diff --git a/.pytool/Plugin/LineEndingCheck/LineEndingCheck.py b/.pytool/Plugin/LineEndingCheck/LineEndingCheck.py new file mode 100644 index 0000000000..b431b6403f --- /dev/null +++ b/.pytool/Plugin/LineEndingCheck/LineEndingCheck.py @@ -0,0 +1,324 @@ +# @file LineEndingCheck.py +# +# An edk2-pytool based plugin that checks line endings. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import glob +from io import StringIO +import logging +import os +import shutil +from pathlib import Path +from typing import Any, Callable, Dict, List, Tuple + +from edk2toolext.environment.plugin_manager import PluginManager +from edk2toolext.environment.plugintypes.ci_build_plugin import ICiBuildPlugin +from edk2toolext.environment.plugintypes.uefi_helper_plugin import \ + HelperFunctions +from edk2toolext.environment.var_dict import VarDict +from edk2toollib.gitignore_parser import parse_gitignore_lines +from edk2toollib.log.junit_report_format import JunitReportTestCase +from edk2toollib.uefi.edk2.path_utilities import Edk2Path +from edk2toollib.utility_functions import RunCmd +from git import Repo + + +PLUGIN_NAME = "LineEndingCheck" + +LINE_ENDINGS = [ + b'\r\n', + b'\n\r', + b'\n', + b'\r' +] + +ALLOWED_LINE_ENDING = b'\r\n' + +# +# Based on a solution for binary file detection presented in +# https://stackoverflow.com/a/7392391. +# +_TEXT_CHARS = bytearray( + {7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f}) + + +def _is_binary_string(_bytes: bytes) -> bool: + return bool(_bytes.translate(None, _TEXT_CHARS)) + + +class LineEndingCheckBadLineEnding(Exception): + pass + +class LineEndingCheckGitIgnoreFileException(Exception): + pass + +class LineEndingCheck(ICiBuildPlugin): + """ + A CiBuildPlugin that checks whether line endings are a certain format. + + By default, the plugin runs against all files in a package unless a + specific file or file extension is excluded. + + Configuration options: + "LineEndingCheck": { + "IgnoreFiles": [], # File patterns to ignore. + } + """ + + def GetTestName(self, packagename: str, environment: VarDict) -> Tuple: + """ Provide the testcase name and classname for use in reporting + + Args: + packagename: String containing name of package to build. + environment: The VarDict for the test to run in. + + Returns: + A tuple containing the testcase name and the classname + (testcasename, classname) + testclassname: a descriptive string for the testcase can + include whitespace + classname: Should be patterned . + . + """ + return ("Check line endings in " + packagename, packagename + + "." + PLUGIN_NAME) + + # Note: This function access git via the command line + # + # function to check and warn if git config reports that + # autocrlf is configured to TRUE + def _check_autocrlf(self): + r = Repo(self._abs_workspace_path) + try: + result = r.config_reader().get_value("core", "autocrlf") + if result: + logging.warning(f"git config core.autocrlf is set to {result} " + f"recommended setting is false " + f"git config --global core.autocrlf false") + except Exception: + logging.warning(f"git config core.autocrlf is not set " + f"recommended setting is false " + f"git config --global core.autocrlf false") + return + + # Note: This function currently accesses git via the git command to prevent + # introducing a new Python git module dependency in mu_basecore + # on gitpython. + # + # After gitpython is adopted by edk2-pytool-extensions, this + # implementation can be updated to use the gitpython interface. + def _get_git_ignored_paths(self) -> List[Path]: + """" + Gets paths ignored by git. + + Returns: + List[str]: A list of file absolute path strings to all files + ignored in this git repository. + + If git is not found, an empty list will be returned. + """ + if not shutil.which("git"): + logging.warning( + "Git is not found on this system. Git submodule paths will " + "not be considered.") + return [] + + outstream_buffer = StringIO() + exit_code = RunCmd("git", "ls-files --other", + workingdir=self._abs_workspace_path, + outstream=outstream_buffer, + logging_level=logging.NOTSET) + if (exit_code != 0): + raise LineEndingCheckGitIgnoreFileException( + f"An error occurred reading git ignore settings. This will " + f"prevent LineEndingCheck from running against the expected " + f"set of files.") + + # Note: This will potentially be a large list, but at least sorted + rel_paths = outstream_buffer.getvalue().strip().splitlines() + abs_paths = [] + for path in rel_paths: + abs_paths.append(Path( + os.path.normpath(os.path.join(self._abs_workspace_path, path)))) + return abs_paths + + # Note: This function currently accesses git via the git command to prevent + # introducing a new Python git module dependency in mu_basecore + # on gitpython. + # + # After gitpython is adopted by edk2-pytool-extensions, this + # implementation can be updated to use the gitpython interface. + def _get_git_submodule_paths(self) -> List[Path]: + """ + Gets submodule paths recognized by git. + + Returns: + List[str]: A list of directory absolute path strings to the root + of each submodule in the workspace repository. + + If git is not found, an empty list will be returned. + """ + if not shutil.which("git"): + logging.warning( + "Git is not found on this system. Git submodule paths will " + "not be considered.") + return [] + + if os.path.isfile(os.path.join(self._abs_workspace_path, ".gitmodules")): + logging.info( + ".gitmodules file found. Excluding submodules in " + "LineEndingCheck.") + + outstream_buffer = StringIO() + exit_code = RunCmd("git", + "config --file .gitmodules --get-regexp path", + workingdir=self._abs_workspace_path, + outstream=outstream_buffer, + logging_level=logging.NOTSET) + if (exit_code != 0): + raise LineEndingCheckGitIgnoreFileException( + f".gitmodule file detected but an error occurred reading " + f"the file. Cannot proceed with unknown submodule paths.") + + submodule_paths = [] + for line in outstream_buffer.getvalue().strip().splitlines(): + submodule_paths.append(Path( + os.path.normpath(os.path.join(self._abs_workspace_path, line.split()[1])))) + + return submodule_paths + else: + return [] + + def _get_files_ignored_in_config(self, + pkg_config: Dict[str, List[str]], + base_dir: str) -> Callable[[str], bool]: + """" + Returns a function that returns true if a given file string path is + ignored in the plugin configuration file and false otherwise. + + Args: + pkg_config: Dictionary with the package configuration + base_dir: Base directory of the package + + Returns: + Callable[[None], None]: A test case function. + """ + ignored_files = [] + if pkg_config.get("IgnoreFilesWithNoExtension", False): + ignored_files.extend(['*', '!*.*', '!*/']) + if "IgnoreFiles" in pkg_config: + ignored_files.extend(pkg_config["IgnoreFiles"]) + + # Pass "Package configuration file" as the source file path since + # the actual configuration file name is unknown to this plugin and + # this provides a generic description of the file that provided + # the ignore file content. + # + # This information is only used for reporting (not used here) and + # the ignore lines are being passed directly as they are given to + # this plugin. + return parse_gitignore_lines(ignored_files, + "Package configuration file", + base_dir) + + def RunBuildPlugin(self, package_rel_path: str, edk2_path: Edk2Path, + package_config: Dict[str, List[str]], + environment_config: Any, + plugin_manager: PluginManager, + plugin_manager_helper: HelperFunctions, + tc: JunitReportTestCase, output_stream=None) -> int: + """ + External function of plugin. This function is used to perform the task + of the CiBuild Plugin. + + Args: + - package_rel_path: edk2 workspace relative path to the package + - edk2_path: Edk2Path object with workspace and packages paths + - package_config: Dictionary with the package configuration + - environment_config: Environment configuration + - plugin_manager: Plugin Manager Instance + - plugin_manager_helper: Plugin Manager Helper Instance + - tc: JUnit test case + - output_stream: The StringIO output stream from this plugin + (logging) + + Returns: + >0 : Number of errors found + 0 : Ran successfully + -1 : Skipped due to a missing pre-requisite + """ + self._abs_workspace_path = edk2_path.WorkspacePath + self._check_autocrlf() + self._abs_pkg_path = \ + edk2_path.GetAbsolutePathOnThisSystemFromEdk2RelativePath( + package_rel_path) + + if self._abs_pkg_path is None: + tc.SetSkipped() + tc.LogStdError(f"Package folder not found {self._abs_pkg_path}") + return 0 + + ignore_files = set(self._get_git_ignored_paths()) + ignore_dirs = set(self._get_git_submodule_paths()) + ignore_filter = self._get_files_ignored_in_config(package_config, self._abs_pkg_path) + + file_count = 0 + line_ending_count = dict.fromkeys(LINE_ENDINGS, 0) + for file in Path(self._abs_pkg_path).rglob('*'): + if file.is_dir(): + continue + + if any(file.is_relative_to(ignore_dir) for ignore_dir in ignore_dirs): + continue + + if ignore_filter(file): + continue + + if file in ignore_files: + continue + + with open(file.resolve(), 'rb') as fb: + if not fb.readable() or _is_binary_string(fb.read(1024)): + continue + fb.seek(0) + + for lineno, line in enumerate(fb): + try: + for e in LINE_ENDINGS: + if line.endswith(e): + line_ending_count[e] += 1 + + if e is not ALLOWED_LINE_ENDING: + file_path = file.relative_to( + Path(self._abs_workspace_path)).as_posix() + file_count += 1 + + tc.LogStdError( + f"Line ending on Line {lineno} in " + f"{file_path} is not allowed.\nLine " + f"ending is {e} and should be " + f"{ALLOWED_LINE_ENDING}.") + logging.error( + f"Line ending on Line {lineno} in " + f"{file_path} is not allowed.\nLine " + f"ending is {e} and should be " + f"{ALLOWED_LINE_ENDING}.") + raise LineEndingCheckBadLineEnding + break + except LineEndingCheckBadLineEnding: + break + + del line_ending_count[ALLOWED_LINE_ENDING] + + if any(line_ending_count.values()): + tc.SetFailed( + f"{PLUGIN_NAME} failed due to {file_count} files with " + f"incorrect line endings.", + "CHECK_FAILED") + else: + tc.SetSuccess() + + return sum(line_ending_count.values()) diff --git a/.pytool/Plugin/LineEndingCheck/Readme.md b/.pytool/Plugin/LineEndingCheck/Readme.md new file mode 100644 index 0000000000..5699f27aa0 --- /dev/null +++ b/.pytool/Plugin/LineEndingCheck/Readme.md @@ -0,0 +1,33 @@ +# Line Ending Check Plugin + +This CiBuildPlugin scans all the files in a package to verify that the line endings are CRLF. + +> _Note:_ If you encounter a line ending issue found by this plugin, update your development environment to avoid +> issues again in the future. +> +> Most problems are caused by `autocrlf=true` in git settings, which will automatically adjust line endings upon +> checkout and commit which distorts the actual line endings from being consistent locally and remotely. In +> other cases, developing within a Linux workspace will natively use LF by default. +> +> It is simplest to set `autocrlf=false` to prevent manipulation of line endings outside of the actual values and set +> up your editor to use CRLF line endings within the project. + +## Configuration + +The plugin can be configured to ignore certain files. + +``` yaml +"LineEndingCheck": { + "IgnoreFiles": [] + "IgnoreFilesWithNoExtension": False +} +``` + +### IgnoreFiles + +An **optional** list of git ignore patterns relative to the package root used to exclude files from being checked. + +### IgnoreFilesWithNoExtension + +An **optional** value that, if True, will insert the gitignore rules necessary to have this check ignore files +that do not contain a file extension. Necessary for binary files and/or POSIX like executables. diff --git a/.pytool/Plugin/LineEndingCheck/line_ending_check_plug_in.yaml b/.pytool/Plugin/LineEndingCheck/line_ending_check_plug_in.yaml new file mode 100644 index 0000000000..6e67ca8d61 --- /dev/null +++ b/.pytool/Plugin/LineEndingCheck/line_ending_check_plug_in.yaml @@ -0,0 +1,11 @@ +## @file +# CiBuildPlugin used to check line ending format. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +{ + "scope": "cibuild", + "name": "Line Ending Check Test", + "module": "LineEndingCheck" +} diff --git a/.pytool/Plugin/UncrustifyCheck/UncrustifyCheck.py b/.pytool/Plugin/UncrustifyCheck/UncrustifyCheck.py index 337339f537..ec464d2ee2 100644 --- a/.pytool/Plugin/UncrustifyCheck/UncrustifyCheck.py +++ b/.pytool/Plugin/UncrustifyCheck/UncrustifyCheck.py @@ -12,7 +12,6 @@ import os import pathlib import shutil -import stat import timeit from edk2toolext.environment import version_aggregator from edk2toolext.environment.plugin_manager import PluginManager @@ -495,7 +494,6 @@ def _initialize_file_to_format_info(self) -> None: for path in rel_file_paths_to_format: self._abs_file_paths_to_format.extend( [str(path.resolve()) for path in pathlib.Path(self._abs_package_path).rglob(path)]) - # Remove files ignore in the plugin configuration file plugin_ignored_files = list(filter(self._get_files_ignored_in_config(), self._abs_file_paths_to_format)) @@ -641,7 +639,7 @@ def _remove_readonly(func, path, _): """ Private function to attempt to change permissions on file/folder being deleted. """ - os.chmod(path, stat.S_IWRITE) + os.chmod(path, os.stat.S_IWRITE) func(path) for _ in range(3): # retry up to 3 times diff --git a/BaseTools/Plugin/BmpCheck/BmpCheckPlugin.py b/BaseTools/Plugin/BmpCheck/BmpCheckPlugin.py index c5ee90002a..ce8234a85d 100644 --- a/BaseTools/Plugin/BmpCheck/BmpCheckPlugin.py +++ b/BaseTools/Plugin/BmpCheck/BmpCheckPlugin.py @@ -1,170 +1,170 @@ -# @file BmpCheckPlugin.py -# Plugin to support checking BMP's included in the FDF for proper usage. -# -## -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -### - -import logging -import os -import time -try: - from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin - from edk2toollib.uefi.edk2.parsers.fdf_parser import FdfParser - from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser - from edk2toollib.uefi import bmp_object -except Exception: - pass - - -def timing(f): - def wrap(*args): - time1 = time.time() - ret = f(*args) - time2 = time.time() - logging.debug('{:s} function took {:.3f} ms'.format(f.__name__, (time2-time1)*1000.0)) - - return ret - return wrap - - -# the tests that we run on the BMP object -class UefiBmpSupportTests(object): - - def __init__(self, BmpObject, max_width=0, max_height=0): - self.Bmp = BmpObject - self.logger = logging.getLogger(__name__) - self.max_width = max_width - self.max_height = max_height - - def Test1(self): - self.logger.info("Test1: Pixel Data Size in file matches computed Size of Pixel data") - #test1 - DataSizePerLine = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel + 31) >> 3) & (~0x3) - DataSize2 = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel +31) / 32) * 4 - self.logger.debug("DataSize2 = {}".format(DataSize2)) - self.logger.debug(" DataSizePerLine: {}".format(DataSizePerLine)) - RawDataSize = self.Bmp.PixelHeight * DataSizePerLine - self.logger.debug(" RawDataSize: 0x%X" % RawDataSize) - ComputeSize = (self.Bmp.Size - self.Bmp.ImageOffset) - self.logger.debug(" File Calculated Data Size: 0x%X" % ComputeSize) - - if(ComputeSize != RawDataSize): - self.logger.error(" BMP Test1 - Fail") - return 1 - else: - self.logger.info(" BMP Test1 - Pass") - return 0 - - def Test2(self): - self.logger.info(" BMP Test2: File Header and Img Header as expected") - #test2 - if self.Bmp.CharB != b'B' and self.Bmp.CharB != b'B': - self.logger.error("Logo check - B header failed {}".format(self.Bmp.CharB)) - return 1 - if self.Bmp.CharM != b'M' and self.Bmp.CharM != 'M': - self.logger.error("Logo check - M header failed {}".format(self.Bmp.CharM)) - return 1 - - self.logger.info(" Test2 - Pass") - return 0 - - def Test3(self): - if self.max_width > 0 and self.Bmp.PixelWidth > self.max_width: - self.logger.error("Image is too wide") - return 1 - if self.max_height > 0 and self.Bmp.PixelHeight > self.max_height: - self.logger.error("Image is too tall") - return 1 - return 0 - - - -class BmpCheckPlugin(IUefiBuildPlugin): - - def __init__(self): - self.logger = logging.getLogger(__name__) - - @staticmethod - def CheckBmp(BmpFilePath, max_width=0, max_height=0): - if not os.path.isfile(BmpFilePath): - return 1 - bmp = open(BmpFilePath, "rb") - BmpObj = bmp_object.BmpObject(bmp) - bmp.close() - #run tests - Tests = UefiBmpSupportTests(BmpObj) - ret = Tests.Test1() - ret += Tests.Test2() - ret += Tests.Test3() - if ret > 0: - Tests.logger.error(f"BMP {BmpFilePath} failed.") - return ret - - @timing - def do_pre_build(self, thebuilder): - try: - error_count = 0 - ''' - # this scans the whole build directory for bmp's - bmp_search_path = os.path.join(thebuilder.ws,"**","*.bmp"); - for found_item in glob.iglob(bmp_search_path, recursive=True): - if CheckBmp(found_item): - logging.error("{} failed image check".format(found_item)) - error_count += 1 - return error_count - ''' - - fp = FdfParser() - dp = DscParser() - - edk2 = thebuilder.edk2path - - ActiveDsc = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( - thebuilder.env.GetValue("ACTIVE_PLATFORM")) - ActiveFdf = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( - thebuilder.env.GetValue("FLASH_DEFINITION")) - - if ActiveFdf is None: - self.logger.info("No FDF found- BMP check skipped") - return 0 - # parse the DSC and the FDF - env_vars = thebuilder.env.GetAllBuildKeyValues() - dp.SetEdk2Path(edk2) - dp.SetInputVars(env_vars).ParseFile(ActiveDsc) - - env_vars.update(dp.LocalVars) - fp.SetEdk2Path(edk2) - fp.SetInputVars(env_vars).ParseFile(ActiveFdf) # give FDF parser the vars from DSC - - # for each FV section in the DSC - for FV_name in fp.FVs: - FV_files = fp.FVs[FV_name]["Files"] - # now look for images in each file of this FV - for fv_file_name in FV_files: - fv_file = FV_files[fv_file_name] - if fv_file["type"].upper() != 'FREEFORM': - continue - fv_file_raw = fv_file['RAW'] - fv_file_raw_list = [] - if isinstance(fv_file_raw, list): - fv_file_raw_list = fv_file_raw - else: - fv_file_raw_list.append(fv_file_raw) - # for each file that is RAW type - for fv_file_raw_item in fv_file_raw_list: - # check if it ends with a bmp - if fv_file_raw_item.lower().endswith(".bmp"): - logging.debug(fv_file_raw_item) - BmpFilePath = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(fv_file_raw_item) - logging.debug(BmpFilePath) - if BmpCheckPlugin.CheckBmp(BmpFilePath): # do the check - self.logger.error("{} failed image check".format(fv_file_raw_item)) - error_count += 1 - return error_count - except Exception: - self.logger.warning( - "Unable to read the FDF. Please update your Edk2-Pytools-* Packages") - return 0 +# @file BmpCheckPlugin.py +# Plugin to support checking BMP's included in the FDF for proper usage. +# +## +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +### + +import logging +import os +import time +try: + from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin + from edk2toollib.uefi.edk2.parsers.fdf_parser import FdfParser + from edk2toollib.uefi.edk2.parsers.dsc_parser import DscParser + from edk2toollib.uefi import bmp_object +except Exception: + pass + + +def timing(f): + def wrap(*args): + time1 = time.time() + ret = f(*args) + time2 = time.time() + logging.debug('{:s} function took {:.3f} ms'.format(f.__name__, (time2-time1)*1000.0)) + + return ret + return wrap + + +# the tests that we run on the BMP object +class UefiBmpSupportTests(object): + + def __init__(self, BmpObject, max_width=0, max_height=0): + self.Bmp = BmpObject + self.logger = logging.getLogger(__name__) + self.max_width = max_width + self.max_height = max_height + + def Test1(self): + self.logger.info("Test1: Pixel Data Size in file matches computed Size of Pixel data") + #test1 + DataSizePerLine = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel + 31) >> 3) & (~0x3) + DataSize2 = ((self.Bmp.PixelWidth * self.Bmp.BitPerPixel +31) / 32) * 4 + self.logger.debug("DataSize2 = {}".format(DataSize2)) + self.logger.debug(" DataSizePerLine: {}".format(DataSizePerLine)) + RawDataSize = self.Bmp.PixelHeight * DataSizePerLine + self.logger.debug(" RawDataSize: 0x%X" % RawDataSize) + ComputeSize = (self.Bmp.Size - self.Bmp.ImageOffset) + self.logger.debug(" File Calculated Data Size: 0x%X" % ComputeSize) + + if(ComputeSize != RawDataSize): + self.logger.error(" BMP Test1 - Fail") + return 1 + else: + self.logger.info(" BMP Test1 - Pass") + return 0 + + def Test2(self): + self.logger.info(" BMP Test2: File Header and Img Header as expected") + #test2 + if self.Bmp.CharB != b'B' and self.Bmp.CharB != b'B': + self.logger.error("Logo check - B header failed {}".format(self.Bmp.CharB)) + return 1 + if self.Bmp.CharM != b'M' and self.Bmp.CharM != 'M': + self.logger.error("Logo check - M header failed {}".format(self.Bmp.CharM)) + return 1 + + self.logger.info(" Test2 - Pass") + return 0 + + def Test3(self): + if self.max_width > 0 and self.Bmp.PixelWidth > self.max_width: + self.logger.error("Image is too wide") + return 1 + if self.max_height > 0 and self.Bmp.PixelHeight > self.max_height: + self.logger.error("Image is too tall") + return 1 + return 0 + + + +class BmpCheckPlugin(IUefiBuildPlugin): + + def __init__(self): + self.logger = logging.getLogger(__name__) + + @staticmethod + def CheckBmp(BmpFilePath, max_width=0, max_height=0): + if not os.path.isfile(BmpFilePath): + return 1 + bmp = open(BmpFilePath, "rb") + BmpObj = bmp_object.BmpObject(bmp) + bmp.close() + #run tests + Tests = UefiBmpSupportTests(BmpObj) + ret = Tests.Test1() + ret += Tests.Test2() + ret += Tests.Test3() + if ret > 0: + Tests.logger.error(f"BMP {BmpFilePath} failed.") + return ret + + @timing + def do_pre_build(self, thebuilder): + try: + error_count = 0 + ''' + # this scans the whole build directory for bmp's + bmp_search_path = os.path.join(thebuilder.ws,"**","*.bmp"); + for found_item in glob.iglob(bmp_search_path, recursive=True): + if CheckBmp(found_item): + logging.error("{} failed image check".format(found_item)) + error_count += 1 + return error_count + ''' + + fp = FdfParser() + dp = DscParser() + + edk2 = thebuilder.edk2path + + ActiveDsc = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( + thebuilder.env.GetValue("ACTIVE_PLATFORM")) + ActiveFdf = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( + thebuilder.env.GetValue("FLASH_DEFINITION")) + + if ActiveFdf is None: + self.logger.info("No FDF found- BMP check skipped") + return 0 + # parse the DSC and the FDF + env_vars = thebuilder.env.GetAllBuildKeyValues() + dp.SetEdk2Path(edk2) + dp.SetInputVars(env_vars).ParseFile(ActiveDsc) + + env_vars.update(dp.LocalVars) + fp.SetEdk2Path(edk2) + fp.SetInputVars(env_vars).ParseFile(ActiveFdf) # give FDF parser the vars from DSC + + # for each FV section in the DSC + for FV_name in fp.FVs: + FV_files = fp.FVs[FV_name]["Files"] + # now look for images in each file of this FV + for fv_file_name in FV_files: + fv_file = FV_files[fv_file_name] + if fv_file["type"].upper() != 'FREEFORM': + continue + fv_file_raw = fv_file['RAW'] + fv_file_raw_list = [] + if isinstance(fv_file_raw, list): + fv_file_raw_list = fv_file_raw + else: + fv_file_raw_list.append(fv_file_raw) + # for each file that is RAW type + for fv_file_raw_item in fv_file_raw_list: + # check if it ends with a bmp + if fv_file_raw_item.lower().endswith(".bmp"): + logging.debug(fv_file_raw_item) + BmpFilePath = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(fv_file_raw_item) + logging.debug(BmpFilePath) + if BmpCheckPlugin.CheckBmp(BmpFilePath): # do the check + self.logger.error("{} failed image check".format(fv_file_raw_item)) + error_count += 1 + return error_count + except Exception: + self.logger.warning( + "Unable to read the FDF. Please update your Edk2-Pytools-* Packages") + return 0 diff --git a/BaseTools/Plugin/Edk2ToolHelper/Edk2ToolHelper.py b/BaseTools/Plugin/Edk2ToolHelper/Edk2ToolHelper.py index 1145e79150..61fab328ba 100644 --- a/BaseTools/Plugin/Edk2ToolHelper/Edk2ToolHelper.py +++ b/BaseTools/Plugin/Edk2ToolHelper/Edk2ToolHelper.py @@ -1,166 +1,166 @@ -from edk2toolext.environment.plugintypes.uefi_helper_plugin import IUefiHelperPlugin -import logging -import os -import uuid -from edk2toollib.utility_functions import RunCmd -from edk2toollib.utility_functions import RunPythonScript -from edk2toollib.utility_functions import CatalogSignWithSignTool -import shutil -import datetime -from Common.Edk2.Capsule.FmpPayloadHeader import FmpPayloadHeaderClass -from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass -from edk2toollib.windows.locate_tools import FindToolInWinSdk - - -class Edk2ToolHelper(IUefiHelperPlugin): - - def RegisterHelpers(self, obj): - fp = os.path.abspath(__file__) - obj.Register("PackageMsFmpHeader", Edk2ToolHelper.PackageMsFmpHeader, fp) - obj.Register("PackageFmpImageAuth", Edk2ToolHelper.PackageFmpImageAuth, fp) - obj.Register("PackageFmpCapsuleHeader", Edk2ToolHelper.PackageFmpCapsuleHeader, fp) - obj.Register("PackageCapsuleHeader", Edk2ToolHelper.PackageCapsuleHeader, fp) - - - ## - # Function to Create binary with MsFmp Header prepended with data supplied - # InputBin: Input binary to wrap with new header (file path) - # OutputBin: file path to write Output binary to - # VersionInt: integer parameter for the version - # LsvInt: Integer parameter for the lowest supported version - # DepList: (optional) list of dependences. Dep format is tuple (FmpGuidForDep, FmpIndex, IntFmpMinVersion, IntFlags ) - ### Dep format can change overtime. Flags can be added for new behavior. See the version and library implementing behavior. - ### V1 details. - ####Flag bit 0: dep MUST be in system if 1. Otherwise dep only applied if fmp found in system. - ####Flag bit 1: dep version MUST be exact match if 1. Otherwise dep must be equal or greater than version. - ## - @staticmethod - def PackageMsFmpHeader(InputBin, OutputBin, VersionInt, LsvInt, DepList = []): - # NOTE: Crash if deps are passed. Return a useful error. - # Currently not ported to the new tooling. - if len(DepList) > 0: - raise RuntimeError("PackageMsFmpHeader has not been ported to support dependencies yet!") - # Should not take a capsule whose Version <= LSV - if (VersionInt < LsvInt): - logging.error("Version number 0x%08x lower than Lowest supported version 0x%08x is not allowed!" % (VersionInt, LsvInt)) - return -1 - - #append depedency if supplied - # for dep in DepList: - # depGuid = dep[0] - # depIndex = int(dep[1]) - # depMinVer = hex(dep[2]) - # depFlag = hex(dep[3]) - # logging.debug("Adding a Dependency:\n\tFMP Guid: %s \nt\tFmp Descriptor Index: %d \n\tFmp DepVersion: %s \n\tFmp Flags: %s\n" % (depGuid, depIndex, depMinVer, depFlag)) - # params += " --dep " + depGuid + " " + str(depIndex) + " " + depMinVer + " " + depFlag - # raise Exception("GenMsPayloadHeader Failed with errorcode %d" % ret) - - # Attempt to write the payload to the file. - # This would normally - with open(InputBin, 'rb') as in_file: - payload_data = in_file.read() - - fmp_header = FmpPayloadHeaderClass() - fmp_header.FwVersion = VersionInt - fmp_header.LowestSupportedVersion = LsvInt - fmp_header.Payload = payload_data - - with open(OutputBin, 'wb') as out_file: - out_file.write(fmp_header.Encode()) - - return 0 - - ## - # Function to create binary wrapped with FmpImage Auth using input supplied - # InputBin: Input binary to wrap with new fmp image auth header (file path) - # OutputBin: file path to write final output binary to - # DevPfxFilePath: (optional) file path to dev pfx file to sign with. If not supplied production signing is assumed. - # - ## - @staticmethod - def PackageFmpImageAuth(InputBin, OutputBin, DevPfxFilePath = None, DevPfxPassword = None, DetachedSignatureFile = None, Eku = None): - logging.debug("CapsulePackage: Fmp Image Auth Header/Signing") - - #temp output dir is in the outputbin folder - ret = 0 - TempOutDir = os.path.join(os.path.dirname(os.path.abspath(OutputBin)), "_Temp_FmpImageAuth_" + str(datetime.datetime.now().time()).replace(":", "_")) - logging.debug("Temp Output dir for FmpImageAuth: %s" % TempOutDir) - os.mkdir(TempOutDir) - cmd = "GenFmpImageAuth.py" - params = "-o " + OutputBin - params = params + " -p " + InputBin + " -m 1" - params = params + " --debug" - params = params + " -l " + os.path.join(TempOutDir, "GenFmpImageAuth_Log.log") - if(DevPfxFilePath is not None): - logging.debug("FmpImageAuth is dev signed. Do entire process in 1 step locally.") - - #Find Signtool - SignToolPath = FindToolInWinSdk("signtool.exe") - if not os.path.exists(SignToolPath): - raise Exception("Can't find signtool on this machine.") - - params = params + " --SignTool \"" + SignToolPath + "\"" - - params = params + " --pfxfile " + DevPfxFilePath - if( DevPfxPassword is not None): - params += " --pfxpass " + DevPfxPassword - if (Eku is not None): - params += " --eku " + Eku - ret = RunPythonScript(cmd, params, workingdir=TempOutDir) - #delete the temp dir - shutil.rmtree(TempOutDir, ignore_errors=True) - else: - #production - logging.debug("FmpImageAuth is Production signed") - - if(DetachedSignatureFile is None): - logging.debug("FmpImageAuth Step1: Make ToBeSigned file for production") - params = params + " --production" - ret = RunPythonScript(cmd, params, workingdir=TempOutDir) - if(ret != 0): - raise Exception("GenFmpImageAuth Failed production signing: step 1. Errorcode %d" % ret) - #now we have a file to sign at - TBS = os.path.join(os.path.dirname(OutputBin), "payload.Temp.ToBeSigned") - if(not os.path.exists(TBS)): - raise Exception("GenFmpImageAuth didn't create ToBeSigned file") - os.rename(TBS, OutputBin) - - else: - logging.debug("FmpImageAuth Step3: Final Packaging of production signed") - params = params + " --production -s " + DetachedSignatureFile - ret = RunPythonScript(cmd, params, workingdir=TempOutDir) - #delete the temp dir - shutil.rmtree(TempOutDir, ignore_errors=True) - - if(ret != 0): - raise Exception("GenFmpImageAuth Failed with errorcode %d" % ret) - return ret - - @staticmethod - def PackageFmpCapsuleHeader(InputBin, OutputBin, FmpGuid): - with open(InputBin, 'rb') as in_file: - capsule_data = in_file.read() - - fmp_capsule = FmpCapsuleHeaderClass() - fmp_capsule.AddPayload(uuid.UUID(FmpGuid), capsule_data) - - with open(OutputBin, 'wb') as out_file: - out_file.write(fmp_capsule.Encode()) - - return 0 - - @staticmethod - def PackageCapsuleHeader(InputBin, OutputBin, FmpDeviceGuid=None): - logging.debug("CapsulePackage: Final Capsule Header") - if(FmpDeviceGuid == None): - logging.debug("CapsulePackage: Using default industry standard FMP guid") - FmpDeviceGuid = "6dcbd5ed-e82d-4c44-bda1-7194199ad92a" - - params = "-o " + OutputBin - params = params + " -g " + FmpDeviceGuid - params = params + " --capsule -v -f " + InputBin - params = params + " --capFlag PersistAcrossReset --capFlag InitiateReset" - ret = RunCmd("genfv", params) - if(ret != 0): - raise Exception("GenFv Failed with errorcode" % ret) - return ret +from edk2toolext.environment.plugintypes.uefi_helper_plugin import IUefiHelperPlugin +import logging +import os +import uuid +from edk2toollib.utility_functions import RunCmd +from edk2toollib.utility_functions import RunPythonScript +from edk2toollib.utility_functions import CatalogSignWithSignTool +import shutil +import datetime +from Common.Edk2.Capsule.FmpPayloadHeader import FmpPayloadHeaderClass +from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass +from edk2toollib.windows.locate_tools import FindToolInWinSdk + + +class Edk2ToolHelper(IUefiHelperPlugin): + + def RegisterHelpers(self, obj): + fp = os.path.abspath(__file__) + obj.Register("PackageMsFmpHeader", Edk2ToolHelper.PackageMsFmpHeader, fp) + obj.Register("PackageFmpImageAuth", Edk2ToolHelper.PackageFmpImageAuth, fp) + obj.Register("PackageFmpCapsuleHeader", Edk2ToolHelper.PackageFmpCapsuleHeader, fp) + obj.Register("PackageCapsuleHeader", Edk2ToolHelper.PackageCapsuleHeader, fp) + + + ## + # Function to Create binary with MsFmp Header prepended with data supplied + # InputBin: Input binary to wrap with new header (file path) + # OutputBin: file path to write Output binary to + # VersionInt: integer parameter for the version + # LsvInt: Integer parameter for the lowest supported version + # DepList: (optional) list of dependences. Dep format is tuple (FmpGuidForDep, FmpIndex, IntFmpMinVersion, IntFlags ) + ### Dep format can change overtime. Flags can be added for new behavior. See the version and library implementing behavior. + ### V1 details. + ####Flag bit 0: dep MUST be in system if 1. Otherwise dep only applied if fmp found in system. + ####Flag bit 1: dep version MUST be exact match if 1. Otherwise dep must be equal or greater than version. + ## + @staticmethod + def PackageMsFmpHeader(InputBin, OutputBin, VersionInt, LsvInt, DepList = []): + # NOTE: Crash if deps are passed. Return a useful error. + # Currently not ported to the new tooling. + if len(DepList) > 0: + raise RuntimeError("PackageMsFmpHeader has not been ported to support dependencies yet!") + # Should not take a capsule whose Version <= LSV + if (VersionInt < LsvInt): + logging.error("Version number 0x%08x lower than Lowest supported version 0x%08x is not allowed!" % (VersionInt, LsvInt)) + return -1 + + #append depedency if supplied + # for dep in DepList: + # depGuid = dep[0] + # depIndex = int(dep[1]) + # depMinVer = hex(dep[2]) + # depFlag = hex(dep[3]) + # logging.debug("Adding a Dependency:\n\tFMP Guid: %s \nt\tFmp Descriptor Index: %d \n\tFmp DepVersion: %s \n\tFmp Flags: %s\n" % (depGuid, depIndex, depMinVer, depFlag)) + # params += " --dep " + depGuid + " " + str(depIndex) + " " + depMinVer + " " + depFlag + # raise Exception("GenMsPayloadHeader Failed with errorcode %d" % ret) + + # Attempt to write the payload to the file. + # This would normally + with open(InputBin, 'rb') as in_file: + payload_data = in_file.read() + + fmp_header = FmpPayloadHeaderClass() + fmp_header.FwVersion = VersionInt + fmp_header.LowestSupportedVersion = LsvInt + fmp_header.Payload = payload_data + + with open(OutputBin, 'wb') as out_file: + out_file.write(fmp_header.Encode()) + + return 0 + + ## + # Function to create binary wrapped with FmpImage Auth using input supplied + # InputBin: Input binary to wrap with new fmp image auth header (file path) + # OutputBin: file path to write final output binary to + # DevPfxFilePath: (optional) file path to dev pfx file to sign with. If not supplied production signing is assumed. + # + ## + @staticmethod + def PackageFmpImageAuth(InputBin, OutputBin, DevPfxFilePath = None, DevPfxPassword = None, DetachedSignatureFile = None, Eku = None): + logging.debug("CapsulePackage: Fmp Image Auth Header/Signing") + + #temp output dir is in the outputbin folder + ret = 0 + TempOutDir = os.path.join(os.path.dirname(os.path.abspath(OutputBin)), "_Temp_FmpImageAuth_" + str(datetime.datetime.now().time()).replace(":", "_")) + logging.debug("Temp Output dir for FmpImageAuth: %s" % TempOutDir) + os.mkdir(TempOutDir) + cmd = "GenFmpImageAuth.py" + params = "-o " + OutputBin + params = params + " -p " + InputBin + " -m 1" + params = params + " --debug" + params = params + " -l " + os.path.join(TempOutDir, "GenFmpImageAuth_Log.log") + if(DevPfxFilePath is not None): + logging.debug("FmpImageAuth is dev signed. Do entire process in 1 step locally.") + + #Find Signtool + SignToolPath = FindToolInWinSdk("signtool.exe") + if not os.path.exists(SignToolPath): + raise Exception("Can't find signtool on this machine.") + + params = params + " --SignTool \"" + SignToolPath + "\"" + + params = params + " --pfxfile " + DevPfxFilePath + if( DevPfxPassword is not None): + params += " --pfxpass " + DevPfxPassword + if (Eku is not None): + params += " --eku " + Eku + ret = RunPythonScript(cmd, params, workingdir=TempOutDir) + #delete the temp dir + shutil.rmtree(TempOutDir, ignore_errors=True) + else: + #production + logging.debug("FmpImageAuth is Production signed") + + if(DetachedSignatureFile is None): + logging.debug("FmpImageAuth Step1: Make ToBeSigned file for production") + params = params + " --production" + ret = RunPythonScript(cmd, params, workingdir=TempOutDir) + if(ret != 0): + raise Exception("GenFmpImageAuth Failed production signing: step 1. Errorcode %d" % ret) + #now we have a file to sign at + TBS = os.path.join(os.path.dirname(OutputBin), "payload.Temp.ToBeSigned") + if(not os.path.exists(TBS)): + raise Exception("GenFmpImageAuth didn't create ToBeSigned file") + os.rename(TBS, OutputBin) + + else: + logging.debug("FmpImageAuth Step3: Final Packaging of production signed") + params = params + " --production -s " + DetachedSignatureFile + ret = RunPythonScript(cmd, params, workingdir=TempOutDir) + #delete the temp dir + shutil.rmtree(TempOutDir, ignore_errors=True) + + if(ret != 0): + raise Exception("GenFmpImageAuth Failed with errorcode %d" % ret) + return ret + + @staticmethod + def PackageFmpCapsuleHeader(InputBin, OutputBin, FmpGuid): + with open(InputBin, 'rb') as in_file: + capsule_data = in_file.read() + + fmp_capsule = FmpCapsuleHeaderClass() + fmp_capsule.AddPayload(uuid.UUID(FmpGuid), capsule_data) + + with open(OutputBin, 'wb') as out_file: + out_file.write(fmp_capsule.Encode()) + + return 0 + + @staticmethod + def PackageCapsuleHeader(InputBin, OutputBin, FmpDeviceGuid=None): + logging.debug("CapsulePackage: Final Capsule Header") + if(FmpDeviceGuid == None): + logging.debug("CapsulePackage: Using default industry standard FMP guid") + FmpDeviceGuid = "6dcbd5ed-e82d-4c44-bda1-7194199ad92a" + + params = "-o " + OutputBin + params = params + " -g " + FmpDeviceGuid + params = params + " --capsule -v -f " + InputBin + params = params + " --capFlag PersistAcrossReset --capFlag InitiateReset" + ret = RunCmd("genfv", params) + if(ret != 0): + raise Exception("GenFv Failed with errorcode" % ret) + return ret diff --git a/BaseTools/Plugin/Sample/HelloWorld.py b/BaseTools/Plugin/Sample/HelloWorld.py index e123984175..d7000fb606 100644 --- a/BaseTools/Plugin/Sample/HelloWorld.py +++ b/BaseTools/Plugin/Sample/HelloWorld.py @@ -1,23 +1,23 @@ -## @file HelloWorld.py -# Sample Project Mu pre/post build plugin -## -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## -### -from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin -import logging - -class HelloWorld(IUefiBuildPlugin): - - def do_post_build(self, thebuilder): - t = "PLUGIN HelloWorld: Hello World! - Post Build Plugin Hook" - print(t) - logging.debug(t) - return 0 - - def do_pre_build(self, thebuilder): - t ="PLUGIN HelloWorld: Hello World! - Pre Build Plugin Hook" - print(t) - logging.debug(t) - return 0 +## @file HelloWorld.py +# Sample Project Mu pre/post build plugin +## +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## +### +from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin +import logging + +class HelloWorld(IUefiBuildPlugin): + + def do_post_build(self, thebuilder): + t = "PLUGIN HelloWorld: Hello World! - Post Build Plugin Hook" + print(t) + logging.debug(t) + return 0 + + def do_pre_build(self, thebuilder): + t ="PLUGIN HelloWorld: Hello World! - Pre Build Plugin Hook" + print(t) + logging.debug(t) + return 0 diff --git a/BaseTools/Plugin/WindowsCapsuleSupportHelper/WindowsCapsuleSupportHelper.py b/BaseTools/Plugin/WindowsCapsuleSupportHelper/WindowsCapsuleSupportHelper.py index 538bc3a084..8bcf7b56b7 100644 --- a/BaseTools/Plugin/WindowsCapsuleSupportHelper/WindowsCapsuleSupportHelper.py +++ b/BaseTools/Plugin/WindowsCapsuleSupportHelper/WindowsCapsuleSupportHelper.py @@ -1,62 +1,62 @@ -## -# UefiBuild Plugin that supports Window Capsule files based on the -# Windows Firmware Update Platform spec. -# Creates INF, Cat, and then signs it -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -import sys -import re -import datetime -import os -import logging -from edk2toolext.environment.plugintypes.uefi_helper_plugin import IUefiHelperPlugin -from edk2toollib.windows.capsule.cat_generator import * -from edk2toollib.windows.capsule.inf_generator import * -from edk2toollib.utility_functions import CatalogSignWithSignTool -from edk2toollib.windows.locate_tools import FindToolInWinSdk - - -class WindowsCapsuleSupportHelper(IUefiHelperPlugin): - - def RegisterHelpers(self, obj): - fp = os.path.abspath(__file__) - obj.Register("PackageWindowsCapsuleFiles", WindowsCapsuleSupportHelper.PackageWindowsCapsuleFiles, fp) - - - @staticmethod - def PackageWindowsCapsuleFiles(OutputFolder, ProductName, ProductFmpGuid, CapsuleVersion_DotString, - CapsuleVersion_HexString, ProductFwProvider, ProductFwMfgName, ProductFwDesc, CapsuleFileName, PfxFile=None, PfxPass=None, - Rollback=False, Arch='amd64', OperatingSystem_String='Win10'): - - logging.debug("CapsulePackage: Create Windows Capsule Files") - - #Make INF - InfFilePath = os.path.join(OutputFolder, ProductName + ".inf") - InfTool = InfGenerator(ProductName, ProductFwProvider, ProductFmpGuid, Arch, ProductFwDesc, CapsuleVersion_DotString, CapsuleVersion_HexString) - InfTool.Manufacturer = ProductFwMfgName #optional - ret = InfTool.MakeInf(InfFilePath, CapsuleFileName, Rollback) - if(ret != 0): - raise Exception("CreateWindowsInf Failed with errorcode %d" % ret) - - #Make CAT - CatFilePath = os.path.realpath(os.path.join(OutputFolder, ProductName + ".cat")) - CatTool = CatGenerator(Arch, OperatingSystem_String) - ret = CatTool.MakeCat(CatFilePath) - - if(ret != 0): - raise Exception("Creating Cat file Failed with errorcode %d" % ret) - - if(PfxFile is not None): - #Find Signtool - SignToolPath = FindToolInWinSdk("signtool.exe") - if not os.path.exists(SignToolPath): - raise Exception("Can't find signtool on this machine.") - #dev sign the cat file - ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, PfxFile, PfxPass) - if(ret != 0): - raise Exception("Signing Cat file Failed with errorcode %d" % ret) - - return ret +## +# UefiBuild Plugin that supports Window Capsule files based on the +# Windows Firmware Update Platform spec. +# Creates INF, Cat, and then signs it +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import sys +import re +import datetime +import os +import logging +from edk2toolext.environment.plugintypes.uefi_helper_plugin import IUefiHelperPlugin +from edk2toollib.windows.capsule.cat_generator import * +from edk2toollib.windows.capsule.inf_generator import * +from edk2toollib.utility_functions import CatalogSignWithSignTool +from edk2toollib.windows.locate_tools import FindToolInWinSdk + + +class WindowsCapsuleSupportHelper(IUefiHelperPlugin): + + def RegisterHelpers(self, obj): + fp = os.path.abspath(__file__) + obj.Register("PackageWindowsCapsuleFiles", WindowsCapsuleSupportHelper.PackageWindowsCapsuleFiles, fp) + + + @staticmethod + def PackageWindowsCapsuleFiles(OutputFolder, ProductName, ProductFmpGuid, CapsuleVersion_DotString, + CapsuleVersion_HexString, ProductFwProvider, ProductFwMfgName, ProductFwDesc, CapsuleFileName, PfxFile=None, PfxPass=None, + Rollback=False, Arch='amd64', OperatingSystem_String='Win10'): + + logging.debug("CapsulePackage: Create Windows Capsule Files") + + #Make INF + InfFilePath = os.path.join(OutputFolder, ProductName + ".inf") + InfTool = InfGenerator(ProductName, ProductFwProvider, ProductFmpGuid, Arch, ProductFwDesc, CapsuleVersion_DotString, CapsuleVersion_HexString) + InfTool.Manufacturer = ProductFwMfgName #optional + ret = InfTool.MakeInf(InfFilePath, CapsuleFileName, Rollback) + if(ret != 0): + raise Exception("CreateWindowsInf Failed with errorcode %d" % ret) + + #Make CAT + CatFilePath = os.path.realpath(os.path.join(OutputFolder, ProductName + ".cat")) + CatTool = CatGenerator(Arch, OperatingSystem_String) + ret = CatTool.MakeCat(CatFilePath) + + if(ret != 0): + raise Exception("Creating Cat file Failed with errorcode %d" % ret) + + if(PfxFile is not None): + #Find Signtool + SignToolPath = FindToolInWinSdk("signtool.exe") + if not os.path.exists(SignToolPath): + raise Exception("Can't find signtool on this machine.") + #dev sign the cat file + ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, PfxFile, PfxPass) + if(ret != 0): + raise Exception("Signing Cat file Failed with errorcode %d" % ret) + + return ret diff --git a/BaseTools/Scripts/GenFmpImageAuth.py b/BaseTools/Scripts/GenFmpImageAuth.py index 5cbc5dd318..bd7cd22d31 100644 --- a/BaseTools/Scripts/GenFmpImageAuth.py +++ b/BaseTools/Scripts/GenFmpImageAuth.py @@ -1,231 +1,231 @@ -## -## Script to Generate a UEFI 2.4B FMP compliant Image Auth Header wrapped -## around the payload file. -## -## For dev purposes this script takes a payload file and signs it and encapsulates it -## in the correct headers. This file is then ready to be put into a FMP capsule. -## -## For production use this script has a production flag and a DetachedSignature parameter -## which allows the signing to be done offline. -## -## General process: -## Phase 1: Create payload file by combining payload and monotonic count -## Phase 2: Sign it using signtool -## Phase 3: Wrap payload in headers to create final FMP Image header/payload -## -## -## Copyright (c) Microsoft Corporation. -## SPDX-License-Identifier: BSD-2-Clause-Patent -## - - -import os, sys -from optparse import OptionParser -import logging -import datetime -import struct -import subprocess -import uuid -from edk2toollib.utility_functions import RunCmd -from edk2toollib.utility_functions import DetachedSignWithSignTool - - -gPhase3PackageOnly = False - -# -#main script function -# -def main(): - parser = OptionParser() - #Output debug log - parser.add_option("-l", dest="OutputLog", help="Create an output log file: ie -l out.txt", default=None) - parser.add_option("-o", "--OutputFile", dest="OutputFile", help="Result/Output file", default=None) - parser.add_option("-p", "--payload", dest="Payload", help="Input unsigned payload file", default=None) - parser.add_option("--production", dest="ProductionSign", action="store_true", help="Production Sign Process (no dev signing)", default=False) - parser.add_option("-m", dest="MonotonicCount", help="Monotonic Count Value", default=0) - parser.add_option("-s", dest="DetachedSignature", help="Detached Signature file (production signed phase 3 step only)", default=None) - parser.add_option("--pfxfile", dest="PfxPath", help="Path to PFX file for dev signing", default=None) - parser.add_option("--pfxpass", dest="PfxPass", help="Optional - PFX password for dev signing with PFX cert", default=None) - parser.add_option("--eku", dest="Eku", help="Option -specify EKU value to pass to signtool if required", default=None) - parser.add_option("--SignTool", dest="SignToolPath", help="Path to signtool.exe") - #Turn on dubug level logging - parser.add_option("--debug", action="store_true", dest="debug", help="turn on debug logging level for file log", default=False) - parser.add_option("--dirty", action="store_true", dest="dirty", help="turn on dirty flag to keep intermediate files. Default is to delete them.", default=False) - - (options, args) = parser.parse_args() - - #setup file based logging if outputReport specified - if(options.OutputLog): - if(len(options.OutputLog) < 2): - logging.critical("the output log file parameter is invalid") - return -2 - else: - #setup file based logging - filelogger = logging.FileHandler(filename=options.OutputLog, mode='w') - if(options.debug): - filelogger.setLevel(logging.DEBUG) - else: - filelogger.setLevel(logging.INFO) - - filelogger.setFormatter(formatter) - logging.getLogger('').addHandler(filelogger) - - logging.info("Log Started: " + datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p" )) - - #check for valid files - if not options.Payload: - logging.critical("No Payload file specified") - return -1 - - if not os.path.isfile(options.Payload): - logging.critical("Invalid Path to payload file") - return -2 - - if not options.DetachedSignature: - logging.debug("No Detached Signature File.") - else: - logging.debug("Parameter for detached signature file specified. " + options.DetachedSignature) - logging.debug("Entering Phase2-PackageOnly Mode") - global gPhase3PackageOnly - gPhase3PackageOnly = True - - if not options.OutputFile: - logging.debug("No output file specified. Using default. AuthPayload.FmImageAuth") - options.OutputFile = "AuthPayload.FmImageAuth" - - if(not gPhase3PackageOnly and not options.ProductionSign): - #must have a pfx file - if not options.PfxPath: - logging.critical("No Pfx File given.") - return -7 - if not os.path.isfile(options.PfxPath): - logging.critical("Invalid PFX Path. File doesn't exist. " + options.PfxPath) - return -6 - - logging.debug("Using PFX file: " + str(options.PfxPath)) - - - logging.debug("Production Mode: " + str(options.ProductionSign)) - logging.debug("Monotonic Count: " + str(options.MonotonicCount)) - logging.debug("Output File: " + str(options.OutputFile)) - logging.debug("Dirty Mode: " + str(options.dirty)) - - FileToSign = os.path.join("payload.Temp.ToBeSigned") - - - - #if not doing phase2 only then we need to do presign stuff - if not gPhase3PackageOnly: - #Since we are not in phase3packageonly mode we know no DetachedSignature file speficied. Set to the default output. - OutputDir = os.path.dirname(os.path.abspath(options.OutputFile)) - logging.debug("Temp files will be written to: " + str(OutputDir)) - - #change the path to temp location - FileToSign = os.path.join(OutputDir, FileToSign) - options.DetachedSignature = FileToSign + ".p7" - - #Create a temp file with payload + monotonic count - f = open(FileToSign, "wb") - pf = open(options.Payload, "rb") - f.write(pf.read()) - mc = struct.pack("Q", int(options.MonotonicCount)) - f.write(mc) - pf.close() - f.close() - - - #if not doing production signing then sign it - if not options.ProductionSign: - #check sign tool - if(os.path.exists(options.SignToolPath)): - logging.debug("Signtool.exe found at location: " + options.SignToolPath) - else: - logging.critical("Can't find signtool at location: " + options.SignToolPath) - return -5 - - ret = DetachedSignWithSignTool( - options.SignToolPath, - FileToSign, - options.DetachedSignature, - options.PfxPath, - PfxPass=options.PfxPass, - Eku=options.Eku - ) - - if ret != 0: - logging.critical("DetachedSignWithSignTool Failed: " + str(ret)) - return ret - - if not options.dirty: - logging.debug("Delete temp file: " + str(FileToSign)) - os.remove(FileToSign) - - - else: - logging.critical("File To Production Sign Created: " + FileToSign) - return 0 - - #package the final output (phase 3) - wcugSize = os.path.getsize(options.DetachedSignature) - logging.debug("PKCS7 Signed Data is size: " + str(wcugSize)) - wcugSize = wcugSize + 4 + 2 + 2 + 16 # matches the hdr + guid below - - # - #Header layout and structures defined in UEFI 2.4 Errata B. - # - - #EFI_FIRMWARE_IMAGE_AUTH - #UINT64 Monotonic Count <--count value used when signing it - #WIN_CERTIFICATE_UEFI_GUID AuthInfo - #WIN_CERTIFICATE Hdr - #UINT32 dwLength <--Length of cert header - #UINT16 wRevision <--Revision level of win cert current 0x0200 - #UINT16 wCertType <--WIN_CERT_TYPE_EFI_GUID 0x0EF1 - #EFI_GUID CertType <--gEfiCertPkcs7Guid = { 0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7 }} - #UINT8[] PKCS7 SignedData <--DetachedSignature from signtool - #UINT8[] Payload <--Payload file - - #struct format for the header - header = struct.pack("QLHH", int(options.MonotonicCount), int(wcugSize), int("200", 16), int("0EF1", 16)) - pkcsguid = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}') - - f = open(options.OutputFile, "wb") - f.write(header) - f.write(pkcsguid.bytes_le) - sd = open(options.DetachedSignature, "rb") - f.write(sd.read()) - sd.close() - p = open(options.Payload, "rb") - f.write(p.read()) - p.close() - f.close() - logging.critical("Final FMP compliant Authenticated Payload Image File created:\n " + os.path.abspath(str(options.OutputFile))) - - #if user wants temp files deleted and didn't pass in the p7 file....then delete it now - if not options.dirty: - if not gPhase3PackageOnly: - logging.debug("Delete temp file: " + str(options.DetachedSignature)) - os.remove(options.DetachedSignature) - - - return 0 - - -if __name__ == '__main__': - #setup main console as logger - logger = logging.getLogger('') - logger.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(levelname)s - %(message)s") - console = logging.StreamHandler() - console.setLevel(logging.CRITICAL) - console.setFormatter(formatter) - logger.addHandler(console) - - #call main worker function - retcode = main() - - if retcode != 0: - logging.critical("Failed. Return Code: %i" % retcode) - #end logging - logging.shutdown() - sys.exit(retcode) +## +## Script to Generate a UEFI 2.4B FMP compliant Image Auth Header wrapped +## around the payload file. +## +## For dev purposes this script takes a payload file and signs it and encapsulates it +## in the correct headers. This file is then ready to be put into a FMP capsule. +## +## For production use this script has a production flag and a DetachedSignature parameter +## which allows the signing to be done offline. +## +## General process: +## Phase 1: Create payload file by combining payload and monotonic count +## Phase 2: Sign it using signtool +## Phase 3: Wrap payload in headers to create final FMP Image header/payload +## +## +## Copyright (c) Microsoft Corporation. +## SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +import os, sys +from optparse import OptionParser +import logging +import datetime +import struct +import subprocess +import uuid +from edk2toollib.utility_functions import RunCmd +from edk2toollib.utility_functions import DetachedSignWithSignTool + + +gPhase3PackageOnly = False + +# +#main script function +# +def main(): + parser = OptionParser() + #Output debug log + parser.add_option("-l", dest="OutputLog", help="Create an output log file: ie -l out.txt", default=None) + parser.add_option("-o", "--OutputFile", dest="OutputFile", help="Result/Output file", default=None) + parser.add_option("-p", "--payload", dest="Payload", help="Input unsigned payload file", default=None) + parser.add_option("--production", dest="ProductionSign", action="store_true", help="Production Sign Process (no dev signing)", default=False) + parser.add_option("-m", dest="MonotonicCount", help="Monotonic Count Value", default=0) + parser.add_option("-s", dest="DetachedSignature", help="Detached Signature file (production signed phase 3 step only)", default=None) + parser.add_option("--pfxfile", dest="PfxPath", help="Path to PFX file for dev signing", default=None) + parser.add_option("--pfxpass", dest="PfxPass", help="Optional - PFX password for dev signing with PFX cert", default=None) + parser.add_option("--eku", dest="Eku", help="Option -specify EKU value to pass to signtool if required", default=None) + parser.add_option("--SignTool", dest="SignToolPath", help="Path to signtool.exe") + #Turn on dubug level logging + parser.add_option("--debug", action="store_true", dest="debug", help="turn on debug logging level for file log", default=False) + parser.add_option("--dirty", action="store_true", dest="dirty", help="turn on dirty flag to keep intermediate files. Default is to delete them.", default=False) + + (options, args) = parser.parse_args() + + #setup file based logging if outputReport specified + if(options.OutputLog): + if(len(options.OutputLog) < 2): + logging.critical("the output log file parameter is invalid") + return -2 + else: + #setup file based logging + filelogger = logging.FileHandler(filename=options.OutputLog, mode='w') + if(options.debug): + filelogger.setLevel(logging.DEBUG) + else: + filelogger.setLevel(logging.INFO) + + filelogger.setFormatter(formatter) + logging.getLogger('').addHandler(filelogger) + + logging.info("Log Started: " + datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p" )) + + #check for valid files + if not options.Payload: + logging.critical("No Payload file specified") + return -1 + + if not os.path.isfile(options.Payload): + logging.critical("Invalid Path to payload file") + return -2 + + if not options.DetachedSignature: + logging.debug("No Detached Signature File.") + else: + logging.debug("Parameter for detached signature file specified. " + options.DetachedSignature) + logging.debug("Entering Phase2-PackageOnly Mode") + global gPhase3PackageOnly + gPhase3PackageOnly = True + + if not options.OutputFile: + logging.debug("No output file specified. Using default. AuthPayload.FmImageAuth") + options.OutputFile = "AuthPayload.FmImageAuth" + + if(not gPhase3PackageOnly and not options.ProductionSign): + #must have a pfx file + if not options.PfxPath: + logging.critical("No Pfx File given.") + return -7 + if not os.path.isfile(options.PfxPath): + logging.critical("Invalid PFX Path. File doesn't exist. " + options.PfxPath) + return -6 + + logging.debug("Using PFX file: " + str(options.PfxPath)) + + + logging.debug("Production Mode: " + str(options.ProductionSign)) + logging.debug("Monotonic Count: " + str(options.MonotonicCount)) + logging.debug("Output File: " + str(options.OutputFile)) + logging.debug("Dirty Mode: " + str(options.dirty)) + + FileToSign = os.path.join("payload.Temp.ToBeSigned") + + + + #if not doing phase2 only then we need to do presign stuff + if not gPhase3PackageOnly: + #Since we are not in phase3packageonly mode we know no DetachedSignature file speficied. Set to the default output. + OutputDir = os.path.dirname(os.path.abspath(options.OutputFile)) + logging.debug("Temp files will be written to: " + str(OutputDir)) + + #change the path to temp location + FileToSign = os.path.join(OutputDir, FileToSign) + options.DetachedSignature = FileToSign + ".p7" + + #Create a temp file with payload + monotonic count + f = open(FileToSign, "wb") + pf = open(options.Payload, "rb") + f.write(pf.read()) + mc = struct.pack("Q", int(options.MonotonicCount)) + f.write(mc) + pf.close() + f.close() + + + #if not doing production signing then sign it + if not options.ProductionSign: + #check sign tool + if(os.path.exists(options.SignToolPath)): + logging.debug("Signtool.exe found at location: " + options.SignToolPath) + else: + logging.critical("Can't find signtool at location: " + options.SignToolPath) + return -5 + + ret = DetachedSignWithSignTool( + options.SignToolPath, + FileToSign, + options.DetachedSignature, + options.PfxPath, + PfxPass=options.PfxPass, + Eku=options.Eku + ) + + if ret != 0: + logging.critical("DetachedSignWithSignTool Failed: " + str(ret)) + return ret + + if not options.dirty: + logging.debug("Delete temp file: " + str(FileToSign)) + os.remove(FileToSign) + + + else: + logging.critical("File To Production Sign Created: " + FileToSign) + return 0 + + #package the final output (phase 3) + wcugSize = os.path.getsize(options.DetachedSignature) + logging.debug("PKCS7 Signed Data is size: " + str(wcugSize)) + wcugSize = wcugSize + 4 + 2 + 2 + 16 # matches the hdr + guid below + + # + #Header layout and structures defined in UEFI 2.4 Errata B. + # + + #EFI_FIRMWARE_IMAGE_AUTH + #UINT64 Monotonic Count <--count value used when signing it + #WIN_CERTIFICATE_UEFI_GUID AuthInfo + #WIN_CERTIFICATE Hdr + #UINT32 dwLength <--Length of cert header + #UINT16 wRevision <--Revision level of win cert current 0x0200 + #UINT16 wCertType <--WIN_CERT_TYPE_EFI_GUID 0x0EF1 + #EFI_GUID CertType <--gEfiCertPkcs7Guid = { 0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7 }} + #UINT8[] PKCS7 SignedData <--DetachedSignature from signtool + #UINT8[] Payload <--Payload file + + #struct format for the header + header = struct.pack("QLHH", int(options.MonotonicCount), int(wcugSize), int("200", 16), int("0EF1", 16)) + pkcsguid = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}') + + f = open(options.OutputFile, "wb") + f.write(header) + f.write(pkcsguid.bytes_le) + sd = open(options.DetachedSignature, "rb") + f.write(sd.read()) + sd.close() + p = open(options.Payload, "rb") + f.write(p.read()) + p.close() + f.close() + logging.critical("Final FMP compliant Authenticated Payload Image File created:\n " + os.path.abspath(str(options.OutputFile))) + + #if user wants temp files deleted and didn't pass in the p7 file....then delete it now + if not options.dirty: + if not gPhase3PackageOnly: + logging.debug("Delete temp file: " + str(options.DetachedSignature)) + os.remove(options.DetachedSignature) + + + return 0 + + +if __name__ == '__main__': + #setup main console as logger + logger = logging.getLogger('') + logger.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(levelname)s - %(message)s") + console = logging.StreamHandler() + console.setLevel(logging.CRITICAL) + console.setFormatter(formatter) + logger.addHandler(console) + + #call main worker function + retcode = main() + + if retcode != 0: + logging.critical("Failed. Return Code: %i" % retcode) + #end logging + logging.shutdown() + sys.exit(retcode) diff --git a/BaseTools/Scripts/WindowsCapsuleFileGen.py b/BaseTools/Scripts/WindowsCapsuleFileGen.py index 56a3c4408f..9a7ef20c79 100644 --- a/BaseTools/Scripts/WindowsCapsuleFileGen.py +++ b/BaseTools/Scripts/WindowsCapsuleFileGen.py @@ -1,129 +1,129 @@ -## -# Tool to create a Windows Capsule files that complies with -# the Windows Firmware Update Platform specification. -# -# Gen INF, CAT, and then dev sign the CAT if PFX supplied. -# -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -import os -import sys -import logging -import argparse -import datetime - -from edk2toollib.windows.capsule.cat_generator import * -from edk2toollib.windows.capsule.inf_generator import * -from edk2toollib.utility_functions import CatalogSignWithSignTool - -def main(): - parser = argparse.ArgumentParser(description='Generate Windows Firmware Update Platform Files for Capsules') - parser.add_argument("name", help="Firmware Name. No spaces") - parser.add_argument("provider", help="Firmware provider listed in INF") - parser.add_argument("description", help="Firmware description listed in INF") - parser.add_argument("version_string", help="Version String in form of XX.XX.XX[.XX]") - parser.add_argument("version_hex", help="Version String in Hex 0xAABBCCDD must be representable within 32bit") - parser.add_argument("esrt_guid", help="guid string in registry format (########-####-####-####-############) for this ESRT entry") - parser.add_argument("firmware_bin_file_path", help="full path to firmware bin / capsule file") - parser.add_argument('arch', choices=InfGenerator.SUPPORTED_ARCH, help="Architecture targeted by INF and CAT") - parser.add_argument('operating_sytem', choices=CatGenerator.SUPPORTED_OS, help="operating system targeted by INF and CAT") - parser.add_argument("--mfgname", help="Manufacturer name listed in INF") - parser.add_argument("--rollback", action="store_true", dest="rollback", help="build a rollback capsule", default=False) - parser.add_argument("--pfx_file", help="Full Path to PFX file. If not set then signing will not be performed.") - parser.add_argument("--pfx_pass", help="Password for PFX file. Optional based on PFX file") - - - #Turn on dubug level logging - parser.add_argument("--debug", action="store_true", dest="debug", help="turn on debug logging level for file log", default=False) - #Output debug log - parser.add_argument("-l", dest="OutputLog", help="Create an output debug log file: ie -l out.txt", default=None) - - args = parser.parse_args() - - #setup file based logging if outputReport specified - if(args.OutputLog): - if(len(args.OutputLog) < 2): - logging.critical("the output log file parameter is invalid") - return -2 - else: - #setup file based logging - filelogger = logging.FileHandler(filename=args.OutputLog, mode='w') - if(args.debug): - filelogger.setLevel(logging.DEBUG) - else: - filelogger.setLevel(logging.INFO) - - filelogger.setFormatter(formatter) - logging.getLogger('').addHandler(filelogger) - - logging.info("Log Started: " + datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p" )) - OutputFolder = os.path.dirname(args.firmware_bin_file_path) - FirmwareFile = os.path.basename(args.firmware_bin_file_path) - - logging.debug("Make INF") - #Make INF - InfFilePath = os.path.join(OutputFolder, args.name + ".inf") - InfTool = InfGenerator(args.name, args.provider, args.esrt_guid, args.arch, args.description, args.version_string, args.version_hex) - if(args.mfgname is not None): - InfTool.Manufacturer = args.mfgname #optional - ret = InfTool.MakeInf(InfFilePath, FirmwareFile, args.rollback) - if(ret != 0): - logging.critical("CreateWindowsInf Failed with errorcode %d" % ret) - return ret - - #Make CAT - CatFilePath = os.path.realpath(os.path.join(OutputFolder, args.name + ".cat")) - CatTool = CatGenerator(args.arch, args.operating_sytem) - ret = CatTool.MakeCat(CatFilePath) - - if(ret != 0): - logging.critical("Creating Cat file Failed with errorcode %d" % ret) - return ret - - if(args.pfx_file is not None): - logging.debug("PFX file set. Going to do signing") - #Find Signtool - SignToolPath = os.path.join(os.getenv("ProgramFiles(x86)"), "Windows Kits", "8.1", "bin", "x64", "signtool.exe") - if not os.path.exists(SignToolPath): - logging.debug("Failed to find 8.1 version of signtool. Trying 10") - SignToolPath = SignToolPath.replace('8.1', '10') - - if not os.path.exists(SignToolPath): - logging.critical("Can't find signtool on this machine.") - return -3 - #dev sign the cat file - ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, args.pfx_file, args.pfx_pass) - if(ret != 0): - logging.critical("Signing Cat file Failed with errorcode %d" % ret) - return ret - else: - logging.info("No PFX. Not signing") - - return ret - - -#-------------------------------- -# Control starts here -# -#-------------------------------- -if __name__ == '__main__': - #setup main console as logger - logger = logging.getLogger('') - logger.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(levelname)s - %(message)s") - console = logging.StreamHandler() - console.setLevel(logging.CRITICAL) - console.setFormatter(formatter) - logger.addHandler(console) - - #call main worker function - retcode = main() - - if retcode != 0: - logging.critical("Failed. Return Code: %i" % retcode) - #end logging - logging.shutdown() - sys.exit(retcode) +## +# Tool to create a Windows Capsule files that complies with +# the Windows Firmware Update Platform specification. +# +# Gen INF, CAT, and then dev sign the CAT if PFX supplied. +# +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import os +import sys +import logging +import argparse +import datetime + +from edk2toollib.windows.capsule.cat_generator import * +from edk2toollib.windows.capsule.inf_generator import * +from edk2toollib.utility_functions import CatalogSignWithSignTool + +def main(): + parser = argparse.ArgumentParser(description='Generate Windows Firmware Update Platform Files for Capsules') + parser.add_argument("name", help="Firmware Name. No spaces") + parser.add_argument("provider", help="Firmware provider listed in INF") + parser.add_argument("description", help="Firmware description listed in INF") + parser.add_argument("version_string", help="Version String in form of XX.XX.XX[.XX]") + parser.add_argument("version_hex", help="Version String in Hex 0xAABBCCDD must be representable within 32bit") + parser.add_argument("esrt_guid", help="guid string in registry format (########-####-####-####-############) for this ESRT entry") + parser.add_argument("firmware_bin_file_path", help="full path to firmware bin / capsule file") + parser.add_argument('arch', choices=InfGenerator.SUPPORTED_ARCH, help="Architecture targeted by INF and CAT") + parser.add_argument('operating_sytem', choices=CatGenerator.SUPPORTED_OS, help="operating system targeted by INF and CAT") + parser.add_argument("--mfgname", help="Manufacturer name listed in INF") + parser.add_argument("--rollback", action="store_true", dest="rollback", help="build a rollback capsule", default=False) + parser.add_argument("--pfx_file", help="Full Path to PFX file. If not set then signing will not be performed.") + parser.add_argument("--pfx_pass", help="Password for PFX file. Optional based on PFX file") + + + #Turn on dubug level logging + parser.add_argument("--debug", action="store_true", dest="debug", help="turn on debug logging level for file log", default=False) + #Output debug log + parser.add_argument("-l", dest="OutputLog", help="Create an output debug log file: ie -l out.txt", default=None) + + args = parser.parse_args() + + #setup file based logging if outputReport specified + if(args.OutputLog): + if(len(args.OutputLog) < 2): + logging.critical("the output log file parameter is invalid") + return -2 + else: + #setup file based logging + filelogger = logging.FileHandler(filename=args.OutputLog, mode='w') + if(args.debug): + filelogger.setLevel(logging.DEBUG) + else: + filelogger.setLevel(logging.INFO) + + filelogger.setFormatter(formatter) + logging.getLogger('').addHandler(filelogger) + + logging.info("Log Started: " + datetime.datetime.strftime(datetime.datetime.now(), "%A, %B %d, %Y %I:%M%p" )) + OutputFolder = os.path.dirname(args.firmware_bin_file_path) + FirmwareFile = os.path.basename(args.firmware_bin_file_path) + + logging.debug("Make INF") + #Make INF + InfFilePath = os.path.join(OutputFolder, args.name + ".inf") + InfTool = InfGenerator(args.name, args.provider, args.esrt_guid, args.arch, args.description, args.version_string, args.version_hex) + if(args.mfgname is not None): + InfTool.Manufacturer = args.mfgname #optional + ret = InfTool.MakeInf(InfFilePath, FirmwareFile, args.rollback) + if(ret != 0): + logging.critical("CreateWindowsInf Failed with errorcode %d" % ret) + return ret + + #Make CAT + CatFilePath = os.path.realpath(os.path.join(OutputFolder, args.name + ".cat")) + CatTool = CatGenerator(args.arch, args.operating_sytem) + ret = CatTool.MakeCat(CatFilePath) + + if(ret != 0): + logging.critical("Creating Cat file Failed with errorcode %d" % ret) + return ret + + if(args.pfx_file is not None): + logging.debug("PFX file set. Going to do signing") + #Find Signtool + SignToolPath = os.path.join(os.getenv("ProgramFiles(x86)"), "Windows Kits", "8.1", "bin", "x64", "signtool.exe") + if not os.path.exists(SignToolPath): + logging.debug("Failed to find 8.1 version of signtool. Trying 10") + SignToolPath = SignToolPath.replace('8.1', '10') + + if not os.path.exists(SignToolPath): + logging.critical("Can't find signtool on this machine.") + return -3 + #dev sign the cat file + ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, args.pfx_file, args.pfx_pass) + if(ret != 0): + logging.critical("Signing Cat file Failed with errorcode %d" % ret) + return ret + else: + logging.info("No PFX. Not signing") + + return ret + + +#-------------------------------- +# Control starts here +# +#-------------------------------- +if __name__ == '__main__': + #setup main console as logger + logger = logging.getLogger('') + logger.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(levelname)s - %(message)s") + console = logging.StreamHandler() + console.setLevel(logging.CRITICAL) + console.setFormatter(formatter) + logger.addHandler(console) + + #call main worker function + retcode = main() + + if retcode != 0: + logging.critical("Failed. Return Code: %i" % retcode) + #end logging + logging.shutdown() + sys.exit(retcode) diff --git a/MdeModulePkg/Library/BaseExceptionPersistenceLibNull/BaseExceptionPersistenceLibNull.inf b/MdeModulePkg/Library/BaseExceptionPersistenceLibNull/BaseExceptionPersistenceLibNull.inf index 7a9b21f249..d95247967c 100644 --- a/MdeModulePkg/Library/BaseExceptionPersistenceLibNull/BaseExceptionPersistenceLibNull.inf +++ b/MdeModulePkg/Library/BaseExceptionPersistenceLibNull/BaseExceptionPersistenceLibNull.inf @@ -1,26 +1,26 @@ -## @file BaseExceptionPersistenceLibNull.inf -# -# NULL implementation of ExceptionPersistenceLib -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = BaseExceptionPersistenceLibNull - FILE_GUID = adefa38d-ea8c-4418-beb5-ff9e13ea2260 - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = ExceptionPersistenceLib - -# -# VALID_ARCHITECTURES = IA32 X64 -# - -[Sources] - BaseExceptionPersistenceLibNull.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec +## @file BaseExceptionPersistenceLibNull.inf +# +# NULL implementation of ExceptionPersistenceLib +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = BaseExceptionPersistenceLibNull + FILE_GUID = adefa38d-ea8c-4418-beb5-ff9e13ea2260 + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = ExceptionPersistenceLib + +# +# VALID_ARCHITECTURES = IA32 X64 +# + +[Sources] + BaseExceptionPersistenceLibNull.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec diff --git a/MdeModulePkg/Library/CapsulePersistLibNull/CapsulePersistLibNull.inf b/MdeModulePkg/Library/CapsulePersistLibNull/CapsulePersistLibNull.inf index 16c03ea4ab..5b182ccb84 100644 --- a/MdeModulePkg/Library/CapsulePersistLibNull/CapsulePersistLibNull.inf +++ b/MdeModulePkg/Library/CapsulePersistLibNull/CapsulePersistLibNull.inf @@ -1,36 +1,36 @@ -## @file CapsulePersistLibNull.inf -# A null implementation of the CapsulePersistLib -# -## -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - - -[Defines] - INF_VERSION = 0x00010017 - BASE_NAME = CapsulePersistLibNull - FILE_GUID = 96AAE710-21AB-4881-9D92-8AD19479BB36 - VERSION_STRING = 1.0 - MODULE_TYPE = DXE_RUNTIME_DRIVER - LIBRARY_CLASS = CapsulePersistLib - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 -# - - -[Sources] - CapsulePersistLibNull.c - - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - - -[LibraryClasses] - +## @file CapsulePersistLibNull.inf +# A null implementation of the CapsulePersistLib +# +## +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + + +[Defines] + INF_VERSION = 0x00010017 + BASE_NAME = CapsulePersistLibNull + FILE_GUID = 96AAE710-21AB-4881-9D92-8AD19479BB36 + VERSION_STRING = 1.0 + MODULE_TYPE = DXE_RUNTIME_DRIVER + LIBRARY_CLASS = CapsulePersistLib + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 +# + + +[Sources] + CapsulePersistLibNull.c + + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + + +[LibraryClasses] + diff --git a/MdeModulePkg/Library/DeviceStateLib/DeviceStateLib.inf b/MdeModulePkg/Library/DeviceStateLib/DeviceStateLib.inf index 185b8d03bf..10914da8df 100644 --- a/MdeModulePkg/Library/DeviceStateLib/DeviceStateLib.inf +++ b/MdeModulePkg/Library/DeviceStateLib/DeviceStateLib.inf @@ -1,33 +1,33 @@ -## @file -# Library to get and set the device state -# -# Copyright (C) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -# MU_CHANGE: new file -## - - -[Defines] -INF_VERSION = 0x00010017 -BASE_NAME = DeviceStateLib -FILE_GUID = 49F18455-FE0D-4DFC-B88B-BEC283BB46DD -VERSION_STRING = 1.0 -MODULE_TYPE = BASE -LIBRARY_CLASS = DeviceStateLib - -[LibraryClasses] -DebugLib -MemoryAllocationLib -BaseMemoryLib -PcdLib - -[Packages] -MdePkg/MdePkg.dec -MdeModulePkg/MdeModulePkg.dec - -[Sources] -DeviceStateLib.c - -[Pcd] -gEfiMdeModulePkgTokenSpaceGuid.PcdDeviceStateBitmask +## @file +# Library to get and set the device state +# +# Copyright (C) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +# MU_CHANGE: new file +## + + +[Defines] +INF_VERSION = 0x00010017 +BASE_NAME = DeviceStateLib +FILE_GUID = 49F18455-FE0D-4DFC-B88B-BEC283BB46DD +VERSION_STRING = 1.0 +MODULE_TYPE = BASE +LIBRARY_CLASS = DeviceStateLib + +[LibraryClasses] +DebugLib +MemoryAllocationLib +BaseMemoryLib +PcdLib + +[Packages] +MdePkg/MdePkg.dec +MdeModulePkg/MdeModulePkg.dec + +[Sources] +DeviceStateLib.c + +[Pcd] +gEfiMdeModulePkgTokenSpaceGuid.PcdDeviceStateBitmask diff --git a/MdeModulePkg/Library/DeviceStateLib/Readme.md b/MdeModulePkg/Library/DeviceStateLib/Readme.md index 4e07c171b0..fcfd9c390a 100644 --- a/MdeModulePkg/Library/DeviceStateLib/Readme.md +++ b/MdeModulePkg/Library/DeviceStateLib/Readme.md @@ -1,32 +1,32 @@ -# DeviceStateLib - -## About - -The DeviceStateLib provides the necessary functions to store platform specific device -states. These device states can then be queried by any element within the boot -environment to enable special code paths. In this library implementation a -bitmask is stored in a PCD to signify what modes are active. - -The default bits in the bitmask are set in DeviceStateLib.h - but each platform -is expected to implement its own header to define the platform specific device -states or to define any of the unused bits: - -* BIT 0: DEVICE_STATE_SECUREBOOT_OFF - UEFI Secure Boot disabled -* BIT 1: DEVICE_STATE_MANUFACTURING_MODE - Device is in an OEM defined - manufacturing mode -* BIT 2: DEVICE_STATE_DEVELOPMENT_BUILD_ENABLED - Device is a development - build. Non-production features might be enabled -* BIT 3: DEVICE_STATE_SOURCE_DEBUG_ENABLED - Source debug mode is enabled - allowing a user to connect and control the device -* BIT 4: DEVICE_STATE_UNDEFINED - Set by the platform -* BIT 5: DEVICE_STATE_UNIT_TEST_MODE - Device has a unit test build. Some - features are disabled to allow for unit tests in UEFI Shell -* BIT 24: DEVICE_STATE_PLATFORM_MODE_0 -* BIT 25: DEVICE_STATE_PLATFORM_MODE_1 -* BIT 26: DEVICE_STATE_PLATFORM_MODE_2 -* BIT 27: DEVICE_STATE_PLATFORM_MODE_3 - -## Copyright - -Copyright (C) Microsoft Corporation. -SPDX-License-Identifier: BSD-2-Clause-Patent +# DeviceStateLib + +## About + +The DeviceStateLib provides the necessary functions to store platform specific device +states. These device states can then be queried by any element within the boot +environment to enable special code paths. In this library implementation a +bitmask is stored in a PCD to signify what modes are active. + +The default bits in the bitmask are set in DeviceStateLib.h - but each platform +is expected to implement its own header to define the platform specific device +states or to define any of the unused bits: + +* BIT 0: DEVICE_STATE_SECUREBOOT_OFF - UEFI Secure Boot disabled +* BIT 1: DEVICE_STATE_MANUFACTURING_MODE - Device is in an OEM defined + manufacturing mode +* BIT 2: DEVICE_STATE_DEVELOPMENT_BUILD_ENABLED - Device is a development + build. Non-production features might be enabled +* BIT 3: DEVICE_STATE_SOURCE_DEBUG_ENABLED - Source debug mode is enabled + allowing a user to connect and control the device +* BIT 4: DEVICE_STATE_UNDEFINED - Set by the platform +* BIT 5: DEVICE_STATE_UNIT_TEST_MODE - Device has a unit test build. Some + features are disabled to allow for unit tests in UEFI Shell +* BIT 24: DEVICE_STATE_PLATFORM_MODE_0 +* BIT 25: DEVICE_STATE_PLATFORM_MODE_1 +* BIT 26: DEVICE_STATE_PLATFORM_MODE_2 +* BIT 27: DEVICE_STATE_PLATFORM_MODE_3 + +## Copyright + +Copyright (C) Microsoft Corporation. +SPDX-License-Identifier: BSD-2-Clause-Patent diff --git a/MdeModulePkg/Library/MemoryProtectionHobLib/DxeMemoryProtectionHobLib.inf b/MdeModulePkg/Library/MemoryProtectionHobLib/DxeMemoryProtectionHobLib.inf index 0797f2a58f..1e749279f7 100644 --- a/MdeModulePkg/Library/MemoryProtectionHobLib/DxeMemoryProtectionHobLib.inf +++ b/MdeModulePkg/Library/MemoryProtectionHobLib/DxeMemoryProtectionHobLib.inf @@ -1,34 +1,34 @@ -## @file -# DXE library instance to support platform-specific global controls for all memory protections. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = DxeMemoryProtectionHobLib - FILE_GUID = f497f7de-b9ab-4b9f-807e-89778922542d - MODULE_TYPE = UEFI_DRIVER - VERSION_STRING = 1.0 - LIBRARY_CLASS = DxeMemoryProtectionHobLib|DXE_DRIVER DXE_CORE UEFI_APPLICATION - CONSTRUCTOR = DxeMemoryProtectionHobLibConstructor - -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 -# - -[Sources] - DxeMemoryProtectionHobLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - HobLib - DebugLib - BaseMemoryLib - -[Guids] - gDxeMemoryProtectionSettingsGuid +## @file +# DXE library instance to support platform-specific global controls for all memory protections. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = DxeMemoryProtectionHobLib + FILE_GUID = f497f7de-b9ab-4b9f-807e-89778922542d + MODULE_TYPE = UEFI_DRIVER + VERSION_STRING = 1.0 + LIBRARY_CLASS = DxeMemoryProtectionHobLib|DXE_DRIVER DXE_CORE UEFI_APPLICATION + CONSTRUCTOR = DxeMemoryProtectionHobLibConstructor + +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 +# + +[Sources] + DxeMemoryProtectionHobLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + HobLib + DebugLib + BaseMemoryLib + +[Guids] + gDxeMemoryProtectionSettingsGuid diff --git a/MdeModulePkg/Library/MemoryProtectionHobLib/SmmMemoryProtectionHobLib.inf b/MdeModulePkg/Library/MemoryProtectionHobLib/SmmMemoryProtectionHobLib.inf index fd697be518..024f30644c 100644 --- a/MdeModulePkg/Library/MemoryProtectionHobLib/SmmMemoryProtectionHobLib.inf +++ b/MdeModulePkg/Library/MemoryProtectionHobLib/SmmMemoryProtectionHobLib.inf @@ -1,35 +1,35 @@ -## @file -# SMM library instance to support platform-specific global controls for all memory protections. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = SmmMemoryProtectionHobLib - FILE_GUID = dc9666f4-917f-400d-8026-2b3beeeff195 - MODULE_TYPE = DXE_SMM_DRIVER - VERSION_STRING = 1.0 - LIBRARY_CLASS = MmMemoryProtectionHobLib|SMM_CORE DXE_SMM_DRIVER - CONSTRUCTOR = SmmMemoryProtectionHobLibConstructor - -# -# VALID_ARCHITECTURES = IA32 X64 -# - -[Sources] - MmCommonMemoryProtectionHobLib.c - SmmMemoryProtectionHobLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - HobLib - DebugLib - BaseMemoryLib - -[Guids] - gMmMemoryProtectionSettingsGuid +## @file +# SMM library instance to support platform-specific global controls for all memory protections. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = SmmMemoryProtectionHobLib + FILE_GUID = dc9666f4-917f-400d-8026-2b3beeeff195 + MODULE_TYPE = DXE_SMM_DRIVER + VERSION_STRING = 1.0 + LIBRARY_CLASS = MmMemoryProtectionHobLib|SMM_CORE DXE_SMM_DRIVER + CONSTRUCTOR = SmmMemoryProtectionHobLibConstructor + +# +# VALID_ARCHITECTURES = IA32 X64 +# + +[Sources] + MmCommonMemoryProtectionHobLib.c + SmmMemoryProtectionHobLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + HobLib + DebugLib + BaseMemoryLib + +[Guids] + gMmMemoryProtectionSettingsGuid diff --git a/MdeModulePkg/Library/MemoryProtectionHobLib/StandaloneMmMemoryProtectionHobLib.inf b/MdeModulePkg/Library/MemoryProtectionHobLib/StandaloneMmMemoryProtectionHobLib.inf index 1d5259b378..3cadb5ec6e 100644 --- a/MdeModulePkg/Library/MemoryProtectionHobLib/StandaloneMmMemoryProtectionHobLib.inf +++ b/MdeModulePkg/Library/MemoryProtectionHobLib/StandaloneMmMemoryProtectionHobLib.inf @@ -1,36 +1,36 @@ -## @file -# SMM library instance to support platform-specific global controls for all memory protections. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = StandaloneMmMemoryProtectionHobLib - FILE_GUID = C0A0D9C4-A249-483A-86EA-D73146D397B3 - MODULE_TYPE = MM_CORE_STANDALONE - PI_SPECIFICATION_VERSION = 0x00010032 - VERSION_STRING = 1.0 - LIBRARY_CLASS = MmMemoryProtectionHobLib|MM_CORE_STANDALONE MM_STANDALONE - CONSTRUCTOR = StandaloneMmMemoryProtectionHobLibConstructor - -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 -# - -[Sources] - MmCommonMemoryProtectionHobLib.c - StandaloneMmMemoryProtectionHobLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - HobLib - DebugLib - BaseMemoryLib - -[Guids] - gMmMemoryProtectionSettingsGuid +## @file +# SMM library instance to support platform-specific global controls for all memory protections. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = StandaloneMmMemoryProtectionHobLib + FILE_GUID = C0A0D9C4-A249-483A-86EA-D73146D397B3 + MODULE_TYPE = MM_CORE_STANDALONE + PI_SPECIFICATION_VERSION = 0x00010032 + VERSION_STRING = 1.0 + LIBRARY_CLASS = MmMemoryProtectionHobLib|MM_CORE_STANDALONE MM_STANDALONE + CONSTRUCTOR = StandaloneMmMemoryProtectionHobLibConstructor + +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 +# + +[Sources] + MmCommonMemoryProtectionHobLib.c + StandaloneMmMemoryProtectionHobLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + HobLib + DebugLib + BaseMemoryLib + +[Guids] + gMmMemoryProtectionSettingsGuid diff --git a/MdeModulePkg/Library/MemoryProtectionHobLibNull/DxeMemoryProtectionHobLibNull.inf b/MdeModulePkg/Library/MemoryProtectionHobLibNull/DxeMemoryProtectionHobLibNull.inf index 463e6a8c5d..6a3166a23b 100644 --- a/MdeModulePkg/Library/MemoryProtectionHobLibNull/DxeMemoryProtectionHobLibNull.inf +++ b/MdeModulePkg/Library/MemoryProtectionHobLibNull/DxeMemoryProtectionHobLibNull.inf @@ -1,25 +1,25 @@ -## @file -# NULL library which defines gDxeMps -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = DxeMemoryProtectionHobLibNull - FILE_GUID = a35c1dc1-0769-421b-a8bc-9db69fae4334 - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = DxeMemoryProtectionHobLib - -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 -# - -[Sources] - DxeMemoryProtectionHobLibNull.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec +## @file +# NULL library which defines gDxeMps +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = DxeMemoryProtectionHobLibNull + FILE_GUID = a35c1dc1-0769-421b-a8bc-9db69fae4334 + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = DxeMemoryProtectionHobLib + +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 +# + +[Sources] + DxeMemoryProtectionHobLibNull.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec diff --git a/MdeModulePkg/Library/MemoryProtectionHobLibNull/MmMemoryProtectionHobLibNull.inf b/MdeModulePkg/Library/MemoryProtectionHobLibNull/MmMemoryProtectionHobLibNull.inf index 19bc3d05e3..61f50921ee 100644 --- a/MdeModulePkg/Library/MemoryProtectionHobLibNull/MmMemoryProtectionHobLibNull.inf +++ b/MdeModulePkg/Library/MemoryProtectionHobLibNull/MmMemoryProtectionHobLibNull.inf @@ -1,26 +1,26 @@ -## @file -# NULL library which defines gMmMps -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = MmMemoryProtectionHobLibNull - FILE_GUID = 4e3f6fd9-4ab5-4911-b80b-009d3338b4b2 - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = MmMemoryProtectionHobLib - -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 -# - -[Sources] - MmMemoryProtectionHobLibNull.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - +## @file +# NULL library which defines gMmMps +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = MmMemoryProtectionHobLibNull + FILE_GUID = 4e3f6fd9-4ab5-4911-b80b-009d3338b4b2 + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = MmMemoryProtectionHobLib + +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 +# + +[Sources] + MmMemoryProtectionHobLibNull.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + diff --git a/MdeModulePkg/Library/MemoryTypeInfoSecVarCheckLib/MemoryTypeInfoSecVarCheckLib.inf b/MdeModulePkg/Library/MemoryTypeInfoSecVarCheckLib/MemoryTypeInfoSecVarCheckLib.inf index 3ae746a3c8..a057a33eec 100644 --- a/MdeModulePkg/Library/MemoryTypeInfoSecVarCheckLib/MemoryTypeInfoSecVarCheckLib.inf +++ b/MdeModulePkg/Library/MemoryTypeInfoSecVarCheckLib/MemoryTypeInfoSecVarCheckLib.inf @@ -1,45 +1,45 @@ -## @file -# NULL class library to register var check handler and variable property set for MemoryTypeInformation variable. -# -# Copyright (c) 2018, Intel Corporation. All rights reserved.
-# Copyright (C) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -# MU_CHANGE TCBZ1086 [WHOLE FILE] - Mitigate potential system brick due to uefi MemoryTypeInformation var changes - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = MemoryTypeInfoSecVarCheckLib - FILE_GUID = C69D75E8-E39F-4F79-9D74-50B8C759D09B - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = MemoryTypeInfoSecVarCheckLib|DXE_RUNTIME_DRIVER DXE_SMM_DRIVER MM_STANDALONE - CONSTRUCTOR = MemoryTypeInfoSecVarCheckLibConstructor - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 -# - -[Sources] - MemoryTypeInfoSecVarCheckLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - BaseLib - BaseMemoryLib - DebugLib - VarCheckLib - SafeIntLib - -[Guids] - gEfiMemoryTypeInformationGuid ## CONSUMES ## Variable:L"MemoryTypeInformation" - -[Pcd] - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxMemoryTypeInfoPages ## CONSUMES +## @file +# NULL class library to register var check handler and variable property set for MemoryTypeInformation variable. +# +# Copyright (c) 2018, Intel Corporation. All rights reserved.
+# Copyright (C) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +# MU_CHANGE TCBZ1086 [WHOLE FILE] - Mitigate potential system brick due to uefi MemoryTypeInformation var changes + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = MemoryTypeInfoSecVarCheckLib + FILE_GUID = C69D75E8-E39F-4F79-9D74-50B8C759D09B + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = MemoryTypeInfoSecVarCheckLib|DXE_RUNTIME_DRIVER DXE_SMM_DRIVER MM_STANDALONE + CONSTRUCTOR = MemoryTypeInfoSecVarCheckLibConstructor + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 +# + +[Sources] + MemoryTypeInfoSecVarCheckLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + BaseLib + BaseMemoryLib + DebugLib + VarCheckLib + SafeIntLib + +[Guids] + gEfiMemoryTypeInformationGuid ## CONSUMES ## Variable:L"MemoryTypeInformation" + +[Pcd] + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxMemoryTypeInfoPages ## CONSUMES diff --git a/MdeModulePkg/Library/MemoryTypeInformationChangeLibNull/MemoryTypeInformationChangeLibNull.inf b/MdeModulePkg/Library/MemoryTypeInformationChangeLibNull/MemoryTypeInformationChangeLibNull.inf index cbe538cbaf..5ac6c87721 100644 --- a/MdeModulePkg/Library/MemoryTypeInformationChangeLibNull/MemoryTypeInformationChangeLibNull.inf +++ b/MdeModulePkg/Library/MemoryTypeInformationChangeLibNull/MemoryTypeInformationChangeLibNull.inf @@ -1,26 +1,26 @@ -## @file MemoryTypeInformationChangeLibNull.inf -# Null library, returns success. -## -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -# MU_CHANGE NEW FILE - -[Defines] - INF_VERSION = 0x00010017 - BASE_NAME = MemoryTypeInformationChangeLibNull - FILE_GUID = eb4cc801-4301-43ba-979a-d4153c55f43c - VERSION_STRING = 1.0 - MODULE_TYPE = BASE - LIBRARY_CLASS = MemoryTypeInformationChangeLib - -[Sources] - MemoryTypeInformationChangeLibNull.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - BaseLib +## @file MemoryTypeInformationChangeLibNull.inf +# Null library, returns success. +## +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +# MU_CHANGE NEW FILE + +[Defines] + INF_VERSION = 0x00010017 + BASE_NAME = MemoryTypeInformationChangeLibNull + FILE_GUID = eb4cc801-4301-43ba-979a-d4153c55f43c + VERSION_STRING = 1.0 + MODULE_TYPE = BASE + LIBRARY_CLASS = MemoryTypeInformationChangeLib + +[Sources] + MemoryTypeInformationChangeLibNull.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + BaseLib diff --git a/MdeModulePkg/Library/ParallelLzmaCustomDecompressLib/ParallelLzmaCustomDecompressLib.inf b/MdeModulePkg/Library/ParallelLzmaCustomDecompressLib/ParallelLzmaCustomDecompressLib.inf index 844a8a9dd8..50243fb678 100644 --- a/MdeModulePkg/Library/ParallelLzmaCustomDecompressLib/ParallelLzmaCustomDecompressLib.inf +++ b/MdeModulePkg/Library/ParallelLzmaCustomDecompressLib/ParallelLzmaCustomDecompressLib.inf @@ -1,42 +1,42 @@ -## @file -# ParallelLzmaCustomDecompressLib produces the Parallel LZMA custom decompression algorithm. -# -# This relies on the standard LzmaCustomDecompress lib to do the work and expects to be -# linked against it with a NULL| library instance. -# -# Copyright (c) Microsoft Corporation -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 1.27 - BASE_NAME = ParallelLzmaDecompressLib - FILE_GUID = 16979EFB-EC84-4390-BC4E-923B69B02CDA - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = NULL - CONSTRUCTOR = ParallelLzmaDecompressLibConstructor - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 ARM -# - -[Sources] - ParallelLzmaCustomDecompressLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[Guids] - gParallelLzmaCustomDecompressGuid ## PRODUCES # specifies LZMA custom decompress algorithm. - gParallelLzmaCustomDecompressHobGuid ## CONSUMES - -[LibraryClasses] - BaseLib - BaseMemoryLib - DebugLib - ExtractGuidedSectionLib - HobLib +## @file +# ParallelLzmaCustomDecompressLib produces the Parallel LZMA custom decompression algorithm. +# +# This relies on the standard LzmaCustomDecompress lib to do the work and expects to be +# linked against it with a NULL| library instance. +# +# Copyright (c) Microsoft Corporation +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 1.27 + BASE_NAME = ParallelLzmaDecompressLib + FILE_GUID = 16979EFB-EC84-4390-BC4E-923B69B02CDA + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = NULL + CONSTRUCTOR = ParallelLzmaDecompressLibConstructor + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 ARM +# + +[Sources] + ParallelLzmaCustomDecompressLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[Guids] + gParallelLzmaCustomDecompressGuid ## PRODUCES # specifies LZMA custom decompress algorithm. + gParallelLzmaCustomDecompressHobGuid ## CONSUMES + +[LibraryClasses] + BaseLib + BaseMemoryLib + DebugLib + ExtractGuidedSectionLib + HobLib diff --git a/MdeModulePkg/Library/SecurityLockAuditDebugMessageLib/SecurityLockAuditDebugMessageLib.inf b/MdeModulePkg/Library/SecurityLockAuditDebugMessageLib/SecurityLockAuditDebugMessageLib.inf index a1bb55039e..144b79a480 100644 --- a/MdeModulePkg/Library/SecurityLockAuditDebugMessageLib/SecurityLockAuditDebugMessageLib.inf +++ b/MdeModulePkg/Library/SecurityLockAuditDebugMessageLib/SecurityLockAuditDebugMessageLib.inf @@ -1,27 +1,27 @@ -## @file -# Library that implements logging and reporting for security locks -# Using DebugLib -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = SecurityLockAuditDebugMessageLib - FILE_GUID = 459d0456-d6be-458e-9cc8-e9b21745f9aa - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = SecurityLockAuditLib - -[Sources.common] - SecurityLockAuditDebugMessageLib.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - BaseLib - DebugLib +## @file +# Library that implements logging and reporting for security locks +# Using DebugLib +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = SecurityLockAuditDebugMessageLib + FILE_GUID = 459d0456-d6be-458e-9cc8-e9b21745f9aa + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = SecurityLockAuditLib + +[Sources.common] + SecurityLockAuditDebugMessageLib.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + BaseLib + DebugLib diff --git a/MdeModulePkg/Library/SecurityLockAuditLibNull/SecurityLockAuditLibNull.inf b/MdeModulePkg/Library/SecurityLockAuditLibNull/SecurityLockAuditLibNull.inf index b30494751c..82dd361e2f 100644 --- a/MdeModulePkg/Library/SecurityLockAuditLibNull/SecurityLockAuditLibNull.inf +++ b/MdeModulePkg/Library/SecurityLockAuditLibNull/SecurityLockAuditLibNull.inf @@ -1,25 +1,25 @@ -## @file -# Null library for security lock logging that does nothing but meet compile requirements -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = SecurityLockAuditLibNull - FILE_GUID = 1d333a6a-90a7-45cb-9897-0a172ee35066 - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = SecurityLockAuditLib - -[Sources.common] - SecurityLockAuditLibNull.c - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - -[LibraryClasses] - BaseLib +## @file +# Null library for security lock logging that does nothing but meet compile requirements +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = SecurityLockAuditLibNull + FILE_GUID = 1d333a6a-90a7-45cb-9897-0a172ee35066 + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = SecurityLockAuditLib + +[Sources.common] + SecurityLockAuditLibNull.c + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + +[LibraryClasses] + BaseLib diff --git a/MdeModulePkg/Library/VarCheckPolicyLib/VarCheckPolicyLibVariableDxe.inf b/MdeModulePkg/Library/VarCheckPolicyLib/VarCheckPolicyLibVariableDxe.inf index 828273e112..e5e315ff0d 100644 --- a/MdeModulePkg/Library/VarCheckPolicyLib/VarCheckPolicyLibVariableDxe.inf +++ b/MdeModulePkg/Library/VarCheckPolicyLib/VarCheckPolicyLibVariableDxe.inf @@ -1,31 +1,31 @@ -## @file VarCheckPolicyLib.inf -# This is an instance of a VarCheck lib that leverages the business logic behind -# the VariablePolicy code to make its decisions. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = VarCheckPolicyLibVariableDxe - FILE_GUID = C17DF9DB-A744-4011-A796-4EDA2ED97C2F - MODULE_TYPE = DXE_RUNTIME_DRIVER - VERSION_STRING = 1.0 - LIBRARY_CLASS = NULL|DXE_RUNTIME_DRIVER - CONSTRUCTOR = VarCheckPolicyLibConstructor - - -[Sources] - VarCheckPolicyLibVariableDxe.c - - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - - -[LibraryClasses] - DebugLib - VarCheckLib - VariablePolicyLib +## @file VarCheckPolicyLib.inf +# This is an instance of a VarCheck lib that leverages the business logic behind +# the VariablePolicy code to make its decisions. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = VarCheckPolicyLibVariableDxe + FILE_GUID = C17DF9DB-A744-4011-A796-4EDA2ED97C2F + MODULE_TYPE = DXE_RUNTIME_DRIVER + VERSION_STRING = 1.0 + LIBRARY_CLASS = NULL|DXE_RUNTIME_DRIVER + CONSTRUCTOR = VarCheckPolicyLibConstructor + + +[Sources] + VarCheckPolicyLibVariableDxe.c + + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + + +[LibraryClasses] + DebugLib + VarCheckLib + VariablePolicyLib diff --git a/MdeModulePkg/Library/VariablePolicyLib/VariablePolicyUnitTest/VariablePolicyUnitTest.inf b/MdeModulePkg/Library/VariablePolicyLib/VariablePolicyUnitTest/VariablePolicyUnitTest.inf index 10b1256abb..06489f21d6 100644 --- a/MdeModulePkg/Library/VariablePolicyLib/VariablePolicyUnitTest/VariablePolicyUnitTest.inf +++ b/MdeModulePkg/Library/VariablePolicyLib/VariablePolicyUnitTest/VariablePolicyUnitTest.inf @@ -1,45 +1,45 @@ -## @file VariablePolicyUnitTest.inf -# UnitTest for... -# Business logic for Variable Policy enforcement. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - - -[Defines] - INF_VERSION = 0x00010006 - BASE_NAME = VariablePolicyUnitTest - FILE_GUID = 1200A2E4-D756-418C-9768-528C2D181A98 - MODULE_TYPE = HOST_APPLICATION - VERSION_STRING = 1.0 - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 ARM AARCH64 -# - -[Sources] - VariablePolicyUnitTest.c - - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec - - -[LibraryClasses] - BaseLib - DebugLib - UnitTestLib - PrintLib - VariablePolicyLib - BaseMemoryLib - MemoryAllocationLib - - -[BuildOptions] - MSFT:NOOPT_*_*_CC_FLAGS = -DINTERNAL_UNIT_TEST - GCC:NOOPT_*_*_CC_FLAGS = -DINTERNAL_UNIT_TEST +## @file VariablePolicyUnitTest.inf +# UnitTest for... +# Business logic for Variable Policy enforcement. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +[Defines] + INF_VERSION = 0x00010006 + BASE_NAME = VariablePolicyUnitTest + FILE_GUID = 1200A2E4-D756-418C-9768-528C2D181A98 + MODULE_TYPE = HOST_APPLICATION + VERSION_STRING = 1.0 + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 ARM AARCH64 +# + +[Sources] + VariablePolicyUnitTest.c + + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec + + +[LibraryClasses] + BaseLib + DebugLib + UnitTestLib + PrintLib + VariablePolicyLib + BaseMemoryLib + MemoryAllocationLib + + +[BuildOptions] + MSFT:NOOPT_*_*_CC_FLAGS = -DINTERNAL_UNIT_TEST + GCC:NOOPT_*_*_CC_FLAGS = -DINTERNAL_UNIT_TEST diff --git a/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/Readme.md b/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/Readme.md index 2cc57b063a..e4a2d753a8 100644 --- a/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/Readme.md +++ b/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/Readme.md @@ -1,67 +1,67 @@ -# Variable Policy Unit Tests - -## Copyright - -Copyright (C) Microsoft Corporation. -SPDX-License-Identifier: BSD-2-Clause-Patent - -## About This Test - -This test verifies functionality of the Variable Policy Protocol by registering various variable policies and exercising -them, as well as tests locking the policy, disabling it, and dumping the policy entries. - -Only policies that are created as a part of this test will be tested. - -1. Try getting test context, if empty then get VP protocol, confirm that VP is not disabled by calling IsVariablePolicyEnabled. - Log VP revision. -2. "No lock" policies: - * check minsize enforcement - * check maxsize enforcement - * check musthave attr enforcement - * check canthave attr enforcement - * check one of the above with empty string policy i.e. name wildcard - * check another one of the above with a "#" containing policy string - * check policy prioritization by having a namespace-wide policy, a policy with a # wildcard, - and a one-var specific policy and testing which one is enforced -3. "Lock now" policies (means if the var doesn't exist, it won't be created; if one exists, it - can't be updated): - * test a policy for an already existing variable, verify we can't write into that variable - * create a policy for a non-existing variable and attempt to register such var -4. "Lock on create" policies (means the var can still be created, but no updates later, existing - vars can't be updated): - * create a var, lock it with LockOnCreate, attempt to update its contents - * create LockOnCreate VP, attempt to create var with invalid size, then invalid attr, then create - valid var, attempt to update its contents -5. "Lock on var state" policies (means the var protected by this policy can't be created or updated - once the trigger is set) - * create VP, trigger lock with a valid var, attempt to create a locked var, then modify the - trigger var, create locked var - * create VP, create targeted var, modify it, trigger lock, attempt to modify var - * create VP, trigger lock with invalid (larger than one byte) var, see if VPE allows creation - of the locked var (it should allow) - * create VP, set locking var with wrong value, see if VPE allows creation of the locked var (should allow) -6. Attempt registering invalid policy entries - * invalid required and banned attributes - * large min size - let's say 2GB - * max size equal to 0 - * invalid policy type -7. Exercise dumping policy. No need to check the validity of the dump blob. -8. Test registering a policy with a random version. -9. Lock VPE, make sure old policies are enforced, new ones can't be registered. - * Register a LockOnCreate policy - * Lock VPE - * Test locking it again. - * Verify one of the prior policies is enforced - * Make sure we can create variables even if those are protected by LockOnCreate policy, after locking the VPE - * Attempt to register new policies - * Make sure can't disable VPE - * Cleanup: save context and reboot -10. Disable variable policy and try some things - * Locate Variable Policy Protocol - * Make sure VP is enabled - * Register a policy - * Disable VPE - * Call IsVariablePolicyEnabled to confirm it's disabled. - * Make sure can't lock policy - * Make sure the policy from a is no longer enforced - * Final cleanup: delete vars that were created in some earlier test suites +# Variable Policy Unit Tests + +## Copyright + +Copyright (C) Microsoft Corporation. +SPDX-License-Identifier: BSD-2-Clause-Patent + +## About This Test + +This test verifies functionality of the Variable Policy Protocol by registering various variable policies and exercising +them, as well as tests locking the policy, disabling it, and dumping the policy entries. + +Only policies that are created as a part of this test will be tested. + +1. Try getting test context, if empty then get VP protocol, confirm that VP is not disabled by calling IsVariablePolicyEnabled. + Log VP revision. +2. "No lock" policies: + * check minsize enforcement + * check maxsize enforcement + * check musthave attr enforcement + * check canthave attr enforcement + * check one of the above with empty string policy i.e. name wildcard + * check another one of the above with a "#" containing policy string + * check policy prioritization by having a namespace-wide policy, a policy with a # wildcard, + and a one-var specific policy and testing which one is enforced +3. "Lock now" policies (means if the var doesn't exist, it won't be created; if one exists, it + can't be updated): + * test a policy for an already existing variable, verify we can't write into that variable + * create a policy for a non-existing variable and attempt to register such var +4. "Lock on create" policies (means the var can still be created, but no updates later, existing + vars can't be updated): + * create a var, lock it with LockOnCreate, attempt to update its contents + * create LockOnCreate VP, attempt to create var with invalid size, then invalid attr, then create + valid var, attempt to update its contents +5. "Lock on var state" policies (means the var protected by this policy can't be created or updated + once the trigger is set) + * create VP, trigger lock with a valid var, attempt to create a locked var, then modify the + trigger var, create locked var + * create VP, create targeted var, modify it, trigger lock, attempt to modify var + * create VP, trigger lock with invalid (larger than one byte) var, see if VPE allows creation + of the locked var (it should allow) + * create VP, set locking var with wrong value, see if VPE allows creation of the locked var (should allow) +6. Attempt registering invalid policy entries + * invalid required and banned attributes + * large min size - let's say 2GB + * max size equal to 0 + * invalid policy type +7. Exercise dumping policy. No need to check the validity of the dump blob. +8. Test registering a policy with a random version. +9. Lock VPE, make sure old policies are enforced, new ones can't be registered. + * Register a LockOnCreate policy + * Lock VPE + * Test locking it again. + * Verify one of the prior policies is enforced + * Make sure we can create variables even if those are protected by LockOnCreate policy, after locking the VPE + * Attempt to register new policies + * Make sure can't disable VPE + * Cleanup: save context and reboot +10. Disable variable policy and try some things + * Locate Variable Policy Protocol + * Make sure VP is enabled + * Register a policy + * Disable VPE + * Call IsVariablePolicyEnabled to confirm it's disabled. + * Make sure can't lock policy + * Make sure the policy from a is no longer enforced + * Final cleanup: delete vars that were created in some earlier test suites diff --git a/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/VariablePolicyFuncTestApp.inf b/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/VariablePolicyFuncTestApp.inf index 85edafdf42..7f605a2b99 100644 --- a/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/VariablePolicyFuncTestApp.inf +++ b/MdeModulePkg/Test/ShellTest/VariablePolicyFuncTestApp/VariablePolicyFuncTestApp.inf @@ -1,48 +1,48 @@ -## @file -# Uefi Shell based Application that unit tests the Variable Policy Protocol -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = VariablePolicyFuncTestApp - FILE_GUID = B653C4C3-3FCC-4B6C-8051-5F692AEAECBA - MODULE_TYPE = UEFI_APPLICATION - VERSION_STRING = 1.0 - ENTRY_POINT = UefiMain - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = X64 AARCH64 -# - -[Sources] - VariablePolicyFuncTestApp.c - VariablePolicyFuncTestAppData.c - VariablePolicyFuncTestInternal.h - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec - -[LibraryClasses] - UefiApplicationEntryPoint - BaseLib - BaseMemoryLib - UnitTestLib - UnitTestBootLib - PrintLib - UefiBootServicesTableLib - UefiRuntimeServicesTableLib - MemoryAllocationLib - VariablePolicyHelperLib - -[Guids] - gEfiCertPkcs7Guid - -[Protocols] - gEdkiiVariablePolicyProtocolGuid +## @file +# Uefi Shell based Application that unit tests the Variable Policy Protocol +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = VariablePolicyFuncTestApp + FILE_GUID = B653C4C3-3FCC-4B6C-8051-5F692AEAECBA + MODULE_TYPE = UEFI_APPLICATION + VERSION_STRING = 1.0 + ENTRY_POINT = UefiMain + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = X64 AARCH64 +# + +[Sources] + VariablePolicyFuncTestApp.c + VariablePolicyFuncTestAppData.c + VariablePolicyFuncTestInternal.h + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec + +[LibraryClasses] + UefiApplicationEntryPoint + BaseLib + BaseMemoryLib + UnitTestLib + UnitTestBootLib + PrintLib + UefiBootServicesTableLib + UefiRuntimeServicesTableLib + MemoryAllocationLib + VariablePolicyHelperLib + +[Guids] + gEfiCertPkcs7Guid + +[Protocols] + gEdkiiVariablePolicyProtocolGuid diff --git a/MdeModulePkg/Universal/Variable/RuntimeDxe/RuntimeDxeUnitTest/VariableRuntimeDxeUnitTest.inf b/MdeModulePkg/Universal/Variable/RuntimeDxe/RuntimeDxeUnitTest/VariableRuntimeDxeUnitTest.inf index 8031e35555..dc1b418599 100644 --- a/MdeModulePkg/Universal/Variable/RuntimeDxe/RuntimeDxeUnitTest/VariableRuntimeDxeUnitTest.inf +++ b/MdeModulePkg/Universal/Variable/RuntimeDxe/RuntimeDxeUnitTest/VariableRuntimeDxeUnitTest.inf @@ -1,159 +1,159 @@ -## @file VariableRuntimeDxeUnitTest.inf -# Host-based unit test for the VariableRuntimeDxe driver. Will -# use mocks for all external interfaces. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - - -[Defines] - INF_VERSION = 0x00010017 - BASE_NAME = VariableRuntimeDxeUnitTest - FILE_GUID = E85B8F09-1BA3-4C36-B871-61D40F1C466F - VERSION_STRING = 1.0 - MODULE_TYPE = HOST_APPLICATION - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 -# - - -[Sources] - # Test Harness - SctShim.c - SctShim.h - TestData.c - VariableRuntimeDxeUnitTest.c - VariableRuntimeDxeUnitTest.h - VariableTestDataHelpers.c - BlackBoxTest/Guid.c - BlackBoxTest/Guid.h - BlackBoxTest/VariableServicesBBTestConformance.c - BlackBoxTest/VariableServicesBBTestFunction.c - BlackBoxTest/VariableServicesBBTestMain.h - BlackBoxTest/VariableServicesBBTestStress.c - SctInclude/StandardTestLibrary.h - SctInclude/TestLoggingLibrary.h - - # Files Under Test - ../Measurement.c - ../PrivilegePolymorphic.h - ../Reclaim.c - ../SpeculationBarrierDxe.c - ../TcgMorLockDxe.c - ../Variable.c - ../Variable.h - ../VariableExLib.c - ../VariableNonVolatile.c - ../VariableNonVolatile.h - ../VariableParsing.c - ../VariableParsing.h - ../VariableRuntimeCache.c - ../VariableRuntimeCache.h - - -[Packages] - MdePkg/MdePkg.dec - MdeModulePkg/MdeModulePkg.dec - - -[LibraryClasses] - UnitTestLib - MemoryAllocationLib - BaseLib - SynchronizationLib - UefiLib - UefiBootServicesTableLib - BaseMemoryLib - DebugLib - UefiRuntimeLib - DxeServicesTableLib - PcdLib - TpmMeasurementLib - AuthVariableLib - VarCheckLib - VariablePolicyLib - VariablePolicyHelperLib - VariableFlashInfoLib - PrintLib - - -[Protocols] - gEdkiiVariablePolicyProtocolGuid ## CONSUMES - - -[Guids] - gVariableFlashInfoHobGuid - gEfiHardwareErrorVariableGuid - - ## SOMETIMES_CONSUMES ## GUID # Signature of Variable store header - ## SOMETIMES_PRODUCES ## GUID # Signature of Variable store header - ## SOMETIMES_CONSUMES ## HOB - ## SOMETIMES_PRODUCES ## SystemTable - gEfiAuthenticatedVariableGuid - - ## SOMETIMES_CONSUMES ## GUID # Signature of Variable store header - ## SOMETIMES_PRODUCES ## GUID # Signature of Variable store header - ## SOMETIMES_CONSUMES ## HOB - ## SOMETIMES_PRODUCES ## SystemTable - gEfiVariableGuid - - ## SOMETIMES_CONSUMES ## Variable:L"PlatformLang" - ## SOMETIMES_PRODUCES ## Variable:L"PlatformLang" - ## SOMETIMES_CONSUMES ## Variable:L"Lang" - ## SOMETIMES_PRODUCES ## Variable:L"Lang" - ## SOMETIMES_CONSUMES ## Variable:L"PK" - ## SOMETIMES_CONSUMES ## Variable:L"KEK" - ## SOMETIMES_CONSUMES ## Variable:L"SecureBoot" - gEfiGlobalVariableGuid - - gEfiMemoryOverwriteControlDataGuid ## SOMETIMES_CONSUMES ## Variable:L"MemoryOverwriteRequestControl" - gEfiMemoryOverwriteRequestControlLockGuid ## SOMETIMES_PRODUCES ## Variable:L"MemoryOverwriteRequestControlLock" - - gEfiEventVirtualAddressChangeGuid ## CONSUMES ## Event - gEfiSystemNvDataFvGuid ## CONSUMES ## GUID - gEfiEndOfDxeEventGroupGuid ## CONSUMES ## Event - gEdkiiFaultTolerantWriteGuid ## SOMETIMES_CONSUMES ## HOB - - ## SOMETIMES_CONSUMES ## Variable:L"VarErrorFlag" - ## SOMETIMES_PRODUCES ## Variable:L"VarErrorFlag" - gEdkiiVarErrorFlagGuid - - ## SOMETIMES_CONSUMES ## Variable:L"db" - ## SOMETIMES_CONSUMES ## Variable:L"dbx" - ## SOMETIMES_CONSUMES ## Variable:L"dbt" - gEfiImageSecurityDatabaseGuid - ## SOMETIMES_CONSUMES ## Variable:L"devdb" - gEfiDeviceSignatureDatabaseGuid - -[Pcd] - gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableBase ## SOMETIMES_CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableBase64 ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxVariableSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxAuthVariableSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxVolatileVariableSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxHardwareErrorVariableSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdVariableStoreSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdHwErrStorageSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdMaxUserNvVariableSpaceSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdBoottimeReservedNvVariableSpaceSize ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdReclaimVariableSpaceAtEndOfDxe ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdEmuVariableNvModeEnable ## SOMETIMES_CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdEmuVariableNvStoreReserved ## SOMETIMES_CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdTcgPfpMeasurementRevision ## CONSUMES - gEfiMdeModulePkgTokenSpaceGuid.PcdEnableSpdmDeviceAuthentication ## PRODUCES AND CONSUMES - -[FeaturePcd] - gEfiMdeModulePkgTokenSpaceGuid.PcdVariableCollectStatistics ## CONSUMES # statistic the information of variable. - gEfiMdePkgTokenSpaceGuid.PcdUefiVariableDefaultLangDeprecate ## CONSUMES # Auto update PlatformLang/Lang - - - -[BuildOptions] - # NOTE: For some reason the MSABI VA_ARGS funcs are causing a SegFault in Linux - GCC:NOOPT_*_*_CC_FLAGS = -include Uefi.h -DNO_MSABI_VA_FUNCS - MSFT:NOOPT_*_*_CC_FLAGS = /FIUefi.h +## @file VariableRuntimeDxeUnitTest.inf +# Host-based unit test for the VariableRuntimeDxe driver. Will +# use mocks for all external interfaces. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +[Defines] + INF_VERSION = 0x00010017 + BASE_NAME = VariableRuntimeDxeUnitTest + FILE_GUID = E85B8F09-1BA3-4C36-B871-61D40F1C466F + VERSION_STRING = 1.0 + MODULE_TYPE = HOST_APPLICATION + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 +# + + +[Sources] + # Test Harness + SctShim.c + SctShim.h + TestData.c + VariableRuntimeDxeUnitTest.c + VariableRuntimeDxeUnitTest.h + VariableTestDataHelpers.c + BlackBoxTest/Guid.c + BlackBoxTest/Guid.h + BlackBoxTest/VariableServicesBBTestConformance.c + BlackBoxTest/VariableServicesBBTestFunction.c + BlackBoxTest/VariableServicesBBTestMain.h + BlackBoxTest/VariableServicesBBTestStress.c + SctInclude/StandardTestLibrary.h + SctInclude/TestLoggingLibrary.h + + # Files Under Test + ../Measurement.c + ../PrivilegePolymorphic.h + ../Reclaim.c + ../SpeculationBarrierDxe.c + ../TcgMorLockDxe.c + ../Variable.c + ../Variable.h + ../VariableExLib.c + ../VariableNonVolatile.c + ../VariableNonVolatile.h + ../VariableParsing.c + ../VariableParsing.h + ../VariableRuntimeCache.c + ../VariableRuntimeCache.h + + +[Packages] + MdePkg/MdePkg.dec + MdeModulePkg/MdeModulePkg.dec + + +[LibraryClasses] + UnitTestLib + MemoryAllocationLib + BaseLib + SynchronizationLib + UefiLib + UefiBootServicesTableLib + BaseMemoryLib + DebugLib + UefiRuntimeLib + DxeServicesTableLib + PcdLib + TpmMeasurementLib + AuthVariableLib + VarCheckLib + VariablePolicyLib + VariablePolicyHelperLib + VariableFlashInfoLib + PrintLib + + +[Protocols] + gEdkiiVariablePolicyProtocolGuid ## CONSUMES + + +[Guids] + gVariableFlashInfoHobGuid + gEfiHardwareErrorVariableGuid + + ## SOMETIMES_CONSUMES ## GUID # Signature of Variable store header + ## SOMETIMES_PRODUCES ## GUID # Signature of Variable store header + ## SOMETIMES_CONSUMES ## HOB + ## SOMETIMES_PRODUCES ## SystemTable + gEfiAuthenticatedVariableGuid + + ## SOMETIMES_CONSUMES ## GUID # Signature of Variable store header + ## SOMETIMES_PRODUCES ## GUID # Signature of Variable store header + ## SOMETIMES_CONSUMES ## HOB + ## SOMETIMES_PRODUCES ## SystemTable + gEfiVariableGuid + + ## SOMETIMES_CONSUMES ## Variable:L"PlatformLang" + ## SOMETIMES_PRODUCES ## Variable:L"PlatformLang" + ## SOMETIMES_CONSUMES ## Variable:L"Lang" + ## SOMETIMES_PRODUCES ## Variable:L"Lang" + ## SOMETIMES_CONSUMES ## Variable:L"PK" + ## SOMETIMES_CONSUMES ## Variable:L"KEK" + ## SOMETIMES_CONSUMES ## Variable:L"SecureBoot" + gEfiGlobalVariableGuid + + gEfiMemoryOverwriteControlDataGuid ## SOMETIMES_CONSUMES ## Variable:L"MemoryOverwriteRequestControl" + gEfiMemoryOverwriteRequestControlLockGuid ## SOMETIMES_PRODUCES ## Variable:L"MemoryOverwriteRequestControlLock" + + gEfiEventVirtualAddressChangeGuid ## CONSUMES ## Event + gEfiSystemNvDataFvGuid ## CONSUMES ## GUID + gEfiEndOfDxeEventGroupGuid ## CONSUMES ## Event + gEdkiiFaultTolerantWriteGuid ## SOMETIMES_CONSUMES ## HOB + + ## SOMETIMES_CONSUMES ## Variable:L"VarErrorFlag" + ## SOMETIMES_PRODUCES ## Variable:L"VarErrorFlag" + gEdkiiVarErrorFlagGuid + + ## SOMETIMES_CONSUMES ## Variable:L"db" + ## SOMETIMES_CONSUMES ## Variable:L"dbx" + ## SOMETIMES_CONSUMES ## Variable:L"dbt" + gEfiImageSecurityDatabaseGuid + ## SOMETIMES_CONSUMES ## Variable:L"devdb" + gEfiDeviceSignatureDatabaseGuid + +[Pcd] + gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableBase ## SOMETIMES_CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdFlashNvStorageVariableBase64 ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxVariableSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxAuthVariableSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxVolatileVariableSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxHardwareErrorVariableSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdVariableStoreSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdHwErrStorageSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdMaxUserNvVariableSpaceSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdBoottimeReservedNvVariableSpaceSize ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdReclaimVariableSpaceAtEndOfDxe ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdEmuVariableNvModeEnable ## SOMETIMES_CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdEmuVariableNvStoreReserved ## SOMETIMES_CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdTcgPfpMeasurementRevision ## CONSUMES + gEfiMdeModulePkgTokenSpaceGuid.PcdEnableSpdmDeviceAuthentication ## PRODUCES AND CONSUMES + +[FeaturePcd] + gEfiMdeModulePkgTokenSpaceGuid.PcdVariableCollectStatistics ## CONSUMES # statistic the information of variable. + gEfiMdePkgTokenSpaceGuid.PcdUefiVariableDefaultLangDeprecate ## CONSUMES # Auto update PlatformLang/Lang + + + +[BuildOptions] + # NOTE: For some reason the MSABI VA_ARGS funcs are causing a SegFault in Linux + GCC:NOOPT_*_*_CC_FLAGS = -include Uefi.h -DNO_MSABI_VA_FUNCS + MSFT:NOOPT_*_*_CC_FLAGS = /FIUefi.h diff --git a/MdePkg/Library/BaseLib/AArch64/CpuBreakAssert.S b/MdePkg/Library/BaseLib/AArch64/CpuBreakAssert.S index 7c167d3bcd..2858d9933a 100644 --- a/MdePkg/Library/BaseLib/AArch64/CpuBreakAssert.S +++ b/MdePkg/Library/BaseLib/AArch64/CpuBreakAssert.S @@ -1,28 +1,28 @@ -# @file CpuBreakAssert.S -# -# CpuBreakAssert function for AArch64. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# - -.text -.p2align 2 -GCC_ASM_EXPORT(CpuBreakAssert) - -#/** -# Generates a debugger assertion break on the CPU. -# -# This does a special break into the debugger such that the debugger knows -# that the code running has hit an assertion, not a generic breakpoint. -# -#**/ -#VOID -#EFIAPI -#CpuBreakAssert ( -# VOID -# ); -# -ASM_PFX(CpuBreakAssert): - brk 0xf001 - ret +# @file CpuBreakAssert.S +# +# CpuBreakAssert function for AArch64. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# + +.text +.p2align 2 +GCC_ASM_EXPORT(CpuBreakAssert) + +#/** +# Generates a debugger assertion break on the CPU. +# +# This does a special break into the debugger such that the debugger knows +# that the code running has hit an assertion, not a generic breakpoint. +# +#**/ +#VOID +#EFIAPI +#CpuBreakAssert ( +# VOID +# ); +# +ASM_PFX(CpuBreakAssert): + brk 0xf001 + ret diff --git a/MdePkg/Library/BaseMmuLibNull/BaseMmuLibNull.inf b/MdePkg/Library/BaseMmuLibNull/BaseMmuLibNull.inf index 64b2282b2e..75e521931c 100644 --- a/MdePkg/Library/BaseMmuLibNull/BaseMmuLibNull.inf +++ b/MdePkg/Library/BaseMmuLibNull/BaseMmuLibNull.inf @@ -1,29 +1,29 @@ -## @file -# This lib abstracts some of the MMU accesses currently hardcoded against -# an Arm lib. It's likely that this will need to be refactored at some point. -# -## -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - - -[Defines] - INF_VERSION = 0x00010017 - BASE_NAME = BaseMmuLibNull - FILE_GUID = 97196A48-00C0-4487-802A-CC5540583EEB - VERSION_STRING = 1.0 - MODULE_TYPE = BASE - LIBRARY_CLASS = MmuLib - - -[Sources] - BaseMmuLibNull.c - - -[LibraryClasses] - DebugLib - - -[Packages] - MdePkg/MdePkg.dec +## @file +# This lib abstracts some of the MMU accesses currently hardcoded against +# an Arm lib. It's likely that this will need to be refactored at some point. +# +## +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +[Defines] + INF_VERSION = 0x00010017 + BASE_NAME = BaseMmuLibNull + FILE_GUID = 97196A48-00C0-4487-802A-CC5540583EEB + VERSION_STRING = 1.0 + MODULE_TYPE = BASE + LIBRARY_CLASS = MmuLib + + +[Sources] + BaseMmuLibNull.c + + +[LibraryClasses] + DebugLib + + +[Packages] + MdePkg/MdePkg.dec diff --git a/MdePkg/Library/CompilerIntrinsicsLib/ArmCompilerIntrinsicsLib.inf b/MdePkg/Library/CompilerIntrinsicsLib/ArmCompilerIntrinsicsLib.inf index 8e00557f93..fb941274d6 100644 --- a/MdePkg/Library/CompilerIntrinsicsLib/ArmCompilerIntrinsicsLib.inf +++ b/MdePkg/Library/CompilerIntrinsicsLib/ArmCompilerIntrinsicsLib.inf @@ -1,78 +1,78 @@ -#/** @file -# Base Library implementation. -# -# Copyright (c) 2009, Apple Inc. All rights reserved.
-# Copyright (c) 2011-2013, ARM Limited. All rights reserved. -# -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -# -#**/ - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = CompilerIntrinsicsLib - FILE_GUID = 855274FA-3575-4C20-9709-C031DC5589FA - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = CompilerIntrinsicsLib - -[Sources] - - memcpy.c | GCC - memset.c | GCC - - memcpy_ms.c | MSFT - memset_ms.c | MSFT - memcmp_ms.c | MSFT - memmove_ms.c | MSFT - -[Sources.ARM] - - Arm/ashrdi3.S | GCC - Arm/ashldi3.S | GCC - Arm/div.S | GCC - Arm/divdi3.S | GCC - Arm/divsi3.S | GCC - Arm/lshrdi3.S | GCC - Arm/memmove.S | GCC - Arm/modsi3.S | GCC - Arm/moddi3.S | GCC - Arm/muldi3.S | GCC - Arm/mullu.S | GCC - Arm/udivsi3.S | GCC - Arm/umodsi3.S | GCC - Arm/udivdi3.S | GCC - Arm/umoddi3.S | GCC - Arm/udivmoddi4.S | GCC - Arm/clzsi2.S | GCC - Arm/ctzsi2.S | GCC - Arm/ucmpdi2.S | GCC - Arm/switch8.S | GCC - Arm/switchu8.S | GCC - Arm/switch16.S | GCC - Arm/switch32.S | GCC - Arm/sourcery.S | GCC - Arm/uldiv.S | GCC - Arm/ldivmod.S | GCC - Arm/lasr.S | GCC - Arm/llsr.S | GCC - Arm/llsl.S | GCC - Arm/uread.S | GCC - Arm/uwrite.S | GCC - - Arm/div.asm | MSFT - Arm/uldiv.asm | MSFT - Arm/ldivmod.asm | MSFT - Arm/llsr.asm | MSFT - -[Sources.AARCH64] - AArch64/Atomics.S | GCC - -[Packages] - MdePkg/MdePkg.dec - #ArmPkg/ArmPkg.dec - -[BuildOptions] - MSFT:*_*_*_CC_FLAGS = /GL- - MSFT:*_*_ARM_ASM_FLAGS = /oldit +#/** @file +# Base Library implementation. +# +# Copyright (c) 2009, Apple Inc. All rights reserved.
+# Copyright (c) 2011-2013, ARM Limited. All rights reserved. +# +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +# +#**/ + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = CompilerIntrinsicsLib + FILE_GUID = 855274FA-3575-4C20-9709-C031DC5589FA + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = CompilerIntrinsicsLib + +[Sources] + + memcpy.c | GCC + memset.c | GCC + + memcpy_ms.c | MSFT + memset_ms.c | MSFT + memcmp_ms.c | MSFT + memmove_ms.c | MSFT + +[Sources.ARM] + + Arm/ashrdi3.S | GCC + Arm/ashldi3.S | GCC + Arm/div.S | GCC + Arm/divdi3.S | GCC + Arm/divsi3.S | GCC + Arm/lshrdi3.S | GCC + Arm/memmove.S | GCC + Arm/modsi3.S | GCC + Arm/moddi3.S | GCC + Arm/muldi3.S | GCC + Arm/mullu.S | GCC + Arm/udivsi3.S | GCC + Arm/umodsi3.S | GCC + Arm/udivdi3.S | GCC + Arm/umoddi3.S | GCC + Arm/udivmoddi4.S | GCC + Arm/clzsi2.S | GCC + Arm/ctzsi2.S | GCC + Arm/ucmpdi2.S | GCC + Arm/switch8.S | GCC + Arm/switchu8.S | GCC + Arm/switch16.S | GCC + Arm/switch32.S | GCC + Arm/sourcery.S | GCC + Arm/uldiv.S | GCC + Arm/ldivmod.S | GCC + Arm/lasr.S | GCC + Arm/llsr.S | GCC + Arm/llsl.S | GCC + Arm/uread.S | GCC + Arm/uwrite.S | GCC + + Arm/div.asm | MSFT + Arm/uldiv.asm | MSFT + Arm/ldivmod.asm | MSFT + Arm/llsr.asm | MSFT + +[Sources.AARCH64] + AArch64/Atomics.S | GCC + +[Packages] + MdePkg/MdePkg.dec + #ArmPkg/ArmPkg.dec + +[BuildOptions] + MSFT:*_*_*_CC_FLAGS = /GL- + MSFT:*_*_ARM_ASM_FLAGS = /oldit diff --git a/MdePkg/Library/FltUsedLib/FltUsedLib.inf b/MdePkg/Library/FltUsedLib/FltUsedLib.inf index ef13e0c240..220b6c3b29 100644 --- a/MdePkg/Library/FltUsedLib/FltUsedLib.inf +++ b/MdePkg/Library/FltUsedLib/FltUsedLib.inf @@ -1,31 +1,31 @@ -## @file -# Lib to include if using floats -# -# Copyright (C) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = FltUsedLib - FILE_GUID = C004F180-9FE2-4D2B-8318-BADC2A231774 - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = FltUsedLib - -# -# The following information is for reference only and not required by the build tools. -# -# VALID_ARCHITECTURES = IA32 X64 AARCH64 -# - -[Sources] - FltUsedLib.c - -[Packages] - MdePkg/MdePkg.dec - -[BuildOptions] - # Disable GL due to linker error LNK1237 - # https://docs.microsoft.com/en-us/cpp/error-messages/tool-errors/linker-tools-error-lnk1237?view=vs-2017 - MSFT:*_*_*_CC_FLAGS = /GL- +## @file +# Lib to include if using floats +# +# Copyright (C) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = FltUsedLib + FILE_GUID = C004F180-9FE2-4D2B-8318-BADC2A231774 + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = FltUsedLib + +# +# The following information is for reference only and not required by the build tools. +# +# VALID_ARCHITECTURES = IA32 X64 AARCH64 +# + +[Sources] + FltUsedLib.c + +[Packages] + MdePkg/MdePkg.dec + +[BuildOptions] + # Disable GL due to linker error LNK1237 + # https://docs.microsoft.com/en-us/cpp/error-messages/tool-errors/linker-tools-error-lnk1237?view=vs-2017 + MSFT:*_*_*_CC_FLAGS = /GL- diff --git a/MdePkg/Library/FltUsedLib/Readme.md b/MdePkg/Library/FltUsedLib/Readme.md index aa2053f6d8..df0133ccf1 100644 --- a/MdePkg/Library/FltUsedLib/Readme.md +++ b/MdePkg/Library/FltUsedLib/Readme.md @@ -1,15 +1,15 @@ -# FltUsedLib - -This library provides a global (fltused) that needs to be defined anywhere floating point operations are used. -The C compiler produces the _fltused symbol by default, this is just to satisfy the linker. - -## Using - -To use FltUsedLib, just include it in the INF of the module that uses floating point. - -```inf -[LibraryClasses] - BaseLib - BaseMemoryLib - FltUsedLib -``` +# FltUsedLib + +This library provides a global (fltused) that needs to be defined anywhere floating point operations are used. +The C compiler produces the _fltused symbol by default, this is just to satisfy the linker. + +## Using + +To use FltUsedLib, just include it in the INF of the module that uses floating point. + +```inf +[LibraryClasses] + BaseLib + BaseMemoryLib + FltUsedLib +``` diff --git a/MdePkg/Library/VsIntrinsicLib/IA32/llmul.asm b/MdePkg/Library/VsIntrinsicLib/IA32/llmul.asm index 17c9fb9078..d6581be7fe 100644 --- a/MdePkg/Library/VsIntrinsicLib/IA32/llmul.asm +++ b/MdePkg/Library/VsIntrinsicLib/IA32/llmul.asm @@ -1,98 +1,98 @@ -;*** -;llmul.asm - long multiply routine -; -; Copyright (c) Microsoft Corporation. -; SPDX-License-Identifier: BSD-2-Clause-Patent -; -;Purpose: -; Defines long multiply routine -; Both signed and unsigned routines are the same, since multiply's -; work out the same in 2's complement -; creates the following routine: -; __allmul -; -;Original Implemenation: MSVC 14.12.25827 -; -;******************************************************************************* - .686 - .model flat,C - .code - - -;*** -;llmul - long multiply routine -; -;Purpose: -; Does a long multiply (same for signed/unsigned) -; Parameters are not changed. -; -;Entry: -; Parameters are passed on the stack: -; 1st pushed: multiplier (QWORD) -; 2nd pushed: multiplicand (QWORD) -; -;Exit: -; EDX:EAX - product of multiplier and multiplicand -; NOTE: parameters are removed from the stack -; -;Uses: -; ECX -; -;Exceptions: -; -;******************************************************************************* -_allmul PROC NEAR - -A EQU [esp + 4] ; stack address of a -B EQU [esp + 12] ; stack address of b - -HIGH_PART EQU [4] ; -LOW_PART EQU [0] - -; -; AHI, BHI : upper 32 bits of A and B -; ALO, BLO : lower 32 bits of A and B -; -; ALO * BLO -; ALO * BHI -; + BLO * AHI -; --------------------- -; - - mov eax,HIGH_PART(A) - mov ecx,HIGH_PART(B) - or ecx,eax ;test for both high dwords zero. - mov ecx,LOW_PART(B) - jnz short hard ;both are zero, just mult ALO and BLO - - mov eax,LOW_PART(A) - mul ecx - - ret 16 ; callee restores the stack - -hard: - push ebx - -; must redefine A and B since esp has been altered - -A2 EQU [esp + 8] ; stack address of a -B2 EQU [esp + 16] ; stack address of b - - mul ecx ;eax has AHI, ecx has BLO, so AHI * BLO - mov ebx,eax ;save result - - mov eax,LOW_PART(A2) - mul dword ptr HIGH_PART(B2) ;ALO * BHI - add ebx,eax ;ebx = ((ALO * BHI) + (AHI * BLO)) - - mov eax,LOW_PART(A2);ecx = BLO - mul ecx ;so edx:eax = ALO*BLO - add edx,ebx ;now edx has all the LO*HI stuff - - pop ebx - - ret 16 ; callee restores the stack - -_allmul ENDP - - end +;*** +;llmul.asm - long multiply routine +; +; Copyright (c) Microsoft Corporation. +; SPDX-License-Identifier: BSD-2-Clause-Patent +; +;Purpose: +; Defines long multiply routine +; Both signed and unsigned routines are the same, since multiply's +; work out the same in 2's complement +; creates the following routine: +; __allmul +; +;Original Implemenation: MSVC 14.12.25827 +; +;******************************************************************************* + .686 + .model flat,C + .code + + +;*** +;llmul - long multiply routine +; +;Purpose: +; Does a long multiply (same for signed/unsigned) +; Parameters are not changed. +; +;Entry: +; Parameters are passed on the stack: +; 1st pushed: multiplier (QWORD) +; 2nd pushed: multiplicand (QWORD) +; +;Exit: +; EDX:EAX - product of multiplier and multiplicand +; NOTE: parameters are removed from the stack +; +;Uses: +; ECX +; +;Exceptions: +; +;******************************************************************************* +_allmul PROC NEAR + +A EQU [esp + 4] ; stack address of a +B EQU [esp + 12] ; stack address of b + +HIGH_PART EQU [4] ; +LOW_PART EQU [0] + +; +; AHI, BHI : upper 32 bits of A and B +; ALO, BLO : lower 32 bits of A and B +; +; ALO * BLO +; ALO * BHI +; + BLO * AHI +; --------------------- +; + + mov eax,HIGH_PART(A) + mov ecx,HIGH_PART(B) + or ecx,eax ;test for both high dwords zero. + mov ecx,LOW_PART(B) + jnz short hard ;both are zero, just mult ALO and BLO + + mov eax,LOW_PART(A) + mul ecx + + ret 16 ; callee restores the stack + +hard: + push ebx + +; must redefine A and B since esp has been altered + +A2 EQU [esp + 8] ; stack address of a +B2 EQU [esp + 16] ; stack address of b + + mul ecx ;eax has AHI, ecx has BLO, so AHI * BLO + mov ebx,eax ;save result + + mov eax,LOW_PART(A2) + mul dword ptr HIGH_PART(B2) ;ALO * BHI + add ebx,eax ;ebx = ((ALO * BHI) + (AHI * BLO)) + + mov eax,LOW_PART(A2);ecx = BLO + mul ecx ;so edx:eax = ALO*BLO + add edx,ebx ;now edx has all the LO*HI stuff + + pop ebx + + ret 16 ; callee restores the stack + +_allmul ENDP + + end diff --git a/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.inf b/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.inf index f755f6b00d..a0c950653b 100644 --- a/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.inf +++ b/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.inf @@ -1,35 +1,35 @@ -## @file -# Since the C compiler does very aggressive full program optimizations there are cases -# where some small number of compiler inserted functions can not be avoided. -# To handle that case this NULL library can be injected into all 32bit modules -# so that the link time dependency is met and the modules compile. -# -# The routines are based on src delivered with the visual studio product. it is -# critical that calling convention, stack usage, register usage, etc is in line -# with what the compiler expects as there is no way to influence the behaviors -# for compiler inserted functions. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = VsIntrinsicLib - MODULE_UNI_FILE = VsIntrinsicLib.uni - FILE_GUID = ed449fc0-3265-40ed-91b8-435b8df0aa5f - MODULE_TYPE = BASE - VERSION_STRING = 1.0 - LIBRARY_CLASS = NULL - -# -# VALID_ARCHITECTURES = IA32 -# - -[Sources] - -[Sources.Ia32] - IA32/llmul.asm - -[Packages] - MdePkg/MdePkg.dec +## @file +# Since the C compiler does very aggressive full program optimizations there are cases +# where some small number of compiler inserted functions can not be avoided. +# To handle that case this NULL library can be injected into all 32bit modules +# so that the link time dependency is met and the modules compile. +# +# The routines are based on src delivered with the visual studio product. it is +# critical that calling convention, stack usage, register usage, etc is in line +# with what the compiler expects as there is no way to influence the behaviors +# for compiler inserted functions. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = VsIntrinsicLib + MODULE_UNI_FILE = VsIntrinsicLib.uni + FILE_GUID = ed449fc0-3265-40ed-91b8-435b8df0aa5f + MODULE_TYPE = BASE + VERSION_STRING = 1.0 + LIBRARY_CLASS = NULL + +# +# VALID_ARCHITECTURES = IA32 +# + +[Sources] + +[Sources.Ia32] + IA32/llmul.asm + +[Packages] + MdePkg/MdePkg.dec diff --git a/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.uni b/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.uni index 6b4a0f4600..4e0a5598f5 100644 --- a/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.uni +++ b/MdePkg/Library/VsIntrinsicLib/VsIntrinsicLib.uni @@ -1,13 +1,13 @@ -// /** @file -// VsIntrinsic Library implementation. -// -// Copyright (c) Microsoft Corporation. -// SPDX-License-Identifier: BSD-2-Clause-Patent -// -// **/ - - -#string STR_MODULE_ABSTRACT #language en-US "VsIntrinsic Library implementation" - -#string STR_MODULE_DESCRIPTION #language en-US "VsIntrinsic Library implementation" - +// /** @file +// VsIntrinsic Library implementation. +// +// Copyright (c) Microsoft Corporation. +// SPDX-License-Identifier: BSD-2-Clause-Patent +// +// **/ + + +#string STR_MODULE_ABSTRACT #language en-US "VsIntrinsic Library implementation" + +#string STR_MODULE_DESCRIPTION #language en-US "VsIntrinsic Library implementation" + diff --git a/MdePkg/Test/Library/RngLibHostTestLfsr/RngLibHostTestLfsr.inf b/MdePkg/Test/Library/RngLibHostTestLfsr/RngLibHostTestLfsr.inf index 03228bbfc3..18d9bf17fd 100644 --- a/MdePkg/Test/Library/RngLibHostTestLfsr/RngLibHostTestLfsr.inf +++ b/MdePkg/Test/Library/RngLibHostTestLfsr/RngLibHostTestLfsr.inf @@ -1,25 +1,25 @@ -## @file -# A minimal implementation of RngLib that supports host based testing -# with a simple LFSR: -# https://en.wikipedia.org/wiki/Linear-feedback_shift_register -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - - -[Defines] - INF_VERSION = 0x00010017 - BASE_NAME = RngLibHostTestLfsr - FILE_GUID = E96C1E06-1052-4967-9FF2-F3E07EE02D8B - VERSION_STRING = 1.0 - MODULE_TYPE = HOST_APPLICATION - LIBRARY_CLASS = RngLib|HOST_APPLICATION - - -[Sources] - RngLibHostTestLfsr.c - - -[Packages] - MdePkg/MdePkg.dec +## @file +# A minimal implementation of RngLib that supports host based testing +# with a simple LFSR: +# https://en.wikipedia.org/wiki/Linear-feedback_shift_register +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + + +[Defines] + INF_VERSION = 0x00010017 + BASE_NAME = RngLibHostTestLfsr + FILE_GUID = E96C1E06-1052-4967-9FF2-F3E07EE02D8B + VERSION_STRING = 1.0 + MODULE_TYPE = HOST_APPLICATION + LIBRARY_CLASS = RngLib|HOST_APPLICATION + + +[Sources] + RngLibHostTestLfsr.c + + +[Packages] + MdePkg/MdePkg.dec diff --git a/MdePkg/Test/Mock/Library/Stub/StubUefiLib/StubUefiLib.inf b/MdePkg/Test/Mock/Library/Stub/StubUefiLib/StubUefiLib.inf index 487a502b53..c6d83c1496 100644 --- a/MdePkg/Test/Mock/Library/Stub/StubUefiLib/StubUefiLib.inf +++ b/MdePkg/Test/Mock/Library/Stub/StubUefiLib/StubUefiLib.inf @@ -1,37 +1,37 @@ -## @file -# Mock instance of UEFI Library. -# -# The UEFI Library provides functions and macros that simplify the development of -# UEFI Drivers and UEFI Applications. These functions and macros help manage EFI -# events, build simple locks utilizing EFI Task Priority Levels (TPLs), install -# EFI Driver Model related protocols, manage Unicode string tables for UEFI Drivers, -# and print messages on the console output and standard error devices. -# -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -[Defines] - INF_VERSION = 0x00010005 - BASE_NAME = StubUefiLib - FILE_GUID = 2834A68A-7201-4931-B953-53EA5E45AA14 - MODULE_TYPE = HOST_APPLICATION - VERSION_STRING = 1.0 - LIBRARY_CLASS = UefiLib|HOST_APPLICATION - - -# -# VALID_ARCHITECTURES = IA32 X64 EBC -# - -[Sources] - StubUefiLib.c - - -[Packages] - MdePkg/MdePkg.dec - UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec - - -[LibraryClasses] - DebugLib +## @file +# Mock instance of UEFI Library. +# +# The UEFI Library provides functions and macros that simplify the development of +# UEFI Drivers and UEFI Applications. These functions and macros help manage EFI +# events, build simple locks utilizing EFI Task Priority Levels (TPLs), install +# EFI Driver Model related protocols, manage Unicode string tables for UEFI Drivers, +# and print messages on the console output and standard error devices. +# +# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +[Defines] + INF_VERSION = 0x00010005 + BASE_NAME = StubUefiLib + FILE_GUID = 2834A68A-7201-4931-B953-53EA5E45AA14 + MODULE_TYPE = HOST_APPLICATION + VERSION_STRING = 1.0 + LIBRARY_CLASS = UefiLib|HOST_APPLICATION + + +# +# VALID_ARCHITECTURES = IA32 X64 EBC +# + +[Sources] + StubUefiLib.c + + +[Packages] + MdePkg/MdePkg.dec + UnitTestFrameworkPkg/UnitTestFrameworkPkg.dec + + +[LibraryClasses] + DebugLib diff --git a/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml b/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml index 4a281e0964..386e392642 100644 --- a/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml +++ b/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml @@ -1,27 +1,27 @@ -## @file -# -# Slim Bootloader CFGDATA Default File. -# -# Copyright (c) 2020, Intel Corporation. All rights reserved.
-# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -# Template section for common policy header, template name has to end with `_TMPL` -# Policy structure metadata, will be used for policy headers and genereating unique macro definitions -POLICY_HEADER_TMPL: > - # Unique identifier for this polisy structure. Duplicate category in an active platform will cause build break - - category : $(1) - # Signature field for verfied policy header - - signature : - - length : 0x08 - - value : $(2) - # Major version field for verfied policy header - - majver : - - length : 0x02 - - value : $(3) - # Minor version field for verfied policy header is automatically populated with the highest minor version from fields - # Size field for verfied policy header, should be what your - - size : - - length : 0x04 - - value : $(4) +## @file +# +# Slim Bootloader CFGDATA Default File. +# +# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +# Template section for common policy header, template name has to end with `_TMPL` +# Policy structure metadata, will be used for policy headers and genereating unique macro definitions +POLICY_HEADER_TMPL: > + # Unique identifier for this polisy structure. Duplicate category in an active platform will cause build break + - category : $(1) + # Signature field for verfied policy header + - signature : + - length : 0x08 + - value : $(2) + # Major version field for verfied policy header + - majver : + - length : 0x02 + - value : $(3) + # Minor version field for verfied policy header is automatically populated with the highest minor version from fields + # Size field for verfied policy header, should be what your + - size : + - length : 0x04 + - value : $(4) diff --git a/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py index f54ac87836..4f9c843f5a 100644 --- a/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py +++ b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py @@ -1,262 +1,262 @@ -## -# This plugin generates policy header files -# from platform supplied YAML policy. -# -# Copyright (c) Microsoft Corporation -# SPDX-License-Identifier: BSD-2-Clause-Patent -## - -import logging -import os -import shutil -from collections import OrderedDict -from copy import deepcopy -import xml.etree.ElementTree -import hashlib -import json -import time -import re -import xml.etree.ElementTree as ET -from edk2toolext.environment import shell_environment -from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin -from edk2toollib.utility_functions import RunPythonScript -from edk2toollib.uefi.edk2.path_utilities import Edk2Path - -import sys - -import yaml -sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'Tools')) -from GenCfgData import CGenCfgData - -class UpdatePolicyHdr(IUefiBuildPlugin): - - def trimTreeBaseOnMinver (self, tree, list): - - if type(tree) is not OrderedDict: - raise Exception ("Incorrect tree type!!!") - - try: - ver = int(tree["minver"], 0) - except: - ver = 0 - - trim_list = [] - for idx in range(len(list)): - if idx < ver and list[idx] != None: - # trim the entry if this minver is higher than it belongs - list[idx] = None - trim_list.append(idx) - - for value in tree: - if type(tree[value]) is OrderedDict: - sub_list = [] - for idx in range(len(list)): - if list[idx] != None: - sub_list.append(list[idx][value]) - else: - sub_list.append(None) - sub_trim_list = self.trimTreeBaseOnMinver (tree[value], sub_list) - for item in sub_trim_list: - del list[item][value] - - return trim_list - - # in-place prettyprint formatter - @staticmethod - def indent(elem, level=0): - i = "\n" + level*" " - if len(elem): - if not elem.text or not elem.text.strip(): - elem.text = i + " " - if not elem.tail or not elem.tail.strip(): - elem.tail = i - for elem in elem: - UpdatePolicyHdr.indent(elem, level+1) - if not elem.tail or not elem.tail.strip(): - elem.tail = i - else: - if level and (not elem.tail or not elem.tail.strip()): - elem.tail = i - - # Attempt to run GenCfgData to generate C header files - # - # Consumes build environement variables: "BUILD_OUTPUT_BASE", "UPDATE_SETTINGS", - # and either of "POLICY_REPORT_FOLDER" or "ACTIVE_PLATFORM" - def do_pre_build(self, thebuilder): - need_check = thebuilder.env.GetValue("UPDATE_SETTINGS") - if need_check is not None and need_check.upper() == "FALSE": - logging.warn ("Platform indicated as not checking YAML file changes, will not be updated!") - return 0 - - yaml_list = [] - exception_list = [] - ws = thebuilder.ws - pp = thebuilder.pp.split(os.pathsep) - edk2 = Edk2Path(ws, pp) - - # Form the exception list of formatted absolute paths. And always ignore our own samples. - exception_list.append (thebuilder.mws.join (thebuilder.ws, "PolicyServicePkg", "Samples")) - platform_exception = thebuilder.env.GetValue("POLICY_IGNORE_PATHS") - if platform_exception is not None: - plat_list = platform_exception.split(';') - for each in plat_list: - exception_list.append(os.path.normpath (thebuilder.mws.join (thebuilder.ws, each))) - - # Look for *_policy_def.yaml files in all package paths. - for pkg_path in pp: - for subdir, dirs, files in os.walk(pkg_path): - for file in files: - if file.endswith ("_policy_def.yaml") or file.endswith ("_policy_def.yml"): - yaml_path = os.path.normpath(os.path.join (subdir, file)) - ignore = False - for exception in exception_list: - if yaml_path.startswith (exception): - ignore = True - break - if ignore: - continue - yaml_list.append (yaml_path) - logging.debug (yaml_path) - - err_count = 0 - type = 'POLICY' - report_dir = thebuilder.env.GetValue("%s_REPORT_FOLDER" % type) - if report_dir is None: - report_dir = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath ( - edk2.GetContainingPackage( - edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( - thebuilder.env.GetValue("ACTIVE_PLATFORM")))) - - report_file = os.path.join (report_dir, "%s_REPORT.xml" % type) - - if os.path.isfile (report_file): - tree = ET.parse(report_file).getroot() - else: - tree = None - - comment = ET.Comment(' === Auto-Generated. Please do not change anything!!! === ') - root = ET.Element('Settings') - root.insert(0, comment) - - for setting in yaml_list: - - if not os.path.normcase(setting).startswith(os.path.normcase(report_dir.rstrip(os.sep)) + os.sep): - continue - - logging.info ("Processing settings from %s" % setting) - - final_dir = os.path.join (edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( - edk2.GetContainingPackage (setting)), "Include") - if not os.path.isdir(final_dir): - os.mkdir (final_dir) - - # Set up a playground first - op_dir = thebuilder.mws.join(thebuilder.ws, thebuilder.env.GetValue("BUILD_OUTPUT_BASE"), "ConfPolicy") - if not os.path.isdir(op_dir): - os.makedirs(op_dir) - - cmd = thebuilder.mws.join(thebuilder.ws, "PolicyServicePkg", "Tools", "GenCfgData.py") - - conf_file = setting - if conf_file is None: - logging.warn ("YAML file not specified, system might not work as expected!!!") - return 0 - if not os.path.isfile(conf_file): - logging.error ("YAML file specified is not found!!!") - return 1 - - gen_cfg_data = CGenCfgData() - - if gen_cfg_data.load_yaml(conf_file, shallow_load=True) != 0: - raise Exception(gen_cfg_data.get_last_error()) - - merged_cfg_tree = gen_cfg_data.get_cfg_tree() - - minor_tree_list = [] - max_minver = gen_cfg_data.findMaxMinver (merged_cfg_tree) - # each minor version needs a spot, thus plus 1 here - for _ in range(max_minver + 1): - new_tree = deepcopy (merged_cfg_tree) - minor_tree_list.append (new_tree) - self.trimTreeBaseOnMinver (merged_cfg_tree, minor_tree_list) - - target = merged_cfg_tree['PolicyHeader']['category'] - major_version = int (merged_cfg_tree['PolicyHeader']['majver']['value'], 0) - - # Insert xml leaf for this conf/policy/etc - leaf = ET.Element(target) - leaf.set("MajorVersion", '0x%04X' % major_version) - leaf.set("MinorVersion", '0x%04X' % max_minver) - - for idx in range(len(minor_tree_list)): - minhash_item = ET.Element("Hash-v%x.%x" % (major_version, idx)) - hash_obj = hashlib.md5() - tree_js = json.dumps(minor_tree_list[idx]) - hash_obj.update(tree_js.encode('utf-8')) - result = hash_obj.hexdigest() - minhash_item.text = result - leaf.append (minhash_item) - - cached_root = None - if tree != None: - cached_root = tree.find (target) - if cached_root != None: - cached_maj_ver = int (cached_root.get("MajorVersion"), 0) - - if cached_maj_ver == None or major_version != cached_maj_ver: - # Print error message here and we will fail the build later on - logging.error ("Platform major verison does not match YAML files. Please update the %s descriptor file." % type) - err_count = err_count + 1 - - count = 0 - - for idx in range(len(minor_tree_list)): - saved_res = cached_root.find("Hash-v%x.%x" % (major_version, idx)) - calc_ret = leaf.find("Hash-v%x.%x" % (major_version, idx)) - if saved_res == None or saved_res.text != calc_ret.text: - count = count + 1 - if idx == 0: - logging.error ("Minor version 0 has changed, please consider bumping up major version") - logging.error ("%d minor version fields have changed, please update your report file" % idx) - err_count = err_count + 1 - - # Just to check if the cached hash file has extra entries compared to reality - for res in cached_root: - calc_ret = leaf.find(res.tag) - if calc_ret == None: - logging.error ("A tag from cached xml (%s) is not found" % res.tag) - err_count = err_count + 1 - - tree.remove (cached_root) - else: - logging.error ("%s report file not found, please add the autogen xml file to your %s_REPORT_FOLDER" % (type, type)) - err_count = err_count + 1 - - # Now that we have the PKL file, output the header files - params = ["GENHDR"] - params.append(conf_file) - params.append("PolicyDataStruct%s.h" % target) - - ret = RunPythonScript(cmd, " ".join(params), workingdir=final_dir) - if ret != 0: - return ret - - root.append (leaf) - - if tree != None and 0 != len(tree): - logging.error ("There is stale policy from cached xml %s, please remove them or use the newly created report." % (str([i.tag for i in tree]))) - err_count = err_count + len(tree) - - if err_count != 0: - UpdatePolicyHdr.indent(root) - hash_obj = hashlib.md5() - tree_xml = ET.tostring(root, encoding="utf-8", xml_declaration=True) - hash_obj.update(tree_xml) - xml_hash = hash_obj.hexdigest() - new_file = os.path.join (report_dir, "%s_REPORT_%s.xml" % (type, xml_hash)) - xml_file = open(new_file, 'wb') - xml_file.write(tree_xml) - xml_file.close() - logging.info ("New %s report xml was generated at %s, please replace %s with this new file." % (type, report_file, new_file)) - - return err_count +## +# This plugin generates policy header files +# from platform supplied YAML policy. +# +# Copyright (c) Microsoft Corporation +# SPDX-License-Identifier: BSD-2-Clause-Patent +## + +import logging +import os +import shutil +from collections import OrderedDict +from copy import deepcopy +import xml.etree.ElementTree +import hashlib +import json +import time +import re +import xml.etree.ElementTree as ET +from edk2toolext.environment import shell_environment +from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin +from edk2toollib.utility_functions import RunPythonScript +from edk2toollib.uefi.edk2.path_utilities import Edk2Path + +import sys + +import yaml +sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'Tools')) +from GenCfgData import CGenCfgData + +class UpdatePolicyHdr(IUefiBuildPlugin): + + def trimTreeBaseOnMinver (self, tree, list): + + if type(tree) is not OrderedDict: + raise Exception ("Incorrect tree type!!!") + + try: + ver = int(tree["minver"], 0) + except: + ver = 0 + + trim_list = [] + for idx in range(len(list)): + if idx < ver and list[idx] != None: + # trim the entry if this minver is higher than it belongs + list[idx] = None + trim_list.append(idx) + + for value in tree: + if type(tree[value]) is OrderedDict: + sub_list = [] + for idx in range(len(list)): + if list[idx] != None: + sub_list.append(list[idx][value]) + else: + sub_list.append(None) + sub_trim_list = self.trimTreeBaseOnMinver (tree[value], sub_list) + for item in sub_trim_list: + del list[item][value] + + return trim_list + + # in-place prettyprint formatter + @staticmethod + def indent(elem, level=0): + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + UpdatePolicyHdr.indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + # Attempt to run GenCfgData to generate C header files + # + # Consumes build environement variables: "BUILD_OUTPUT_BASE", "UPDATE_SETTINGS", + # and either of "POLICY_REPORT_FOLDER" or "ACTIVE_PLATFORM" + def do_pre_build(self, thebuilder): + need_check = thebuilder.env.GetValue("UPDATE_SETTINGS") + if need_check is not None and need_check.upper() == "FALSE": + logging.warn ("Platform indicated as not checking YAML file changes, will not be updated!") + return 0 + + yaml_list = [] + exception_list = [] + ws = thebuilder.ws + pp = thebuilder.pp.split(os.pathsep) + edk2 = Edk2Path(ws, pp) + + # Form the exception list of formatted absolute paths. And always ignore our own samples. + exception_list.append (thebuilder.mws.join (thebuilder.ws, "PolicyServicePkg", "Samples")) + platform_exception = thebuilder.env.GetValue("POLICY_IGNORE_PATHS") + if platform_exception is not None: + plat_list = platform_exception.split(';') + for each in plat_list: + exception_list.append(os.path.normpath (thebuilder.mws.join (thebuilder.ws, each))) + + # Look for *_policy_def.yaml files in all package paths. + for pkg_path in pp: + for subdir, dirs, files in os.walk(pkg_path): + for file in files: + if file.endswith ("_policy_def.yaml") or file.endswith ("_policy_def.yml"): + yaml_path = os.path.normpath(os.path.join (subdir, file)) + ignore = False + for exception in exception_list: + if yaml_path.startswith (exception): + ignore = True + break + if ignore: + continue + yaml_list.append (yaml_path) + logging.debug (yaml_path) + + err_count = 0 + type = 'POLICY' + report_dir = thebuilder.env.GetValue("%s_REPORT_FOLDER" % type) + if report_dir is None: + report_dir = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath ( + edk2.GetContainingPackage( + edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( + thebuilder.env.GetValue("ACTIVE_PLATFORM")))) + + report_file = os.path.join (report_dir, "%s_REPORT.xml" % type) + + if os.path.isfile (report_file): + tree = ET.parse(report_file).getroot() + else: + tree = None + + comment = ET.Comment(' === Auto-Generated. Please do not change anything!!! === ') + root = ET.Element('Settings') + root.insert(0, comment) + + for setting in yaml_list: + + if not os.path.normcase(setting).startswith(os.path.normcase(report_dir.rstrip(os.sep)) + os.sep): + continue + + logging.info ("Processing settings from %s" % setting) + + final_dir = os.path.join (edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath( + edk2.GetContainingPackage (setting)), "Include") + if not os.path.isdir(final_dir): + os.mkdir (final_dir) + + # Set up a playground first + op_dir = thebuilder.mws.join(thebuilder.ws, thebuilder.env.GetValue("BUILD_OUTPUT_BASE"), "ConfPolicy") + if not os.path.isdir(op_dir): + os.makedirs(op_dir) + + cmd = thebuilder.mws.join(thebuilder.ws, "PolicyServicePkg", "Tools", "GenCfgData.py") + + conf_file = setting + if conf_file is None: + logging.warn ("YAML file not specified, system might not work as expected!!!") + return 0 + if not os.path.isfile(conf_file): + logging.error ("YAML file specified is not found!!!") + return 1 + + gen_cfg_data = CGenCfgData() + + if gen_cfg_data.load_yaml(conf_file, shallow_load=True) != 0: + raise Exception(gen_cfg_data.get_last_error()) + + merged_cfg_tree = gen_cfg_data.get_cfg_tree() + + minor_tree_list = [] + max_minver = gen_cfg_data.findMaxMinver (merged_cfg_tree) + # each minor version needs a spot, thus plus 1 here + for _ in range(max_minver + 1): + new_tree = deepcopy (merged_cfg_tree) + minor_tree_list.append (new_tree) + self.trimTreeBaseOnMinver (merged_cfg_tree, minor_tree_list) + + target = merged_cfg_tree['PolicyHeader']['category'] + major_version = int (merged_cfg_tree['PolicyHeader']['majver']['value'], 0) + + # Insert xml leaf for this conf/policy/etc + leaf = ET.Element(target) + leaf.set("MajorVersion", '0x%04X' % major_version) + leaf.set("MinorVersion", '0x%04X' % max_minver) + + for idx in range(len(minor_tree_list)): + minhash_item = ET.Element("Hash-v%x.%x" % (major_version, idx)) + hash_obj = hashlib.md5() + tree_js = json.dumps(minor_tree_list[idx]) + hash_obj.update(tree_js.encode('utf-8')) + result = hash_obj.hexdigest() + minhash_item.text = result + leaf.append (minhash_item) + + cached_root = None + if tree != None: + cached_root = tree.find (target) + if cached_root != None: + cached_maj_ver = int (cached_root.get("MajorVersion"), 0) + + if cached_maj_ver == None or major_version != cached_maj_ver: + # Print error message here and we will fail the build later on + logging.error ("Platform major verison does not match YAML files. Please update the %s descriptor file." % type) + err_count = err_count + 1 + + count = 0 + + for idx in range(len(minor_tree_list)): + saved_res = cached_root.find("Hash-v%x.%x" % (major_version, idx)) + calc_ret = leaf.find("Hash-v%x.%x" % (major_version, idx)) + if saved_res == None or saved_res.text != calc_ret.text: + count = count + 1 + if idx == 0: + logging.error ("Minor version 0 has changed, please consider bumping up major version") + logging.error ("%d minor version fields have changed, please update your report file" % idx) + err_count = err_count + 1 + + # Just to check if the cached hash file has extra entries compared to reality + for res in cached_root: + calc_ret = leaf.find(res.tag) + if calc_ret == None: + logging.error ("A tag from cached xml (%s) is not found" % res.tag) + err_count = err_count + 1 + + tree.remove (cached_root) + else: + logging.error ("%s report file not found, please add the autogen xml file to your %s_REPORT_FOLDER" % (type, type)) + err_count = err_count + 1 + + # Now that we have the PKL file, output the header files + params = ["GENHDR"] + params.append(conf_file) + params.append("PolicyDataStruct%s.h" % target) + + ret = RunPythonScript(cmd, " ".join(params), workingdir=final_dir) + if ret != 0: + return ret + + root.append (leaf) + + if tree != None and 0 != len(tree): + logging.error ("There is stale policy from cached xml %s, please remove them or use the newly created report." % (str([i.tag for i in tree]))) + err_count = err_count + len(tree) + + if err_count != 0: + UpdatePolicyHdr.indent(root) + hash_obj = hashlib.md5() + tree_xml = ET.tostring(root, encoding="utf-8", xml_declaration=True) + hash_obj.update(tree_xml) + xml_hash = hash_obj.hexdigest() + new_file = os.path.join (report_dir, "%s_REPORT_%s.xml" % (type, xml_hash)) + xml_file = open(new_file, 'wb') + xml_file.write(tree_xml) + xml_file.close() + logging.info ("New %s report xml was generated at %s, please replace %s with this new file." % (type, report_file, new_file)) + + return err_count diff --git a/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml b/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml index 2e7945fe7a..04b82d3c0d 100644 --- a/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml +++ b/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml @@ -1,43 +1,43 @@ -## @file -# -# Slim Bootloader CFGDATA Default File. -# -# Copyright (c) 2020, Intel Corporation. All rights reserved.
-# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -# Template section for common structure definitions, template name has to end with `_TMPL` -template: - - !include PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml - - # Template for minor version 0 of GFX structure, $(#) are replacable parameters through !expand syntax - GFX_POLICY_TMPL: > - - Power_State_Port_$(1) : - name : Power state of GFX port $(1) - length : 0x02 - value : $(2) - - # Template for minor version 1 of GFX structure, these should always be appended after - # all existed minor 0 structures, $(#) are replacable parameters through !expand syntax - GFX_POLICY_SUP1_TMPL: > - - Skip_Check_$(1) : - name : Flag to skip this controller or not - length : 0x02 - value : $(2) - minver : 0x01 - -configs: - # Policy structure metadata, will be used for policy headers and genereating unique macro definitions - - PolicyHeader: - - !expand { POLICY_HEADER_TMPL : [GFX, 'PDATAGFX', 0x01, _LENGTH_GFX_POLICY_DATA_] } - - # Main structure definitions for this policy - - GFX_POLICY_DATA : - # Structure filled with populated templates, minor version 0 first - - !expand { GFX_POLICY_TMPL : [ 0, 1] } - - !expand { GFX_POLICY_TMPL : [ 1, 1] } - - # Structure filled with populated templates, minor version 1 second - - !expand { GFX_POLICY_SUP1_TMPL : [ 0, 0] } - - !expand { GFX_POLICY_SUP1_TMPL : [ 1, 0] } +## @file +# +# Slim Bootloader CFGDATA Default File. +# +# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +# Template section for common structure definitions, template name has to end with `_TMPL` +template: + - !include PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml + + # Template for minor version 0 of GFX structure, $(#) are replacable parameters through !expand syntax + GFX_POLICY_TMPL: > + - Power_State_Port_$(1) : + name : Power state of GFX port $(1) + length : 0x02 + value : $(2) + + # Template for minor version 1 of GFX structure, these should always be appended after + # all existed minor 0 structures, $(#) are replacable parameters through !expand syntax + GFX_POLICY_SUP1_TMPL: > + - Skip_Check_$(1) : + name : Flag to skip this controller or not + length : 0x02 + value : $(2) + minver : 0x01 + +configs: + # Policy structure metadata, will be used for policy headers and genereating unique macro definitions + - PolicyHeader: + - !expand { POLICY_HEADER_TMPL : [GFX, 'PDATAGFX', 0x01, _LENGTH_GFX_POLICY_DATA_] } + + # Main structure definitions for this policy + - GFX_POLICY_DATA : + # Structure filled with populated templates, minor version 0 first + - !expand { GFX_POLICY_TMPL : [ 0, 1] } + - !expand { GFX_POLICY_TMPL : [ 1, 1] } + + # Structure filled with populated templates, minor version 1 second + - !expand { GFX_POLICY_SUP1_TMPL : [ 0, 0] } + - !expand { GFX_POLICY_SUP1_TMPL : [ 1, 0] } diff --git a/PolicyServicePkg/Tools/GenCfgData.py b/PolicyServicePkg/Tools/GenCfgData.py index 620a69d097..c725e25092 100644 --- a/PolicyServicePkg/Tools/GenCfgData.py +++ b/PolicyServicePkg/Tools/GenCfgData.py @@ -1,2564 +1,2564 @@ -## @ GenCfgData.py -# -# Copyright (c) 2020, Intel Corporation. All rights reserved.
-# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -## - -import os -import sys -import re -import struct -import marshal -import pprint -import string -import operator as op -import ast -import binascii -from typing import Type, final -from unicodedata import category -import uuid -from datetime import date -from collections import OrderedDict - -from CommonUtility import * - -# Generated file copyright header -__copyright_tmp__ = """/** @file - - Platform Configuration %s File. - - Copyright (c) %4d, Intel Corporation. All rights reserved.
- Copyright (c) Microsoft Corporation. - SPDX-License-Identifier: BSD-2-Clause-Patent - - This file is automatically generated. Please do NOT modify !!! - -**/ -""" - -def get_copyright_header (file_type, allow_modify = False): - file_description = { - 'yaml': 'Boot Setting', - 'dlt' : 'Delta', - 'inc' : 'C Binary Blob', - 'h' : 'C Struct Header' - } - if file_type in ['yaml', 'dlt']: - comment_char = '#' - else: - comment_char = '' - lines = __copyright_tmp__.split('\n') - if allow_modify: - lines = [line for line in lines if 'Please do NOT modify' not in line] - copyright_hdr = '\n'.join('%s%s' % (comment_char, line) for line in lines)[:-1] + '\n' - return copyright_hdr % (file_description[file_type], date.today().year) - -def check_quote (text): - if (text[0] == "'" and text[-1] == "'") or (text[0] == '"' and text[-1] == '"'): - return True - return False - -def strip_quote (text): - new_text = text.strip() - if check_quote (new_text): - return new_text[1:-1] - return text - -def strip_delimiter (text, delim): - new_text = text.strip() - if new_text: - if new_text[0] == delim[0] and new_text[-1] == delim[-1]: - return new_text[1:-1] - return text - -def bytes_to_bracket_str (bytes): - return '{ %s }' % (', '.join('0x%02x' % i for i in bytes)) - -def array_str_to_value (val_str): - val_str = val_str.strip() - val_str = strip_delimiter (val_str, '{}') - val_str = strip_quote (val_str) - value = 0 - for each in val_str.split(',')[::-1]: - each = each.strip() - value = (value << 8) | int(each, 0) - return value - -def write_lines (lines, file): - fo = open(file, "w") - fo.write (''.join ([x[0] for x in lines])) - fo.close () - -def read_lines (file): - if not os.path.exists(file): - test_file = os.path.basename(file) - if os.path.exists(test_file): - file = test_file - fi = open (file, 'r') - lines = fi.readlines () - fi.close () - return lines - -def expand_file_value (path, value_str): - result = bytearray() - match = re.match("\{\s*FILE:(.+)\}", value_str) - if match: - file_list = match.group(1).split(',') - for file in file_list: - file = file.strip() - bin_path = os.path.join(path, file) - result.extend(bytearray(open(bin_path, 'rb').read())) - return result - -class ExpressionEval(ast.NodeVisitor): - operators = { - ast.Add: op.add, - ast.Sub: op.sub, - ast.Mult: op.mul, - ast.Div: op.floordiv, - ast.Mod: op.mod, - ast.Eq: op.eq, - ast.NotEq: op.ne, - ast.Gt: op.gt, - ast.Lt: op.lt, - ast.GtE: op.ge, - ast.LtE: op.le, - ast.BitXor: op.xor, - ast.BitAnd: op.and_, - ast.BitOr: op.or_, - ast.Invert: op.invert, - ast.USub: op.neg - } - - - def __init__(self): - self._debug = False - self._expression = '' - self._namespace = {} - self._get_variable = None - - def eval(self, expr, vars={}): - self._expression = expr - if type(vars) is dict: - self._namespace = vars - self._get_variable = None - else: - self._namespace = {} - self._get_variable = vars - node = ast.parse(self._expression, mode='eval') - result = self.visit(node.body) - if self._debug: - print ('EVAL [ %s ] = %s' % (expr, str(result))) - return result - - def visit_Name(self, node): - if self._get_variable is not None: - return self._get_variable(node.id) - else: - return self._namespace[node.id] - - def visit_Num(self, node): - return node.n - - def visit_NameConstant(self, node): - return node.value - - def visit_BoolOp(self, node): - result = False - if isinstance(node.op, ast.And): - for value in node.values: - result = self.visit(value) - if not result: - break - elif isinstance(node.op, ast.Or): - for value in node.values: - result = self.visit(value) - if result: - break - return True if result else False - - def visit_UnaryOp(self, node): - val = self.visit(node.operand) - return operators[type(node.op)](val) - - def visit_BinOp(self, node): - lhs = self.visit(node.left) - rhs = self.visit(node.right) - return ExpressionEval.operators[type(node.op)](lhs, rhs) - - def visit_Compare(self, node): - right = self.visit(node.left) - result = True - for operation, comp in zip(node.ops, node.comparators): - if not result: - break - left = right - right = self.visit(comp) - result = ExpressionEval.operators[type(operation)](left, right) - return result - - def visit_Call(self, node): - if node.func.id in ['ternary']: - condition = self.visit (node.args[0]) - val_true = self.visit (node.args[1]) - val_false = self.visit (node.args[2]) - return val_true if condition else val_false - elif node.func.id in ['offset', 'length']: - if self._get_variable is not None: - return self._get_variable(node.args[0].s, node.func.id) - else: - raise ValueError("Unsupported function: " + repr(node)) - - def generic_visit(self, node): - raise ValueError("malformed node or string: " + repr(node)) - - -class CFG_YAML(): - TEMPLATE = 'template' - CONFIGS = 'configs' - VARIABLE = 'variable' - - def __init__ (self, is_policy=False): - self.log_line = False - self.allow_template = False - self.cfg_tree = None - self.tmp_tree = None - self.var_dict = None - self.def_dict = {} - self.yaml_path = '' - self.lines = [] - self.full_lines = [] - self.index = 0 - self.is_policy = is_policy - self.re_expand = re.compile (r'(.+:\s+|\s*\-\s*)!expand\s+\{\s*(\w+_TMPL)\s*:\s*\[(.+)]\s*\}') - self.re_include = re.compile (r'(.+:\s+|\s*\-\s*)!include\s+(.+)') - - @staticmethod - def count_indent (line): - return next((i for i, c in enumerate(line) if not c.isspace()), len(line)) - - @staticmethod - def substitue_args (text, arg_dict): - for arg in arg_dict: - text = text.replace ('$' + arg, arg_dict[arg]) - return text - - @staticmethod - def dprint (*args): - pass - - def process_include (self, line, insert = True): - match = self.re_include.match (line) - if not match: - raise Exception ("Invalid !include format '%s' !" % line.strip()) - - prefix = match.group(1) - include = match.group(2) - if prefix.strip() == '-': - prefix = '' - adjust = 0 - else: - adjust = 2 - - include = strip_quote (include) - request = CFG_YAML.count_indent (line) + adjust - - if self.log_line: - # remove the include line itself - del self.full_lines[-1] - - inc_path = os.path.join (self.yaml_path, include) - if not os.path.exists(inc_path): - # try relative path to project root - try_path = os.path.join(os.path.dirname (os.path.realpath(__file__)), "../..", include) - if os.path.exists(try_path): - inc_path = try_path - else: - raise Exception ("ERROR: Cannot open file '%s'." % inc_path) - - lines = read_lines (inc_path) - - current = 0 - same_line = False - for idx, each in enumerate (lines): - start = each.lstrip() - if start == '' or start[0] == '#': - continue - - if start[0] == '>': - # append the content directly at the same line - same_line = True - - start = idx - current = CFG_YAML.count_indent (each) - break - - lines = lines[start+1:] if same_line else lines[start:] - leading = '' - if same_line: - request = len(prefix) - leading = '>' - - lines = [prefix + '%s\n' % leading] + [' ' * request + i[current:] for i in lines] - if insert: - self.lines = lines + self.lines - - return lines - - def process_expand (self, line): - match = self.re_expand.match(line) - if not match: - raise Exception ("Invalid !expand format '%s' !" % line.strip()) - lines = [] - prefix = match.group(1) - temp_name = match.group(2) - args = match.group(3) - - if prefix.strip() == '-': - indent = 0 - else: - indent = 2 - lines = self.process_expand_template (temp_name, prefix, args, indent) - self.lines = lines + self.lines - - - def process_expand_template (self, temp_name, prefix, args, indent = 2): - # expand text with arg substitution - if temp_name not in self.tmp_tree: - raise Exception ("Could not find template '%s' !" % temp_name) - parts = args.split(',') - parts = [i.strip() for i in parts] - num = len(parts) - arg_dict = dict(zip( ['(%d)' % (i + 1) for i in range(num)], parts)) - str_data = self.tmp_tree[temp_name] - text = DefTemplate(str_data).safe_substitute(self.def_dict) - text = CFG_YAML.substitue_args (text, arg_dict) - target = CFG_YAML.count_indent (prefix) + indent - current = CFG_YAML.count_indent (text) - padding = target * ' ' - if indent == 0: - leading = [] - else: - leading = [prefix + '\n'] - text = leading + [(padding + i + '\n')[current:] for i in text.splitlines()] - return text - - - def load_file (self, yaml_file): - self.index = 0 - self.lines = read_lines (yaml_file) - - - def peek_line (self): - if len(self.lines) == 0: - return None - else: - return self.lines[0] - - - def put_line (self, line): - self.lines.insert (0, line) - if self.log_line: - del self.full_lines[-1] - - - def get_line (self): - if len(self.lines) == 0: - return None - else: - line = self.lines.pop(0) - if self.log_line: - self.full_lines.append (line.rstrip()) - return line - - - def get_multiple_line (self, indent): - text = '' - newind = indent + 1 - while True: - line = self.peek_line () - if line is None: - break - sline = line.strip() - if sline != '': - newind = CFG_YAML.count_indent(line) - if newind <= indent: - break - self.get_line () - if sline != '': - text = text + line - return text - - - def traverse_cfg_tree (self, handler): - def _traverse_cfg_tree (root, level = 0): - # config structure - for key in root: - if type(root[key]) is OrderedDict: - level += 1 - handler (key, root[key], level) - _traverse_cfg_tree (root[key], level) - level -= 1 - _traverse_cfg_tree (self.cfg_tree) - - - def count (self): - def _count (name, cfgs, level): - num[0] += 1 - num = [0] - self.traverse_cfg_tree (_count) - return num[0] - - - def parse (self, parent_name = '', curr = None, level = 0): - child = None - last_indent = None - temp_chk = {} - - while True: - line = self.get_line () - if line is None: - break - - curr_line = line.strip() - if curr_line == '' or curr_line[0] == '#': - continue - - indent = CFG_YAML.count_indent(line) - if last_indent is None: - last_indent = indent - - if indent != last_indent: - # outside of current block, put the line back to queue - self.put_line (' ' * indent + curr_line) - - if curr_line.endswith (': >'): - # multiline marker - old_count = len(self.full_lines) - line = self.get_multiple_line (indent) - if self.log_line and not self.allow_template and '!include ' in line: - # expand include in template - new_lines = [] - lines = line.splitlines() - for idx, each in enumerate(lines): - if '!include ' in each: - new_line = ''.join(self.process_include (each, False)) - new_lines.append(new_line) - else: - new_lines.append(each) - self.full_lines = self.full_lines[:old_count] + new_lines - curr_line = curr_line + line - - if indent > last_indent: - # child nodes - if child is None: - raise Exception ('Unexpected format at line: %s' % (curr_line)) - - level += 1 - self.parse (key, child, level) - level -= 1 - - line = self.peek_line () - if line is not None: - curr_line = line.strip() - indent = CFG_YAML.count_indent(line) - if indent >= last_indent: - # consume the line - self.get_line () - else: - # end of file - indent = -1 - - if curr is None: - curr = OrderedDict() - - if indent < last_indent: - return curr - - marker1 = curr_line[0] - marker2 = curr_line[-1] - start = 1 if marker1 == '-' else 0 - pos = curr_line.find(': ') - if pos > 0: - child = None - key = curr_line[start:pos].strip() - if curr_line[pos + 2] == '>': - curr[key] = curr_line[pos + 3:] - else: - # XXXX: !include / !expand - if '!include ' in curr_line: - self.process_include (line) - elif '!expand ' in curr_line: - if self.allow_template and not self.log_line: - self.process_expand (line) - else: - value_str = curr_line[pos + 2:].strip() - if key == "IdTag" or key == "ArrayIdTag": - # Insert the headers corresponds to this ID tag from here, most contents are hardcoded for now - cfg_hdr = OrderedDict() - cfg_hdr['length'] = '0x04' - cfg_hdr['value'] = '{0x01:2b, (_LENGTH_%s_/4):10b, %d:4b, 0:4b, %s:12b}' % (parent_name, 0 if key == "IdTag" else 1, value_str) - curr['CfgHeader'] = cfg_hdr - - cnd_val = OrderedDict() - cnd_val['length'] = '0x04' - cnd_val['value'] = '0x00000000' - curr['CondValue'] = cnd_val - else: - curr[key] = value_str - if self.log_line and value_str[0] == '{': - # expand {FILE: xxxx} format in the log line - if value_str[1:].rstrip().startswith('FILE:'): - value_bytes = expand_file_value (self.yaml_path, value_str) - value_str = bytes_to_bracket_str (value_bytes) - self.full_lines[-1] = line[:indent] + curr_line[:pos + 2] + value_str - - elif marker2 == ':': - child = OrderedDict() - key = curr_line[start:-1].strip() - if key == '$ACTION': - # special virtual nodes, rename to ensure unique key - key = '$ACTION_%04X' % self.index - self.index += 1 - if key in curr: - if key not in temp_chk: - # check for duplicated keys at same level - temp_chk[key] = 1 - else: - raise Exception ("Duplicated item '%s:%s' found !" % (parent_name, key)) - - curr[key] = child - if self.var_dict is None and key == CFG_YAML.VARIABLE: - self.var_dict = child - if self.tmp_tree is None and key == CFG_YAML.TEMPLATE: - self.tmp_tree = child - if self.var_dict: - for each in self.var_dict: - txt = self.var_dict[each] - if type(txt) is str: - self.def_dict['(%s)' % each] = txt - if self.tmp_tree and key == CFG_YAML.CONFIGS: - if not self.is_policy: - # apply template for the main configs - self.allow_template = True - child['Signature'] = OrderedDict() - child['Signature']['length'] = '0x04' - child['Signature']['value'] = "{'CFGD'}" - - child['HeaderLength'] = OrderedDict() - child['HeaderLength']['length'] = '0x01' - child['HeaderLength']['value'] = '0x10' - - child['Reserved'] = OrderedDict() - child['Reserved']['length'] = '0x03' - child['Reserved']['value'] = '{0,0,0}' - - child['UsedLength'] = OrderedDict() - child['UsedLength']['length'] = '0x04' - child['UsedLength']['value'] = '_LENGTH_' - - # This will be rounded up to 4KB aligned - child['TotalLength'] = OrderedDict() - child['TotalLength']['length'] = '0x04' - child['TotalLength']['value'] = '(_LENGTH_/0x1000 + 1)*0x1000' - else: - child = None - # - !include cfg_opt.yaml - if '!include ' in curr_line: - self.process_include (line) - - return curr - - - def load_yaml (self, opt_file): - self.var_dict = None - self.yaml_path = os.path.dirname (opt_file) - self.load_file (opt_file) - yaml_tree = self.parse () - self.tmp_tree = yaml_tree[CFG_YAML.TEMPLATE] - self.cfg_tree = yaml_tree[CFG_YAML.CONFIGS] - return self.cfg_tree - - - def expand_yaml (self, opt_file): - self.log_line = True - self.load_yaml (opt_file) - self.log_line = False - text = '\n'.join (self.full_lines) - self.full_lines = [] - return text - - -class DefTemplate(string.Template): - idpattern = '\([_A-Z][_A-Z0-9]*\)|[_A-Z][_A-Z0-9]*' - - -class CGenCfgData: - STRUCT = '$STRUCT' - bits_width = {'b':1, 'B':8, 'W':16, 'D':32, 'Q':64} - builtin_option = {'$EN_DIS' : [('0', 'Disable'), ('1', 'Enable')]} - exclude_struct = ['GPIO_GPP_*', 'GPIO_CFG_DATA', 'GpioConfPad*', 'GpioPinConfig', - 'BOOT_OPTION*', 'PLATFORMID_CFG_DATA', '\w+_Half[01]'] - include_tag = ['GPIO_CFG_DATA'] - keyword_set = set(['name', 'type', 'option', 'help', 'length', 'value', 'order', 'struct', 'condition', 'minver']) - - def __init__(self): - self.initialize () - - - def initialize (self): - self._cfg_tree = {} - self._tmp_tree = {} - self._cfg_list = [] - self._cfg_page = {'root': {'title': '', 'child': []}} - self._cur_page = '' - self._var_dict = {} - self._def_dict = {} - self._yaml_path = '' - - - @staticmethod - def deep_convert_dict (layer): - # convert OrderedDict to list + dict - new_list = layer - if isinstance(layer, OrderedDict): - new_list = list (layer.items()) - for idx, pair in enumerate (new_list): - new_node = CGenCfgData.deep_convert_dict (pair[1]) - new_list[idx] = dict({pair[0] : new_node}) - return new_list - - - @staticmethod - def deep_convert_list (layer): - if isinstance(layer, list): - od = OrderedDict({}) - for each in layer: - if isinstance(each, dict): - key = next(iter(each)) - od[key] = CGenCfgData.deep_convert_list(each[key]) - return od - else: - return layer - - - @staticmethod - def expand_include_files (file_path, cur_dir = ''): - if cur_dir == '': - cur_dir = os.path.dirname(file_path) - file_path = os.path.basename(file_path) - - input_file_path = os.path.join(cur_dir, file_path) - file = open(input_file_path, "r") - lines = file.readlines() - file.close() - - new_lines = [] - for line_num, line in enumerate(lines): - match = re.match("^!include\s*(.+)?$", line.strip()) - if match: - inc_path = match.group(1) - tmp_path = os.path.join(cur_dir, inc_path) - org_path = tmp_path - if not os.path.exists(tmp_path): - cur_dir = os.path.join(os.path.dirname (os.path.realpath(__file__)), "..", "..") - tmp_path = os.path.join(cur_dir, inc_path) - if not os.path.exists(tmp_path): - raise Exception ("ERROR: Cannot open include file '%s'." % org_path) - else: - new_lines.append (('# Included from file: %s\n' % inc_path, tmp_path, 0)) - new_lines.append (('# %s\n' % ('=' * 80), tmp_path, 0)) - new_lines.extend (CGenCfgData.expand_include_files (inc_path, cur_dir)) - else: - new_lines.append ((line, input_file_path, line_num)) - - return new_lines - - - @staticmethod - def format_struct_field_name (input, count = 0): - name = '' - cap = True - if '_' in input: - input = input.lower() - for each in input: - if each == '_': - cap = True - continue - elif cap: - each = each.upper() - cap = False - name = name + each - - if count > 1: - name = '%s[%d]' % (name, count) - - return name - - def get_last_error (self): - return '' - - - def get_variable (self, var, attr = 'value'): - if var in self._var_dict: - var = self._var_dict[var] - return var - - item = self.locate_cfg_item (var, False) - if item is None: - raise ValueError ("Cannot find variable '%s' !" % var) - - if item: - if 'indx' in item: - item = self.get_item_by_index (item['indx']) - if attr == 'offset': - var = item['offset'] - elif attr == 'length': - var = item['length'] - elif attr == 'value': - var = self.get_cfg_item_value (item) - else: - raise ValueError ("Unsupported variable attribute '%s' !" % attr) - return var - - - def eval (self, expr): - def _handler (pattern): - if pattern.group(1): - target = 1 - else: - target = 2 - result = self.get_variable(pattern.group(target)) - if result is None: - raise ValueError('Unknown variable $(%s) !' % pattern.group(target)) - return hex(result) - - expr_eval = ExpressionEval () - if '$' in expr: - # replace known variable first - expr = re.sub(r'\$\(([_a-zA-Z][\w\.]*)\)|\$([_a-zA-Z][\w\.]*)', _handler, expr) - return expr_eval.eval(expr, self.get_variable) - - - def get_cfg_list (self, page_id = None): - if page_id is None: - # return full list - return self._cfg_list - else: - # build a new list for items under a page ID - cfgs = [i for i in self._cfg_list if i['cname'] and (i['page'] == page_id)] - return cfgs - - - def get_cfg_page (self): - return self._cfg_page - - def get_cfg_item_length (self, item): - return item['length'] - - def get_cfg_item_value (self, item, array = False): - value_str = item['value'] - length = item['length'] - return self.get_value (value_str, length, array) - - - def format_value_to_str (self, value, bit_length, old_value = ''): - # value is always int - length = (bit_length + 7) // 8 - fmt = '' - if old_value.startswith ('0x'): - fmt = '0x' - elif old_value and (old_value[0] in ['"', "'", '{']): - fmt = old_value[0] - else: - fmt = '' - - bvalue = value_to_bytearray (value, length) - if fmt in ['"', "'"]: - svalue = bvalue.rstrip(b'\x00').decode() - value_str = fmt + svalue + fmt - elif fmt == "{": - value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }' - elif fmt == '0x': - hex_len = length * 2 - if len(old_value) == hex_len + 2: - fstr = '0x%%0%dX' % hex_len - else: - fstr = '0x%X' - value_str = fstr % value - else: - if length <= 2: - value_str = '%d' % value - elif length <= 8: - value_str = '0x%x' % value - else: - value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }' - return value_str - - - def reformat_value_str (self, value_str, bit_length, old_value = None): - value = self.parse_value (value_str, bit_length, False) - if old_value is None: - old_value = value_str - new_value = self.format_value_to_str (value, bit_length, old_value) - return new_value - - - def get_value (self, value_str, bit_length, array = True): - value_str = value_str.strip() - if len(value_str) == 0: - return 0 - if value_str[0] == "'" and value_str[-1] == "'" or \ - value_str[0] == '"' and value_str[-1] == '"': - value_str = value_str[1:-1] - bvalue = bytearray (value_str.encode()) - if len(bvalue) == 0: - bvalue = bytearray(b'\x00') - if array: - return bvalue - else: - return bytes_to_value (bvalue) - else: - if value_str[0] in '{' : - value_str = value_str[1:-1].strip() - value = 0 - for each in value_str.split(',')[::-1]: - each = each.strip() - value = (value << 8) | int(each, 0) - if array: - length = (bit_length + 7) // 8 - return value_to_bytearray (value, length) - else: - return value - - - def parse_value (self, value_str, bit_length, array = True): - length = (bit_length + 7) // 8 - if check_quote(value_str): - value_str = bytes_to_bracket_str(value_str[1:-1].encode()) - elif (',' in value_str) and (value_str[0] != '{'): - value_str = '{ %s }' % value_str - if value_str[0] == '{': - result = expand_file_value (self._yaml_path, value_str) - if len(result) == 0 : - bin_list = value_str[1:-1].split(',') - value = 0 - bit_len = 0 - unit_len = 1 - for idx, element in enumerate(bin_list): - each = element.strip() - if len(each) == 0: - continue - - in_bit_field = False - if each[0] in "'" + '"': - each_value = bytearray(each[1:-1], 'utf-8') - elif ':' in each: - match = re.match("^(.+):(\d+)([b|B|W|D|Q])$", each) - if match is None: - raise SystemExit("Exception: Invald value list format '%s' !" % each) - if match.group(1) == '0' and match.group(2) == '0': - unit_len = CGenCfgData.bits_width[match.group(3)] // 8 - cur_bit_len = int(match.group(2)) * CGenCfgData.bits_width[match.group(3)] - value += ((self.eval(match.group(1)) & (1< 0: - if bit_len % 8 != 0: - raise SystemExit("Exception: Invalid bit field alignment '%s' !" % value_str) - result.extend(value_to_bytes(value, bit_len // 8)) - value = 0 - bit_len = 0 - - result.extend(each_value) - - elif check_quote (value_str): - result = bytearray(value_str[1:-1], 'utf-8') # Excluding quotes - else: - result = value_to_bytearray (self.eval(value_str), length) - - if len(result) < length: - result.extend(b'\x00' * (length - len(result))) - elif len(result) > length: - raise SystemExit ("Exception: Value '%s' is too big to fit into %d bytes !" % (value_str, length)) - - if array: - return result - else: - return bytes_to_value(result) - - return result - - - def get_cfg_item_options (self, item): - tmp_list = [] - if item['type'] == "Combo": - if item['option'] in CGenCfgData.builtin_option: - for op_val, op_str in CGenCfgData.builtin_option[item['option']]: - tmp_list.append((op_val, op_str)) - else: - opt_list = item['option'].split(',') - for option in opt_list: - option = option.strip() - try: - (op_val, op_str) = option.split(':') - except: - raise SystemExit ("Exception: Invalid option format '%s' for item '%s' !" % (option, item['cname'])) - tmp_list.append((op_val, op_str)) - return tmp_list - - - def get_page_title(self, page_id, top = None): - if top is None: - top = self.get_cfg_page()['root'] - for node in top['child']: - page_key = next(iter(node)) - if page_id == page_key: - return node[page_key]['title'] - else: - result = self.get_page_title (page_id, node[page_key]) - if result is not None: - return result - return None - - - def print_pages(self, top=None, level=0): - if top is None: - top = self.get_cfg_page()['root'] - for node in top['child']: - page_id = next(iter(node)) - print('%s%s: %s' % (' ' * level, page_id, node[page_id]['title'])) - level += 1 - self.print_pages(node[page_id], level) - level -= 1 - - - def get_item_by_index (self, index): - return self._cfg_list[index] - - - def get_item_by_path (self, path): - node = self.locate_cfg_item (path) - if node: - return self.get_item_by_index (node['indx']) - else: - return None - - def locate_cfg_path (self, item): - def _locate_cfg_path (root, level = 0): - # config structure - if item is root: - return path - for key in root: - if type(root[key]) is OrderedDict: - level += 1 - path.append(key) - ret = _locate_cfg_path (root[key], level) - if ret: - return ret - path.pop() - return None - path = [] - return _locate_cfg_path (self._cfg_tree) - - - def locate_cfg_item (self, path, allow_exp = True): - def _locate_cfg_item (root, path, level = 0): - if len(path) == level: - return root - next_root = root.get(path[level], None) - if next_root is None: - if allow_exp: - raise Exception ('Not a valid CFG config option path: %s' % '.'.join(path[:level+1])) - else: - return None - return _locate_cfg_item (next_root, path, level + 1) - - path_nodes = path.split('.') - return _locate_cfg_item (self._cfg_tree, path_nodes) - - - def traverse_cfg_tree (self, handler, top = None): - def _traverse_cfg_tree (root, level = 0): - # config structure - for key in root: - if type(root[key]) is OrderedDict: - level += 1 - handler (key, root[key], level) - _traverse_cfg_tree (root[key], level) - level -= 1 - - if top is None: - top = self._cfg_tree - _traverse_cfg_tree (top) - - - def print_cfgs(self, root = None, short = True, print_level = 256): - def _print_cfgs (name, cfgs, level): - - if 'indx' in cfgs: - act_cfg = self.get_item_by_index (cfgs['indx']) - else: - offset = 0 - length = 0 - value = '' - path='' - if CGenCfgData.STRUCT in cfgs: - cfg = cfgs[CGenCfgData.STRUCT] - offset = int(cfg['offset']) - length = int(cfg['length']) - if 'value' in cfg: - value = cfg['value'] - if length == 0: - return - act_cfg = dict({'value' : value, 'offset' : offset, 'length' : length}) - value = act_cfg['value'] - bit_len = act_cfg['length'] - offset = (act_cfg['offset'] + 7) // 8 - if value != '': - try: - value = self.reformat_value_str (act_cfg['value'], act_cfg['length']) - except: - value = act_cfg['value'] - length = bit_len // 8 - bit_len = '(%db)' % bit_len if bit_len % 8 else '' * 4 - if level <= print_level: - if short and len(value) > 40: - value = '%s ... %s' % (value[:20] , value[-20:]) - print('%04X:%04X%-6s %s%s : %s' % (offset, length, bit_len, ' ' * level, name, value)) - - self.traverse_cfg_tree (_print_cfgs) - - - def get_cfg_tree(self): - return self._cfg_tree - - - def set_cfg_tree(self, cfg_tree): - self._cfg_tree = cfg_tree - - - def merge_cfg_tree(self, root, other_root): - ret = OrderedDict () - prev_key = None - for other_key in other_root: - if other_key not in root: - ret[other_key] = other_root[other_key] - else: - # this is a good time to check to see if we miss anything from previous root elements - found_last = False - for key in root: - if key == prev_key: - found_last = True - continue - if prev_key == None: - found_last = True - if found_last: - ret[key] = root[key] - if key == other_key: - prev_key = other_key - break - - if type(root[other_key]) is OrderedDict and type(other_root[other_key]) is OrderedDict: - # if they are both non-leaf, great, process recursively - ret[other_key] = self.merge_cfg_tree (root[other_key], other_root[other_key]) - elif type(root[other_key]) is OrderedDict or type(other_root[other_key]) is OrderedDict: - raise Exception ("Two yamls files have hierachy mismatch!!!") - else: - # this is duplicate value in from both roots, take original root as principal - ret[other_key] = root[other_key] - - # See if there is any leftovers - found_last = False - for key in root: - if key == prev_key: - found_last = True - continue - if prev_key == None: - found_last = True - if found_last: - ret[key] = root[key] - if key == other_key: - prev_key = other_key - break - return ret - - - def build_var_dict (self): - def _build_var_dict (name, cfgs, level): - if level <= 2: - if CGenCfgData.STRUCT in cfgs: - struct_info = cfgs[CGenCfgData.STRUCT] - self._var_dict['_LENGTH_%s_' % name] = struct_info['length'] // 8 - self._var_dict['_OFFSET_%s_' % name] = struct_info['offset'] // 8 - - self._var_dict = {} - self.traverse_cfg_tree (_build_var_dict) - self._var_dict['_LENGTH_'] = self._cfg_tree[CGenCfgData.STRUCT]['length'] // 8 - return 0 - - - def add_cfg_page(self, child, parent, title=''): - def _add_cfg_page(cfg_page, child, parent): - key = next(iter(cfg_page)) - if parent == key: - cfg_page[key]['child'].append({child: {'title': title, - 'child': []}}) - return True - else: - result = False - for each in cfg_page[key]['child']: - if _add_cfg_page(each, child, parent): - result = True - break - return result - - return _add_cfg_page(self._cfg_page, child, parent) - - - def set_cur_page(self, page_str): - if not page_str: - return - - if ',' in page_str: - page_list = page_str.split(',') - else: - page_list = [page_str] - for page_str in page_list: - parts = page_str.split(':') - if len(parts) in [1, 3]: - page = parts[0].strip() - if len(parts) == 3: - # it is a new page definition, add it into tree - parent = parts[1] if parts[1] else 'root' - parent = parent.strip() - if parts[2][0] == '"' and parts[2][-1] == '"': - parts[2] = parts[2][1:-1] - - if not self.add_cfg_page(page, parent, parts[2]): - raise SystemExit("Error: Cannot find parent page '%s'!" % parent) - else: - raise SystemExit("Error: Invalid page format '%s' !" % page_str) - self._cur_page = page - - - def extend_variable (self, line): - # replace all variables - if line == '': - return line - loop = 2 - while loop > 0: - line_after = DefTemplate(line).safe_substitute(self._def_dict) - if line == line_after: - break - loop -= 1 - line = line_after - return line_after - - def reformat_number_per_type (self, itype, value): - if check_quote(value) or value.startswith('{'): - return value - parts = itype.split(',') - if len(parts) > 3 and parts[0] == 'EditNum': - num_fmt = parts[1].strip() - else: - num_fmt = '' - if num_fmt == 'HEX' and not value.startswith('0x'): - value = '0x%X' % int(value, 10) - elif num_fmt == 'DEC' and value.startswith('0x'): - value = '%d' % int(value, 16) - return value - - def add_cfg_item(self, name, item, offset, path): - - self.set_cur_page (item.get('page', '')) - - if name[0] == '$': - # skip all virtual node - return 0 - - - if not set(item).issubset(CGenCfgData.keyword_set): - for each in list(item): - if each not in CGenCfgData.keyword_set: - raise Exception ("Invalid attribute '%s' for '%s'!" % (each, '.'.join(path))) - - length = item.get('length', 0) - if type(length) is str: - match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)\s*$", length) - if match: - unit_len = CGenCfgData.bits_width[match.group(2)] - length = int(match.group(1), 10) * unit_len - else: - try: - length = int(length, 0) * 8 - except: - raise Exception ("Invalid length field '%s' for '%s' !" % (length, '.'.join(path))) - - if offset % 8 > 0: - raise Exception ("Invalid alignment for field '%s' for '%s' !" % (name, '.'.join(path))) - else: - # define is length in bytes - length = length * 8 - - if not name.isidentifier(): - raise Exception ("Invalid config name '%s' for '%s' !" % (name, '.'.join(path))) - - - itype = str(item.get('type', 'Reserved')) - value = str(item.get('value', '')) - if value: - if not (check_quote(value) or value.startswith('{')): - if ',' in value: - value = '{ %s }' % value - else: - value = self.reformat_number_per_type (itype, value) - - help = str(item.get('help', '')) - if '\n' in help: - help = ' '.join ([i.strip() for i in help.splitlines()]) - - option = str(item.get('option', '')) - if '\n' in option: - option = ' '.join ([i.strip() for i in option.splitlines()]) - - # extend variables for value and condition - condition = str(item.get('condition', '')) - if condition: - condition = self.extend_variable (condition) - value = self.extend_variable (value) - - order = str(item.get('order', '')) - if order: - if '.' in order: - (major, minor) = order.split('.') - order = int (major, 16) - else: - order = int (order, 16) - else: - order = offset - - cfg_item = dict() - cfg_item['length'] = length - cfg_item['offset'] = offset - cfg_item['value'] = value - cfg_item['type'] = itype - cfg_item['cname'] = str(name) - cfg_item['name'] = str(item.get('name', '')) - cfg_item['help'] = help - cfg_item['option'] = option - cfg_item['page'] = self._cur_page - cfg_item['order'] = order - cfg_item['path'] = '.'.join(path) - cfg_item['condition'] = condition - if 'struct' in item: - cfg_item['struct'] = item['struct'] - self._cfg_list.append(cfg_item) - - item['indx'] = len(self._cfg_list) - 1 - - # remove used info for reducing pkl size - item.pop('option', None) - item.pop('condition', None) - item.pop('help', None) - item.pop('name', None) - item.pop('page', None) - - return length - - - def build_cfg_list (self, cfg_name ='', top = None, path = [], info = {'offset': 0}): - if top is None: - top = self._cfg_tree - - start = info['offset'] - is_leaf = True - for key in top: - path.append(key) - if type(top[key]) is OrderedDict: - is_leaf = False - self.build_cfg_list(key, top[key], path, info) - path.pop() - - if is_leaf: - length = self.add_cfg_item(cfg_name, top, info['offset'], path) - info['offset'] += length - elif cfg_name == '' or (cfg_name and cfg_name[0] != '$'): - # check first element for struct - first = next(iter(top)) - struct_str = CGenCfgData.STRUCT - if first != struct_str: - struct_node = OrderedDict({}) - top[struct_str] = struct_node - top.move_to_end (struct_str, False) - else: - struct_node = top[struct_str] - struct_node['offset'] = start - if len(path) == 1: - # Round up first layer tree to be 4 Byte aligned - info['offset'] = (info['offset'] + 31) & (~31) - struct_node['length'] = (info['offset'] - start + 31) & (~31) - else: - struct_node['length'] = info['offset'] - start - if struct_node['length'] % 8 != 0: - raise SystemExit("Error: Bits length not aligned for %s !" % str(path)) - - - def get_field_value (self, top = None): - def _get_field_value (name, cfgs, level): - if 'indx' in cfgs: - act_cfg = self.get_item_by_index (cfgs['indx']) - if act_cfg['length'] == 0: - return - value = self.get_value (act_cfg['value'], act_cfg['length'], False) - set_bits_to_bytes (result, act_cfg['offset'] - struct_info['offset'], act_cfg['length'], value) - - if top is None: - top = self._cfg_tree - struct_info = top[CGenCfgData.STRUCT] - result = bytearray ((struct_info['length'] + 7) // 8) - self.traverse_cfg_tree (_get_field_value, top) - return result - - - def set_field_value (self, top, value_bytes, force = False): - def _set_field_value (name, cfgs, level): - if 'indx' not in cfgs: - return - act_cfg = self.get_item_by_index (cfgs['indx']) - if force or act_cfg['value'] == '': - value = get_bits_from_bytes (full_bytes, act_cfg['offset'] - struct_info['offset'], act_cfg['length']) - act_val = act_cfg['value'] - if act_val == '': - act_val = '%d' % value - act_val = self.reformat_number_per_type (act_cfg['type'], act_val) - act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_val) - - if 'indx' in top: - # it is config option - value = bytes_to_value (value_bytes) - act_cfg = self.get_item_by_index (top['indx']) - act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_cfg['value']) - else: - # it is structure - struct_info = top[CGenCfgData.STRUCT] - length = struct_info['length'] // 8 - full_bytes = bytearray(value_bytes[:length]) - if len(full_bytes) < length: - full_bytes.extend(bytearray(length - len(value_bytes))) - self.traverse_cfg_tree (_set_field_value, top) - - - def update_def_value (self): - def _update_def_value (name, cfgs, level): - if 'indx' in cfgs: - act_cfg = self.get_item_by_index (cfgs['indx']) - if act_cfg['value'] != '' and act_cfg['length'] > 0: - try: - act_cfg['value'] = self.reformat_value_str (act_cfg['value'], act_cfg['length']) - except: - raise Exception ("Invalid value expression '%s' for '%s' !" % (act_cfg['value'], act_cfg['path'])) - else: - if CGenCfgData.STRUCT in cfgs and 'value' in cfgs[CGenCfgData.STRUCT]: - curr = cfgs[CGenCfgData.STRUCT] - value_bytes = value_to_bytearray (self.eval(curr['value']), (curr['length'] + 7) // 8) - self.set_field_value (cfgs, value_bytes) - - self.traverse_cfg_tree (_update_def_value, self._cfg_tree) - - - def evaluate_condition (self, item): - expr = item['condition'] - result = self.parse_value (expr, 1, False) - return result - - - def load_default_from_bin (self, bin_data): - self.set_field_value(self._cfg_tree, bin_data, True) - - - def generate_binary_array (self): - return self.get_field_value() - - def generate_binary (self, bin_file_name): - bin_file = open(bin_file_name, "wb") - bin_file.write (self.generate_binary_array ()) - bin_file.close() - return 0 - - def write_delta_file (self, out_file, platform_id, out_lines): - dlt_fd = open (out_file, "w") - dlt_fd.write ("%s\n" % get_copyright_header('dlt', True)) - dlt_fd.write ('#\n') - dlt_fd.write ('# Delta configuration values for platform ID 0x%04X\n' % platform_id) - dlt_fd.write ('#\n\n') - for line in out_lines: - dlt_fd.write ('%s\n' % line) - dlt_fd.close() - - - def override_default_value(self, dlt_file): - error = 0 - dlt_lines = CGenCfgData.expand_include_files(dlt_file) - - platform_id = None - for line, file_path, line_num in dlt_lines: - line = line.strip() - if not line or line.startswith('#'): - continue - match = re.match("\s*([\w\.]+)\s*\|\s*(.+)", line) - if not match: - raise Exception("Unrecognized line '%s' (File:'%s' Line:%d) !" % - (line, file_path, line_num + 1)) - - path = match.group(1) - value_str = match.group(2) - top = self.locate_cfg_item (path) - if not top: - raise Exception( - "Invalid configuration '%s' (File:'%s' Line:%d) !" % - (path, file_path, line_num + 1)) - - if 'indx' in top: - act_cfg = self.get_item_by_index (top['indx']) - bit_len = act_cfg['length'] - else: - struct_info = top[CGenCfgData.STRUCT] - bit_len = struct_info['length'] - - value_bytes = self.parse_value (value_str, bit_len) - self.set_field_value (top, value_bytes, True) - - if path == 'PLATFORMID_CFG_DATA.PlatformId': - platform_id = value_str - - if platform_id is None: - platform_id = 0 - print("PLATFORMID_CFG_DATA.PlatformId is missing in file '%s' !" % (dlt_file)) - - return error - - - def generate_delta_file_from_bin (self, delta_file, old_data, new_data, full=False): - self.load_default_from_bin (new_data) - lines = [] - tag_name = '' - level = 0 - platform_id = None - def_platform_id = 0 - - for item in self._cfg_list: - old_val = get_bits_from_bytes (old_data, item['offset'], item['length']) - new_val = get_bits_from_bytes (new_data, item['offset'], item['length']) - - full_name = item['path'] - if 'PLATFORMID_CFG_DATA.PlatformId' == full_name: - def_platform_id = old_val - platform_id = new_val - elif item['type'] != 'Reserved' and ((new_val != old_val) or full): - val_str = self.reformat_value_str (item['value'], item['length']) - text = '%-40s | %s' % (full_name, val_str) - lines.append(text) - - if def_platform_id == platform_id: - platform_id = def_platform_id - - lines.insert(0, '%-40s | %s\n\n' % - ('PLATFORMID_CFG_DATA.PlatformId', '0x%04X' % platform_id)) - - if platform_id is None: - print ("Platform ID is not set and will be configured to 0") - platform_id = 0 - - self.write_delta_file (delta_file, platform_id, lines) - return 0 - - - def generate_delta_svd_from_bin (self, old_data, new_data): - self.load_default_from_bin (new_data) - lines = [] - tag_name = '' - level = 0 - platform_id = None - def_platform_id = 0 - items = [] - - for item in self._cfg_list: - old_val = get_bits_from_bytes (old_data, item['offset'], item['length']) - new_val = get_bits_from_bytes (new_data, item['offset'], item['length']) - - full_name = item['path'] - if 'PLATFORMID_CFG_DATA.PlatformId' == full_name: - def_platform_id = old_val - platform_id = new_val - elif item['type'] != 'Reserved' and (new_val != old_val): - val_str = self.reformat_value_str (item['value'], item['length']) - text = '%-40s | %s' % (full_name, val_str) - item = self.locate_cfg_item(item['path']) - if item is None: - raise Exception ("Failed to locate item from path: %s" % item['path']) - items.append(item) - - execs = [] - # The idea is that the 1st level tag content will be regenerated if changed - for item in items: - exec = self.locate_exec_from_item (item) - if exec == None: - raise Exception ("Failed to find the immediate executive tree for an item") - if exec not in execs: - execs.append (exec) - - bytes_array = [] - for exec in execs: - bytes = self.get_field_value (exec) - offset = 0 - offset += int(exec['CfgHeader']['length'], 0) - offset += int(exec['CondValue']['length'], 0) - bytes_array.append (bytes[offset:]) - - # self.write_delta_file (delta_file, platform_id, lines) - return (execs, bytes_array) - - def locate_exec_from_item (self, item): - - def _locate_exec_from_item (name, cfgs, level): - if level == 1: - exec[0] = cfgs - elif cfgs == item: - exec[1] = exec[0] - - exec = [None, None] - self.traverse_cfg_tree (_locate_exec_from_item, self._cfg_tree) - return exec[1] - - def locate_exec_from_tag (self, tag): - - def _locate_exec_from_tag (name, cfgs, level): - if level == 1: - exec[0] = cfgs - if CGenCfgData.STRUCT in cfgs: - cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx']) - tag_val = array_str_to_value(cfghdr['value']) >> 20 - if tag_val == tag: - exec[1] = exec[0] - - exec = [None, None] - self.traverse_cfg_tree (_locate_exec_from_tag, self._cfg_tree) - return exec[1] - - def generate_delta_file(self, delta_file, bin_file, bin_file2, full=False): - fd = open (bin_file, 'rb') - new_data = bytearray(fd.read()) - fd.close() - - if bin_file2 == '': - old_data = self.generate_binary_array() - else: - old_data = new_data - fd = open (bin_file2, 'rb') - new_data = bytearray(fd.read()) - fd.close() - - return self.generate_delta_file_from_bin (delta_file, old_data, new_data, full) - - - def prepare_marshal (self, is_save): - if is_save: - # Ordered dict is not marshallable, convert to list - self._cfg_tree = CGenCfgData.deep_convert_dict (self._cfg_tree) - else: - # Revert it back - self._cfg_tree = CGenCfgData.deep_convert_list (self._cfg_tree) - - def generate_yml_file (self, in_file, out_file): - cfg_yaml = CFG_YAML() - text = cfg_yaml.expand_yaml (in_file) - yml_fd = open(out_file, "w") - yml_fd.write (text) - yml_fd.close () - return 0 - - - def write_cfg_header_file (self, hdr_file_name, tag_mode, tag_dict, struct_list): - lines = [] - lines.append ('\n\n') - tag_list = sorted(list(tag_dict.items()), key=lambda x: x[1]) - for tagname, tagval in tag_list: - if (tag_mode == 0 and tagval >= 0x100) or (tag_mode == 1 and tagval < 0x100): - continue - lines.append ('#define %-30s 0x%03X\n' % ('CDATA_%s_TAG' % tagname[:-9], tagval)) - lines.append ('\n\n') - - name_dict = {} - new_dict = {} - for each in struct_list: - if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100): - continue - new_dict[each['name']] = (each['alias'], each['count']) - if each['alias'] not in name_dict: - name_dict[each['alias']] = 1 - lines.extend(self.create_struct (each['alias'], each['node'], new_dict)) - - - self.write_header_file (lines, hdr_file_name) - - - def findMaxMinver (self, tree): - - if type(tree) is not OrderedDict: - raise Exception ("Incorrect tree type %s!!!" % type(tree)) - - # In-order tree traversal to make sure all minor versions are non-descending - try: - ver = int(tree["minver"], 0) - except: - ver = 0 - - parent_minver = ver - - max_minver = parent_minver - for value in tree: - if type(tree[value]) is OrderedDict: - temp_ver = self.findMaxMinver (tree[value]) - if temp_ver >= max_minver: - max_minver = temp_ver - else: - raise Exception ("Higher minor version detected %d between older fields at %s. New minor version fields should only be appended!!!\ - Consider append new fields, or remove the minor version and bump major version" % (temp_ver, max_minver, value)) - - return max_minver - - - def write_policy_header_file (self, hdr_file_name, tag_mode, struct_list): - lines = [] - max_minver = self.findMaxMinver(self._cfg_tree) - category = '' - - # Step 1: Macro definitions - for struct in struct_list: - if struct["name"] == "PolicyHeader": - category = struct['node']['category'] - lines.append ('#define %-30s 0x%016X\n' % ('PDATA_%s_SIGNATURE' % (category), int.from_bytes(bytes(struct['node']['signature']["value"].strip("'"), 'utf-8'), 'little'))) - lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MAJOR_VER' % (category), int(struct['node']['majver']["value"], 0))) - lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MINOR_VER' % (category), max_minver)) - lines.append ('\n') - - if category == '': - raise Exception ("No category field set in the Policy header!!!") - - # Step 2: Structure definitions - name_dict = {} - new_dict = {} - for each in struct_list: - if each['name'] == "PolicyHeader": - continue - if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100): - continue - new_dict[each['name']] = (each['alias'], each['count']) - if each['alias'] not in name_dict: - name_dict[each['alias']] = 1 - lines.extend(self.create_struct (each['alias'], each['node'], new_dict)) - - # Step 3: Template verified policy header - for struct in struct_list: - if struct["name"] == "PolicyHeader": - lines.append ('STATIC CONST VERIFIED_POLICY_HEADER POLICY_%s_DESC = {\n' % (category)) - lines.append (' .Signature = %s,\n' % ('PDATA_%s_SIGNATURE' % (category))) - lines.append (' .MajorVersion = %s,\n' % ('PDATA_%s_MAJOR_VER' % (category))) - lines.append (' .MinorVersion = %s,\n' % ('PDATA_%s_MINOR_VER' % (category))) - lines.append (' .Size = 0x%02X,\n' % (self._var_dict[struct["node"]["size"]["value"]])) - lines.append ('};\n') - lines.append ('\n') - - # Step 4: Get/set accessors for each field per minor version - setter_def_all = [] - getter_def_all = [] - for struct in struct_list: - if struct["name"] == "PolicyHeader": - continue - for minver in range (max_minver + 1): - lines.append ('/* Get accessors for MIN_VER %d */\n' % minver) - (getter, getter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, False) - lines.extend(getter) - getter_def_all.append(getter_def) - - lines.append ('/* Set accessors for MIN_VER %d */\n' % minver) - (setter, setter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, True) - lines.extend(setter) - setter_def_all.append(setter_def) - - lines.append ('/* Set accessors for all fields of this structure */\n') - ''' - STATIC - VOID - EFIAPI - SET_%s_default ( - IN EFI_HANDLE _handle, - IN EFI_GUID *Guid - ) { - if ((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {" - SET_%s_default (); - } - } - ''' - lines.append("inline\n" ) - lines.append("STATIC\n" ) - lines.append("VOID\n" ) - lines.append("EFIAPI\n" ) - lines.append("SET_%s_default (\n" % struct['name']) - lines.append(" IN EFI_HANDLE _handle,\n" ) - lines.append(" IN EFI_GUID *Guid\n" ) - lines.append(" ) {\n" ) - for idx in range(len(setter_def_all)): - lines.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {\n"% idx) - for each in setter_def_all[idx]: - lines.append(" %s (_handle, Guid);\n" % each) - lines.append(" }\n") - lines.append("}\n\n") - - self.write_header_file (lines, hdr_file_name) - - - def write_header_file (self, txt_body, file_name, type = 'h'): - file_name_def = os.path.basename(file_name).replace ('.', '_') - file_name_def = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', file_name_def) - file_name_def = re.sub('([a-z0-9])([A-Z])', r'\1_\2', file_name_def).upper() - - lines = [] - lines.append ("%s\n" % get_copyright_header(type)) - lines.append ("#ifndef __%s__\n" % file_name_def) - lines.append ("#define __%s__\n\n" % file_name_def) - if type == 'h': - lines.append ("#include \n\n") - lines.append ("#pragma pack(1)\n\n") - lines.extend (txt_body) - if type == 'h': - lines.append ("#pragma pack()\n\n") - lines.append ("#endif\n") - - # Don't rewrite if the contents are the same - create = True - if os.path.exists(file_name): - hdr_file = open(file_name, "r") - org_txt = hdr_file.read() - hdr_file.close() - - new_txt = ''.join(lines) - if org_txt == new_txt: - create = False - - if create: - hdr_file = open(file_name, "w") - hdr_file.write (''.join(lines)) - hdr_file.close() - - - def generate_data_inc_file (self, dat_inc_file_name, bin_file = None): - # Put a prefix GUID before CFGDATA so that it can be located later on - prefix = b'\xa7\xbd\x7f\x73\x20\x1e\x46\xd6\xbe\x8f\x64\x12\x05\x8d\x0a\xa8' - if bin_file: - fin = open (bin_file, 'rb') - bin_dat = prefix + bytearray(fin.read()) - fin.close() - else: - bin_dat = prefix + self.generate_binary_array () - - file_name = os.path.basename(dat_inc_file_name).upper() - file_name = file_name.replace('.', '_') - - txt_lines = [] - - txt_lines.append ("UINT8 mConfigDataBlob[%d] = {\n" % len(bin_dat)) - count = 0 - line = [' '] - for each in bin_dat: - line.append('0x%02X, ' % each) - count = count + 1 - if (count & 0x0F) == 0: - line.append('\n') - txt_lines.append (''.join(line)) - line = [' '] - if len(line) > 1: - txt_lines.append (''.join(line) + '\n') - - txt_lines.append ("};\n\n") - - self.write_header_file (txt_lines, dat_inc_file_name, 'inc') - - return 0 - - - def get_struct_array_info (self, input): - parts = input.split(':') - if len(parts) > 1: - var = parts[1] - input = parts[0] - else: - var = '' - array_str = input.split('[') - name = array_str[0] - if len(array_str) > 1: - num_str = ''.join(c for c in array_str[-1] if c.isdigit()) - num_str = '1000' if len(num_str) == 0 else num_str - array_num = int(num_str) - else: - array_num = 0 - return name, array_num, var - - - def process_multilines (self, string, max_char_length): - multilines = '' - string_length = len(string) - current_string_start = 0 - string_offset = 0 - break_line_dict = [] - if len(string) <= max_char_length: - while (string_offset < string_length): - if string_offset >= 1: - if string[string_offset - 1] == '\\' and string[string_offset] == 'n': - break_line_dict.append (string_offset + 1) - string_offset += 1 - if break_line_dict != []: - for each in break_line_dict: - multilines += " %s\n" % string[current_string_start:each].lstrip() - current_string_start = each - if string_length - current_string_start > 0: - multilines += " %s\n" % string[current_string_start:].lstrip() - else: - multilines = " %s\n" % string - else: - new_line_start = 0 - new_line_count = 0 - found_space_char = False - while (string_offset < string_length): - if string_offset >= 1: - if new_line_count >= max_char_length - 1: - if string[string_offset] == ' ' and string_length - string_offset > 10: - break_line_dict.append (new_line_start + new_line_count) - new_line_start = new_line_start + new_line_count - new_line_count = 0 - found_space_char = True - elif string_offset == string_length - 1 and found_space_char == False: - break_line_dict.append (0) - if string[string_offset - 1] == '\\' and string[string_offset] == 'n': - break_line_dict.append (string_offset + 1) - new_line_start = string_offset + 1 - new_line_count = 0 - string_offset += 1 - new_line_count += 1 - if break_line_dict != []: - break_line_dict.sort () - for each in break_line_dict: - if each > 0: - multilines += " %s\n" % string[current_string_start:each].lstrip() - current_string_start = each - if string_length - current_string_start > 0: - multilines += " %s\n" % string[current_string_start:].lstrip() - return multilines - - - def create_field (self, item, name, length, offset, struct, bsf_name, help, option, bits_length = None): - pos_name = 28 - pos_comment = 30 - name_line='' - help_line='' - option_line='' - - if length == 0 and name == 'dummy': - return '\n' - - if bits_length == 0: - return '\n' - - is_array = False - if length in [1,2,4,8]: - type = "UINT%d" % (length * 8) - else: - is_array = True - type = "UINT8" - - if item and item['value'].startswith('{'): - type = "UINT8" - is_array = True - - if struct != '': - struct_base = struct.rstrip('*') - name = '*' * (len(struct) - len(struct_base)) + name - struct = struct_base - type = struct - if struct in ['UINT8','UINT16','UINT32','UINT64']: - is_array = True - unit = int(type[4:]) // 8 - length = length / unit - else: - is_array = False - - if is_array: - name = name + '[%d]' % length - - if len(type) < pos_name: - space1 = pos_name - len(type) - else: - space1 = 1 - - if bsf_name != '': - name_line=" %s\n" % bsf_name - else: - name_line="N/A\n" - - if help != '': - help_line = self.process_multilines (help, 80) - - if option != '': - option_line = self.process_multilines (option, 80) - - if offset is None: - offset_str = '????' - else: - offset_str = '0x%04X' % offset - - if bits_length is None: - bits_length = '' - else: - bits_length = ' : %d' % bits_length - - #return "\n/** %s%s%s**/\n %s%s%s%s;\n" % (name_line, help_line, option_line, type, ' ' * space1, name, bits_length) - return "\n /* %s */\n %s%s%s%s;\n" % (name_line.strip(), type, ' ' * space1, name, bits_length) - - - def create_accessor (self, item, category, name, length, offset, struct, bsf_name, help, option, is_set, bits_length = None): - - if length == 0 and name == 'dummy': - return '\n' - - if bits_length == 0: - return '\n' - - is_array = False - if length in [1,2,4,8]: - type = "UINT%d" % (length * 8) - else: - is_array = True - type = "UINT8" - - if item and item['value'].startswith('{'): - type = "UINT8" - is_array = True - - if struct != '': - struct_base = struct.rstrip('*') - name = '*' * (len(struct) - len(struct_base)) + name - struct = struct_base - type = struct - if struct in ['UINT8','UINT16','UINT32','UINT64']: - is_array = True - unit = int(type[4:]) // 8 - length = length / unit - else: - is_array = False - - if is_array: - name = name + '[%d]' % length - - if bits_length is None: - bits_length = '' - else: - bits_length = ' : %d' % bits_length - - path = item['path'].split(".") - final_acs_list = [] - if is_set: - ''' - STATIC - VOID - EFIAPI - SET_%s ( - IN EFI_HANDLE _handle, - IN EFI_GUID *Guid, - IN %s val, - ) { - ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val; - ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); - } - ''' - final_acs_list.append("inline" ) - final_acs_list.append("STATIC" ) - final_acs_list.append("VOID" ) - final_acs_list.append("EFIAPI" ) - final_acs_list.append("SET_%s (" % "_".join(path)) - final_acs_list.append(" IN EFI_HANDLE _handle," ) - final_acs_list.append(" IN EFI_GUID *Guid," ) - final_acs_list.append(" IN %s val" % type) - final_acs_list.append(" ) {" ) - final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val;" % (path[0], path[1])) - final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset)) - final_acs_list.append("}\n\n") - - # Set default value - ''' - STATIC - VOID - EFIAPI - SET_%s_default ( - IN EFI_HANDLE _handle, - IN EFI_GUID *Guid - ) { - ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x; - ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); - } - ''' - final_acs_list.append("inline" ) - final_acs_list.append("STATIC" ) - final_acs_list.append("VOID" ) - final_acs_list.append("EFIAPI" ) - acs_default = "SET_%s_default (" % "_".join(path) - final_acs_list.append(acs_default) - final_acs_list.append(" IN EFI_HANDLE _handle," ) - final_acs_list.append(" IN EFI_GUID *Guid" ) - final_acs_list.append(" ) {" ) - final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x;" % (path[0], path[1], int(item['value'], 0))) - final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset)) - final_acs_list.append("}\n\n") - else: - ''' - STATIC - %s - EFIAPI - GET_%s ( - IN EFI_HANDLE _handle, - IN EFI_GUID *Guid - ) { - %s Temp; - if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s *)0)->%s)) { - Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s; - } else { - Temp = 0x%x; - } - ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); - return Temp; - } - ''' - final_acs_list.append("inline" ) - final_acs_list.append("STATIC" ) - final_acs_list.append("%s" % type) - final_acs_list.append("EFIAPI" ) - final_acs_list.append("GET_%s (" % "_".join(path)) - final_acs_list.append(" IN EFI_HANDLE _handle," ) - final_acs_list.append(" IN EFI_GUID *Guid" ) - final_acs_list.append(" ) {" ) - final_acs_list.append(" %s Temp;" % type) - final_acs_list.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s*)0)->%s)))) {" % (path[0], path[1], path[0], path[1])) - final_acs_list.append(" Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s;" % (path[0], path[1])) - final_acs_list.append(" } else {" ) - final_acs_list.append(" Temp = 0x%x;" % int(item['value'], 0)) - final_acs_list.append(" }" ) - final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset)) - final_acs_list.append(" return Temp;" ) - final_acs_list.append("}\n\n") - - # Get default value - ''' - STATIC - %s - EFIAPI - GET_%s_default ( - IN EFI_HANDLE _handle, - IN EFI_GUID *Guid - ) { - ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); - return 0x%x; - } - ''' - final_acs_list.append("inline" ) - final_acs_list.append("STATIC" ) - final_acs_list.append("%s" % type) - final_acs_list.append("EFIAPI" ) - acs_default = "GET_%s_default (" % "_".join(path) - final_acs_list.append(acs_default) - final_acs_list.append(" IN EFI_HANDLE _handle," ) - final_acs_list.append(" IN EFI_GUID *Guid" ) - final_acs_list.append(" ) {" ) - final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset)) - final_acs_list.append(" return 0x%x;" % int(item['value'], 0)) - final_acs_list.append("}\n\n") - - final_acs_str = "\n".join(final_acs_list) - return (final_acs_str, acs_default.rstrip (' (')) - - - def create_struct (self, cname, top, struct_dict): - index = 0 - last = '' - lines = [] - lines.append ('\ntypedef struct {\n') - for field in top: - if field[0] == '$': - continue - - index += 1 - - t_item = top[field] - if 'indx' not in t_item: - if CGenCfgData.STRUCT not in top[field]: - continue - - if struct_dict[field][1] == 0: - continue - - append = True - struct_info = top[field][CGenCfgData.STRUCT] - - if 'struct' in struct_info: - struct, array_num, var = self.get_struct_array_info (struct_info['struct']) - if array_num > 0: - if last == struct: - append = False - last = struct - if var == '': - var = field - - field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1]) - else: - struct = struct_dict[field][0] - field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1]) - - if append: - line = self.create_field (None, field, 0, 0, struct, '', '', '') - lines.append (' %s' % line) - last = struct - continue - - item = self.get_item_by_index (t_item['indx']) - if item['cname'] == 'CfgHeader' and index == 1 or (item['cname'] == 'CondValue' and index == 2): - continue - - bit_length = None - length = (item['length'] + 7) // 8 - match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length']) - if match and match.group(2) == 'b': - bit_length = int(match.group(1)) - if match.group(3) != '': - length = CGenCfgData.bits_width[match.group(3)] // 8 - else: - length = 4 - offset = item['offset'] // 8 - struct = item.get('struct', '') - name = field - prompt = item['name'] - help = item['help'] - option = item['option'] - line = self.create_field (item, name, length, offset, struct, prompt, help, option, bit_length) - lines.append (' %s' % line) - last = struct - - lines.append ('\n} %s;\n\n' % cname) - - return lines - - - def traverse_struct (self, top, struct_dict, target_min_ver, category, is_set): - index = 0 - last = '' - lines = [] - defaults = [] - - for field in top: - if field[0] == '$': - continue - - index += 1 - - t_item = top[field] - - try: - minver = int(t_item['minver'], 0) - except: - minver = 0 - - if minver != target_min_ver: - continue - - if 'indx' not in t_item: - if CGenCfgData.STRUCT not in top[field]: - continue - - if struct_dict[field][1] == 0: - continue - - append = True - struct_info = top[field][CGenCfgData.STRUCT] - - if 'struct' in struct_info: - struct, array_num, var = self.get_struct_array_info (struct_info['struct']) - if array_num > 0: - if last == struct: - append = False - last = struct - if var == '': - var = field - - field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1]) - else: - struct = struct_dict[field][0] - field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1]) - - if append: - (line, default) = self.create_accessor (None, category, field, 0, 0, struct, '', '', '', is_set) - lines.append (' %s' % line) - defaults.append (default) - last = struct - continue - - item = self.get_item_by_index (t_item['indx']) - - bit_length = None - length = (item['length'] + 7) // 8 - match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length']) - if match and match.group(2) == 'b': - bit_length = int(match.group(1)) - if match.group(3) != '': - length = CGenCfgData.bits_width[match.group(3)] // 8 - else: - length = 4 - offset = item['offset'] // 8 - struct = item.get('struct', '') - name = field - prompt = item['name'] - help = item['help'] - option = item['option'] - (line, default) = self.create_accessor (item, category, name, length, offset, struct, prompt, help, option, is_set, bit_length) - lines.append ('%s' % line) - defaults.append (default) - last = struct - - lines.append ('\n') - - return (lines, defaults) - - - def create_header_file (self, hdr_file_name, com_hdr_file_name = ''): - def _build_header_struct (name, cfgs, level): - if CGenCfgData.STRUCT in cfgs: - if 'CfgHeader' in cfgs: - # collect CFGDATA TAG IDs - cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx']) - tag_val = array_str_to_value(cfghdr['value']) >> 20 - tag_dict[name] = tag_val - if level == 1: - tag_curr[0] = tag_val - struct_dict[name] = (level, tag_curr[0], cfgs) - - tag_curr = [0] - tag_dict = {} - struct_dict = {} - self.traverse_cfg_tree (_build_header_struct) - - if tag_curr[0] == 0: - hdr_mode = 2 - else: - hdr_mode = 1 - - # filter out the items to be built for tags and structures - struct_list = [] - for each in struct_dict: - match = False - for check in CGenCfgData.exclude_struct: - if re.match (check, each): - match = True - if each in tag_dict: - if each not in CGenCfgData.include_tag: - del tag_dict[each] - break - if not match: - struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0], - 'tag':struct_dict[each][1], 'node':struct_dict[each][2]}) - - # sort by level so that the bottom level struct will be build first to satisfy dependencies - struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True) - - # Convert XXX_[0-9]+ to XXX as an array hint - for each in struct_list: - cfgs = each['node'] - if 'struct' in cfgs['$STRUCT']: - each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct']) - else: - match = re.match('(\w+)(_\d+)', each['name']) - if match: - each['alias'] = match.group(1) - else: - each['alias'] = each['name'] - - # count items for array build - for idx, each in enumerate(struct_list): - if idx > 0: - last_struct = struct_list[idx-1]['node']['$STRUCT'] - curr_struct = each['node']['$STRUCT'] - if struct_list[idx-1]['alias'] == each['alias'] and \ - curr_struct['length'] == last_struct['length'] and \ - curr_struct['offset'] == last_struct['offset'] + last_struct['length']: - for idx2 in range (idx-1, -1, -1): - if struct_list[idx2]['count'] > 0: - struct_list[idx2]['count'] += 1 - break - continue - each['count'] = 1 - - # generate common header - if com_hdr_file_name: - self.write_cfg_header_file (com_hdr_file_name, 0, tag_dict, struct_list) - - # generate platform header - self.write_cfg_header_file (hdr_file_name, hdr_mode, tag_dict, struct_list) - - return 0 - - - def create_policy_header_file (self, hdr_file_name, com_hdr_file_name = ''): - def _build_header_struct (name, cfgs, level): - if CGenCfgData.STRUCT in cfgs: - if 'PolicyHeader' in cfgs: - # collect macro definitions - cfghdr = self.get_item_by_index (cfgs['PolicyHeader']['indx']) - tag_val = array_str_to_value(cfghdr['value']) >> 20 - tag_dict[name] = tag_val - if level == 1: - tag_curr[0] = tag_val - struct_dict[name] = (level, tag_curr[0], cfgs) - - tag_curr = [0] - tag_dict = {} - struct_dict = {} - self.traverse_cfg_tree (_build_header_struct) - - if tag_curr[0] == 0: - hdr_mode = 2 - else: - hdr_mode = 1 - - # filter out the items to be built for tags and structures - struct_list = [] - for each in struct_dict: - match = False - for check in CGenCfgData.exclude_struct: - if re.match (check, each): - match = True - if each in tag_dict: - if each not in CGenCfgData.include_tag: - del tag_dict[each] - break - if not match: - struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0], - 'tag':struct_dict[each][1], 'node':struct_dict[each][2]}) - - # sort by level so that the bottom level struct will be build first to satisfy dependencies - struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True) - - # Convert XXX_[0-9]+ to XXX as an array hint - for each in struct_list: - cfgs = each['node'] - if 'struct' in cfgs['$STRUCT']: - each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct']) - else: - match = re.match('(\w+)(_\d+)', each['name']) - if match: - each['alias'] = match.group(1) - else: - each['alias'] = each['name'] - - # count items for array build - for idx, each in enumerate(struct_list): - if idx > 0: - last_struct = struct_list[idx-1]['node']['$STRUCT'] - curr_struct = each['node']['$STRUCT'] - if struct_list[idx-1]['alias'] == each['alias'] and \ - curr_struct['length'] == last_struct['length'] and \ - curr_struct['offset'] == last_struct['offset'] + last_struct['length']: - for idx2 in range (idx-1, -1, -1): - if struct_list[idx2]['count'] > 0: - struct_list[idx2]['count'] += 1 - break - continue - each['count'] = 1 - - # generate platform header - self.write_policy_header_file (hdr_file_name, hdr_mode, struct_list) - - return 0 - - - def load_yaml (self, cfg_file, shallow_load=False, is_policy=False): - cfg_yaml = CFG_YAML() - self.initialize () - self.is_policy = is_policy - self._cfg_tree = cfg_yaml.load_yaml (cfg_file) - self._def_dict = cfg_yaml.def_dict - self._yaml_path = os.path.dirname(cfg_file) - if not shallow_load: - self.build_cfg_list() - self.build_var_dict() - self.update_def_value() - return 0 - - -def usage(): - print ('\n'.join([ - "GenCfgData Version 0.50", - "Usage:", - " GenCfgData GENINC BinFile IncOutFile", - " GenCfgData GENPKL YamlFile PklOutFile", - " GenCfgData GENBIN YamlFile[;DltFile] BinOutFile", - " GenCfgData GENDLT YamlFile[;BinFile] DltOutFile", - " GenCfgData GENHDR YamlFile HdrOutFile" - ])) - - -def main(): - # Parse the options and args - argc = len(sys.argv) - if argc < 4 or argc > 5: - usage() - return 1 - - gen_cfg_data = CGenCfgData() - command = sys.argv[1].upper() - out_file = sys.argv[3] - - file_list = sys.argv[2].split(';') - if len(file_list) >= 2: - yml_file = file_list[0] - dlt_file = file_list[1] - elif len(file_list) == 1: - yml_file = file_list[0] - dlt_file = '' - else: - raise Exception ("ERROR: Invalid parameter '%s' !" % sys.argv[2]) - - if command == "GENDLT" and yml_file.endswith('.dlt'): - # It needs to expand an existing DLT file - dlt_file = yml_file - lines = gen_cfg_data.expand_include_files (dlt_file) - write_lines (lines, out_file) - return 0 - - bin_file = '' - if (yml_file.lower().endswith('.bin')) and (command == "GENINC"): - # It is binary file - bin_file = yml_file - yml_file = '' - - if bin_file: - gen_cfg_data.generate_data_inc_file(out_file, bin_file) - return 0 - - cfg_bin_file = '' - cfg_bin_file2 = '' - if dlt_file: - if command == "GENDLT": - cfg_bin_file = dlt_file - dlt_file = '' - if len(file_list) >= 3: - cfg_bin_file2 = file_list[2] - - if yml_file.lower().endswith('.pkl'): - with open(yml_file, "rb") as pkl_file: - gen_cfg_data.__dict__ = marshal.load(pkl_file) - gen_cfg_data.prepare_marshal (False) - else: - if command == 'GENHDR': - gen_cfg_data.load_yaml (yml_file, is_policy=True) - - if command == 'GENPKL': - gen_cfg_data.prepare_marshal (True) - with open(out_file, "wb") as pkl_file: - marshal.dump(gen_cfg_data.__dict__, pkl_file) - json_file = os.path.splitext(out_file)[0] + '.json' - fo = open (json_file, 'w') - path_list = [] - cfgs = {'_cfg_page' : gen_cfg_data._cfg_page, '_cfg_list':gen_cfg_data._cfg_list, '_path_list' : path_list} - # optimize to reduce size - path = None - for each in cfgs['_cfg_list']: - new_path = each['path'][:-len(each['cname'])-1] - if path != new_path: - path = new_path - each['path'] = path - path_list.append(path) - else: - del each['path'] - if each['order'] == each['offset']: - del each['order'] - del each['offset'] - - # value is just used to indicate display type - value = each['value'] - if value.startswith ('0x'): - hex_len = ((each['length'] + 7) // 8) * 2 - if len(value) == hex_len: - value = 'x%d' % hex_len - else: - value = 'x' - each['value'] = value - elif value and value[0] in ['"', "'", '{']: - each['value'] = value[0] - else: - del each['value'] - - fo.write(repr(cfgs)) - fo.close () - return 0 - - if dlt_file: - gen_cfg_data.override_default_value(dlt_file) - - if command == "GENBIN": - if len(file_list) == 3: - old_data = gen_cfg_data.generate_binary_array() - fi = open (file_list[2], 'rb') - new_data = bytearray (fi.read ()) - fi.close () - if len(new_data) != len(old_data): - raise Exception ("Binary file '%s' length does not match, ignored !" % file_list[2]) - else: - gen_cfg_data.load_default_from_bin (new_data) - gen_cfg_data.override_default_value(dlt_file) - - gen_cfg_data.generate_binary(out_file) - - elif command == "GENDLT": - gen_cfg_data.generate_delta_file (out_file, cfg_bin_file, cfg_bin_file2) - - elif command == "GENHDR": - out_files = out_file.strip("'").split(';') - brd_out_file = out_files[0].strip() - if len(out_files) > 1: - com_out_file = out_files[1].strip() - else: - com_out_file = '' - gen_cfg_data.create_policy_header_file(brd_out_file, com_out_file) - - elif command == "GENINC": - gen_cfg_data.generate_data_inc_file(out_file) - - elif command == "DEBUG": - gen_cfg_data.print_cfgs() - - else: - raise Exception ("Unsuported command '%s' !" % command) - - return 0 - - -if __name__ == '__main__': - sys.exit(main()) - +## @ GenCfgData.py +# +# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# Copyright (c) Microsoft Corporation. +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +import os +import sys +import re +import struct +import marshal +import pprint +import string +import operator as op +import ast +import binascii +from typing import Type, final +from unicodedata import category +import uuid +from datetime import date +from collections import OrderedDict + +from CommonUtility import * + +# Generated file copyright header +__copyright_tmp__ = """/** @file + + Platform Configuration %s File. + + Copyright (c) %4d, Intel Corporation. All rights reserved.
+ Copyright (c) Microsoft Corporation. + SPDX-License-Identifier: BSD-2-Clause-Patent + + This file is automatically generated. Please do NOT modify !!! + +**/ +""" + +def get_copyright_header (file_type, allow_modify = False): + file_description = { + 'yaml': 'Boot Setting', + 'dlt' : 'Delta', + 'inc' : 'C Binary Blob', + 'h' : 'C Struct Header' + } + if file_type in ['yaml', 'dlt']: + comment_char = '#' + else: + comment_char = '' + lines = __copyright_tmp__.split('\n') + if allow_modify: + lines = [line for line in lines if 'Please do NOT modify' not in line] + copyright_hdr = '\n'.join('%s%s' % (comment_char, line) for line in lines)[:-1] + '\n' + return copyright_hdr % (file_description[file_type], date.today().year) + +def check_quote (text): + if (text[0] == "'" and text[-1] == "'") or (text[0] == '"' and text[-1] == '"'): + return True + return False + +def strip_quote (text): + new_text = text.strip() + if check_quote (new_text): + return new_text[1:-1] + return text + +def strip_delimiter (text, delim): + new_text = text.strip() + if new_text: + if new_text[0] == delim[0] and new_text[-1] == delim[-1]: + return new_text[1:-1] + return text + +def bytes_to_bracket_str (bytes): + return '{ %s }' % (', '.join('0x%02x' % i for i in bytes)) + +def array_str_to_value (val_str): + val_str = val_str.strip() + val_str = strip_delimiter (val_str, '{}') + val_str = strip_quote (val_str) + value = 0 + for each in val_str.split(',')[::-1]: + each = each.strip() + value = (value << 8) | int(each, 0) + return value + +def write_lines (lines, file): + fo = open(file, "w") + fo.write (''.join ([x[0] for x in lines])) + fo.close () + +def read_lines (file): + if not os.path.exists(file): + test_file = os.path.basename(file) + if os.path.exists(test_file): + file = test_file + fi = open (file, 'r') + lines = fi.readlines () + fi.close () + return lines + +def expand_file_value (path, value_str): + result = bytearray() + match = re.match("\{\s*FILE:(.+)\}", value_str) + if match: + file_list = match.group(1).split(',') + for file in file_list: + file = file.strip() + bin_path = os.path.join(path, file) + result.extend(bytearray(open(bin_path, 'rb').read())) + return result + +class ExpressionEval(ast.NodeVisitor): + operators = { + ast.Add: op.add, + ast.Sub: op.sub, + ast.Mult: op.mul, + ast.Div: op.floordiv, + ast.Mod: op.mod, + ast.Eq: op.eq, + ast.NotEq: op.ne, + ast.Gt: op.gt, + ast.Lt: op.lt, + ast.GtE: op.ge, + ast.LtE: op.le, + ast.BitXor: op.xor, + ast.BitAnd: op.and_, + ast.BitOr: op.or_, + ast.Invert: op.invert, + ast.USub: op.neg + } + + + def __init__(self): + self._debug = False + self._expression = '' + self._namespace = {} + self._get_variable = None + + def eval(self, expr, vars={}): + self._expression = expr + if type(vars) is dict: + self._namespace = vars + self._get_variable = None + else: + self._namespace = {} + self._get_variable = vars + node = ast.parse(self._expression, mode='eval') + result = self.visit(node.body) + if self._debug: + print ('EVAL [ %s ] = %s' % (expr, str(result))) + return result + + def visit_Name(self, node): + if self._get_variable is not None: + return self._get_variable(node.id) + else: + return self._namespace[node.id] + + def visit_Num(self, node): + return node.n + + def visit_NameConstant(self, node): + return node.value + + def visit_BoolOp(self, node): + result = False + if isinstance(node.op, ast.And): + for value in node.values: + result = self.visit(value) + if not result: + break + elif isinstance(node.op, ast.Or): + for value in node.values: + result = self.visit(value) + if result: + break + return True if result else False + + def visit_UnaryOp(self, node): + val = self.visit(node.operand) + return operators[type(node.op)](val) + + def visit_BinOp(self, node): + lhs = self.visit(node.left) + rhs = self.visit(node.right) + return ExpressionEval.operators[type(node.op)](lhs, rhs) + + def visit_Compare(self, node): + right = self.visit(node.left) + result = True + for operation, comp in zip(node.ops, node.comparators): + if not result: + break + left = right + right = self.visit(comp) + result = ExpressionEval.operators[type(operation)](left, right) + return result + + def visit_Call(self, node): + if node.func.id in ['ternary']: + condition = self.visit (node.args[0]) + val_true = self.visit (node.args[1]) + val_false = self.visit (node.args[2]) + return val_true if condition else val_false + elif node.func.id in ['offset', 'length']: + if self._get_variable is not None: + return self._get_variable(node.args[0].s, node.func.id) + else: + raise ValueError("Unsupported function: " + repr(node)) + + def generic_visit(self, node): + raise ValueError("malformed node or string: " + repr(node)) + + +class CFG_YAML(): + TEMPLATE = 'template' + CONFIGS = 'configs' + VARIABLE = 'variable' + + def __init__ (self, is_policy=False): + self.log_line = False + self.allow_template = False + self.cfg_tree = None + self.tmp_tree = None + self.var_dict = None + self.def_dict = {} + self.yaml_path = '' + self.lines = [] + self.full_lines = [] + self.index = 0 + self.is_policy = is_policy + self.re_expand = re.compile (r'(.+:\s+|\s*\-\s*)!expand\s+\{\s*(\w+_TMPL)\s*:\s*\[(.+)]\s*\}') + self.re_include = re.compile (r'(.+:\s+|\s*\-\s*)!include\s+(.+)') + + @staticmethod + def count_indent (line): + return next((i for i, c in enumerate(line) if not c.isspace()), len(line)) + + @staticmethod + def substitue_args (text, arg_dict): + for arg in arg_dict: + text = text.replace ('$' + arg, arg_dict[arg]) + return text + + @staticmethod + def dprint (*args): + pass + + def process_include (self, line, insert = True): + match = self.re_include.match (line) + if not match: + raise Exception ("Invalid !include format '%s' !" % line.strip()) + + prefix = match.group(1) + include = match.group(2) + if prefix.strip() == '-': + prefix = '' + adjust = 0 + else: + adjust = 2 + + include = strip_quote (include) + request = CFG_YAML.count_indent (line) + adjust + + if self.log_line: + # remove the include line itself + del self.full_lines[-1] + + inc_path = os.path.join (self.yaml_path, include) + if not os.path.exists(inc_path): + # try relative path to project root + try_path = os.path.join(os.path.dirname (os.path.realpath(__file__)), "../..", include) + if os.path.exists(try_path): + inc_path = try_path + else: + raise Exception ("ERROR: Cannot open file '%s'." % inc_path) + + lines = read_lines (inc_path) + + current = 0 + same_line = False + for idx, each in enumerate (lines): + start = each.lstrip() + if start == '' or start[0] == '#': + continue + + if start[0] == '>': + # append the content directly at the same line + same_line = True + + start = idx + current = CFG_YAML.count_indent (each) + break + + lines = lines[start+1:] if same_line else lines[start:] + leading = '' + if same_line: + request = len(prefix) + leading = '>' + + lines = [prefix + '%s\n' % leading] + [' ' * request + i[current:] for i in lines] + if insert: + self.lines = lines + self.lines + + return lines + + def process_expand (self, line): + match = self.re_expand.match(line) + if not match: + raise Exception ("Invalid !expand format '%s' !" % line.strip()) + lines = [] + prefix = match.group(1) + temp_name = match.group(2) + args = match.group(3) + + if prefix.strip() == '-': + indent = 0 + else: + indent = 2 + lines = self.process_expand_template (temp_name, prefix, args, indent) + self.lines = lines + self.lines + + + def process_expand_template (self, temp_name, prefix, args, indent = 2): + # expand text with arg substitution + if temp_name not in self.tmp_tree: + raise Exception ("Could not find template '%s' !" % temp_name) + parts = args.split(',') + parts = [i.strip() for i in parts] + num = len(parts) + arg_dict = dict(zip( ['(%d)' % (i + 1) for i in range(num)], parts)) + str_data = self.tmp_tree[temp_name] + text = DefTemplate(str_data).safe_substitute(self.def_dict) + text = CFG_YAML.substitue_args (text, arg_dict) + target = CFG_YAML.count_indent (prefix) + indent + current = CFG_YAML.count_indent (text) + padding = target * ' ' + if indent == 0: + leading = [] + else: + leading = [prefix + '\n'] + text = leading + [(padding + i + '\n')[current:] for i in text.splitlines()] + return text + + + def load_file (self, yaml_file): + self.index = 0 + self.lines = read_lines (yaml_file) + + + def peek_line (self): + if len(self.lines) == 0: + return None + else: + return self.lines[0] + + + def put_line (self, line): + self.lines.insert (0, line) + if self.log_line: + del self.full_lines[-1] + + + def get_line (self): + if len(self.lines) == 0: + return None + else: + line = self.lines.pop(0) + if self.log_line: + self.full_lines.append (line.rstrip()) + return line + + + def get_multiple_line (self, indent): + text = '' + newind = indent + 1 + while True: + line = self.peek_line () + if line is None: + break + sline = line.strip() + if sline != '': + newind = CFG_YAML.count_indent(line) + if newind <= indent: + break + self.get_line () + if sline != '': + text = text + line + return text + + + def traverse_cfg_tree (self, handler): + def _traverse_cfg_tree (root, level = 0): + # config structure + for key in root: + if type(root[key]) is OrderedDict: + level += 1 + handler (key, root[key], level) + _traverse_cfg_tree (root[key], level) + level -= 1 + _traverse_cfg_tree (self.cfg_tree) + + + def count (self): + def _count (name, cfgs, level): + num[0] += 1 + num = [0] + self.traverse_cfg_tree (_count) + return num[0] + + + def parse (self, parent_name = '', curr = None, level = 0): + child = None + last_indent = None + temp_chk = {} + + while True: + line = self.get_line () + if line is None: + break + + curr_line = line.strip() + if curr_line == '' or curr_line[0] == '#': + continue + + indent = CFG_YAML.count_indent(line) + if last_indent is None: + last_indent = indent + + if indent != last_indent: + # outside of current block, put the line back to queue + self.put_line (' ' * indent + curr_line) + + if curr_line.endswith (': >'): + # multiline marker + old_count = len(self.full_lines) + line = self.get_multiple_line (indent) + if self.log_line and not self.allow_template and '!include ' in line: + # expand include in template + new_lines = [] + lines = line.splitlines() + for idx, each in enumerate(lines): + if '!include ' in each: + new_line = ''.join(self.process_include (each, False)) + new_lines.append(new_line) + else: + new_lines.append(each) + self.full_lines = self.full_lines[:old_count] + new_lines + curr_line = curr_line + line + + if indent > last_indent: + # child nodes + if child is None: + raise Exception ('Unexpected format at line: %s' % (curr_line)) + + level += 1 + self.parse (key, child, level) + level -= 1 + + line = self.peek_line () + if line is not None: + curr_line = line.strip() + indent = CFG_YAML.count_indent(line) + if indent >= last_indent: + # consume the line + self.get_line () + else: + # end of file + indent = -1 + + if curr is None: + curr = OrderedDict() + + if indent < last_indent: + return curr + + marker1 = curr_line[0] + marker2 = curr_line[-1] + start = 1 if marker1 == '-' else 0 + pos = curr_line.find(': ') + if pos > 0: + child = None + key = curr_line[start:pos].strip() + if curr_line[pos + 2] == '>': + curr[key] = curr_line[pos + 3:] + else: + # XXXX: !include / !expand + if '!include ' in curr_line: + self.process_include (line) + elif '!expand ' in curr_line: + if self.allow_template and not self.log_line: + self.process_expand (line) + else: + value_str = curr_line[pos + 2:].strip() + if key == "IdTag" or key == "ArrayIdTag": + # Insert the headers corresponds to this ID tag from here, most contents are hardcoded for now + cfg_hdr = OrderedDict() + cfg_hdr['length'] = '0x04' + cfg_hdr['value'] = '{0x01:2b, (_LENGTH_%s_/4):10b, %d:4b, 0:4b, %s:12b}' % (parent_name, 0 if key == "IdTag" else 1, value_str) + curr['CfgHeader'] = cfg_hdr + + cnd_val = OrderedDict() + cnd_val['length'] = '0x04' + cnd_val['value'] = '0x00000000' + curr['CondValue'] = cnd_val + else: + curr[key] = value_str + if self.log_line and value_str[0] == '{': + # expand {FILE: xxxx} format in the log line + if value_str[1:].rstrip().startswith('FILE:'): + value_bytes = expand_file_value (self.yaml_path, value_str) + value_str = bytes_to_bracket_str (value_bytes) + self.full_lines[-1] = line[:indent] + curr_line[:pos + 2] + value_str + + elif marker2 == ':': + child = OrderedDict() + key = curr_line[start:-1].strip() + if key == '$ACTION': + # special virtual nodes, rename to ensure unique key + key = '$ACTION_%04X' % self.index + self.index += 1 + if key in curr: + if key not in temp_chk: + # check for duplicated keys at same level + temp_chk[key] = 1 + else: + raise Exception ("Duplicated item '%s:%s' found !" % (parent_name, key)) + + curr[key] = child + if self.var_dict is None and key == CFG_YAML.VARIABLE: + self.var_dict = child + if self.tmp_tree is None and key == CFG_YAML.TEMPLATE: + self.tmp_tree = child + if self.var_dict: + for each in self.var_dict: + txt = self.var_dict[each] + if type(txt) is str: + self.def_dict['(%s)' % each] = txt + if self.tmp_tree and key == CFG_YAML.CONFIGS: + if not self.is_policy: + # apply template for the main configs + self.allow_template = True + child['Signature'] = OrderedDict() + child['Signature']['length'] = '0x04' + child['Signature']['value'] = "{'CFGD'}" + + child['HeaderLength'] = OrderedDict() + child['HeaderLength']['length'] = '0x01' + child['HeaderLength']['value'] = '0x10' + + child['Reserved'] = OrderedDict() + child['Reserved']['length'] = '0x03' + child['Reserved']['value'] = '{0,0,0}' + + child['UsedLength'] = OrderedDict() + child['UsedLength']['length'] = '0x04' + child['UsedLength']['value'] = '_LENGTH_' + + # This will be rounded up to 4KB aligned + child['TotalLength'] = OrderedDict() + child['TotalLength']['length'] = '0x04' + child['TotalLength']['value'] = '(_LENGTH_/0x1000 + 1)*0x1000' + else: + child = None + # - !include cfg_opt.yaml + if '!include ' in curr_line: + self.process_include (line) + + return curr + + + def load_yaml (self, opt_file): + self.var_dict = None + self.yaml_path = os.path.dirname (opt_file) + self.load_file (opt_file) + yaml_tree = self.parse () + self.tmp_tree = yaml_tree[CFG_YAML.TEMPLATE] + self.cfg_tree = yaml_tree[CFG_YAML.CONFIGS] + return self.cfg_tree + + + def expand_yaml (self, opt_file): + self.log_line = True + self.load_yaml (opt_file) + self.log_line = False + text = '\n'.join (self.full_lines) + self.full_lines = [] + return text + + +class DefTemplate(string.Template): + idpattern = '\([_A-Z][_A-Z0-9]*\)|[_A-Z][_A-Z0-9]*' + + +class CGenCfgData: + STRUCT = '$STRUCT' + bits_width = {'b':1, 'B':8, 'W':16, 'D':32, 'Q':64} + builtin_option = {'$EN_DIS' : [('0', 'Disable'), ('1', 'Enable')]} + exclude_struct = ['GPIO_GPP_*', 'GPIO_CFG_DATA', 'GpioConfPad*', 'GpioPinConfig', + 'BOOT_OPTION*', 'PLATFORMID_CFG_DATA', '\w+_Half[01]'] + include_tag = ['GPIO_CFG_DATA'] + keyword_set = set(['name', 'type', 'option', 'help', 'length', 'value', 'order', 'struct', 'condition', 'minver']) + + def __init__(self): + self.initialize () + + + def initialize (self): + self._cfg_tree = {} + self._tmp_tree = {} + self._cfg_list = [] + self._cfg_page = {'root': {'title': '', 'child': []}} + self._cur_page = '' + self._var_dict = {} + self._def_dict = {} + self._yaml_path = '' + + + @staticmethod + def deep_convert_dict (layer): + # convert OrderedDict to list + dict + new_list = layer + if isinstance(layer, OrderedDict): + new_list = list (layer.items()) + for idx, pair in enumerate (new_list): + new_node = CGenCfgData.deep_convert_dict (pair[1]) + new_list[idx] = dict({pair[0] : new_node}) + return new_list + + + @staticmethod + def deep_convert_list (layer): + if isinstance(layer, list): + od = OrderedDict({}) + for each in layer: + if isinstance(each, dict): + key = next(iter(each)) + od[key] = CGenCfgData.deep_convert_list(each[key]) + return od + else: + return layer + + + @staticmethod + def expand_include_files (file_path, cur_dir = ''): + if cur_dir == '': + cur_dir = os.path.dirname(file_path) + file_path = os.path.basename(file_path) + + input_file_path = os.path.join(cur_dir, file_path) + file = open(input_file_path, "r") + lines = file.readlines() + file.close() + + new_lines = [] + for line_num, line in enumerate(lines): + match = re.match("^!include\s*(.+)?$", line.strip()) + if match: + inc_path = match.group(1) + tmp_path = os.path.join(cur_dir, inc_path) + org_path = tmp_path + if not os.path.exists(tmp_path): + cur_dir = os.path.join(os.path.dirname (os.path.realpath(__file__)), "..", "..") + tmp_path = os.path.join(cur_dir, inc_path) + if not os.path.exists(tmp_path): + raise Exception ("ERROR: Cannot open include file '%s'." % org_path) + else: + new_lines.append (('# Included from file: %s\n' % inc_path, tmp_path, 0)) + new_lines.append (('# %s\n' % ('=' * 80), tmp_path, 0)) + new_lines.extend (CGenCfgData.expand_include_files (inc_path, cur_dir)) + else: + new_lines.append ((line, input_file_path, line_num)) + + return new_lines + + + @staticmethod + def format_struct_field_name (input, count = 0): + name = '' + cap = True + if '_' in input: + input = input.lower() + for each in input: + if each == '_': + cap = True + continue + elif cap: + each = each.upper() + cap = False + name = name + each + + if count > 1: + name = '%s[%d]' % (name, count) + + return name + + def get_last_error (self): + return '' + + + def get_variable (self, var, attr = 'value'): + if var in self._var_dict: + var = self._var_dict[var] + return var + + item = self.locate_cfg_item (var, False) + if item is None: + raise ValueError ("Cannot find variable '%s' !" % var) + + if item: + if 'indx' in item: + item = self.get_item_by_index (item['indx']) + if attr == 'offset': + var = item['offset'] + elif attr == 'length': + var = item['length'] + elif attr == 'value': + var = self.get_cfg_item_value (item) + else: + raise ValueError ("Unsupported variable attribute '%s' !" % attr) + return var + + + def eval (self, expr): + def _handler (pattern): + if pattern.group(1): + target = 1 + else: + target = 2 + result = self.get_variable(pattern.group(target)) + if result is None: + raise ValueError('Unknown variable $(%s) !' % pattern.group(target)) + return hex(result) + + expr_eval = ExpressionEval () + if '$' in expr: + # replace known variable first + expr = re.sub(r'\$\(([_a-zA-Z][\w\.]*)\)|\$([_a-zA-Z][\w\.]*)', _handler, expr) + return expr_eval.eval(expr, self.get_variable) + + + def get_cfg_list (self, page_id = None): + if page_id is None: + # return full list + return self._cfg_list + else: + # build a new list for items under a page ID + cfgs = [i for i in self._cfg_list if i['cname'] and (i['page'] == page_id)] + return cfgs + + + def get_cfg_page (self): + return self._cfg_page + + def get_cfg_item_length (self, item): + return item['length'] + + def get_cfg_item_value (self, item, array = False): + value_str = item['value'] + length = item['length'] + return self.get_value (value_str, length, array) + + + def format_value_to_str (self, value, bit_length, old_value = ''): + # value is always int + length = (bit_length + 7) // 8 + fmt = '' + if old_value.startswith ('0x'): + fmt = '0x' + elif old_value and (old_value[0] in ['"', "'", '{']): + fmt = old_value[0] + else: + fmt = '' + + bvalue = value_to_bytearray (value, length) + if fmt in ['"', "'"]: + svalue = bvalue.rstrip(b'\x00').decode() + value_str = fmt + svalue + fmt + elif fmt == "{": + value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }' + elif fmt == '0x': + hex_len = length * 2 + if len(old_value) == hex_len + 2: + fstr = '0x%%0%dX' % hex_len + else: + fstr = '0x%X' + value_str = fstr % value + else: + if length <= 2: + value_str = '%d' % value + elif length <= 8: + value_str = '0x%x' % value + else: + value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }' + return value_str + + + def reformat_value_str (self, value_str, bit_length, old_value = None): + value = self.parse_value (value_str, bit_length, False) + if old_value is None: + old_value = value_str + new_value = self.format_value_to_str (value, bit_length, old_value) + return new_value + + + def get_value (self, value_str, bit_length, array = True): + value_str = value_str.strip() + if len(value_str) == 0: + return 0 + if value_str[0] == "'" and value_str[-1] == "'" or \ + value_str[0] == '"' and value_str[-1] == '"': + value_str = value_str[1:-1] + bvalue = bytearray (value_str.encode()) + if len(bvalue) == 0: + bvalue = bytearray(b'\x00') + if array: + return bvalue + else: + return bytes_to_value (bvalue) + else: + if value_str[0] in '{' : + value_str = value_str[1:-1].strip() + value = 0 + for each in value_str.split(',')[::-1]: + each = each.strip() + value = (value << 8) | int(each, 0) + if array: + length = (bit_length + 7) // 8 + return value_to_bytearray (value, length) + else: + return value + + + def parse_value (self, value_str, bit_length, array = True): + length = (bit_length + 7) // 8 + if check_quote(value_str): + value_str = bytes_to_bracket_str(value_str[1:-1].encode()) + elif (',' in value_str) and (value_str[0] != '{'): + value_str = '{ %s }' % value_str + if value_str[0] == '{': + result = expand_file_value (self._yaml_path, value_str) + if len(result) == 0 : + bin_list = value_str[1:-1].split(',') + value = 0 + bit_len = 0 + unit_len = 1 + for idx, element in enumerate(bin_list): + each = element.strip() + if len(each) == 0: + continue + + in_bit_field = False + if each[0] in "'" + '"': + each_value = bytearray(each[1:-1], 'utf-8') + elif ':' in each: + match = re.match("^(.+):(\d+)([b|B|W|D|Q])$", each) + if match is None: + raise SystemExit("Exception: Invald value list format '%s' !" % each) + if match.group(1) == '0' and match.group(2) == '0': + unit_len = CGenCfgData.bits_width[match.group(3)] // 8 + cur_bit_len = int(match.group(2)) * CGenCfgData.bits_width[match.group(3)] + value += ((self.eval(match.group(1)) & (1< 0: + if bit_len % 8 != 0: + raise SystemExit("Exception: Invalid bit field alignment '%s' !" % value_str) + result.extend(value_to_bytes(value, bit_len // 8)) + value = 0 + bit_len = 0 + + result.extend(each_value) + + elif check_quote (value_str): + result = bytearray(value_str[1:-1], 'utf-8') # Excluding quotes + else: + result = value_to_bytearray (self.eval(value_str), length) + + if len(result) < length: + result.extend(b'\x00' * (length - len(result))) + elif len(result) > length: + raise SystemExit ("Exception: Value '%s' is too big to fit into %d bytes !" % (value_str, length)) + + if array: + return result + else: + return bytes_to_value(result) + + return result + + + def get_cfg_item_options (self, item): + tmp_list = [] + if item['type'] == "Combo": + if item['option'] in CGenCfgData.builtin_option: + for op_val, op_str in CGenCfgData.builtin_option[item['option']]: + tmp_list.append((op_val, op_str)) + else: + opt_list = item['option'].split(',') + for option in opt_list: + option = option.strip() + try: + (op_val, op_str) = option.split(':') + except: + raise SystemExit ("Exception: Invalid option format '%s' for item '%s' !" % (option, item['cname'])) + tmp_list.append((op_val, op_str)) + return tmp_list + + + def get_page_title(self, page_id, top = None): + if top is None: + top = self.get_cfg_page()['root'] + for node in top['child']: + page_key = next(iter(node)) + if page_id == page_key: + return node[page_key]['title'] + else: + result = self.get_page_title (page_id, node[page_key]) + if result is not None: + return result + return None + + + def print_pages(self, top=None, level=0): + if top is None: + top = self.get_cfg_page()['root'] + for node in top['child']: + page_id = next(iter(node)) + print('%s%s: %s' % (' ' * level, page_id, node[page_id]['title'])) + level += 1 + self.print_pages(node[page_id], level) + level -= 1 + + + def get_item_by_index (self, index): + return self._cfg_list[index] + + + def get_item_by_path (self, path): + node = self.locate_cfg_item (path) + if node: + return self.get_item_by_index (node['indx']) + else: + return None + + def locate_cfg_path (self, item): + def _locate_cfg_path (root, level = 0): + # config structure + if item is root: + return path + for key in root: + if type(root[key]) is OrderedDict: + level += 1 + path.append(key) + ret = _locate_cfg_path (root[key], level) + if ret: + return ret + path.pop() + return None + path = [] + return _locate_cfg_path (self._cfg_tree) + + + def locate_cfg_item (self, path, allow_exp = True): + def _locate_cfg_item (root, path, level = 0): + if len(path) == level: + return root + next_root = root.get(path[level], None) + if next_root is None: + if allow_exp: + raise Exception ('Not a valid CFG config option path: %s' % '.'.join(path[:level+1])) + else: + return None + return _locate_cfg_item (next_root, path, level + 1) + + path_nodes = path.split('.') + return _locate_cfg_item (self._cfg_tree, path_nodes) + + + def traverse_cfg_tree (self, handler, top = None): + def _traverse_cfg_tree (root, level = 0): + # config structure + for key in root: + if type(root[key]) is OrderedDict: + level += 1 + handler (key, root[key], level) + _traverse_cfg_tree (root[key], level) + level -= 1 + + if top is None: + top = self._cfg_tree + _traverse_cfg_tree (top) + + + def print_cfgs(self, root = None, short = True, print_level = 256): + def _print_cfgs (name, cfgs, level): + + if 'indx' in cfgs: + act_cfg = self.get_item_by_index (cfgs['indx']) + else: + offset = 0 + length = 0 + value = '' + path='' + if CGenCfgData.STRUCT in cfgs: + cfg = cfgs[CGenCfgData.STRUCT] + offset = int(cfg['offset']) + length = int(cfg['length']) + if 'value' in cfg: + value = cfg['value'] + if length == 0: + return + act_cfg = dict({'value' : value, 'offset' : offset, 'length' : length}) + value = act_cfg['value'] + bit_len = act_cfg['length'] + offset = (act_cfg['offset'] + 7) // 8 + if value != '': + try: + value = self.reformat_value_str (act_cfg['value'], act_cfg['length']) + except: + value = act_cfg['value'] + length = bit_len // 8 + bit_len = '(%db)' % bit_len if bit_len % 8 else '' * 4 + if level <= print_level: + if short and len(value) > 40: + value = '%s ... %s' % (value[:20] , value[-20:]) + print('%04X:%04X%-6s %s%s : %s' % (offset, length, bit_len, ' ' * level, name, value)) + + self.traverse_cfg_tree (_print_cfgs) + + + def get_cfg_tree(self): + return self._cfg_tree + + + def set_cfg_tree(self, cfg_tree): + self._cfg_tree = cfg_tree + + + def merge_cfg_tree(self, root, other_root): + ret = OrderedDict () + prev_key = None + for other_key in other_root: + if other_key not in root: + ret[other_key] = other_root[other_key] + else: + # this is a good time to check to see if we miss anything from previous root elements + found_last = False + for key in root: + if key == prev_key: + found_last = True + continue + if prev_key == None: + found_last = True + if found_last: + ret[key] = root[key] + if key == other_key: + prev_key = other_key + break + + if type(root[other_key]) is OrderedDict and type(other_root[other_key]) is OrderedDict: + # if they are both non-leaf, great, process recursively + ret[other_key] = self.merge_cfg_tree (root[other_key], other_root[other_key]) + elif type(root[other_key]) is OrderedDict or type(other_root[other_key]) is OrderedDict: + raise Exception ("Two yamls files have hierachy mismatch!!!") + else: + # this is duplicate value in from both roots, take original root as principal + ret[other_key] = root[other_key] + + # See if there is any leftovers + found_last = False + for key in root: + if key == prev_key: + found_last = True + continue + if prev_key == None: + found_last = True + if found_last: + ret[key] = root[key] + if key == other_key: + prev_key = other_key + break + return ret + + + def build_var_dict (self): + def _build_var_dict (name, cfgs, level): + if level <= 2: + if CGenCfgData.STRUCT in cfgs: + struct_info = cfgs[CGenCfgData.STRUCT] + self._var_dict['_LENGTH_%s_' % name] = struct_info['length'] // 8 + self._var_dict['_OFFSET_%s_' % name] = struct_info['offset'] // 8 + + self._var_dict = {} + self.traverse_cfg_tree (_build_var_dict) + self._var_dict['_LENGTH_'] = self._cfg_tree[CGenCfgData.STRUCT]['length'] // 8 + return 0 + + + def add_cfg_page(self, child, parent, title=''): + def _add_cfg_page(cfg_page, child, parent): + key = next(iter(cfg_page)) + if parent == key: + cfg_page[key]['child'].append({child: {'title': title, + 'child': []}}) + return True + else: + result = False + for each in cfg_page[key]['child']: + if _add_cfg_page(each, child, parent): + result = True + break + return result + + return _add_cfg_page(self._cfg_page, child, parent) + + + def set_cur_page(self, page_str): + if not page_str: + return + + if ',' in page_str: + page_list = page_str.split(',') + else: + page_list = [page_str] + for page_str in page_list: + parts = page_str.split(':') + if len(parts) in [1, 3]: + page = parts[0].strip() + if len(parts) == 3: + # it is a new page definition, add it into tree + parent = parts[1] if parts[1] else 'root' + parent = parent.strip() + if parts[2][0] == '"' and parts[2][-1] == '"': + parts[2] = parts[2][1:-1] + + if not self.add_cfg_page(page, parent, parts[2]): + raise SystemExit("Error: Cannot find parent page '%s'!" % parent) + else: + raise SystemExit("Error: Invalid page format '%s' !" % page_str) + self._cur_page = page + + + def extend_variable (self, line): + # replace all variables + if line == '': + return line + loop = 2 + while loop > 0: + line_after = DefTemplate(line).safe_substitute(self._def_dict) + if line == line_after: + break + loop -= 1 + line = line_after + return line_after + + def reformat_number_per_type (self, itype, value): + if check_quote(value) or value.startswith('{'): + return value + parts = itype.split(',') + if len(parts) > 3 and parts[0] == 'EditNum': + num_fmt = parts[1].strip() + else: + num_fmt = '' + if num_fmt == 'HEX' and not value.startswith('0x'): + value = '0x%X' % int(value, 10) + elif num_fmt == 'DEC' and value.startswith('0x'): + value = '%d' % int(value, 16) + return value + + def add_cfg_item(self, name, item, offset, path): + + self.set_cur_page (item.get('page', '')) + + if name[0] == '$': + # skip all virtual node + return 0 + + + if not set(item).issubset(CGenCfgData.keyword_set): + for each in list(item): + if each not in CGenCfgData.keyword_set: + raise Exception ("Invalid attribute '%s' for '%s'!" % (each, '.'.join(path))) + + length = item.get('length', 0) + if type(length) is str: + match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)\s*$", length) + if match: + unit_len = CGenCfgData.bits_width[match.group(2)] + length = int(match.group(1), 10) * unit_len + else: + try: + length = int(length, 0) * 8 + except: + raise Exception ("Invalid length field '%s' for '%s' !" % (length, '.'.join(path))) + + if offset % 8 > 0: + raise Exception ("Invalid alignment for field '%s' for '%s' !" % (name, '.'.join(path))) + else: + # define is length in bytes + length = length * 8 + + if not name.isidentifier(): + raise Exception ("Invalid config name '%s' for '%s' !" % (name, '.'.join(path))) + + + itype = str(item.get('type', 'Reserved')) + value = str(item.get('value', '')) + if value: + if not (check_quote(value) or value.startswith('{')): + if ',' in value: + value = '{ %s }' % value + else: + value = self.reformat_number_per_type (itype, value) + + help = str(item.get('help', '')) + if '\n' in help: + help = ' '.join ([i.strip() for i in help.splitlines()]) + + option = str(item.get('option', '')) + if '\n' in option: + option = ' '.join ([i.strip() for i in option.splitlines()]) + + # extend variables for value and condition + condition = str(item.get('condition', '')) + if condition: + condition = self.extend_variable (condition) + value = self.extend_variable (value) + + order = str(item.get('order', '')) + if order: + if '.' in order: + (major, minor) = order.split('.') + order = int (major, 16) + else: + order = int (order, 16) + else: + order = offset + + cfg_item = dict() + cfg_item['length'] = length + cfg_item['offset'] = offset + cfg_item['value'] = value + cfg_item['type'] = itype + cfg_item['cname'] = str(name) + cfg_item['name'] = str(item.get('name', '')) + cfg_item['help'] = help + cfg_item['option'] = option + cfg_item['page'] = self._cur_page + cfg_item['order'] = order + cfg_item['path'] = '.'.join(path) + cfg_item['condition'] = condition + if 'struct' in item: + cfg_item['struct'] = item['struct'] + self._cfg_list.append(cfg_item) + + item['indx'] = len(self._cfg_list) - 1 + + # remove used info for reducing pkl size + item.pop('option', None) + item.pop('condition', None) + item.pop('help', None) + item.pop('name', None) + item.pop('page', None) + + return length + + + def build_cfg_list (self, cfg_name ='', top = None, path = [], info = {'offset': 0}): + if top is None: + top = self._cfg_tree + + start = info['offset'] + is_leaf = True + for key in top: + path.append(key) + if type(top[key]) is OrderedDict: + is_leaf = False + self.build_cfg_list(key, top[key], path, info) + path.pop() + + if is_leaf: + length = self.add_cfg_item(cfg_name, top, info['offset'], path) + info['offset'] += length + elif cfg_name == '' or (cfg_name and cfg_name[0] != '$'): + # check first element for struct + first = next(iter(top)) + struct_str = CGenCfgData.STRUCT + if first != struct_str: + struct_node = OrderedDict({}) + top[struct_str] = struct_node + top.move_to_end (struct_str, False) + else: + struct_node = top[struct_str] + struct_node['offset'] = start + if len(path) == 1: + # Round up first layer tree to be 4 Byte aligned + info['offset'] = (info['offset'] + 31) & (~31) + struct_node['length'] = (info['offset'] - start + 31) & (~31) + else: + struct_node['length'] = info['offset'] - start + if struct_node['length'] % 8 != 0: + raise SystemExit("Error: Bits length not aligned for %s !" % str(path)) + + + def get_field_value (self, top = None): + def _get_field_value (name, cfgs, level): + if 'indx' in cfgs: + act_cfg = self.get_item_by_index (cfgs['indx']) + if act_cfg['length'] == 0: + return + value = self.get_value (act_cfg['value'], act_cfg['length'], False) + set_bits_to_bytes (result, act_cfg['offset'] - struct_info['offset'], act_cfg['length'], value) + + if top is None: + top = self._cfg_tree + struct_info = top[CGenCfgData.STRUCT] + result = bytearray ((struct_info['length'] + 7) // 8) + self.traverse_cfg_tree (_get_field_value, top) + return result + + + def set_field_value (self, top, value_bytes, force = False): + def _set_field_value (name, cfgs, level): + if 'indx' not in cfgs: + return + act_cfg = self.get_item_by_index (cfgs['indx']) + if force or act_cfg['value'] == '': + value = get_bits_from_bytes (full_bytes, act_cfg['offset'] - struct_info['offset'], act_cfg['length']) + act_val = act_cfg['value'] + if act_val == '': + act_val = '%d' % value + act_val = self.reformat_number_per_type (act_cfg['type'], act_val) + act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_val) + + if 'indx' in top: + # it is config option + value = bytes_to_value (value_bytes) + act_cfg = self.get_item_by_index (top['indx']) + act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_cfg['value']) + else: + # it is structure + struct_info = top[CGenCfgData.STRUCT] + length = struct_info['length'] // 8 + full_bytes = bytearray(value_bytes[:length]) + if len(full_bytes) < length: + full_bytes.extend(bytearray(length - len(value_bytes))) + self.traverse_cfg_tree (_set_field_value, top) + + + def update_def_value (self): + def _update_def_value (name, cfgs, level): + if 'indx' in cfgs: + act_cfg = self.get_item_by_index (cfgs['indx']) + if act_cfg['value'] != '' and act_cfg['length'] > 0: + try: + act_cfg['value'] = self.reformat_value_str (act_cfg['value'], act_cfg['length']) + except: + raise Exception ("Invalid value expression '%s' for '%s' !" % (act_cfg['value'], act_cfg['path'])) + else: + if CGenCfgData.STRUCT in cfgs and 'value' in cfgs[CGenCfgData.STRUCT]: + curr = cfgs[CGenCfgData.STRUCT] + value_bytes = value_to_bytearray (self.eval(curr['value']), (curr['length'] + 7) // 8) + self.set_field_value (cfgs, value_bytes) + + self.traverse_cfg_tree (_update_def_value, self._cfg_tree) + + + def evaluate_condition (self, item): + expr = item['condition'] + result = self.parse_value (expr, 1, False) + return result + + + def load_default_from_bin (self, bin_data): + self.set_field_value(self._cfg_tree, bin_data, True) + + + def generate_binary_array (self): + return self.get_field_value() + + def generate_binary (self, bin_file_name): + bin_file = open(bin_file_name, "wb") + bin_file.write (self.generate_binary_array ()) + bin_file.close() + return 0 + + def write_delta_file (self, out_file, platform_id, out_lines): + dlt_fd = open (out_file, "w") + dlt_fd.write ("%s\n" % get_copyright_header('dlt', True)) + dlt_fd.write ('#\n') + dlt_fd.write ('# Delta configuration values for platform ID 0x%04X\n' % platform_id) + dlt_fd.write ('#\n\n') + for line in out_lines: + dlt_fd.write ('%s\n' % line) + dlt_fd.close() + + + def override_default_value(self, dlt_file): + error = 0 + dlt_lines = CGenCfgData.expand_include_files(dlt_file) + + platform_id = None + for line, file_path, line_num in dlt_lines: + line = line.strip() + if not line or line.startswith('#'): + continue + match = re.match("\s*([\w\.]+)\s*\|\s*(.+)", line) + if not match: + raise Exception("Unrecognized line '%s' (File:'%s' Line:%d) !" % + (line, file_path, line_num + 1)) + + path = match.group(1) + value_str = match.group(2) + top = self.locate_cfg_item (path) + if not top: + raise Exception( + "Invalid configuration '%s' (File:'%s' Line:%d) !" % + (path, file_path, line_num + 1)) + + if 'indx' in top: + act_cfg = self.get_item_by_index (top['indx']) + bit_len = act_cfg['length'] + else: + struct_info = top[CGenCfgData.STRUCT] + bit_len = struct_info['length'] + + value_bytes = self.parse_value (value_str, bit_len) + self.set_field_value (top, value_bytes, True) + + if path == 'PLATFORMID_CFG_DATA.PlatformId': + platform_id = value_str + + if platform_id is None: + platform_id = 0 + print("PLATFORMID_CFG_DATA.PlatformId is missing in file '%s' !" % (dlt_file)) + + return error + + + def generate_delta_file_from_bin (self, delta_file, old_data, new_data, full=False): + self.load_default_from_bin (new_data) + lines = [] + tag_name = '' + level = 0 + platform_id = None + def_platform_id = 0 + + for item in self._cfg_list: + old_val = get_bits_from_bytes (old_data, item['offset'], item['length']) + new_val = get_bits_from_bytes (new_data, item['offset'], item['length']) + + full_name = item['path'] + if 'PLATFORMID_CFG_DATA.PlatformId' == full_name: + def_platform_id = old_val + platform_id = new_val + elif item['type'] != 'Reserved' and ((new_val != old_val) or full): + val_str = self.reformat_value_str (item['value'], item['length']) + text = '%-40s | %s' % (full_name, val_str) + lines.append(text) + + if def_platform_id == platform_id: + platform_id = def_platform_id + + lines.insert(0, '%-40s | %s\n\n' % + ('PLATFORMID_CFG_DATA.PlatformId', '0x%04X' % platform_id)) + + if platform_id is None: + print ("Platform ID is not set and will be configured to 0") + platform_id = 0 + + self.write_delta_file (delta_file, platform_id, lines) + return 0 + + + def generate_delta_svd_from_bin (self, old_data, new_data): + self.load_default_from_bin (new_data) + lines = [] + tag_name = '' + level = 0 + platform_id = None + def_platform_id = 0 + items = [] + + for item in self._cfg_list: + old_val = get_bits_from_bytes (old_data, item['offset'], item['length']) + new_val = get_bits_from_bytes (new_data, item['offset'], item['length']) + + full_name = item['path'] + if 'PLATFORMID_CFG_DATA.PlatformId' == full_name: + def_platform_id = old_val + platform_id = new_val + elif item['type'] != 'Reserved' and (new_val != old_val): + val_str = self.reformat_value_str (item['value'], item['length']) + text = '%-40s | %s' % (full_name, val_str) + item = self.locate_cfg_item(item['path']) + if item is None: + raise Exception ("Failed to locate item from path: %s" % item['path']) + items.append(item) + + execs = [] + # The idea is that the 1st level tag content will be regenerated if changed + for item in items: + exec = self.locate_exec_from_item (item) + if exec == None: + raise Exception ("Failed to find the immediate executive tree for an item") + if exec not in execs: + execs.append (exec) + + bytes_array = [] + for exec in execs: + bytes = self.get_field_value (exec) + offset = 0 + offset += int(exec['CfgHeader']['length'], 0) + offset += int(exec['CondValue']['length'], 0) + bytes_array.append (bytes[offset:]) + + # self.write_delta_file (delta_file, platform_id, lines) + return (execs, bytes_array) + + def locate_exec_from_item (self, item): + + def _locate_exec_from_item (name, cfgs, level): + if level == 1: + exec[0] = cfgs + elif cfgs == item: + exec[1] = exec[0] + + exec = [None, None] + self.traverse_cfg_tree (_locate_exec_from_item, self._cfg_tree) + return exec[1] + + def locate_exec_from_tag (self, tag): + + def _locate_exec_from_tag (name, cfgs, level): + if level == 1: + exec[0] = cfgs + if CGenCfgData.STRUCT in cfgs: + cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx']) + tag_val = array_str_to_value(cfghdr['value']) >> 20 + if tag_val == tag: + exec[1] = exec[0] + + exec = [None, None] + self.traverse_cfg_tree (_locate_exec_from_tag, self._cfg_tree) + return exec[1] + + def generate_delta_file(self, delta_file, bin_file, bin_file2, full=False): + fd = open (bin_file, 'rb') + new_data = bytearray(fd.read()) + fd.close() + + if bin_file2 == '': + old_data = self.generate_binary_array() + else: + old_data = new_data + fd = open (bin_file2, 'rb') + new_data = bytearray(fd.read()) + fd.close() + + return self.generate_delta_file_from_bin (delta_file, old_data, new_data, full) + + + def prepare_marshal (self, is_save): + if is_save: + # Ordered dict is not marshallable, convert to list + self._cfg_tree = CGenCfgData.deep_convert_dict (self._cfg_tree) + else: + # Revert it back + self._cfg_tree = CGenCfgData.deep_convert_list (self._cfg_tree) + + def generate_yml_file (self, in_file, out_file): + cfg_yaml = CFG_YAML() + text = cfg_yaml.expand_yaml (in_file) + yml_fd = open(out_file, "w") + yml_fd.write (text) + yml_fd.close () + return 0 + + + def write_cfg_header_file (self, hdr_file_name, tag_mode, tag_dict, struct_list): + lines = [] + lines.append ('\n\n') + tag_list = sorted(list(tag_dict.items()), key=lambda x: x[1]) + for tagname, tagval in tag_list: + if (tag_mode == 0 and tagval >= 0x100) or (tag_mode == 1 and tagval < 0x100): + continue + lines.append ('#define %-30s 0x%03X\n' % ('CDATA_%s_TAG' % tagname[:-9], tagval)) + lines.append ('\n\n') + + name_dict = {} + new_dict = {} + for each in struct_list: + if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100): + continue + new_dict[each['name']] = (each['alias'], each['count']) + if each['alias'] not in name_dict: + name_dict[each['alias']] = 1 + lines.extend(self.create_struct (each['alias'], each['node'], new_dict)) + + + self.write_header_file (lines, hdr_file_name) + + + def findMaxMinver (self, tree): + + if type(tree) is not OrderedDict: + raise Exception ("Incorrect tree type %s!!!" % type(tree)) + + # In-order tree traversal to make sure all minor versions are non-descending + try: + ver = int(tree["minver"], 0) + except: + ver = 0 + + parent_minver = ver + + max_minver = parent_minver + for value in tree: + if type(tree[value]) is OrderedDict: + temp_ver = self.findMaxMinver (tree[value]) + if temp_ver >= max_minver: + max_minver = temp_ver + else: + raise Exception ("Higher minor version detected %d between older fields at %s. New minor version fields should only be appended!!!\ + Consider append new fields, or remove the minor version and bump major version" % (temp_ver, max_minver, value)) + + return max_minver + + + def write_policy_header_file (self, hdr_file_name, tag_mode, struct_list): + lines = [] + max_minver = self.findMaxMinver(self._cfg_tree) + category = '' + + # Step 1: Macro definitions + for struct in struct_list: + if struct["name"] == "PolicyHeader": + category = struct['node']['category'] + lines.append ('#define %-30s 0x%016X\n' % ('PDATA_%s_SIGNATURE' % (category), int.from_bytes(bytes(struct['node']['signature']["value"].strip("'"), 'utf-8'), 'little'))) + lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MAJOR_VER' % (category), int(struct['node']['majver']["value"], 0))) + lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MINOR_VER' % (category), max_minver)) + lines.append ('\n') + + if category == '': + raise Exception ("No category field set in the Policy header!!!") + + # Step 2: Structure definitions + name_dict = {} + new_dict = {} + for each in struct_list: + if each['name'] == "PolicyHeader": + continue + if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100): + continue + new_dict[each['name']] = (each['alias'], each['count']) + if each['alias'] not in name_dict: + name_dict[each['alias']] = 1 + lines.extend(self.create_struct (each['alias'], each['node'], new_dict)) + + # Step 3: Template verified policy header + for struct in struct_list: + if struct["name"] == "PolicyHeader": + lines.append ('STATIC CONST VERIFIED_POLICY_HEADER POLICY_%s_DESC = {\n' % (category)) + lines.append (' .Signature = %s,\n' % ('PDATA_%s_SIGNATURE' % (category))) + lines.append (' .MajorVersion = %s,\n' % ('PDATA_%s_MAJOR_VER' % (category))) + lines.append (' .MinorVersion = %s,\n' % ('PDATA_%s_MINOR_VER' % (category))) + lines.append (' .Size = 0x%02X,\n' % (self._var_dict[struct["node"]["size"]["value"]])) + lines.append ('};\n') + lines.append ('\n') + + # Step 4: Get/set accessors for each field per minor version + setter_def_all = [] + getter_def_all = [] + for struct in struct_list: + if struct["name"] == "PolicyHeader": + continue + for minver in range (max_minver + 1): + lines.append ('/* Get accessors for MIN_VER %d */\n' % minver) + (getter, getter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, False) + lines.extend(getter) + getter_def_all.append(getter_def) + + lines.append ('/* Set accessors for MIN_VER %d */\n' % minver) + (setter, setter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, True) + lines.extend(setter) + setter_def_all.append(setter_def) + + lines.append ('/* Set accessors for all fields of this structure */\n') + ''' + STATIC + VOID + EFIAPI + SET_%s_default ( + IN EFI_HANDLE _handle, + IN EFI_GUID *Guid + ) { + if ((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {" + SET_%s_default (); + } + } + ''' + lines.append("inline\n" ) + lines.append("STATIC\n" ) + lines.append("VOID\n" ) + lines.append("EFIAPI\n" ) + lines.append("SET_%s_default (\n" % struct['name']) + lines.append(" IN EFI_HANDLE _handle,\n" ) + lines.append(" IN EFI_GUID *Guid\n" ) + lines.append(" ) {\n" ) + for idx in range(len(setter_def_all)): + lines.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {\n"% idx) + for each in setter_def_all[idx]: + lines.append(" %s (_handle, Guid);\n" % each) + lines.append(" }\n") + lines.append("}\n\n") + + self.write_header_file (lines, hdr_file_name) + + + def write_header_file (self, txt_body, file_name, type = 'h'): + file_name_def = os.path.basename(file_name).replace ('.', '_') + file_name_def = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', file_name_def) + file_name_def = re.sub('([a-z0-9])([A-Z])', r'\1_\2', file_name_def).upper() + + lines = [] + lines.append ("%s\n" % get_copyright_header(type)) + lines.append ("#ifndef __%s__\n" % file_name_def) + lines.append ("#define __%s__\n\n" % file_name_def) + if type == 'h': + lines.append ("#include \n\n") + lines.append ("#pragma pack(1)\n\n") + lines.extend (txt_body) + if type == 'h': + lines.append ("#pragma pack()\n\n") + lines.append ("#endif\n") + + # Don't rewrite if the contents are the same + create = True + if os.path.exists(file_name): + hdr_file = open(file_name, "r") + org_txt = hdr_file.read() + hdr_file.close() + + new_txt = ''.join(lines) + if org_txt == new_txt: + create = False + + if create: + hdr_file = open(file_name, "w") + hdr_file.write (''.join(lines)) + hdr_file.close() + + + def generate_data_inc_file (self, dat_inc_file_name, bin_file = None): + # Put a prefix GUID before CFGDATA so that it can be located later on + prefix = b'\xa7\xbd\x7f\x73\x20\x1e\x46\xd6\xbe\x8f\x64\x12\x05\x8d\x0a\xa8' + if bin_file: + fin = open (bin_file, 'rb') + bin_dat = prefix + bytearray(fin.read()) + fin.close() + else: + bin_dat = prefix + self.generate_binary_array () + + file_name = os.path.basename(dat_inc_file_name).upper() + file_name = file_name.replace('.', '_') + + txt_lines = [] + + txt_lines.append ("UINT8 mConfigDataBlob[%d] = {\n" % len(bin_dat)) + count = 0 + line = [' '] + for each in bin_dat: + line.append('0x%02X, ' % each) + count = count + 1 + if (count & 0x0F) == 0: + line.append('\n') + txt_lines.append (''.join(line)) + line = [' '] + if len(line) > 1: + txt_lines.append (''.join(line) + '\n') + + txt_lines.append ("};\n\n") + + self.write_header_file (txt_lines, dat_inc_file_name, 'inc') + + return 0 + + + def get_struct_array_info (self, input): + parts = input.split(':') + if len(parts) > 1: + var = parts[1] + input = parts[0] + else: + var = '' + array_str = input.split('[') + name = array_str[0] + if len(array_str) > 1: + num_str = ''.join(c for c in array_str[-1] if c.isdigit()) + num_str = '1000' if len(num_str) == 0 else num_str + array_num = int(num_str) + else: + array_num = 0 + return name, array_num, var + + + def process_multilines (self, string, max_char_length): + multilines = '' + string_length = len(string) + current_string_start = 0 + string_offset = 0 + break_line_dict = [] + if len(string) <= max_char_length: + while (string_offset < string_length): + if string_offset >= 1: + if string[string_offset - 1] == '\\' and string[string_offset] == 'n': + break_line_dict.append (string_offset + 1) + string_offset += 1 + if break_line_dict != []: + for each in break_line_dict: + multilines += " %s\n" % string[current_string_start:each].lstrip() + current_string_start = each + if string_length - current_string_start > 0: + multilines += " %s\n" % string[current_string_start:].lstrip() + else: + multilines = " %s\n" % string + else: + new_line_start = 0 + new_line_count = 0 + found_space_char = False + while (string_offset < string_length): + if string_offset >= 1: + if new_line_count >= max_char_length - 1: + if string[string_offset] == ' ' and string_length - string_offset > 10: + break_line_dict.append (new_line_start + new_line_count) + new_line_start = new_line_start + new_line_count + new_line_count = 0 + found_space_char = True + elif string_offset == string_length - 1 and found_space_char == False: + break_line_dict.append (0) + if string[string_offset - 1] == '\\' and string[string_offset] == 'n': + break_line_dict.append (string_offset + 1) + new_line_start = string_offset + 1 + new_line_count = 0 + string_offset += 1 + new_line_count += 1 + if break_line_dict != []: + break_line_dict.sort () + for each in break_line_dict: + if each > 0: + multilines += " %s\n" % string[current_string_start:each].lstrip() + current_string_start = each + if string_length - current_string_start > 0: + multilines += " %s\n" % string[current_string_start:].lstrip() + return multilines + + + def create_field (self, item, name, length, offset, struct, bsf_name, help, option, bits_length = None): + pos_name = 28 + pos_comment = 30 + name_line='' + help_line='' + option_line='' + + if length == 0 and name == 'dummy': + return '\n' + + if bits_length == 0: + return '\n' + + is_array = False + if length in [1,2,4,8]: + type = "UINT%d" % (length * 8) + else: + is_array = True + type = "UINT8" + + if item and item['value'].startswith('{'): + type = "UINT8" + is_array = True + + if struct != '': + struct_base = struct.rstrip('*') + name = '*' * (len(struct) - len(struct_base)) + name + struct = struct_base + type = struct + if struct in ['UINT8','UINT16','UINT32','UINT64']: + is_array = True + unit = int(type[4:]) // 8 + length = length / unit + else: + is_array = False + + if is_array: + name = name + '[%d]' % length + + if len(type) < pos_name: + space1 = pos_name - len(type) + else: + space1 = 1 + + if bsf_name != '': + name_line=" %s\n" % bsf_name + else: + name_line="N/A\n" + + if help != '': + help_line = self.process_multilines (help, 80) + + if option != '': + option_line = self.process_multilines (option, 80) + + if offset is None: + offset_str = '????' + else: + offset_str = '0x%04X' % offset + + if bits_length is None: + bits_length = '' + else: + bits_length = ' : %d' % bits_length + + #return "\n/** %s%s%s**/\n %s%s%s%s;\n" % (name_line, help_line, option_line, type, ' ' * space1, name, bits_length) + return "\n /* %s */\n %s%s%s%s;\n" % (name_line.strip(), type, ' ' * space1, name, bits_length) + + + def create_accessor (self, item, category, name, length, offset, struct, bsf_name, help, option, is_set, bits_length = None): + + if length == 0 and name == 'dummy': + return '\n' + + if bits_length == 0: + return '\n' + + is_array = False + if length in [1,2,4,8]: + type = "UINT%d" % (length * 8) + else: + is_array = True + type = "UINT8" + + if item and item['value'].startswith('{'): + type = "UINT8" + is_array = True + + if struct != '': + struct_base = struct.rstrip('*') + name = '*' * (len(struct) - len(struct_base)) + name + struct = struct_base + type = struct + if struct in ['UINT8','UINT16','UINT32','UINT64']: + is_array = True + unit = int(type[4:]) // 8 + length = length / unit + else: + is_array = False + + if is_array: + name = name + '[%d]' % length + + if bits_length is None: + bits_length = '' + else: + bits_length = ' : %d' % bits_length + + path = item['path'].split(".") + final_acs_list = [] + if is_set: + ''' + STATIC + VOID + EFIAPI + SET_%s ( + IN EFI_HANDLE _handle, + IN EFI_GUID *Guid, + IN %s val, + ) { + ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val; + ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); + } + ''' + final_acs_list.append("inline" ) + final_acs_list.append("STATIC" ) + final_acs_list.append("VOID" ) + final_acs_list.append("EFIAPI" ) + final_acs_list.append("SET_%s (" % "_".join(path)) + final_acs_list.append(" IN EFI_HANDLE _handle," ) + final_acs_list.append(" IN EFI_GUID *Guid," ) + final_acs_list.append(" IN %s val" % type) + final_acs_list.append(" ) {" ) + final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val;" % (path[0], path[1])) + final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset)) + final_acs_list.append("}\n\n") + + # Set default value + ''' + STATIC + VOID + EFIAPI + SET_%s_default ( + IN EFI_HANDLE _handle, + IN EFI_GUID *Guid + ) { + ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x; + ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); + } + ''' + final_acs_list.append("inline" ) + final_acs_list.append("STATIC" ) + final_acs_list.append("VOID" ) + final_acs_list.append("EFIAPI" ) + acs_default = "SET_%s_default (" % "_".join(path) + final_acs_list.append(acs_default) + final_acs_list.append(" IN EFI_HANDLE _handle," ) + final_acs_list.append(" IN EFI_GUID *Guid" ) + final_acs_list.append(" ) {" ) + final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x;" % (path[0], path[1], int(item['value'], 0))) + final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset)) + final_acs_list.append("}\n\n") + else: + ''' + STATIC + %s + EFIAPI + GET_%s ( + IN EFI_HANDLE _handle, + IN EFI_GUID *Guid + ) { + %s Temp; + if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s *)0)->%s)) { + Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s; + } else { + Temp = 0x%x; + } + ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); + return Temp; + } + ''' + final_acs_list.append("inline" ) + final_acs_list.append("STATIC" ) + final_acs_list.append("%s" % type) + final_acs_list.append("EFIAPI" ) + final_acs_list.append("GET_%s (" % "_".join(path)) + final_acs_list.append(" IN EFI_HANDLE _handle," ) + final_acs_list.append(" IN EFI_GUID *Guid" ) + final_acs_list.append(" ) {" ) + final_acs_list.append(" %s Temp;" % type) + final_acs_list.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s*)0)->%s)))) {" % (path[0], path[1], path[0], path[1])) + final_acs_list.append(" Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s;" % (path[0], path[1])) + final_acs_list.append(" } else {" ) + final_acs_list.append(" Temp = 0x%x;" % int(item['value'], 0)) + final_acs_list.append(" }" ) + final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset)) + final_acs_list.append(" return Temp;" ) + final_acs_list.append("}\n\n") + + # Get default value + ''' + STATIC + %s + EFIAPI + GET_%s_default ( + IN EFI_HANDLE _handle, + IN EFI_GUID *Guid + ) { + ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE); + return 0x%x; + } + ''' + final_acs_list.append("inline" ) + final_acs_list.append("STATIC" ) + final_acs_list.append("%s" % type) + final_acs_list.append("EFIAPI" ) + acs_default = "GET_%s_default (" % "_".join(path) + final_acs_list.append(acs_default) + final_acs_list.append(" IN EFI_HANDLE _handle," ) + final_acs_list.append(" IN EFI_GUID *Guid" ) + final_acs_list.append(" ) {" ) + final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset)) + final_acs_list.append(" return 0x%x;" % int(item['value'], 0)) + final_acs_list.append("}\n\n") + + final_acs_str = "\n".join(final_acs_list) + return (final_acs_str, acs_default.rstrip (' (')) + + + def create_struct (self, cname, top, struct_dict): + index = 0 + last = '' + lines = [] + lines.append ('\ntypedef struct {\n') + for field in top: + if field[0] == '$': + continue + + index += 1 + + t_item = top[field] + if 'indx' not in t_item: + if CGenCfgData.STRUCT not in top[field]: + continue + + if struct_dict[field][1] == 0: + continue + + append = True + struct_info = top[field][CGenCfgData.STRUCT] + + if 'struct' in struct_info: + struct, array_num, var = self.get_struct_array_info (struct_info['struct']) + if array_num > 0: + if last == struct: + append = False + last = struct + if var == '': + var = field + + field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1]) + else: + struct = struct_dict[field][0] + field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1]) + + if append: + line = self.create_field (None, field, 0, 0, struct, '', '', '') + lines.append (' %s' % line) + last = struct + continue + + item = self.get_item_by_index (t_item['indx']) + if item['cname'] == 'CfgHeader' and index == 1 or (item['cname'] == 'CondValue' and index == 2): + continue + + bit_length = None + length = (item['length'] + 7) // 8 + match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length']) + if match and match.group(2) == 'b': + bit_length = int(match.group(1)) + if match.group(3) != '': + length = CGenCfgData.bits_width[match.group(3)] // 8 + else: + length = 4 + offset = item['offset'] // 8 + struct = item.get('struct', '') + name = field + prompt = item['name'] + help = item['help'] + option = item['option'] + line = self.create_field (item, name, length, offset, struct, prompt, help, option, bit_length) + lines.append (' %s' % line) + last = struct + + lines.append ('\n} %s;\n\n' % cname) + + return lines + + + def traverse_struct (self, top, struct_dict, target_min_ver, category, is_set): + index = 0 + last = '' + lines = [] + defaults = [] + + for field in top: + if field[0] == '$': + continue + + index += 1 + + t_item = top[field] + + try: + minver = int(t_item['minver'], 0) + except: + minver = 0 + + if minver != target_min_ver: + continue + + if 'indx' not in t_item: + if CGenCfgData.STRUCT not in top[field]: + continue + + if struct_dict[field][1] == 0: + continue + + append = True + struct_info = top[field][CGenCfgData.STRUCT] + + if 'struct' in struct_info: + struct, array_num, var = self.get_struct_array_info (struct_info['struct']) + if array_num > 0: + if last == struct: + append = False + last = struct + if var == '': + var = field + + field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1]) + else: + struct = struct_dict[field][0] + field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1]) + + if append: + (line, default) = self.create_accessor (None, category, field, 0, 0, struct, '', '', '', is_set) + lines.append (' %s' % line) + defaults.append (default) + last = struct + continue + + item = self.get_item_by_index (t_item['indx']) + + bit_length = None + length = (item['length'] + 7) // 8 + match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length']) + if match and match.group(2) == 'b': + bit_length = int(match.group(1)) + if match.group(3) != '': + length = CGenCfgData.bits_width[match.group(3)] // 8 + else: + length = 4 + offset = item['offset'] // 8 + struct = item.get('struct', '') + name = field + prompt = item['name'] + help = item['help'] + option = item['option'] + (line, default) = self.create_accessor (item, category, name, length, offset, struct, prompt, help, option, is_set, bit_length) + lines.append ('%s' % line) + defaults.append (default) + last = struct + + lines.append ('\n') + + return (lines, defaults) + + + def create_header_file (self, hdr_file_name, com_hdr_file_name = ''): + def _build_header_struct (name, cfgs, level): + if CGenCfgData.STRUCT in cfgs: + if 'CfgHeader' in cfgs: + # collect CFGDATA TAG IDs + cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx']) + tag_val = array_str_to_value(cfghdr['value']) >> 20 + tag_dict[name] = tag_val + if level == 1: + tag_curr[0] = tag_val + struct_dict[name] = (level, tag_curr[0], cfgs) + + tag_curr = [0] + tag_dict = {} + struct_dict = {} + self.traverse_cfg_tree (_build_header_struct) + + if tag_curr[0] == 0: + hdr_mode = 2 + else: + hdr_mode = 1 + + # filter out the items to be built for tags and structures + struct_list = [] + for each in struct_dict: + match = False + for check in CGenCfgData.exclude_struct: + if re.match (check, each): + match = True + if each in tag_dict: + if each not in CGenCfgData.include_tag: + del tag_dict[each] + break + if not match: + struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0], + 'tag':struct_dict[each][1], 'node':struct_dict[each][2]}) + + # sort by level so that the bottom level struct will be build first to satisfy dependencies + struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True) + + # Convert XXX_[0-9]+ to XXX as an array hint + for each in struct_list: + cfgs = each['node'] + if 'struct' in cfgs['$STRUCT']: + each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct']) + else: + match = re.match('(\w+)(_\d+)', each['name']) + if match: + each['alias'] = match.group(1) + else: + each['alias'] = each['name'] + + # count items for array build + for idx, each in enumerate(struct_list): + if idx > 0: + last_struct = struct_list[idx-1]['node']['$STRUCT'] + curr_struct = each['node']['$STRUCT'] + if struct_list[idx-1]['alias'] == each['alias'] and \ + curr_struct['length'] == last_struct['length'] and \ + curr_struct['offset'] == last_struct['offset'] + last_struct['length']: + for idx2 in range (idx-1, -1, -1): + if struct_list[idx2]['count'] > 0: + struct_list[idx2]['count'] += 1 + break + continue + each['count'] = 1 + + # generate common header + if com_hdr_file_name: + self.write_cfg_header_file (com_hdr_file_name, 0, tag_dict, struct_list) + + # generate platform header + self.write_cfg_header_file (hdr_file_name, hdr_mode, tag_dict, struct_list) + + return 0 + + + def create_policy_header_file (self, hdr_file_name, com_hdr_file_name = ''): + def _build_header_struct (name, cfgs, level): + if CGenCfgData.STRUCT in cfgs: + if 'PolicyHeader' in cfgs: + # collect macro definitions + cfghdr = self.get_item_by_index (cfgs['PolicyHeader']['indx']) + tag_val = array_str_to_value(cfghdr['value']) >> 20 + tag_dict[name] = tag_val + if level == 1: + tag_curr[0] = tag_val + struct_dict[name] = (level, tag_curr[0], cfgs) + + tag_curr = [0] + tag_dict = {} + struct_dict = {} + self.traverse_cfg_tree (_build_header_struct) + + if tag_curr[0] == 0: + hdr_mode = 2 + else: + hdr_mode = 1 + + # filter out the items to be built for tags and structures + struct_list = [] + for each in struct_dict: + match = False + for check in CGenCfgData.exclude_struct: + if re.match (check, each): + match = True + if each in tag_dict: + if each not in CGenCfgData.include_tag: + del tag_dict[each] + break + if not match: + struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0], + 'tag':struct_dict[each][1], 'node':struct_dict[each][2]}) + + # sort by level so that the bottom level struct will be build first to satisfy dependencies + struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True) + + # Convert XXX_[0-9]+ to XXX as an array hint + for each in struct_list: + cfgs = each['node'] + if 'struct' in cfgs['$STRUCT']: + each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct']) + else: + match = re.match('(\w+)(_\d+)', each['name']) + if match: + each['alias'] = match.group(1) + else: + each['alias'] = each['name'] + + # count items for array build + for idx, each in enumerate(struct_list): + if idx > 0: + last_struct = struct_list[idx-1]['node']['$STRUCT'] + curr_struct = each['node']['$STRUCT'] + if struct_list[idx-1]['alias'] == each['alias'] and \ + curr_struct['length'] == last_struct['length'] and \ + curr_struct['offset'] == last_struct['offset'] + last_struct['length']: + for idx2 in range (idx-1, -1, -1): + if struct_list[idx2]['count'] > 0: + struct_list[idx2]['count'] += 1 + break + continue + each['count'] = 1 + + # generate platform header + self.write_policy_header_file (hdr_file_name, hdr_mode, struct_list) + + return 0 + + + def load_yaml (self, cfg_file, shallow_load=False, is_policy=False): + cfg_yaml = CFG_YAML() + self.initialize () + self.is_policy = is_policy + self._cfg_tree = cfg_yaml.load_yaml (cfg_file) + self._def_dict = cfg_yaml.def_dict + self._yaml_path = os.path.dirname(cfg_file) + if not shallow_load: + self.build_cfg_list() + self.build_var_dict() + self.update_def_value() + return 0 + + +def usage(): + print ('\n'.join([ + "GenCfgData Version 0.50", + "Usage:", + " GenCfgData GENINC BinFile IncOutFile", + " GenCfgData GENPKL YamlFile PklOutFile", + " GenCfgData GENBIN YamlFile[;DltFile] BinOutFile", + " GenCfgData GENDLT YamlFile[;BinFile] DltOutFile", + " GenCfgData GENHDR YamlFile HdrOutFile" + ])) + + +def main(): + # Parse the options and args + argc = len(sys.argv) + if argc < 4 or argc > 5: + usage() + return 1 + + gen_cfg_data = CGenCfgData() + command = sys.argv[1].upper() + out_file = sys.argv[3] + + file_list = sys.argv[2].split(';') + if len(file_list) >= 2: + yml_file = file_list[0] + dlt_file = file_list[1] + elif len(file_list) == 1: + yml_file = file_list[0] + dlt_file = '' + else: + raise Exception ("ERROR: Invalid parameter '%s' !" % sys.argv[2]) + + if command == "GENDLT" and yml_file.endswith('.dlt'): + # It needs to expand an existing DLT file + dlt_file = yml_file + lines = gen_cfg_data.expand_include_files (dlt_file) + write_lines (lines, out_file) + return 0 + + bin_file = '' + if (yml_file.lower().endswith('.bin')) and (command == "GENINC"): + # It is binary file + bin_file = yml_file + yml_file = '' + + if bin_file: + gen_cfg_data.generate_data_inc_file(out_file, bin_file) + return 0 + + cfg_bin_file = '' + cfg_bin_file2 = '' + if dlt_file: + if command == "GENDLT": + cfg_bin_file = dlt_file + dlt_file = '' + if len(file_list) >= 3: + cfg_bin_file2 = file_list[2] + + if yml_file.lower().endswith('.pkl'): + with open(yml_file, "rb") as pkl_file: + gen_cfg_data.__dict__ = marshal.load(pkl_file) + gen_cfg_data.prepare_marshal (False) + else: + if command == 'GENHDR': + gen_cfg_data.load_yaml (yml_file, is_policy=True) + + if command == 'GENPKL': + gen_cfg_data.prepare_marshal (True) + with open(out_file, "wb") as pkl_file: + marshal.dump(gen_cfg_data.__dict__, pkl_file) + json_file = os.path.splitext(out_file)[0] + '.json' + fo = open (json_file, 'w') + path_list = [] + cfgs = {'_cfg_page' : gen_cfg_data._cfg_page, '_cfg_list':gen_cfg_data._cfg_list, '_path_list' : path_list} + # optimize to reduce size + path = None + for each in cfgs['_cfg_list']: + new_path = each['path'][:-len(each['cname'])-1] + if path != new_path: + path = new_path + each['path'] = path + path_list.append(path) + else: + del each['path'] + if each['order'] == each['offset']: + del each['order'] + del each['offset'] + + # value is just used to indicate display type + value = each['value'] + if value.startswith ('0x'): + hex_len = ((each['length'] + 7) // 8) * 2 + if len(value) == hex_len: + value = 'x%d' % hex_len + else: + value = 'x' + each['value'] = value + elif value and value[0] in ['"', "'", '{']: + each['value'] = value[0] + else: + del each['value'] + + fo.write(repr(cfgs)) + fo.close () + return 0 + + if dlt_file: + gen_cfg_data.override_default_value(dlt_file) + + if command == "GENBIN": + if len(file_list) == 3: + old_data = gen_cfg_data.generate_binary_array() + fi = open (file_list[2], 'rb') + new_data = bytearray (fi.read ()) + fi.close () + if len(new_data) != len(old_data): + raise Exception ("Binary file '%s' length does not match, ignored !" % file_list[2]) + else: + gen_cfg_data.load_default_from_bin (new_data) + gen_cfg_data.override_default_value(dlt_file) + + gen_cfg_data.generate_binary(out_file) + + elif command == "GENDLT": + gen_cfg_data.generate_delta_file (out_file, cfg_bin_file, cfg_bin_file2) + + elif command == "GENHDR": + out_files = out_file.strip("'").split(';') + brd_out_file = out_files[0].strip() + if len(out_files) > 1: + com_out_file = out_files[1].strip() + else: + com_out_file = '' + gen_cfg_data.create_policy_header_file(brd_out_file, com_out_file) + + elif command == "GENINC": + gen_cfg_data.generate_data_inc_file(out_file) + + elif command == "DEBUG": + gen_cfg_data.print_cfgs() + + else: + raise Exception ("Unsuported command '%s' !" % command) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) + diff --git a/ShellPkg/Application/ShellCTestApp/TestArgv.log b/ShellPkg/Application/ShellCTestApp/TestArgv.log index e76781ea0e..9ba3cb5421 100644 Binary files a/ShellPkg/Application/ShellCTestApp/TestArgv.log and b/ShellPkg/Application/ShellCTestApp/TestArgv.log differ diff --git a/StandaloneMmPkg/Library/StandaloneMmCoreEntryPointNull/StandaloneMmCoreEntryPointNull.inf b/StandaloneMmPkg/Library/StandaloneMmCoreEntryPointNull/StandaloneMmCoreEntryPointNull.inf index 43742b2beb..97e9f031fc 100644 --- a/StandaloneMmPkg/Library/StandaloneMmCoreEntryPointNull/StandaloneMmCoreEntryPointNull.inf +++ b/StandaloneMmPkg/Library/StandaloneMmCoreEntryPointNull/StandaloneMmCoreEntryPointNull.inf @@ -1,31 +1,31 @@ -## @file -# Module entry point library for DXE core. -# -# Copyright (c) 2017 - 2021, Arm Ltd. All rights reserved.
-# -# SPDX-License-Identifier: BSD-2-Clause-Patent -# -# -## - -[Defines] - INF_VERSION = 0x0001001A - BASE_NAME = StandaloneMmCoreEntryPointNull - FILE_GUID = 5E28E9FA-67DC-4408-A177-05F72CD7E248 - MODULE_TYPE = MM_CORE_STANDALONE - VERSION_STRING = 1.0 - PI_SPECIFICATION_VERSION = 0x00010032 - LIBRARY_CLASS = StandaloneMmCoreEntryPoint|MM_CORE_STANDALONE - -# -# VALID_ARCHITECTURES = IA32 X64 IPF EBC (EBC is for build only) -# - -[Sources] - StandaloneMmCoreEntryPointNull.c - -[Packages] - MdePkg/MdePkg.dec - -[LibraryClasses] - BaseLib +## @file +# Module entry point library for DXE core. +# +# Copyright (c) 2017 - 2021, Arm Ltd. All rights reserved.
+# +# SPDX-License-Identifier: BSD-2-Clause-Patent +# +# +## + +[Defines] + INF_VERSION = 0x0001001A + BASE_NAME = StandaloneMmCoreEntryPointNull + FILE_GUID = 5E28E9FA-67DC-4408-A177-05F72CD7E248 + MODULE_TYPE = MM_CORE_STANDALONE + VERSION_STRING = 1.0 + PI_SPECIFICATION_VERSION = 0x00010032 + LIBRARY_CLASS = StandaloneMmCoreEntryPoint|MM_CORE_STANDALONE + +# +# VALID_ARCHITECTURES = IA32 X64 IPF EBC (EBC is for build only) +# + +[Sources] + StandaloneMmCoreEntryPointNull.c + +[Packages] + MdePkg/MdePkg.dec + +[LibraryClasses] + BaseLib