diff --git a/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml b/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml
new file mode 100644
index 00000000000..4a281e09643
--- /dev/null
+++ b/PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml
@@ -0,0 +1,27 @@
+## @file
+#
+# Slim Bootloader CFGDATA Default File.
+#
+# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+# Template section for common policy header, template name has to end with `_TMPL`
+# Policy structure metadata, will be used for policy headers and genereating unique macro definitions
+POLICY_HEADER_TMPL: >
+ # Unique identifier for this polisy structure. Duplicate category in an active platform will cause build break
+ - category : $(1)
+ # Signature field for verfied policy header
+ - signature :
+ - length : 0x08
+ - value : $(2)
+ # Major version field for verfied policy header
+ - majver :
+ - length : 0x02
+ - value : $(3)
+ # Minor version field for verfied policy header is automatically populated with the highest minor version from fields
+ # Size field for verfied policy header, should be what your
+ - size :
+ - length : 0x04
+ - value : $(4)
diff --git a/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py
new file mode 100644
index 00000000000..f54ac87836f
--- /dev/null
+++ b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr.py
@@ -0,0 +1,262 @@
+##
+# This plugin generates policy header files
+# from platform supplied YAML policy.
+#
+# Copyright (c) Microsoft Corporation
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+
+import logging
+import os
+import shutil
+from collections import OrderedDict
+from copy import deepcopy
+import xml.etree.ElementTree
+import hashlib
+import json
+import time
+import re
+import xml.etree.ElementTree as ET
+from edk2toolext.environment import shell_environment
+from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
+from edk2toollib.utility_functions import RunPythonScript
+from edk2toollib.uefi.edk2.path_utilities import Edk2Path
+
+import sys
+
+import yaml
+sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'Tools'))
+from GenCfgData import CGenCfgData
+
+class UpdatePolicyHdr(IUefiBuildPlugin):
+
+ def trimTreeBaseOnMinver (self, tree, list):
+
+ if type(tree) is not OrderedDict:
+ raise Exception ("Incorrect tree type!!!")
+
+ try:
+ ver = int(tree["minver"], 0)
+ except:
+ ver = 0
+
+ trim_list = []
+ for idx in range(len(list)):
+ if idx < ver and list[idx] != None:
+ # trim the entry if this minver is higher than it belongs
+ list[idx] = None
+ trim_list.append(idx)
+
+ for value in tree:
+ if type(tree[value]) is OrderedDict:
+ sub_list = []
+ for idx in range(len(list)):
+ if list[idx] != None:
+ sub_list.append(list[idx][value])
+ else:
+ sub_list.append(None)
+ sub_trim_list = self.trimTreeBaseOnMinver (tree[value], sub_list)
+ for item in sub_trim_list:
+ del list[item][value]
+
+ return trim_list
+
+ # in-place prettyprint formatter
+ @staticmethod
+ def indent(elem, level=0):
+ i = "\n" + level*" "
+ if len(elem):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for elem in elem:
+ UpdatePolicyHdr.indent(elem, level+1)
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and (not elem.tail or not elem.tail.strip()):
+ elem.tail = i
+
+ # Attempt to run GenCfgData to generate C header files
+ #
+ # Consumes build environement variables: "BUILD_OUTPUT_BASE", "UPDATE_SETTINGS",
+ # and either of "POLICY_REPORT_FOLDER" or "ACTIVE_PLATFORM"
+ def do_pre_build(self, thebuilder):
+ need_check = thebuilder.env.GetValue("UPDATE_SETTINGS")
+ if need_check is not None and need_check.upper() == "FALSE":
+ logging.warn ("Platform indicated as not checking YAML file changes, will not be updated!")
+ return 0
+
+ yaml_list = []
+ exception_list = []
+ ws = thebuilder.ws
+ pp = thebuilder.pp.split(os.pathsep)
+ edk2 = Edk2Path(ws, pp)
+
+ # Form the exception list of formatted absolute paths. And always ignore our own samples.
+ exception_list.append (thebuilder.mws.join (thebuilder.ws, "PolicyServicePkg", "Samples"))
+ platform_exception = thebuilder.env.GetValue("POLICY_IGNORE_PATHS")
+ if platform_exception is not None:
+ plat_list = platform_exception.split(';')
+ for each in plat_list:
+ exception_list.append(os.path.normpath (thebuilder.mws.join (thebuilder.ws, each)))
+
+ # Look for *_policy_def.yaml files in all package paths.
+ for pkg_path in pp:
+ for subdir, dirs, files in os.walk(pkg_path):
+ for file in files:
+ if file.endswith ("_policy_def.yaml") or file.endswith ("_policy_def.yml"):
+ yaml_path = os.path.normpath(os.path.join (subdir, file))
+ ignore = False
+ for exception in exception_list:
+ if yaml_path.startswith (exception):
+ ignore = True
+ break
+ if ignore:
+ continue
+ yaml_list.append (yaml_path)
+ logging.debug (yaml_path)
+
+ err_count = 0
+ type = 'POLICY'
+ report_dir = thebuilder.env.GetValue("%s_REPORT_FOLDER" % type)
+ if report_dir is None:
+ report_dir = edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath (
+ edk2.GetContainingPackage(
+ edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
+ thebuilder.env.GetValue("ACTIVE_PLATFORM"))))
+
+ report_file = os.path.join (report_dir, "%s_REPORT.xml" % type)
+
+ if os.path.isfile (report_file):
+ tree = ET.parse(report_file).getroot()
+ else:
+ tree = None
+
+ comment = ET.Comment(' === Auto-Generated. Please do not change anything!!! === ')
+ root = ET.Element('Settings')
+ root.insert(0, comment)
+
+ for setting in yaml_list:
+
+ if not os.path.normcase(setting).startswith(os.path.normcase(report_dir.rstrip(os.sep)) + os.sep):
+ continue
+
+ logging.info ("Processing settings from %s" % setting)
+
+ final_dir = os.path.join (edk2.GetAbsolutePathOnThisSystemFromEdk2RelativePath(
+ edk2.GetContainingPackage (setting)), "Include")
+ if not os.path.isdir(final_dir):
+ os.mkdir (final_dir)
+
+ # Set up a playground first
+ op_dir = thebuilder.mws.join(thebuilder.ws, thebuilder.env.GetValue("BUILD_OUTPUT_BASE"), "ConfPolicy")
+ if not os.path.isdir(op_dir):
+ os.makedirs(op_dir)
+
+ cmd = thebuilder.mws.join(thebuilder.ws, "PolicyServicePkg", "Tools", "GenCfgData.py")
+
+ conf_file = setting
+ if conf_file is None:
+ logging.warn ("YAML file not specified, system might not work as expected!!!")
+ return 0
+ if not os.path.isfile(conf_file):
+ logging.error ("YAML file specified is not found!!!")
+ return 1
+
+ gen_cfg_data = CGenCfgData()
+
+ if gen_cfg_data.load_yaml(conf_file, shallow_load=True) != 0:
+ raise Exception(gen_cfg_data.get_last_error())
+
+ merged_cfg_tree = gen_cfg_data.get_cfg_tree()
+
+ minor_tree_list = []
+ max_minver = gen_cfg_data.findMaxMinver (merged_cfg_tree)
+ # each minor version needs a spot, thus plus 1 here
+ for _ in range(max_minver + 1):
+ new_tree = deepcopy (merged_cfg_tree)
+ minor_tree_list.append (new_tree)
+ self.trimTreeBaseOnMinver (merged_cfg_tree, minor_tree_list)
+
+ target = merged_cfg_tree['PolicyHeader']['category']
+ major_version = int (merged_cfg_tree['PolicyHeader']['majver']['value'], 0)
+
+ # Insert xml leaf for this conf/policy/etc
+ leaf = ET.Element(target)
+ leaf.set("MajorVersion", '0x%04X' % major_version)
+ leaf.set("MinorVersion", '0x%04X' % max_minver)
+
+ for idx in range(len(minor_tree_list)):
+ minhash_item = ET.Element("Hash-v%x.%x" % (major_version, idx))
+ hash_obj = hashlib.md5()
+ tree_js = json.dumps(minor_tree_list[idx])
+ hash_obj.update(tree_js.encode('utf-8'))
+ result = hash_obj.hexdigest()
+ minhash_item.text = result
+ leaf.append (minhash_item)
+
+ cached_root = None
+ if tree != None:
+ cached_root = tree.find (target)
+ if cached_root != None:
+ cached_maj_ver = int (cached_root.get("MajorVersion"), 0)
+
+ if cached_maj_ver == None or major_version != cached_maj_ver:
+ # Print error message here and we will fail the build later on
+ logging.error ("Platform major verison does not match YAML files. Please update the %s descriptor file." % type)
+ err_count = err_count + 1
+
+ count = 0
+
+ for idx in range(len(minor_tree_list)):
+ saved_res = cached_root.find("Hash-v%x.%x" % (major_version, idx))
+ calc_ret = leaf.find("Hash-v%x.%x" % (major_version, idx))
+ if saved_res == None or saved_res.text != calc_ret.text:
+ count = count + 1
+ if idx == 0:
+ logging.error ("Minor version 0 has changed, please consider bumping up major version")
+ logging.error ("%d minor version fields have changed, please update your report file" % idx)
+ err_count = err_count + 1
+
+ # Just to check if the cached hash file has extra entries compared to reality
+ for res in cached_root:
+ calc_ret = leaf.find(res.tag)
+ if calc_ret == None:
+ logging.error ("A tag from cached xml (%s) is not found" % res.tag)
+ err_count = err_count + 1
+
+ tree.remove (cached_root)
+ else:
+ logging.error ("%s report file not found, please add the autogen xml file to your %s_REPORT_FOLDER" % (type, type))
+ err_count = err_count + 1
+
+ # Now that we have the PKL file, output the header files
+ params = ["GENHDR"]
+ params.append(conf_file)
+ params.append("PolicyDataStruct%s.h" % target)
+
+ ret = RunPythonScript(cmd, " ".join(params), workingdir=final_dir)
+ if ret != 0:
+ return ret
+
+ root.append (leaf)
+
+ if tree != None and 0 != len(tree):
+ logging.error ("There is stale policy from cached xml %s, please remove them or use the newly created report." % (str([i.tag for i in tree])))
+ err_count = err_count + len(tree)
+
+ if err_count != 0:
+ UpdatePolicyHdr.indent(root)
+ hash_obj = hashlib.md5()
+ tree_xml = ET.tostring(root, encoding="utf-8", xml_declaration=True)
+ hash_obj.update(tree_xml)
+ xml_hash = hash_obj.hexdigest()
+ new_file = os.path.join (report_dir, "%s_REPORT_%s.xml" % (type, xml_hash))
+ xml_file = open(new_file, 'wb')
+ xml_file.write(tree_xml)
+ xml_file.close()
+ logging.info ("New %s report xml was generated at %s, please replace %s with this new file." % (type, report_file, new_file))
+
+ return err_count
diff --git a/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr_plug_in.yaml b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr_plug_in.yaml
new file mode 100644
index 00000000000..e8b353d5ea1
--- /dev/null
+++ b/PolicyServicePkg/Plugins/UpdatePolicyHdr/UpdatePolicyHdr_plug_in.yaml
@@ -0,0 +1,12 @@
+## @file UpdatePolicyHdr/UpdatePolicyHdr_plug_in.yaml
+# This plugin generates policy header files
+# from platform supplied YAML policies.
+#
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+{
+ "scope": "policydata",
+ "name": "Update Policy Data Headers",
+ "module": "UpdatePolicyHdr"
+}
\ No newline at end of file
diff --git a/PolicyServicePkg/PolicyServicePkg.ci.yaml b/PolicyServicePkg/PolicyServicePkg.ci.yaml
index 595743f8e81..03792155116 100644
--- a/PolicyServicePkg/PolicyServicePkg.ci.yaml
+++ b/PolicyServicePkg/PolicyServicePkg.ci.yaml
@@ -62,12 +62,19 @@
},
"SpellCheck": {
"AuditOnly": False,
- "ExtendWords": [], # words to extend to the dictionary for this package
+ "ExtendWords": [
+ "PDATA"
+ ], # words to extend to the dictionary for this package
"IgnoreStandardPaths": [], # Standard Plugin defined paths that should be ignore
"AdditionalIncludePaths": [] # Additional paths to spell check (wildcards supported)
},
"MarkdownLintCheck": {
"IgnoreFiles": [
] # package root relative file, folder, or glob pattern to ignore
+ },
+ "UncrustifyCheck": {
+ "IgnoreFiles": [
+ "Samples/PolicyDefinitions/PolicyDataStructGFX.h"
+ ] # Standard Plugin defined paths that should be ignored.
}
}
diff --git a/PolicyServicePkg/README.md b/PolicyServicePkg/README.md
index cf9f989e58d..8c4691f06ab 100644
--- a/PolicyServicePkg/README.md
+++ b/PolicyServicePkg/README.md
@@ -222,3 +222,101 @@ the MM policy service will be readable from the PEI or DXE policies services.
Policies are not shared with the standalone MM module after initialization. Any
policy created in MM will not be readable by DXE and PEI, and any policy made after
MM initialization will not be readable from MM.
+
+## YAML Based Policy Definition
+
+This section provides an overview of YAML based policy definition and how platform can
+integrate them.
+
+### YAML Definition for Policy Structures
+
+When used, the YAML based policy definition is treated as the ground truth of policy structure
+and default data. The YAML parser is largely inheritted from [Intel's slim bootloader](https://github.com/slimbootloader/slimbootloader).
+Thus, the YAML syntax follows the specification defined in [slim bootloader](https://slimbootloader.github.io/specs/config.html#configuration-description-yaml-explained)
+as well.
+
+Such YAML definition will be used to generate header files and the field accessors for platform consumption.
+
+### MU Added Rules
+
+In addition to aforementioned YAML specification from slim bootloader, a few extra rules was added to the existing
+specification to facilitate the adaptation of policy specific usage. These rules will be enforced by a Pre-Build
+plugin, more details in its [implementation section](#Pre-Build-Plugin).
+
+1. Each policy definition group must include a `POLICY_HEADER_TMPL` section, as provided in this template [here](CommonPolicy/Template_PolicyHeader.yaml).
+This section should include a 64-bit signature, an expected major version, an maximally expected minor version and
+a size of such structure. This data will mainly be used as metadata instead of policy data. Platforms could `!include`
+the provided template for easier inclusion.
+
+1. For each non-header fields defined in the YAML policy file, developers could optionally add a `minver` field, which
+denotes at which minor version this field is added. If not added, this field will be treated as 0 for default value.
+
+1. Under the same major value, all new minor fields should only be appended after the fields with lower minor version
+values, otherwise the build will break.
+
+1. This YAML definition is not created to support UI configuration features, thus no UI related configuration fields
+will be recognized in the context of policy YAML definition.
+
+### Field Accessors
+
+For each fields defined in YAML structures, 4 accessor functions will be created. These functions will cover the
+functionality of setting this field to target value or default value, get current or default value from policy handle.
+
+All autogen functions will be created under the naming scheme of `SET_POLICY_STRUCTURE_NAME_Field_Name`,
+`SET_POLICY_STRUCTURE_NAME_Field_Name_default`, `GET_POLICY_STRUCTURE_NAME_Field_Name` and
+`GET_POLICY_STRUCTURE_NAME_Field_Name_default`.
+
+The internal implementation of these functions are dependent on `PolicyLib`, specifically the verified policy related
+functionalities.
+
+In order to simplify the usage of policy initialization, a function of `SET_POLICY_STRUCTURE_NAME_default` is created.
+This function could be invoked for a platform to initialize the newly created policy handle.
+
+### Pre-Build Plugin
+
+A pre-build plugin is created to enforce rules indicated in the previous [section](#Field-Accessors).
+
+This plugin requires 3 build environment variable to execute properly:
+
+- `BUILD_OUTPUT_BASE`: This is used to create a temporary folder to contain intermediate files
+- `UPDATE_SETTINGS`: Setting this to `false` to disable this plugin
+- `POLICY_REPORT_FOLDER`: This optional variable can be used to indicate where the plugin should output the report.
+- `POLICY_IGNORE_PATHS`: This optional variable can be used by platform to specify which directories or files the autogen
+should ignore. Each entry should be relative UEFI path separated by colons (';').
+If not supplied this report will be save to the same folder of `ACTIVE_PLATFORM`.
+
+A policy report is the collateral output after codebase analyzing:
+
+- During pre-build, this plugin will search through all the included package paths
+for files ending with `_policy_def.yaml` or `_policy_def.yml`. Each policy component
+should have its own `*_policy_def.yaml` file. i.e. `USB_policy_def.yaml` for USB
+policies and `PCI_policy_def.yaml` for PCI policies.
+
+- For each discovered policy definition, the plugin will compute a hash value of
+each defined minor version with the structure and its content. This value will
+be compared to the value from previously output report, if this report does not
+exist, the plugin will create one.
+
+- If the any of the rules does not meet, or any field change is detected, the build
+will halt and notify the developers to fix the unexpected and update the report file.
+
+- The plugin will also generate the header file from this YAML definition automatically,
+which includes the header structure and its accessors.
+
+### Platform Integration
+
+For a platform to integrate the changes, the following needs to be satisfied:
+
+#### Silicon Packages
+
+- Define and create component policy defintions in YAML.
+
+- Use autogen accessors and `PolicyLib` to consume policy values.
+
+#### Platform Packages
+
+- Use autogen accessors and `PolicyLib` to publish and/or override policy values.
+
+- Include `POLICY_REPORT.xml` for version tracking purpose.
+
+- Add `PolicyLib` instances to platform descriptor file.
diff --git a/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml b/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml
new file mode 100644
index 00000000000..2e7945fe7a6
--- /dev/null
+++ b/PolicyServicePkg/Samples/PolicyDefinitions/GFX_policy_def.yaml
@@ -0,0 +1,43 @@
+## @file
+#
+# Slim Bootloader CFGDATA Default File.
+#
+# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+# Template section for common structure definitions, template name has to end with `_TMPL`
+template:
+ - !include PolicyServicePkg/CommonPolicy/Template_PolicyHeader.yaml
+
+ # Template for minor version 0 of GFX structure, $(#) are replacable parameters through !expand syntax
+ GFX_POLICY_TMPL: >
+ - Power_State_Port_$(1) :
+ name : Power state of GFX port $(1)
+ length : 0x02
+ value : $(2)
+
+ # Template for minor version 1 of GFX structure, these should always be appended after
+ # all existed minor 0 structures, $(#) are replacable parameters through !expand syntax
+ GFX_POLICY_SUP1_TMPL: >
+ - Skip_Check_$(1) :
+ name : Flag to skip this controller or not
+ length : 0x02
+ value : $(2)
+ minver : 0x01
+
+configs:
+ # Policy structure metadata, will be used for policy headers and genereating unique macro definitions
+ - PolicyHeader:
+ - !expand { POLICY_HEADER_TMPL : [GFX, 'PDATAGFX', 0x01, _LENGTH_GFX_POLICY_DATA_] }
+
+ # Main structure definitions for this policy
+ - GFX_POLICY_DATA :
+ # Structure filled with populated templates, minor version 0 first
+ - !expand { GFX_POLICY_TMPL : [ 0, 1] }
+ - !expand { GFX_POLICY_TMPL : [ 1, 1] }
+
+ # Structure filled with populated templates, minor version 1 second
+ - !expand { GFX_POLICY_SUP1_TMPL : [ 0, 0] }
+ - !expand { GFX_POLICY_SUP1_TMPL : [ 1, 0] }
diff --git a/PolicyServicePkg/Samples/PolicyDefinitions/PolicyDataStructGFX.h b/PolicyServicePkg/Samples/PolicyDefinitions/PolicyDataStructGFX.h
new file mode 100644
index 00000000000..ab7368ba5d4
--- /dev/null
+++ b/PolicyServicePkg/Samples/PolicyDefinitions/PolicyDataStructGFX.h
@@ -0,0 +1,305 @@
+/** @file
+
+ Platform Configuration C Struct Header File.
+
+ Copyright (c) 2022, Intel Corporation. All rights reserved.
+ Copyright (c) Microsoft Corporation.
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+ This file is automatically generated. Please do NOT modify !!!
+
+**/
+
+#ifndef __POLICY_DATA_STRUCT_GFX_H__
+#define __POLICY_DATA_STRUCT_GFX_H__
+
+#include
+
+#pragma pack(1)
+
+#define PDATA_GFX_SIGNATURE 0x5846474154414450
+#define PDATA_GFX_MAJOR_VER 0x01
+#define PDATA_GFX_MINOR_VER 0x01
+
+
+typedef struct {
+
+ /* Power state of GFX port 0 */
+ UINT16 Power_State_Port_0;
+
+ /* Power state of GFX port 1 */
+ UINT16 Power_State_Port_1;
+
+ /* Flag to skip this controller or not */
+ UINT16 Skip_Check_0;
+
+ /* Flag to skip this controller or not */
+ UINT16 Skip_Check_1;
+
+} GFX_POLICY_DATA;
+
+STATIC CONST VERIFIED_POLICY_HEADER POLICY_GFX_DESC = {
+ .Signature = PDATA_GFX_SIGNATURE,
+ .MajorVersion = PDATA_GFX_MAJOR_VER,
+ .MinorVersion = PDATA_GFX_MINOR_VER,
+ .Size = 0x08,
+};
+
+/* Get accessors for MIN_VER 0 */
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Power_State_Port_0 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ UINT16 Temp;
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(GFX_POLICY_DATA, Power_State_Port_0) + (sizeof (((GFX_POLICY_DATA*)0)->Power_State_Port_0)))) {
+ Temp = ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_0;
+ } else {
+ Temp = 0x1;
+ }
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x20, FALSE);
+ return Temp;
+}
+
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Power_State_Port_0_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x20, FALSE);
+ return 0x1;
+}
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Power_State_Port_1 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ UINT16 Temp;
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(GFX_POLICY_DATA, Power_State_Port_1) + (sizeof (((GFX_POLICY_DATA*)0)->Power_State_Port_1)))) {
+ Temp = ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_1;
+ } else {
+ Temp = 0x1;
+ }
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x22, FALSE);
+ return Temp;
+}
+
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Power_State_Port_1_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x22, FALSE);
+ return 0x1;
+}
+
+
+/* Set accessors for MIN_VER 0 */
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Power_State_Port_0 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid,
+ IN UINT16 val
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_0 = val;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x20, TRUE);
+}
+
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Power_State_Port_0_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_0 = 0x1;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x20, TRUE);
+}
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Power_State_Port_1 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid,
+ IN UINT16 val
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_1 = val;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x22, TRUE);
+}
+
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Power_State_Port_1_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Power_State_Port_1 = 0x1;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x22, TRUE);
+}
+
+
+/* Get accessors for MIN_VER 1 */
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Skip_Check_0 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ UINT16 Temp;
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(GFX_POLICY_DATA, Skip_Check_0) + (sizeof (((GFX_POLICY_DATA*)0)->Skip_Check_0)))) {
+ Temp = ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_0;
+ } else {
+ Temp = 0x0;
+ }
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x24, FALSE);
+ return Temp;
+}
+
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Skip_Check_0_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x24, FALSE);
+ return 0x0;
+}
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Skip_Check_1 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ UINT16 Temp;
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(GFX_POLICY_DATA, Skip_Check_1) + (sizeof (((GFX_POLICY_DATA*)0)->Skip_Check_1)))) {
+ Temp = ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_1;
+ } else {
+ Temp = 0x0;
+ }
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x26, FALSE);
+ return Temp;
+}
+
+
+inline
+STATIC
+UINT16
+EFIAPI
+GET_GFX_POLICY_DATA_Skip_Check_1_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x26, FALSE);
+ return 0x0;
+}
+
+
+/* Set accessors for MIN_VER 1 */
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Skip_Check_0 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid,
+ IN UINT16 val
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_0 = val;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x24, TRUE);
+}
+
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Skip_Check_0_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_0 = 0x0;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x24, TRUE);
+}
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Skip_Check_1 (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid,
+ IN UINT16 val
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_1 = val;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x26, TRUE);
+}
+
+
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_Skip_Check_1_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ((GFX_POLICY_DATA*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->Skip_Check_1 = 0x0;
+ ReportVerifiedPolicyAccess (_handle, Guid, 0x2, 0x26, TRUE);
+}
+
+
+/* Set accessors for all fields of this structure */
+inline
+STATIC
+VOID
+EFIAPI
+SET_GFX_POLICY_DATA_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x0) {
+ SET_GFX_POLICY_DATA_Power_State_Port_0_default (_handle, Guid);
+ SET_GFX_POLICY_DATA_Power_State_Port_1_default (_handle, Guid);
+ }
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x1) {
+ SET_GFX_POLICY_DATA_Skip_Check_0_default (_handle, Guid);
+ SET_GFX_POLICY_DATA_Skip_Check_1_default (_handle, Guid);
+ }
+}
+
+#pragma pack()
+
+#endif
diff --git a/PolicyServicePkg/Samples/PolicyDefinitions/README.md b/PolicyServicePkg/Samples/PolicyDefinitions/README.md
new file mode 100644
index 00000000000..152e7c3a648
--- /dev/null
+++ b/PolicyServicePkg/Samples/PolicyDefinitions/README.md
@@ -0,0 +1,9 @@
+
+# Policy YAML Definition Sample
+
+This directory contains sample policy definitions using YAML files to demonstrate
+a basic use of the policy autogen output. The [GFX header file](PolicyDataStructGFX.h)
+demonstrates all the structures, templates and interfaces generated.
+
+The functions are directly invocable from firmware code and template can be used
+verified policy operations.
diff --git a/PolicyServicePkg/Tools/CommonUtility.py b/PolicyServicePkg/Tools/CommonUtility.py
new file mode 100644
index 00000000000..1660f18450a
--- /dev/null
+++ b/PolicyServicePkg/Tools/CommonUtility.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+## @ CommonUtility.py
+# Common utility script
+#
+# Copyright (c) 2016 - 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+##
+# Import Modules
+#
+import os
+import sys
+import re
+import shutil
+import subprocess
+import struct
+import hashlib
+import string
+from ctypes import *
+from functools import reduce
+from importlib.machinery import SourceFileLoader
+
+def print_bytes (data, indent=0, offset=0, show_ascii = False):
+ bytes_per_line = 16
+ printable = ' ' + string.ascii_letters + string.digits + string.punctuation
+ str_fmt = '{:s}{:04x}: {:%ds} {:s}' % (bytes_per_line * 3)
+ bytes_per_line
+ data_array = bytearray(data)
+ for idx in range(0, len(data_array), bytes_per_line):
+ hex_str = ' '.join('%02X' % val for val in data_array[idx:idx + bytes_per_line])
+ asc_str = ''.join('%c' % (val if (chr(val) in printable) else '.')
+ for val in data_array[idx:idx + bytes_per_line])
+ print (str_fmt.format(indent * ' ', offset + idx, hex_str, ' ' + asc_str if show_ascii else ''))
+
+def get_bits_from_bytes (bytes, start, length):
+ if length == 0:
+ return 0
+ byte_start = (start) // 8
+ byte_end = (start + length - 1) // 8
+ bit_start = start & 7
+ mask = (1 << length) - 1
+ val = bytes_to_value (bytes[byte_start:byte_end + 1])
+ val = (val >> bit_start) & mask
+ return val
+
+def set_bits_to_bytes (bytes, start, length, bvalue):
+ if length == 0:
+ return
+ byte_start = (start) // 8
+ byte_end = (start + length - 1) // 8
+ bit_start = start & 7
+ mask = (1 << length) - 1
+ val = bytes_to_value (bytes[byte_start:byte_end + 1])
+ val &= ~(mask << bit_start)
+ val |= ((bvalue & mask) << bit_start)
+ bytes[byte_start:byte_end+1] = value_to_bytearray (val, byte_end + 1 - byte_start)
+
+def value_to_bytes (value, length):
+ return value.to_bytes(length, 'little')
+
+def bytes_to_value (bytes):
+ return int.from_bytes (bytes, 'little')
+
+def value_to_bytearray (value, length):
+ return bytearray(value_to_bytes(value, length))
+
+def value_to_bytearray (value, length):
+ return bytearray(value_to_bytes(value, length))
+
+def get_aligned_value (value, alignment = 4):
+ if alignment != (1 << (alignment.bit_length() - 1)):
+ raise Exception ('Alignment (0x%x) should to be power of 2 !' % alignment)
+ value = (value + (alignment - 1)) & ~(alignment - 1)
+ return value
+
+def get_padding_length (data_len, alignment = 4):
+ new_data_len = get_aligned_value (data_len, alignment)
+ return new_data_len - data_len
+
+def get_file_data (file, mode = 'rb'):
+ return open(file, mode).read()
+
+def gen_file_from_object (file, object):
+ open (file, 'wb').write(object)
+
+def gen_file_with_size (file, size):
+ open (file, 'wb').write(b'\xFF' * size);
+
+def check_files_exist (base_name_list, dir = '', ext = ''):
+ for each in base_name_list:
+ if not os.path.exists (os.path.join (dir, each + ext)):
+ return False
+ return True
+
+def load_source (name, filepath):
+ mod = SourceFileLoader (name, filepath).load_module()
+ return mod
+
+def get_openssl_path ():
+ if os.name == 'nt':
+ if 'OPENSSL_PATH' not in os.environ:
+ openssl_dir = "C:\\Openssl\\bin\\"
+ if os.path.exists (openssl_dir):
+ os.environ['OPENSSL_PATH'] = openssl_dir
+ else:
+ os.environ['OPENSSL_PATH'] = "C:\\Openssl\\"
+ if 'OPENSSL_CONF' not in os.environ:
+ openssl_cfg = "C:\\Openssl\\openssl.cfg"
+ if os.path.exists(openssl_cfg):
+ os.environ['OPENSSL_CONF'] = openssl_cfg
+ openssl = os.path.join(os.environ.get ('OPENSSL_PATH', ''), 'openssl.exe')
+ else:
+ # Get openssl path for Linux cases
+ openssl = shutil.which('openssl')
+
+ return openssl
+
+def run_process (arg_list, print_cmd = False, capture_out = False):
+ sys.stdout.flush()
+ if os.name == 'nt' and os.path.splitext(arg_list[0])[1] == '' and \
+ os.path.exists (arg_list[0] + '.exe'):
+ arg_list[0] += '.exe'
+ if print_cmd:
+ print (' '.join(arg_list))
+
+ exc = None
+ result = 0
+ output = ''
+ try:
+ if capture_out:
+ output = subprocess.check_output(arg_list).decode()
+ else:
+ result = subprocess.call (arg_list)
+ except Exception as ex:
+ result = 1
+ exc = ex
+
+ if result:
+ if not print_cmd:
+ print ('Error in running process:\n %s' % ' '.join(arg_list))
+ if exc is None:
+ sys.exit(1)
+ else:
+ raise exc
+
+ return output
diff --git a/PolicyServicePkg/Tools/GenCfgData.py b/PolicyServicePkg/Tools/GenCfgData.py
new file mode 100644
index 00000000000..620a69d097e
--- /dev/null
+++ b/PolicyServicePkg/Tools/GenCfgData.py
@@ -0,0 +1,2564 @@
+## @ GenCfgData.py
+#
+# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import os
+import sys
+import re
+import struct
+import marshal
+import pprint
+import string
+import operator as op
+import ast
+import binascii
+from typing import Type, final
+from unicodedata import category
+import uuid
+from datetime import date
+from collections import OrderedDict
+
+from CommonUtility import *
+
+# Generated file copyright header
+__copyright_tmp__ = """/** @file
+
+ Platform Configuration %s File.
+
+ Copyright (c) %4d, Intel Corporation. All rights reserved.
+ Copyright (c) Microsoft Corporation.
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+ This file is automatically generated. Please do NOT modify !!!
+
+**/
+"""
+
+def get_copyright_header (file_type, allow_modify = False):
+ file_description = {
+ 'yaml': 'Boot Setting',
+ 'dlt' : 'Delta',
+ 'inc' : 'C Binary Blob',
+ 'h' : 'C Struct Header'
+ }
+ if file_type in ['yaml', 'dlt']:
+ comment_char = '#'
+ else:
+ comment_char = ''
+ lines = __copyright_tmp__.split('\n')
+ if allow_modify:
+ lines = [line for line in lines if 'Please do NOT modify' not in line]
+ copyright_hdr = '\n'.join('%s%s' % (comment_char, line) for line in lines)[:-1] + '\n'
+ return copyright_hdr % (file_description[file_type], date.today().year)
+
+def check_quote (text):
+ if (text[0] == "'" and text[-1] == "'") or (text[0] == '"' and text[-1] == '"'):
+ return True
+ return False
+
+def strip_quote (text):
+ new_text = text.strip()
+ if check_quote (new_text):
+ return new_text[1:-1]
+ return text
+
+def strip_delimiter (text, delim):
+ new_text = text.strip()
+ if new_text:
+ if new_text[0] == delim[0] and new_text[-1] == delim[-1]:
+ return new_text[1:-1]
+ return text
+
+def bytes_to_bracket_str (bytes):
+ return '{ %s }' % (', '.join('0x%02x' % i for i in bytes))
+
+def array_str_to_value (val_str):
+ val_str = val_str.strip()
+ val_str = strip_delimiter (val_str, '{}')
+ val_str = strip_quote (val_str)
+ value = 0
+ for each in val_str.split(',')[::-1]:
+ each = each.strip()
+ value = (value << 8) | int(each, 0)
+ return value
+
+def write_lines (lines, file):
+ fo = open(file, "w")
+ fo.write (''.join ([x[0] for x in lines]))
+ fo.close ()
+
+def read_lines (file):
+ if not os.path.exists(file):
+ test_file = os.path.basename(file)
+ if os.path.exists(test_file):
+ file = test_file
+ fi = open (file, 'r')
+ lines = fi.readlines ()
+ fi.close ()
+ return lines
+
+def expand_file_value (path, value_str):
+ result = bytearray()
+ match = re.match("\{\s*FILE:(.+)\}", value_str)
+ if match:
+ file_list = match.group(1).split(',')
+ for file in file_list:
+ file = file.strip()
+ bin_path = os.path.join(path, file)
+ result.extend(bytearray(open(bin_path, 'rb').read()))
+ return result
+
+class ExpressionEval(ast.NodeVisitor):
+ operators = {
+ ast.Add: op.add,
+ ast.Sub: op.sub,
+ ast.Mult: op.mul,
+ ast.Div: op.floordiv,
+ ast.Mod: op.mod,
+ ast.Eq: op.eq,
+ ast.NotEq: op.ne,
+ ast.Gt: op.gt,
+ ast.Lt: op.lt,
+ ast.GtE: op.ge,
+ ast.LtE: op.le,
+ ast.BitXor: op.xor,
+ ast.BitAnd: op.and_,
+ ast.BitOr: op.or_,
+ ast.Invert: op.invert,
+ ast.USub: op.neg
+ }
+
+
+ def __init__(self):
+ self._debug = False
+ self._expression = ''
+ self._namespace = {}
+ self._get_variable = None
+
+ def eval(self, expr, vars={}):
+ self._expression = expr
+ if type(vars) is dict:
+ self._namespace = vars
+ self._get_variable = None
+ else:
+ self._namespace = {}
+ self._get_variable = vars
+ node = ast.parse(self._expression, mode='eval')
+ result = self.visit(node.body)
+ if self._debug:
+ print ('EVAL [ %s ] = %s' % (expr, str(result)))
+ return result
+
+ def visit_Name(self, node):
+ if self._get_variable is not None:
+ return self._get_variable(node.id)
+ else:
+ return self._namespace[node.id]
+
+ def visit_Num(self, node):
+ return node.n
+
+ def visit_NameConstant(self, node):
+ return node.value
+
+ def visit_BoolOp(self, node):
+ result = False
+ if isinstance(node.op, ast.And):
+ for value in node.values:
+ result = self.visit(value)
+ if not result:
+ break
+ elif isinstance(node.op, ast.Or):
+ for value in node.values:
+ result = self.visit(value)
+ if result:
+ break
+ return True if result else False
+
+ def visit_UnaryOp(self, node):
+ val = self.visit(node.operand)
+ return operators[type(node.op)](val)
+
+ def visit_BinOp(self, node):
+ lhs = self.visit(node.left)
+ rhs = self.visit(node.right)
+ return ExpressionEval.operators[type(node.op)](lhs, rhs)
+
+ def visit_Compare(self, node):
+ right = self.visit(node.left)
+ result = True
+ for operation, comp in zip(node.ops, node.comparators):
+ if not result:
+ break
+ left = right
+ right = self.visit(comp)
+ result = ExpressionEval.operators[type(operation)](left, right)
+ return result
+
+ def visit_Call(self, node):
+ if node.func.id in ['ternary']:
+ condition = self.visit (node.args[0])
+ val_true = self.visit (node.args[1])
+ val_false = self.visit (node.args[2])
+ return val_true if condition else val_false
+ elif node.func.id in ['offset', 'length']:
+ if self._get_variable is not None:
+ return self._get_variable(node.args[0].s, node.func.id)
+ else:
+ raise ValueError("Unsupported function: " + repr(node))
+
+ def generic_visit(self, node):
+ raise ValueError("malformed node or string: " + repr(node))
+
+
+class CFG_YAML():
+ TEMPLATE = 'template'
+ CONFIGS = 'configs'
+ VARIABLE = 'variable'
+
+ def __init__ (self, is_policy=False):
+ self.log_line = False
+ self.allow_template = False
+ self.cfg_tree = None
+ self.tmp_tree = None
+ self.var_dict = None
+ self.def_dict = {}
+ self.yaml_path = ''
+ self.lines = []
+ self.full_lines = []
+ self.index = 0
+ self.is_policy = is_policy
+ self.re_expand = re.compile (r'(.+:\s+|\s*\-\s*)!expand\s+\{\s*(\w+_TMPL)\s*:\s*\[(.+)]\s*\}')
+ self.re_include = re.compile (r'(.+:\s+|\s*\-\s*)!include\s+(.+)')
+
+ @staticmethod
+ def count_indent (line):
+ return next((i for i, c in enumerate(line) if not c.isspace()), len(line))
+
+ @staticmethod
+ def substitue_args (text, arg_dict):
+ for arg in arg_dict:
+ text = text.replace ('$' + arg, arg_dict[arg])
+ return text
+
+ @staticmethod
+ def dprint (*args):
+ pass
+
+ def process_include (self, line, insert = True):
+ match = self.re_include.match (line)
+ if not match:
+ raise Exception ("Invalid !include format '%s' !" % line.strip())
+
+ prefix = match.group(1)
+ include = match.group(2)
+ if prefix.strip() == '-':
+ prefix = ''
+ adjust = 0
+ else:
+ adjust = 2
+
+ include = strip_quote (include)
+ request = CFG_YAML.count_indent (line) + adjust
+
+ if self.log_line:
+ # remove the include line itself
+ del self.full_lines[-1]
+
+ inc_path = os.path.join (self.yaml_path, include)
+ if not os.path.exists(inc_path):
+ # try relative path to project root
+ try_path = os.path.join(os.path.dirname (os.path.realpath(__file__)), "../..", include)
+ if os.path.exists(try_path):
+ inc_path = try_path
+ else:
+ raise Exception ("ERROR: Cannot open file '%s'." % inc_path)
+
+ lines = read_lines (inc_path)
+
+ current = 0
+ same_line = False
+ for idx, each in enumerate (lines):
+ start = each.lstrip()
+ if start == '' or start[0] == '#':
+ continue
+
+ if start[0] == '>':
+ # append the content directly at the same line
+ same_line = True
+
+ start = idx
+ current = CFG_YAML.count_indent (each)
+ break
+
+ lines = lines[start+1:] if same_line else lines[start:]
+ leading = ''
+ if same_line:
+ request = len(prefix)
+ leading = '>'
+
+ lines = [prefix + '%s\n' % leading] + [' ' * request + i[current:] for i in lines]
+ if insert:
+ self.lines = lines + self.lines
+
+ return lines
+
+ def process_expand (self, line):
+ match = self.re_expand.match(line)
+ if not match:
+ raise Exception ("Invalid !expand format '%s' !" % line.strip())
+ lines = []
+ prefix = match.group(1)
+ temp_name = match.group(2)
+ args = match.group(3)
+
+ if prefix.strip() == '-':
+ indent = 0
+ else:
+ indent = 2
+ lines = self.process_expand_template (temp_name, prefix, args, indent)
+ self.lines = lines + self.lines
+
+
+ def process_expand_template (self, temp_name, prefix, args, indent = 2):
+ # expand text with arg substitution
+ if temp_name not in self.tmp_tree:
+ raise Exception ("Could not find template '%s' !" % temp_name)
+ parts = args.split(',')
+ parts = [i.strip() for i in parts]
+ num = len(parts)
+ arg_dict = dict(zip( ['(%d)' % (i + 1) for i in range(num)], parts))
+ str_data = self.tmp_tree[temp_name]
+ text = DefTemplate(str_data).safe_substitute(self.def_dict)
+ text = CFG_YAML.substitue_args (text, arg_dict)
+ target = CFG_YAML.count_indent (prefix) + indent
+ current = CFG_YAML.count_indent (text)
+ padding = target * ' '
+ if indent == 0:
+ leading = []
+ else:
+ leading = [prefix + '\n']
+ text = leading + [(padding + i + '\n')[current:] for i in text.splitlines()]
+ return text
+
+
+ def load_file (self, yaml_file):
+ self.index = 0
+ self.lines = read_lines (yaml_file)
+
+
+ def peek_line (self):
+ if len(self.lines) == 0:
+ return None
+ else:
+ return self.lines[0]
+
+
+ def put_line (self, line):
+ self.lines.insert (0, line)
+ if self.log_line:
+ del self.full_lines[-1]
+
+
+ def get_line (self):
+ if len(self.lines) == 0:
+ return None
+ else:
+ line = self.lines.pop(0)
+ if self.log_line:
+ self.full_lines.append (line.rstrip())
+ return line
+
+
+ def get_multiple_line (self, indent):
+ text = ''
+ newind = indent + 1
+ while True:
+ line = self.peek_line ()
+ if line is None:
+ break
+ sline = line.strip()
+ if sline != '':
+ newind = CFG_YAML.count_indent(line)
+ if newind <= indent:
+ break
+ self.get_line ()
+ if sline != '':
+ text = text + line
+ return text
+
+
+ def traverse_cfg_tree (self, handler):
+ def _traverse_cfg_tree (root, level = 0):
+ # config structure
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ handler (key, root[key], level)
+ _traverse_cfg_tree (root[key], level)
+ level -= 1
+ _traverse_cfg_tree (self.cfg_tree)
+
+
+ def count (self):
+ def _count (name, cfgs, level):
+ num[0] += 1
+ num = [0]
+ self.traverse_cfg_tree (_count)
+ return num[0]
+
+
+ def parse (self, parent_name = '', curr = None, level = 0):
+ child = None
+ last_indent = None
+ temp_chk = {}
+
+ while True:
+ line = self.get_line ()
+ if line is None:
+ break
+
+ curr_line = line.strip()
+ if curr_line == '' or curr_line[0] == '#':
+ continue
+
+ indent = CFG_YAML.count_indent(line)
+ if last_indent is None:
+ last_indent = indent
+
+ if indent != last_indent:
+ # outside of current block, put the line back to queue
+ self.put_line (' ' * indent + curr_line)
+
+ if curr_line.endswith (': >'):
+ # multiline marker
+ old_count = len(self.full_lines)
+ line = self.get_multiple_line (indent)
+ if self.log_line and not self.allow_template and '!include ' in line:
+ # expand include in template
+ new_lines = []
+ lines = line.splitlines()
+ for idx, each in enumerate(lines):
+ if '!include ' in each:
+ new_line = ''.join(self.process_include (each, False))
+ new_lines.append(new_line)
+ else:
+ new_lines.append(each)
+ self.full_lines = self.full_lines[:old_count] + new_lines
+ curr_line = curr_line + line
+
+ if indent > last_indent:
+ # child nodes
+ if child is None:
+ raise Exception ('Unexpected format at line: %s' % (curr_line))
+
+ level += 1
+ self.parse (key, child, level)
+ level -= 1
+
+ line = self.peek_line ()
+ if line is not None:
+ curr_line = line.strip()
+ indent = CFG_YAML.count_indent(line)
+ if indent >= last_indent:
+ # consume the line
+ self.get_line ()
+ else:
+ # end of file
+ indent = -1
+
+ if curr is None:
+ curr = OrderedDict()
+
+ if indent < last_indent:
+ return curr
+
+ marker1 = curr_line[0]
+ marker2 = curr_line[-1]
+ start = 1 if marker1 == '-' else 0
+ pos = curr_line.find(': ')
+ if pos > 0:
+ child = None
+ key = curr_line[start:pos].strip()
+ if curr_line[pos + 2] == '>':
+ curr[key] = curr_line[pos + 3:]
+ else:
+ # XXXX: !include / !expand
+ if '!include ' in curr_line:
+ self.process_include (line)
+ elif '!expand ' in curr_line:
+ if self.allow_template and not self.log_line:
+ self.process_expand (line)
+ else:
+ value_str = curr_line[pos + 2:].strip()
+ if key == "IdTag" or key == "ArrayIdTag":
+ # Insert the headers corresponds to this ID tag from here, most contents are hardcoded for now
+ cfg_hdr = OrderedDict()
+ cfg_hdr['length'] = '0x04'
+ cfg_hdr['value'] = '{0x01:2b, (_LENGTH_%s_/4):10b, %d:4b, 0:4b, %s:12b}' % (parent_name, 0 if key == "IdTag" else 1, value_str)
+ curr['CfgHeader'] = cfg_hdr
+
+ cnd_val = OrderedDict()
+ cnd_val['length'] = '0x04'
+ cnd_val['value'] = '0x00000000'
+ curr['CondValue'] = cnd_val
+ else:
+ curr[key] = value_str
+ if self.log_line and value_str[0] == '{':
+ # expand {FILE: xxxx} format in the log line
+ if value_str[1:].rstrip().startswith('FILE:'):
+ value_bytes = expand_file_value (self.yaml_path, value_str)
+ value_str = bytes_to_bracket_str (value_bytes)
+ self.full_lines[-1] = line[:indent] + curr_line[:pos + 2] + value_str
+
+ elif marker2 == ':':
+ child = OrderedDict()
+ key = curr_line[start:-1].strip()
+ if key == '$ACTION':
+ # special virtual nodes, rename to ensure unique key
+ key = '$ACTION_%04X' % self.index
+ self.index += 1
+ if key in curr:
+ if key not in temp_chk:
+ # check for duplicated keys at same level
+ temp_chk[key] = 1
+ else:
+ raise Exception ("Duplicated item '%s:%s' found !" % (parent_name, key))
+
+ curr[key] = child
+ if self.var_dict is None and key == CFG_YAML.VARIABLE:
+ self.var_dict = child
+ if self.tmp_tree is None and key == CFG_YAML.TEMPLATE:
+ self.tmp_tree = child
+ if self.var_dict:
+ for each in self.var_dict:
+ txt = self.var_dict[each]
+ if type(txt) is str:
+ self.def_dict['(%s)' % each] = txt
+ if self.tmp_tree and key == CFG_YAML.CONFIGS:
+ if not self.is_policy:
+ # apply template for the main configs
+ self.allow_template = True
+ child['Signature'] = OrderedDict()
+ child['Signature']['length'] = '0x04'
+ child['Signature']['value'] = "{'CFGD'}"
+
+ child['HeaderLength'] = OrderedDict()
+ child['HeaderLength']['length'] = '0x01'
+ child['HeaderLength']['value'] = '0x10'
+
+ child['Reserved'] = OrderedDict()
+ child['Reserved']['length'] = '0x03'
+ child['Reserved']['value'] = '{0,0,0}'
+
+ child['UsedLength'] = OrderedDict()
+ child['UsedLength']['length'] = '0x04'
+ child['UsedLength']['value'] = '_LENGTH_'
+
+ # This will be rounded up to 4KB aligned
+ child['TotalLength'] = OrderedDict()
+ child['TotalLength']['length'] = '0x04'
+ child['TotalLength']['value'] = '(_LENGTH_/0x1000 + 1)*0x1000'
+ else:
+ child = None
+ # - !include cfg_opt.yaml
+ if '!include ' in curr_line:
+ self.process_include (line)
+
+ return curr
+
+
+ def load_yaml (self, opt_file):
+ self.var_dict = None
+ self.yaml_path = os.path.dirname (opt_file)
+ self.load_file (opt_file)
+ yaml_tree = self.parse ()
+ self.tmp_tree = yaml_tree[CFG_YAML.TEMPLATE]
+ self.cfg_tree = yaml_tree[CFG_YAML.CONFIGS]
+ return self.cfg_tree
+
+
+ def expand_yaml (self, opt_file):
+ self.log_line = True
+ self.load_yaml (opt_file)
+ self.log_line = False
+ text = '\n'.join (self.full_lines)
+ self.full_lines = []
+ return text
+
+
+class DefTemplate(string.Template):
+ idpattern = '\([_A-Z][_A-Z0-9]*\)|[_A-Z][_A-Z0-9]*'
+
+
+class CGenCfgData:
+ STRUCT = '$STRUCT'
+ bits_width = {'b':1, 'B':8, 'W':16, 'D':32, 'Q':64}
+ builtin_option = {'$EN_DIS' : [('0', 'Disable'), ('1', 'Enable')]}
+ exclude_struct = ['GPIO_GPP_*', 'GPIO_CFG_DATA', 'GpioConfPad*', 'GpioPinConfig',
+ 'BOOT_OPTION*', 'PLATFORMID_CFG_DATA', '\w+_Half[01]']
+ include_tag = ['GPIO_CFG_DATA']
+ keyword_set = set(['name', 'type', 'option', 'help', 'length', 'value', 'order', 'struct', 'condition', 'minver'])
+
+ def __init__(self):
+ self.initialize ()
+
+
+ def initialize (self):
+ self._cfg_tree = {}
+ self._tmp_tree = {}
+ self._cfg_list = []
+ self._cfg_page = {'root': {'title': '', 'child': []}}
+ self._cur_page = ''
+ self._var_dict = {}
+ self._def_dict = {}
+ self._yaml_path = ''
+
+
+ @staticmethod
+ def deep_convert_dict (layer):
+ # convert OrderedDict to list + dict
+ new_list = layer
+ if isinstance(layer, OrderedDict):
+ new_list = list (layer.items())
+ for idx, pair in enumerate (new_list):
+ new_node = CGenCfgData.deep_convert_dict (pair[1])
+ new_list[idx] = dict({pair[0] : new_node})
+ return new_list
+
+
+ @staticmethod
+ def deep_convert_list (layer):
+ if isinstance(layer, list):
+ od = OrderedDict({})
+ for each in layer:
+ if isinstance(each, dict):
+ key = next(iter(each))
+ od[key] = CGenCfgData.deep_convert_list(each[key])
+ return od
+ else:
+ return layer
+
+
+ @staticmethod
+ def expand_include_files (file_path, cur_dir = ''):
+ if cur_dir == '':
+ cur_dir = os.path.dirname(file_path)
+ file_path = os.path.basename(file_path)
+
+ input_file_path = os.path.join(cur_dir, file_path)
+ file = open(input_file_path, "r")
+ lines = file.readlines()
+ file.close()
+
+ new_lines = []
+ for line_num, line in enumerate(lines):
+ match = re.match("^!include\s*(.+)?$", line.strip())
+ if match:
+ inc_path = match.group(1)
+ tmp_path = os.path.join(cur_dir, inc_path)
+ org_path = tmp_path
+ if not os.path.exists(tmp_path):
+ cur_dir = os.path.join(os.path.dirname (os.path.realpath(__file__)), "..", "..")
+ tmp_path = os.path.join(cur_dir, inc_path)
+ if not os.path.exists(tmp_path):
+ raise Exception ("ERROR: Cannot open include file '%s'." % org_path)
+ else:
+ new_lines.append (('# Included from file: %s\n' % inc_path, tmp_path, 0))
+ new_lines.append (('# %s\n' % ('=' * 80), tmp_path, 0))
+ new_lines.extend (CGenCfgData.expand_include_files (inc_path, cur_dir))
+ else:
+ new_lines.append ((line, input_file_path, line_num))
+
+ return new_lines
+
+
+ @staticmethod
+ def format_struct_field_name (input, count = 0):
+ name = ''
+ cap = True
+ if '_' in input:
+ input = input.lower()
+ for each in input:
+ if each == '_':
+ cap = True
+ continue
+ elif cap:
+ each = each.upper()
+ cap = False
+ name = name + each
+
+ if count > 1:
+ name = '%s[%d]' % (name, count)
+
+ return name
+
+ def get_last_error (self):
+ return ''
+
+
+ def get_variable (self, var, attr = 'value'):
+ if var in self._var_dict:
+ var = self._var_dict[var]
+ return var
+
+ item = self.locate_cfg_item (var, False)
+ if item is None:
+ raise ValueError ("Cannot find variable '%s' !" % var)
+
+ if item:
+ if 'indx' in item:
+ item = self.get_item_by_index (item['indx'])
+ if attr == 'offset':
+ var = item['offset']
+ elif attr == 'length':
+ var = item['length']
+ elif attr == 'value':
+ var = self.get_cfg_item_value (item)
+ else:
+ raise ValueError ("Unsupported variable attribute '%s' !" % attr)
+ return var
+
+
+ def eval (self, expr):
+ def _handler (pattern):
+ if pattern.group(1):
+ target = 1
+ else:
+ target = 2
+ result = self.get_variable(pattern.group(target))
+ if result is None:
+ raise ValueError('Unknown variable $(%s) !' % pattern.group(target))
+ return hex(result)
+
+ expr_eval = ExpressionEval ()
+ if '$' in expr:
+ # replace known variable first
+ expr = re.sub(r'\$\(([_a-zA-Z][\w\.]*)\)|\$([_a-zA-Z][\w\.]*)', _handler, expr)
+ return expr_eval.eval(expr, self.get_variable)
+
+
+ def get_cfg_list (self, page_id = None):
+ if page_id is None:
+ # return full list
+ return self._cfg_list
+ else:
+ # build a new list for items under a page ID
+ cfgs = [i for i in self._cfg_list if i['cname'] and (i['page'] == page_id)]
+ return cfgs
+
+
+ def get_cfg_page (self):
+ return self._cfg_page
+
+ def get_cfg_item_length (self, item):
+ return item['length']
+
+ def get_cfg_item_value (self, item, array = False):
+ value_str = item['value']
+ length = item['length']
+ return self.get_value (value_str, length, array)
+
+
+ def format_value_to_str (self, value, bit_length, old_value = ''):
+ # value is always int
+ length = (bit_length + 7) // 8
+ fmt = ''
+ if old_value.startswith ('0x'):
+ fmt = '0x'
+ elif old_value and (old_value[0] in ['"', "'", '{']):
+ fmt = old_value[0]
+ else:
+ fmt = ''
+
+ bvalue = value_to_bytearray (value, length)
+ if fmt in ['"', "'"]:
+ svalue = bvalue.rstrip(b'\x00').decode()
+ value_str = fmt + svalue + fmt
+ elif fmt == "{":
+ value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }'
+ elif fmt == '0x':
+ hex_len = length * 2
+ if len(old_value) == hex_len + 2:
+ fstr = '0x%%0%dX' % hex_len
+ else:
+ fstr = '0x%X'
+ value_str = fstr % value
+ else:
+ if length <= 2:
+ value_str = '%d' % value
+ elif length <= 8:
+ value_str = '0x%x' % value
+ else:
+ value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }'
+ return value_str
+
+
+ def reformat_value_str (self, value_str, bit_length, old_value = None):
+ value = self.parse_value (value_str, bit_length, False)
+ if old_value is None:
+ old_value = value_str
+ new_value = self.format_value_to_str (value, bit_length, old_value)
+ return new_value
+
+
+ def get_value (self, value_str, bit_length, array = True):
+ value_str = value_str.strip()
+ if len(value_str) == 0:
+ return 0
+ if value_str[0] == "'" and value_str[-1] == "'" or \
+ value_str[0] == '"' and value_str[-1] == '"':
+ value_str = value_str[1:-1]
+ bvalue = bytearray (value_str.encode())
+ if len(bvalue) == 0:
+ bvalue = bytearray(b'\x00')
+ if array:
+ return bvalue
+ else:
+ return bytes_to_value (bvalue)
+ else:
+ if value_str[0] in '{' :
+ value_str = value_str[1:-1].strip()
+ value = 0
+ for each in value_str.split(',')[::-1]:
+ each = each.strip()
+ value = (value << 8) | int(each, 0)
+ if array:
+ length = (bit_length + 7) // 8
+ return value_to_bytearray (value, length)
+ else:
+ return value
+
+
+ def parse_value (self, value_str, bit_length, array = True):
+ length = (bit_length + 7) // 8
+ if check_quote(value_str):
+ value_str = bytes_to_bracket_str(value_str[1:-1].encode())
+ elif (',' in value_str) and (value_str[0] != '{'):
+ value_str = '{ %s }' % value_str
+ if value_str[0] == '{':
+ result = expand_file_value (self._yaml_path, value_str)
+ if len(result) == 0 :
+ bin_list = value_str[1:-1].split(',')
+ value = 0
+ bit_len = 0
+ unit_len = 1
+ for idx, element in enumerate(bin_list):
+ each = element.strip()
+ if len(each) == 0:
+ continue
+
+ in_bit_field = False
+ if each[0] in "'" + '"':
+ each_value = bytearray(each[1:-1], 'utf-8')
+ elif ':' in each:
+ match = re.match("^(.+):(\d+)([b|B|W|D|Q])$", each)
+ if match is None:
+ raise SystemExit("Exception: Invald value list format '%s' !" % each)
+ if match.group(1) == '0' and match.group(2) == '0':
+ unit_len = CGenCfgData.bits_width[match.group(3)] // 8
+ cur_bit_len = int(match.group(2)) * CGenCfgData.bits_width[match.group(3)]
+ value += ((self.eval(match.group(1)) & (1< 0:
+ if bit_len % 8 != 0:
+ raise SystemExit("Exception: Invalid bit field alignment '%s' !" % value_str)
+ result.extend(value_to_bytes(value, bit_len // 8))
+ value = 0
+ bit_len = 0
+
+ result.extend(each_value)
+
+ elif check_quote (value_str):
+ result = bytearray(value_str[1:-1], 'utf-8') # Excluding quotes
+ else:
+ result = value_to_bytearray (self.eval(value_str), length)
+
+ if len(result) < length:
+ result.extend(b'\x00' * (length - len(result)))
+ elif len(result) > length:
+ raise SystemExit ("Exception: Value '%s' is too big to fit into %d bytes !" % (value_str, length))
+
+ if array:
+ return result
+ else:
+ return bytes_to_value(result)
+
+ return result
+
+
+ def get_cfg_item_options (self, item):
+ tmp_list = []
+ if item['type'] == "Combo":
+ if item['option'] in CGenCfgData.builtin_option:
+ for op_val, op_str in CGenCfgData.builtin_option[item['option']]:
+ tmp_list.append((op_val, op_str))
+ else:
+ opt_list = item['option'].split(',')
+ for option in opt_list:
+ option = option.strip()
+ try:
+ (op_val, op_str) = option.split(':')
+ except:
+ raise SystemExit ("Exception: Invalid option format '%s' for item '%s' !" % (option, item['cname']))
+ tmp_list.append((op_val, op_str))
+ return tmp_list
+
+
+ def get_page_title(self, page_id, top = None):
+ if top is None:
+ top = self.get_cfg_page()['root']
+ for node in top['child']:
+ page_key = next(iter(node))
+ if page_id == page_key:
+ return node[page_key]['title']
+ else:
+ result = self.get_page_title (page_id, node[page_key])
+ if result is not None:
+ return result
+ return None
+
+
+ def print_pages(self, top=None, level=0):
+ if top is None:
+ top = self.get_cfg_page()['root']
+ for node in top['child']:
+ page_id = next(iter(node))
+ print('%s%s: %s' % (' ' * level, page_id, node[page_id]['title']))
+ level += 1
+ self.print_pages(node[page_id], level)
+ level -= 1
+
+
+ def get_item_by_index (self, index):
+ return self._cfg_list[index]
+
+
+ def get_item_by_path (self, path):
+ node = self.locate_cfg_item (path)
+ if node:
+ return self.get_item_by_index (node['indx'])
+ else:
+ return None
+
+ def locate_cfg_path (self, item):
+ def _locate_cfg_path (root, level = 0):
+ # config structure
+ if item is root:
+ return path
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ path.append(key)
+ ret = _locate_cfg_path (root[key], level)
+ if ret:
+ return ret
+ path.pop()
+ return None
+ path = []
+ return _locate_cfg_path (self._cfg_tree)
+
+
+ def locate_cfg_item (self, path, allow_exp = True):
+ def _locate_cfg_item (root, path, level = 0):
+ if len(path) == level:
+ return root
+ next_root = root.get(path[level], None)
+ if next_root is None:
+ if allow_exp:
+ raise Exception ('Not a valid CFG config option path: %s' % '.'.join(path[:level+1]))
+ else:
+ return None
+ return _locate_cfg_item (next_root, path, level + 1)
+
+ path_nodes = path.split('.')
+ return _locate_cfg_item (self._cfg_tree, path_nodes)
+
+
+ def traverse_cfg_tree (self, handler, top = None):
+ def _traverse_cfg_tree (root, level = 0):
+ # config structure
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ handler (key, root[key], level)
+ _traverse_cfg_tree (root[key], level)
+ level -= 1
+
+ if top is None:
+ top = self._cfg_tree
+ _traverse_cfg_tree (top)
+
+
+ def print_cfgs(self, root = None, short = True, print_level = 256):
+ def _print_cfgs (name, cfgs, level):
+
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index (cfgs['indx'])
+ else:
+ offset = 0
+ length = 0
+ value = ''
+ path=''
+ if CGenCfgData.STRUCT in cfgs:
+ cfg = cfgs[CGenCfgData.STRUCT]
+ offset = int(cfg['offset'])
+ length = int(cfg['length'])
+ if 'value' in cfg:
+ value = cfg['value']
+ if length == 0:
+ return
+ act_cfg = dict({'value' : value, 'offset' : offset, 'length' : length})
+ value = act_cfg['value']
+ bit_len = act_cfg['length']
+ offset = (act_cfg['offset'] + 7) // 8
+ if value != '':
+ try:
+ value = self.reformat_value_str (act_cfg['value'], act_cfg['length'])
+ except:
+ value = act_cfg['value']
+ length = bit_len // 8
+ bit_len = '(%db)' % bit_len if bit_len % 8 else '' * 4
+ if level <= print_level:
+ if short and len(value) > 40:
+ value = '%s ... %s' % (value[:20] , value[-20:])
+ print('%04X:%04X%-6s %s%s : %s' % (offset, length, bit_len, ' ' * level, name, value))
+
+ self.traverse_cfg_tree (_print_cfgs)
+
+
+ def get_cfg_tree(self):
+ return self._cfg_tree
+
+
+ def set_cfg_tree(self, cfg_tree):
+ self._cfg_tree = cfg_tree
+
+
+ def merge_cfg_tree(self, root, other_root):
+ ret = OrderedDict ()
+ prev_key = None
+ for other_key in other_root:
+ if other_key not in root:
+ ret[other_key] = other_root[other_key]
+ else:
+ # this is a good time to check to see if we miss anything from previous root elements
+ found_last = False
+ for key in root:
+ if key == prev_key:
+ found_last = True
+ continue
+ if prev_key == None:
+ found_last = True
+ if found_last:
+ ret[key] = root[key]
+ if key == other_key:
+ prev_key = other_key
+ break
+
+ if type(root[other_key]) is OrderedDict and type(other_root[other_key]) is OrderedDict:
+ # if they are both non-leaf, great, process recursively
+ ret[other_key] = self.merge_cfg_tree (root[other_key], other_root[other_key])
+ elif type(root[other_key]) is OrderedDict or type(other_root[other_key]) is OrderedDict:
+ raise Exception ("Two yamls files have hierachy mismatch!!!")
+ else:
+ # this is duplicate value in from both roots, take original root as principal
+ ret[other_key] = root[other_key]
+
+ # See if there is any leftovers
+ found_last = False
+ for key in root:
+ if key == prev_key:
+ found_last = True
+ continue
+ if prev_key == None:
+ found_last = True
+ if found_last:
+ ret[key] = root[key]
+ if key == other_key:
+ prev_key = other_key
+ break
+ return ret
+
+
+ def build_var_dict (self):
+ def _build_var_dict (name, cfgs, level):
+ if level <= 2:
+ if CGenCfgData.STRUCT in cfgs:
+ struct_info = cfgs[CGenCfgData.STRUCT]
+ self._var_dict['_LENGTH_%s_' % name] = struct_info['length'] // 8
+ self._var_dict['_OFFSET_%s_' % name] = struct_info['offset'] // 8
+
+ self._var_dict = {}
+ self.traverse_cfg_tree (_build_var_dict)
+ self._var_dict['_LENGTH_'] = self._cfg_tree[CGenCfgData.STRUCT]['length'] // 8
+ return 0
+
+
+ def add_cfg_page(self, child, parent, title=''):
+ def _add_cfg_page(cfg_page, child, parent):
+ key = next(iter(cfg_page))
+ if parent == key:
+ cfg_page[key]['child'].append({child: {'title': title,
+ 'child': []}})
+ return True
+ else:
+ result = False
+ for each in cfg_page[key]['child']:
+ if _add_cfg_page(each, child, parent):
+ result = True
+ break
+ return result
+
+ return _add_cfg_page(self._cfg_page, child, parent)
+
+
+ def set_cur_page(self, page_str):
+ if not page_str:
+ return
+
+ if ',' in page_str:
+ page_list = page_str.split(',')
+ else:
+ page_list = [page_str]
+ for page_str in page_list:
+ parts = page_str.split(':')
+ if len(parts) in [1, 3]:
+ page = parts[0].strip()
+ if len(parts) == 3:
+ # it is a new page definition, add it into tree
+ parent = parts[1] if parts[1] else 'root'
+ parent = parent.strip()
+ if parts[2][0] == '"' and parts[2][-1] == '"':
+ parts[2] = parts[2][1:-1]
+
+ if not self.add_cfg_page(page, parent, parts[2]):
+ raise SystemExit("Error: Cannot find parent page '%s'!" % parent)
+ else:
+ raise SystemExit("Error: Invalid page format '%s' !" % page_str)
+ self._cur_page = page
+
+
+ def extend_variable (self, line):
+ # replace all variables
+ if line == '':
+ return line
+ loop = 2
+ while loop > 0:
+ line_after = DefTemplate(line).safe_substitute(self._def_dict)
+ if line == line_after:
+ break
+ loop -= 1
+ line = line_after
+ return line_after
+
+ def reformat_number_per_type (self, itype, value):
+ if check_quote(value) or value.startswith('{'):
+ return value
+ parts = itype.split(',')
+ if len(parts) > 3 and parts[0] == 'EditNum':
+ num_fmt = parts[1].strip()
+ else:
+ num_fmt = ''
+ if num_fmt == 'HEX' and not value.startswith('0x'):
+ value = '0x%X' % int(value, 10)
+ elif num_fmt == 'DEC' and value.startswith('0x'):
+ value = '%d' % int(value, 16)
+ return value
+
+ def add_cfg_item(self, name, item, offset, path):
+
+ self.set_cur_page (item.get('page', ''))
+
+ if name[0] == '$':
+ # skip all virtual node
+ return 0
+
+
+ if not set(item).issubset(CGenCfgData.keyword_set):
+ for each in list(item):
+ if each not in CGenCfgData.keyword_set:
+ raise Exception ("Invalid attribute '%s' for '%s'!" % (each, '.'.join(path)))
+
+ length = item.get('length', 0)
+ if type(length) is str:
+ match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)\s*$", length)
+ if match:
+ unit_len = CGenCfgData.bits_width[match.group(2)]
+ length = int(match.group(1), 10) * unit_len
+ else:
+ try:
+ length = int(length, 0) * 8
+ except:
+ raise Exception ("Invalid length field '%s' for '%s' !" % (length, '.'.join(path)))
+
+ if offset % 8 > 0:
+ raise Exception ("Invalid alignment for field '%s' for '%s' !" % (name, '.'.join(path)))
+ else:
+ # define is length in bytes
+ length = length * 8
+
+ if not name.isidentifier():
+ raise Exception ("Invalid config name '%s' for '%s' !" % (name, '.'.join(path)))
+
+
+ itype = str(item.get('type', 'Reserved'))
+ value = str(item.get('value', ''))
+ if value:
+ if not (check_quote(value) or value.startswith('{')):
+ if ',' in value:
+ value = '{ %s }' % value
+ else:
+ value = self.reformat_number_per_type (itype, value)
+
+ help = str(item.get('help', ''))
+ if '\n' in help:
+ help = ' '.join ([i.strip() for i in help.splitlines()])
+
+ option = str(item.get('option', ''))
+ if '\n' in option:
+ option = ' '.join ([i.strip() for i in option.splitlines()])
+
+ # extend variables for value and condition
+ condition = str(item.get('condition', ''))
+ if condition:
+ condition = self.extend_variable (condition)
+ value = self.extend_variable (value)
+
+ order = str(item.get('order', ''))
+ if order:
+ if '.' in order:
+ (major, minor) = order.split('.')
+ order = int (major, 16)
+ else:
+ order = int (order, 16)
+ else:
+ order = offset
+
+ cfg_item = dict()
+ cfg_item['length'] = length
+ cfg_item['offset'] = offset
+ cfg_item['value'] = value
+ cfg_item['type'] = itype
+ cfg_item['cname'] = str(name)
+ cfg_item['name'] = str(item.get('name', ''))
+ cfg_item['help'] = help
+ cfg_item['option'] = option
+ cfg_item['page'] = self._cur_page
+ cfg_item['order'] = order
+ cfg_item['path'] = '.'.join(path)
+ cfg_item['condition'] = condition
+ if 'struct' in item:
+ cfg_item['struct'] = item['struct']
+ self._cfg_list.append(cfg_item)
+
+ item['indx'] = len(self._cfg_list) - 1
+
+ # remove used info for reducing pkl size
+ item.pop('option', None)
+ item.pop('condition', None)
+ item.pop('help', None)
+ item.pop('name', None)
+ item.pop('page', None)
+
+ return length
+
+
+ def build_cfg_list (self, cfg_name ='', top = None, path = [], info = {'offset': 0}):
+ if top is None:
+ top = self._cfg_tree
+
+ start = info['offset']
+ is_leaf = True
+ for key in top:
+ path.append(key)
+ if type(top[key]) is OrderedDict:
+ is_leaf = False
+ self.build_cfg_list(key, top[key], path, info)
+ path.pop()
+
+ if is_leaf:
+ length = self.add_cfg_item(cfg_name, top, info['offset'], path)
+ info['offset'] += length
+ elif cfg_name == '' or (cfg_name and cfg_name[0] != '$'):
+ # check first element for struct
+ first = next(iter(top))
+ struct_str = CGenCfgData.STRUCT
+ if first != struct_str:
+ struct_node = OrderedDict({})
+ top[struct_str] = struct_node
+ top.move_to_end (struct_str, False)
+ else:
+ struct_node = top[struct_str]
+ struct_node['offset'] = start
+ if len(path) == 1:
+ # Round up first layer tree to be 4 Byte aligned
+ info['offset'] = (info['offset'] + 31) & (~31)
+ struct_node['length'] = (info['offset'] - start + 31) & (~31)
+ else:
+ struct_node['length'] = info['offset'] - start
+ if struct_node['length'] % 8 != 0:
+ raise SystemExit("Error: Bits length not aligned for %s !" % str(path))
+
+
+ def get_field_value (self, top = None):
+ def _get_field_value (name, cfgs, level):
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index (cfgs['indx'])
+ if act_cfg['length'] == 0:
+ return
+ value = self.get_value (act_cfg['value'], act_cfg['length'], False)
+ set_bits_to_bytes (result, act_cfg['offset'] - struct_info['offset'], act_cfg['length'], value)
+
+ if top is None:
+ top = self._cfg_tree
+ struct_info = top[CGenCfgData.STRUCT]
+ result = bytearray ((struct_info['length'] + 7) // 8)
+ self.traverse_cfg_tree (_get_field_value, top)
+ return result
+
+
+ def set_field_value (self, top, value_bytes, force = False):
+ def _set_field_value (name, cfgs, level):
+ if 'indx' not in cfgs:
+ return
+ act_cfg = self.get_item_by_index (cfgs['indx'])
+ if force or act_cfg['value'] == '':
+ value = get_bits_from_bytes (full_bytes, act_cfg['offset'] - struct_info['offset'], act_cfg['length'])
+ act_val = act_cfg['value']
+ if act_val == '':
+ act_val = '%d' % value
+ act_val = self.reformat_number_per_type (act_cfg['type'], act_val)
+ act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_val)
+
+ if 'indx' in top:
+ # it is config option
+ value = bytes_to_value (value_bytes)
+ act_cfg = self.get_item_by_index (top['indx'])
+ act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_cfg['value'])
+ else:
+ # it is structure
+ struct_info = top[CGenCfgData.STRUCT]
+ length = struct_info['length'] // 8
+ full_bytes = bytearray(value_bytes[:length])
+ if len(full_bytes) < length:
+ full_bytes.extend(bytearray(length - len(value_bytes)))
+ self.traverse_cfg_tree (_set_field_value, top)
+
+
+ def update_def_value (self):
+ def _update_def_value (name, cfgs, level):
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index (cfgs['indx'])
+ if act_cfg['value'] != '' and act_cfg['length'] > 0:
+ try:
+ act_cfg['value'] = self.reformat_value_str (act_cfg['value'], act_cfg['length'])
+ except:
+ raise Exception ("Invalid value expression '%s' for '%s' !" % (act_cfg['value'], act_cfg['path']))
+ else:
+ if CGenCfgData.STRUCT in cfgs and 'value' in cfgs[CGenCfgData.STRUCT]:
+ curr = cfgs[CGenCfgData.STRUCT]
+ value_bytes = value_to_bytearray (self.eval(curr['value']), (curr['length'] + 7) // 8)
+ self.set_field_value (cfgs, value_bytes)
+
+ self.traverse_cfg_tree (_update_def_value, self._cfg_tree)
+
+
+ def evaluate_condition (self, item):
+ expr = item['condition']
+ result = self.parse_value (expr, 1, False)
+ return result
+
+
+ def load_default_from_bin (self, bin_data):
+ self.set_field_value(self._cfg_tree, bin_data, True)
+
+
+ def generate_binary_array (self):
+ return self.get_field_value()
+
+ def generate_binary (self, bin_file_name):
+ bin_file = open(bin_file_name, "wb")
+ bin_file.write (self.generate_binary_array ())
+ bin_file.close()
+ return 0
+
+ def write_delta_file (self, out_file, platform_id, out_lines):
+ dlt_fd = open (out_file, "w")
+ dlt_fd.write ("%s\n" % get_copyright_header('dlt', True))
+ dlt_fd.write ('#\n')
+ dlt_fd.write ('# Delta configuration values for platform ID 0x%04X\n' % platform_id)
+ dlt_fd.write ('#\n\n')
+ for line in out_lines:
+ dlt_fd.write ('%s\n' % line)
+ dlt_fd.close()
+
+
+ def override_default_value(self, dlt_file):
+ error = 0
+ dlt_lines = CGenCfgData.expand_include_files(dlt_file)
+
+ platform_id = None
+ for line, file_path, line_num in dlt_lines:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ match = re.match("\s*([\w\.]+)\s*\|\s*(.+)", line)
+ if not match:
+ raise Exception("Unrecognized line '%s' (File:'%s' Line:%d) !" %
+ (line, file_path, line_num + 1))
+
+ path = match.group(1)
+ value_str = match.group(2)
+ top = self.locate_cfg_item (path)
+ if not top:
+ raise Exception(
+ "Invalid configuration '%s' (File:'%s' Line:%d) !" %
+ (path, file_path, line_num + 1))
+
+ if 'indx' in top:
+ act_cfg = self.get_item_by_index (top['indx'])
+ bit_len = act_cfg['length']
+ else:
+ struct_info = top[CGenCfgData.STRUCT]
+ bit_len = struct_info['length']
+
+ value_bytes = self.parse_value (value_str, bit_len)
+ self.set_field_value (top, value_bytes, True)
+
+ if path == 'PLATFORMID_CFG_DATA.PlatformId':
+ platform_id = value_str
+
+ if platform_id is None:
+ platform_id = 0
+ print("PLATFORMID_CFG_DATA.PlatformId is missing in file '%s' !" % (dlt_file))
+
+ return error
+
+
+ def generate_delta_file_from_bin (self, delta_file, old_data, new_data, full=False):
+ self.load_default_from_bin (new_data)
+ lines = []
+ tag_name = ''
+ level = 0
+ platform_id = None
+ def_platform_id = 0
+
+ for item in self._cfg_list:
+ old_val = get_bits_from_bytes (old_data, item['offset'], item['length'])
+ new_val = get_bits_from_bytes (new_data, item['offset'], item['length'])
+
+ full_name = item['path']
+ if 'PLATFORMID_CFG_DATA.PlatformId' == full_name:
+ def_platform_id = old_val
+ platform_id = new_val
+ elif item['type'] != 'Reserved' and ((new_val != old_val) or full):
+ val_str = self.reformat_value_str (item['value'], item['length'])
+ text = '%-40s | %s' % (full_name, val_str)
+ lines.append(text)
+
+ if def_platform_id == platform_id:
+ platform_id = def_platform_id
+
+ lines.insert(0, '%-40s | %s\n\n' %
+ ('PLATFORMID_CFG_DATA.PlatformId', '0x%04X' % platform_id))
+
+ if platform_id is None:
+ print ("Platform ID is not set and will be configured to 0")
+ platform_id = 0
+
+ self.write_delta_file (delta_file, platform_id, lines)
+ return 0
+
+
+ def generate_delta_svd_from_bin (self, old_data, new_data):
+ self.load_default_from_bin (new_data)
+ lines = []
+ tag_name = ''
+ level = 0
+ platform_id = None
+ def_platform_id = 0
+ items = []
+
+ for item in self._cfg_list:
+ old_val = get_bits_from_bytes (old_data, item['offset'], item['length'])
+ new_val = get_bits_from_bytes (new_data, item['offset'], item['length'])
+
+ full_name = item['path']
+ if 'PLATFORMID_CFG_DATA.PlatformId' == full_name:
+ def_platform_id = old_val
+ platform_id = new_val
+ elif item['type'] != 'Reserved' and (new_val != old_val):
+ val_str = self.reformat_value_str (item['value'], item['length'])
+ text = '%-40s | %s' % (full_name, val_str)
+ item = self.locate_cfg_item(item['path'])
+ if item is None:
+ raise Exception ("Failed to locate item from path: %s" % item['path'])
+ items.append(item)
+
+ execs = []
+ # The idea is that the 1st level tag content will be regenerated if changed
+ for item in items:
+ exec = self.locate_exec_from_item (item)
+ if exec == None:
+ raise Exception ("Failed to find the immediate executive tree for an item")
+ if exec not in execs:
+ execs.append (exec)
+
+ bytes_array = []
+ for exec in execs:
+ bytes = self.get_field_value (exec)
+ offset = 0
+ offset += int(exec['CfgHeader']['length'], 0)
+ offset += int(exec['CondValue']['length'], 0)
+ bytes_array.append (bytes[offset:])
+
+ # self.write_delta_file (delta_file, platform_id, lines)
+ return (execs, bytes_array)
+
+ def locate_exec_from_item (self, item):
+
+ def _locate_exec_from_item (name, cfgs, level):
+ if level == 1:
+ exec[0] = cfgs
+ elif cfgs == item:
+ exec[1] = exec[0]
+
+ exec = [None, None]
+ self.traverse_cfg_tree (_locate_exec_from_item, self._cfg_tree)
+ return exec[1]
+
+ def locate_exec_from_tag (self, tag):
+
+ def _locate_exec_from_tag (name, cfgs, level):
+ if level == 1:
+ exec[0] = cfgs
+ if CGenCfgData.STRUCT in cfgs:
+ cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx'])
+ tag_val = array_str_to_value(cfghdr['value']) >> 20
+ if tag_val == tag:
+ exec[1] = exec[0]
+
+ exec = [None, None]
+ self.traverse_cfg_tree (_locate_exec_from_tag, self._cfg_tree)
+ return exec[1]
+
+ def generate_delta_file(self, delta_file, bin_file, bin_file2, full=False):
+ fd = open (bin_file, 'rb')
+ new_data = bytearray(fd.read())
+ fd.close()
+
+ if bin_file2 == '':
+ old_data = self.generate_binary_array()
+ else:
+ old_data = new_data
+ fd = open (bin_file2, 'rb')
+ new_data = bytearray(fd.read())
+ fd.close()
+
+ return self.generate_delta_file_from_bin (delta_file, old_data, new_data, full)
+
+
+ def prepare_marshal (self, is_save):
+ if is_save:
+ # Ordered dict is not marshallable, convert to list
+ self._cfg_tree = CGenCfgData.deep_convert_dict (self._cfg_tree)
+ else:
+ # Revert it back
+ self._cfg_tree = CGenCfgData.deep_convert_list (self._cfg_tree)
+
+ def generate_yml_file (self, in_file, out_file):
+ cfg_yaml = CFG_YAML()
+ text = cfg_yaml.expand_yaml (in_file)
+ yml_fd = open(out_file, "w")
+ yml_fd.write (text)
+ yml_fd.close ()
+ return 0
+
+
+ def write_cfg_header_file (self, hdr_file_name, tag_mode, tag_dict, struct_list):
+ lines = []
+ lines.append ('\n\n')
+ tag_list = sorted(list(tag_dict.items()), key=lambda x: x[1])
+ for tagname, tagval in tag_list:
+ if (tag_mode == 0 and tagval >= 0x100) or (tag_mode == 1 and tagval < 0x100):
+ continue
+ lines.append ('#define %-30s 0x%03X\n' % ('CDATA_%s_TAG' % tagname[:-9], tagval))
+ lines.append ('\n\n')
+
+ name_dict = {}
+ new_dict = {}
+ for each in struct_list:
+ if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100):
+ continue
+ new_dict[each['name']] = (each['alias'], each['count'])
+ if each['alias'] not in name_dict:
+ name_dict[each['alias']] = 1
+ lines.extend(self.create_struct (each['alias'], each['node'], new_dict))
+
+
+ self.write_header_file (lines, hdr_file_name)
+
+
+ def findMaxMinver (self, tree):
+
+ if type(tree) is not OrderedDict:
+ raise Exception ("Incorrect tree type %s!!!" % type(tree))
+
+ # In-order tree traversal to make sure all minor versions are non-descending
+ try:
+ ver = int(tree["minver"], 0)
+ except:
+ ver = 0
+
+ parent_minver = ver
+
+ max_minver = parent_minver
+ for value in tree:
+ if type(tree[value]) is OrderedDict:
+ temp_ver = self.findMaxMinver (tree[value])
+ if temp_ver >= max_minver:
+ max_minver = temp_ver
+ else:
+ raise Exception ("Higher minor version detected %d between older fields at %s. New minor version fields should only be appended!!!\
+ Consider append new fields, or remove the minor version and bump major version" % (temp_ver, max_minver, value))
+
+ return max_minver
+
+
+ def write_policy_header_file (self, hdr_file_name, tag_mode, struct_list):
+ lines = []
+ max_minver = self.findMaxMinver(self._cfg_tree)
+ category = ''
+
+ # Step 1: Macro definitions
+ for struct in struct_list:
+ if struct["name"] == "PolicyHeader":
+ category = struct['node']['category']
+ lines.append ('#define %-30s 0x%016X\n' % ('PDATA_%s_SIGNATURE' % (category), int.from_bytes(bytes(struct['node']['signature']["value"].strip("'"), 'utf-8'), 'little')))
+ lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MAJOR_VER' % (category), int(struct['node']['majver']["value"], 0)))
+ lines.append ('#define %-30s 0x%02X\n' % ('PDATA_%s_MINOR_VER' % (category), max_minver))
+ lines.append ('\n')
+
+ if category == '':
+ raise Exception ("No category field set in the Policy header!!!")
+
+ # Step 2: Structure definitions
+ name_dict = {}
+ new_dict = {}
+ for each in struct_list:
+ if each['name'] == "PolicyHeader":
+ continue
+ if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100):
+ continue
+ new_dict[each['name']] = (each['alias'], each['count'])
+ if each['alias'] not in name_dict:
+ name_dict[each['alias']] = 1
+ lines.extend(self.create_struct (each['alias'], each['node'], new_dict))
+
+ # Step 3: Template verified policy header
+ for struct in struct_list:
+ if struct["name"] == "PolicyHeader":
+ lines.append ('STATIC CONST VERIFIED_POLICY_HEADER POLICY_%s_DESC = {\n' % (category))
+ lines.append (' .Signature = %s,\n' % ('PDATA_%s_SIGNATURE' % (category)))
+ lines.append (' .MajorVersion = %s,\n' % ('PDATA_%s_MAJOR_VER' % (category)))
+ lines.append (' .MinorVersion = %s,\n' % ('PDATA_%s_MINOR_VER' % (category)))
+ lines.append (' .Size = 0x%02X,\n' % (self._var_dict[struct["node"]["size"]["value"]]))
+ lines.append ('};\n')
+ lines.append ('\n')
+
+ # Step 4: Get/set accessors for each field per minor version
+ setter_def_all = []
+ getter_def_all = []
+ for struct in struct_list:
+ if struct["name"] == "PolicyHeader":
+ continue
+ for minver in range (max_minver + 1):
+ lines.append ('/* Get accessors for MIN_VER %d */\n' % minver)
+ (getter, getter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, False)
+ lines.extend(getter)
+ getter_def_all.append(getter_def)
+
+ lines.append ('/* Set accessors for MIN_VER %d */\n' % minver)
+ (setter, setter_def) = self.traverse_struct (struct['node'], new_dict, minver, category, True)
+ lines.extend(setter)
+ setter_def_all.append(setter_def)
+
+ lines.append ('/* Set accessors for all fields of this structure */\n')
+ '''
+ STATIC
+ VOID
+ EFIAPI
+ SET_%s_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ if ((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {"
+ SET_%s_default ();
+ }
+ }
+ '''
+ lines.append("inline\n" )
+ lines.append("STATIC\n" )
+ lines.append("VOID\n" )
+ lines.append("EFIAPI\n" )
+ lines.append("SET_%s_default (\n" % struct['name'])
+ lines.append(" IN EFI_HANDLE _handle,\n" )
+ lines.append(" IN EFI_GUID *Guid\n" )
+ lines.append(" ) {\n" )
+ for idx in range(len(setter_def_all)):
+ lines.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->MinorVersion >= 0x%x) {\n"% idx)
+ for each in setter_def_all[idx]:
+ lines.append(" %s (_handle, Guid);\n" % each)
+ lines.append(" }\n")
+ lines.append("}\n\n")
+
+ self.write_header_file (lines, hdr_file_name)
+
+
+ def write_header_file (self, txt_body, file_name, type = 'h'):
+ file_name_def = os.path.basename(file_name).replace ('.', '_')
+ file_name_def = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', file_name_def)
+ file_name_def = re.sub('([a-z0-9])([A-Z])', r'\1_\2', file_name_def).upper()
+
+ lines = []
+ lines.append ("%s\n" % get_copyright_header(type))
+ lines.append ("#ifndef __%s__\n" % file_name_def)
+ lines.append ("#define __%s__\n\n" % file_name_def)
+ if type == 'h':
+ lines.append ("#include \n\n")
+ lines.append ("#pragma pack(1)\n\n")
+ lines.extend (txt_body)
+ if type == 'h':
+ lines.append ("#pragma pack()\n\n")
+ lines.append ("#endif\n")
+
+ # Don't rewrite if the contents are the same
+ create = True
+ if os.path.exists(file_name):
+ hdr_file = open(file_name, "r")
+ org_txt = hdr_file.read()
+ hdr_file.close()
+
+ new_txt = ''.join(lines)
+ if org_txt == new_txt:
+ create = False
+
+ if create:
+ hdr_file = open(file_name, "w")
+ hdr_file.write (''.join(lines))
+ hdr_file.close()
+
+
+ def generate_data_inc_file (self, dat_inc_file_name, bin_file = None):
+ # Put a prefix GUID before CFGDATA so that it can be located later on
+ prefix = b'\xa7\xbd\x7f\x73\x20\x1e\x46\xd6\xbe\x8f\x64\x12\x05\x8d\x0a\xa8'
+ if bin_file:
+ fin = open (bin_file, 'rb')
+ bin_dat = prefix + bytearray(fin.read())
+ fin.close()
+ else:
+ bin_dat = prefix + self.generate_binary_array ()
+
+ file_name = os.path.basename(dat_inc_file_name).upper()
+ file_name = file_name.replace('.', '_')
+
+ txt_lines = []
+
+ txt_lines.append ("UINT8 mConfigDataBlob[%d] = {\n" % len(bin_dat))
+ count = 0
+ line = [' ']
+ for each in bin_dat:
+ line.append('0x%02X, ' % each)
+ count = count + 1
+ if (count & 0x0F) == 0:
+ line.append('\n')
+ txt_lines.append (''.join(line))
+ line = [' ']
+ if len(line) > 1:
+ txt_lines.append (''.join(line) + '\n')
+
+ txt_lines.append ("};\n\n")
+
+ self.write_header_file (txt_lines, dat_inc_file_name, 'inc')
+
+ return 0
+
+
+ def get_struct_array_info (self, input):
+ parts = input.split(':')
+ if len(parts) > 1:
+ var = parts[1]
+ input = parts[0]
+ else:
+ var = ''
+ array_str = input.split('[')
+ name = array_str[0]
+ if len(array_str) > 1:
+ num_str = ''.join(c for c in array_str[-1] if c.isdigit())
+ num_str = '1000' if len(num_str) == 0 else num_str
+ array_num = int(num_str)
+ else:
+ array_num = 0
+ return name, array_num, var
+
+
+ def process_multilines (self, string, max_char_length):
+ multilines = ''
+ string_length = len(string)
+ current_string_start = 0
+ string_offset = 0
+ break_line_dict = []
+ if len(string) <= max_char_length:
+ while (string_offset < string_length):
+ if string_offset >= 1:
+ if string[string_offset - 1] == '\\' and string[string_offset] == 'n':
+ break_line_dict.append (string_offset + 1)
+ string_offset += 1
+ if break_line_dict != []:
+ for each in break_line_dict:
+ multilines += " %s\n" % string[current_string_start:each].lstrip()
+ current_string_start = each
+ if string_length - current_string_start > 0:
+ multilines += " %s\n" % string[current_string_start:].lstrip()
+ else:
+ multilines = " %s\n" % string
+ else:
+ new_line_start = 0
+ new_line_count = 0
+ found_space_char = False
+ while (string_offset < string_length):
+ if string_offset >= 1:
+ if new_line_count >= max_char_length - 1:
+ if string[string_offset] == ' ' and string_length - string_offset > 10:
+ break_line_dict.append (new_line_start + new_line_count)
+ new_line_start = new_line_start + new_line_count
+ new_line_count = 0
+ found_space_char = True
+ elif string_offset == string_length - 1 and found_space_char == False:
+ break_line_dict.append (0)
+ if string[string_offset - 1] == '\\' and string[string_offset] == 'n':
+ break_line_dict.append (string_offset + 1)
+ new_line_start = string_offset + 1
+ new_line_count = 0
+ string_offset += 1
+ new_line_count += 1
+ if break_line_dict != []:
+ break_line_dict.sort ()
+ for each in break_line_dict:
+ if each > 0:
+ multilines += " %s\n" % string[current_string_start:each].lstrip()
+ current_string_start = each
+ if string_length - current_string_start > 0:
+ multilines += " %s\n" % string[current_string_start:].lstrip()
+ return multilines
+
+
+ def create_field (self, item, name, length, offset, struct, bsf_name, help, option, bits_length = None):
+ pos_name = 28
+ pos_comment = 30
+ name_line=''
+ help_line=''
+ option_line=''
+
+ if length == 0 and name == 'dummy':
+ return '\n'
+
+ if bits_length == 0:
+ return '\n'
+
+ is_array = False
+ if length in [1,2,4,8]:
+ type = "UINT%d" % (length * 8)
+ else:
+ is_array = True
+ type = "UINT8"
+
+ if item and item['value'].startswith('{'):
+ type = "UINT8"
+ is_array = True
+
+ if struct != '':
+ struct_base = struct.rstrip('*')
+ name = '*' * (len(struct) - len(struct_base)) + name
+ struct = struct_base
+ type = struct
+ if struct in ['UINT8','UINT16','UINT32','UINT64']:
+ is_array = True
+ unit = int(type[4:]) // 8
+ length = length / unit
+ else:
+ is_array = False
+
+ if is_array:
+ name = name + '[%d]' % length
+
+ if len(type) < pos_name:
+ space1 = pos_name - len(type)
+ else:
+ space1 = 1
+
+ if bsf_name != '':
+ name_line=" %s\n" % bsf_name
+ else:
+ name_line="N/A\n"
+
+ if help != '':
+ help_line = self.process_multilines (help, 80)
+
+ if option != '':
+ option_line = self.process_multilines (option, 80)
+
+ if offset is None:
+ offset_str = '????'
+ else:
+ offset_str = '0x%04X' % offset
+
+ if bits_length is None:
+ bits_length = ''
+ else:
+ bits_length = ' : %d' % bits_length
+
+ #return "\n/** %s%s%s**/\n %s%s%s%s;\n" % (name_line, help_line, option_line, type, ' ' * space1, name, bits_length)
+ return "\n /* %s */\n %s%s%s%s;\n" % (name_line.strip(), type, ' ' * space1, name, bits_length)
+
+
+ def create_accessor (self, item, category, name, length, offset, struct, bsf_name, help, option, is_set, bits_length = None):
+
+ if length == 0 and name == 'dummy':
+ return '\n'
+
+ if bits_length == 0:
+ return '\n'
+
+ is_array = False
+ if length in [1,2,4,8]:
+ type = "UINT%d" % (length * 8)
+ else:
+ is_array = True
+ type = "UINT8"
+
+ if item and item['value'].startswith('{'):
+ type = "UINT8"
+ is_array = True
+
+ if struct != '':
+ struct_base = struct.rstrip('*')
+ name = '*' * (len(struct) - len(struct_base)) + name
+ struct = struct_base
+ type = struct
+ if struct in ['UINT8','UINT16','UINT32','UINT64']:
+ is_array = True
+ unit = int(type[4:]) // 8
+ length = length / unit
+ else:
+ is_array = False
+
+ if is_array:
+ name = name + '[%d]' % length
+
+ if bits_length is None:
+ bits_length = ''
+ else:
+ bits_length = ' : %d' % bits_length
+
+ path = item['path'].split(".")
+ final_acs_list = []
+ if is_set:
+ '''
+ STATIC
+ VOID
+ EFIAPI
+ SET_%s (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid,
+ IN %s val,
+ ) {
+ ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val;
+ ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE);
+ }
+ '''
+ final_acs_list.append("inline" )
+ final_acs_list.append("STATIC" )
+ final_acs_list.append("VOID" )
+ final_acs_list.append("EFIAPI" )
+ final_acs_list.append("SET_%s (" % "_".join(path))
+ final_acs_list.append(" IN EFI_HANDLE _handle," )
+ final_acs_list.append(" IN EFI_GUID *Guid," )
+ final_acs_list.append(" IN %s val" % type)
+ final_acs_list.append(" ) {" )
+ final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = val;" % (path[0], path[1]))
+ final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset))
+ final_acs_list.append("}\n\n")
+
+ # Set default value
+ '''
+ STATIC
+ VOID
+ EFIAPI
+ SET_%s_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x;
+ ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE);
+ }
+ '''
+ final_acs_list.append("inline" )
+ final_acs_list.append("STATIC" )
+ final_acs_list.append("VOID" )
+ final_acs_list.append("EFIAPI" )
+ acs_default = "SET_%s_default (" % "_".join(path)
+ final_acs_list.append(acs_default)
+ final_acs_list.append(" IN EFI_HANDLE _handle," )
+ final_acs_list.append(" IN EFI_GUID *Guid" )
+ final_acs_list.append(" ) {" )
+ final_acs_list.append(" ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s = 0x%x;" % (path[0], path[1], int(item['value'], 0)))
+ final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, TRUE);" % (length, offset))
+ final_acs_list.append("}\n\n")
+ else:
+ '''
+ STATIC
+ %s
+ EFIAPI
+ GET_%s (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ %s Temp;
+ if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s *)0)->%s)) {
+ Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s;
+ } else {
+ Temp = 0x%x;
+ }
+ ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE);
+ return Temp;
+ }
+ '''
+ final_acs_list.append("inline" )
+ final_acs_list.append("STATIC" )
+ final_acs_list.append("%s" % type)
+ final_acs_list.append("EFIAPI" )
+ final_acs_list.append("GET_%s (" % "_".join(path))
+ final_acs_list.append(" IN EFI_HANDLE _handle," )
+ final_acs_list.append(" IN EFI_GUID *Guid" )
+ final_acs_list.append(" ) {" )
+ final_acs_list.append(" %s Temp;" % type)
+ final_acs_list.append(" if (((VERIFIED_POLICY_DESCRIPTOR*)_handle)->Size >= (OFFSET_OF(%s, %s) + (sizeof (((%s*)0)->%s)))) {" % (path[0], path[1], path[0], path[1]))
+ final_acs_list.append(" Temp = ((%s*)((UINTN)_handle + sizeof (VERIFIED_POLICY_DESCRIPTOR)))->%s;" % (path[0], path[1]))
+ final_acs_list.append(" } else {" )
+ final_acs_list.append(" Temp = 0x%x;" % int(item['value'], 0))
+ final_acs_list.append(" }" )
+ final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset))
+ final_acs_list.append(" return Temp;" )
+ final_acs_list.append("}\n\n")
+
+ # Get default value
+ '''
+ STATIC
+ %s
+ EFIAPI
+ GET_%s_default (
+ IN EFI_HANDLE _handle,
+ IN EFI_GUID *Guid
+ ) {
+ ReportVerifiedPolicyAccess (_handle, Guid, offset, size, FALSE);
+ return 0x%x;
+ }
+ '''
+ final_acs_list.append("inline" )
+ final_acs_list.append("STATIC" )
+ final_acs_list.append("%s" % type)
+ final_acs_list.append("EFIAPI" )
+ acs_default = "GET_%s_default (" % "_".join(path)
+ final_acs_list.append(acs_default)
+ final_acs_list.append(" IN EFI_HANDLE _handle," )
+ final_acs_list.append(" IN EFI_GUID *Guid" )
+ final_acs_list.append(" ) {" )
+ final_acs_list.append(" ReportVerifiedPolicyAccess (_handle, Guid, 0x%x, 0x%x, FALSE);" % (length, offset))
+ final_acs_list.append(" return 0x%x;" % int(item['value'], 0))
+ final_acs_list.append("}\n\n")
+
+ final_acs_str = "\n".join(final_acs_list)
+ return (final_acs_str, acs_default.rstrip (' ('))
+
+
+ def create_struct (self, cname, top, struct_dict):
+ index = 0
+ last = ''
+ lines = []
+ lines.append ('\ntypedef struct {\n')
+ for field in top:
+ if field[0] == '$':
+ continue
+
+ index += 1
+
+ t_item = top[field]
+ if 'indx' not in t_item:
+ if CGenCfgData.STRUCT not in top[field]:
+ continue
+
+ if struct_dict[field][1] == 0:
+ continue
+
+ append = True
+ struct_info = top[field][CGenCfgData.STRUCT]
+
+ if 'struct' in struct_info:
+ struct, array_num, var = self.get_struct_array_info (struct_info['struct'])
+ if array_num > 0:
+ if last == struct:
+ append = False
+ last = struct
+ if var == '':
+ var = field
+
+ field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1])
+ else:
+ struct = struct_dict[field][0]
+ field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1])
+
+ if append:
+ line = self.create_field (None, field, 0, 0, struct, '', '', '')
+ lines.append (' %s' % line)
+ last = struct
+ continue
+
+ item = self.get_item_by_index (t_item['indx'])
+ if item['cname'] == 'CfgHeader' and index == 1 or (item['cname'] == 'CondValue' and index == 2):
+ continue
+
+ bit_length = None
+ length = (item['length'] + 7) // 8
+ match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length'])
+ if match and match.group(2) == 'b':
+ bit_length = int(match.group(1))
+ if match.group(3) != '':
+ length = CGenCfgData.bits_width[match.group(3)] // 8
+ else:
+ length = 4
+ offset = item['offset'] // 8
+ struct = item.get('struct', '')
+ name = field
+ prompt = item['name']
+ help = item['help']
+ option = item['option']
+ line = self.create_field (item, name, length, offset, struct, prompt, help, option, bit_length)
+ lines.append (' %s' % line)
+ last = struct
+
+ lines.append ('\n} %s;\n\n' % cname)
+
+ return lines
+
+
+ def traverse_struct (self, top, struct_dict, target_min_ver, category, is_set):
+ index = 0
+ last = ''
+ lines = []
+ defaults = []
+
+ for field in top:
+ if field[0] == '$':
+ continue
+
+ index += 1
+
+ t_item = top[field]
+
+ try:
+ minver = int(t_item['minver'], 0)
+ except:
+ minver = 0
+
+ if minver != target_min_ver:
+ continue
+
+ if 'indx' not in t_item:
+ if CGenCfgData.STRUCT not in top[field]:
+ continue
+
+ if struct_dict[field][1] == 0:
+ continue
+
+ append = True
+ struct_info = top[field][CGenCfgData.STRUCT]
+
+ if 'struct' in struct_info:
+ struct, array_num, var = self.get_struct_array_info (struct_info['struct'])
+ if array_num > 0:
+ if last == struct:
+ append = False
+ last = struct
+ if var == '':
+ var = field
+
+ field = CGenCfgData.format_struct_field_name (var, struct_dict[field][1])
+ else:
+ struct = struct_dict[field][0]
+ field = CGenCfgData.format_struct_field_name (field, struct_dict[field][1])
+
+ if append:
+ (line, default) = self.create_accessor (None, category, field, 0, 0, struct, '', '', '', is_set)
+ lines.append (' %s' % line)
+ defaults.append (default)
+ last = struct
+ continue
+
+ item = self.get_item_by_index (t_item['indx'])
+
+ bit_length = None
+ length = (item['length'] + 7) // 8
+ match = re.match("^(\d+)([b|B|W|D|Q])([B|W|D|Q]?)", t_item['length'])
+ if match and match.group(2) == 'b':
+ bit_length = int(match.group(1))
+ if match.group(3) != '':
+ length = CGenCfgData.bits_width[match.group(3)] // 8
+ else:
+ length = 4
+ offset = item['offset'] // 8
+ struct = item.get('struct', '')
+ name = field
+ prompt = item['name']
+ help = item['help']
+ option = item['option']
+ (line, default) = self.create_accessor (item, category, name, length, offset, struct, prompt, help, option, is_set, bit_length)
+ lines.append ('%s' % line)
+ defaults.append (default)
+ last = struct
+
+ lines.append ('\n')
+
+ return (lines, defaults)
+
+
+ def create_header_file (self, hdr_file_name, com_hdr_file_name = ''):
+ def _build_header_struct (name, cfgs, level):
+ if CGenCfgData.STRUCT in cfgs:
+ if 'CfgHeader' in cfgs:
+ # collect CFGDATA TAG IDs
+ cfghdr = self.get_item_by_index (cfgs['CfgHeader']['indx'])
+ tag_val = array_str_to_value(cfghdr['value']) >> 20
+ tag_dict[name] = tag_val
+ if level == 1:
+ tag_curr[0] = tag_val
+ struct_dict[name] = (level, tag_curr[0], cfgs)
+
+ tag_curr = [0]
+ tag_dict = {}
+ struct_dict = {}
+ self.traverse_cfg_tree (_build_header_struct)
+
+ if tag_curr[0] == 0:
+ hdr_mode = 2
+ else:
+ hdr_mode = 1
+
+ # filter out the items to be built for tags and structures
+ struct_list = []
+ for each in struct_dict:
+ match = False
+ for check in CGenCfgData.exclude_struct:
+ if re.match (check, each):
+ match = True
+ if each in tag_dict:
+ if each not in CGenCfgData.include_tag:
+ del tag_dict[each]
+ break
+ if not match:
+ struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0],
+ 'tag':struct_dict[each][1], 'node':struct_dict[each][2]})
+
+ # sort by level so that the bottom level struct will be build first to satisfy dependencies
+ struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True)
+
+ # Convert XXX_[0-9]+ to XXX as an array hint
+ for each in struct_list:
+ cfgs = each['node']
+ if 'struct' in cfgs['$STRUCT']:
+ each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct'])
+ else:
+ match = re.match('(\w+)(_\d+)', each['name'])
+ if match:
+ each['alias'] = match.group(1)
+ else:
+ each['alias'] = each['name']
+
+ # count items for array build
+ for idx, each in enumerate(struct_list):
+ if idx > 0:
+ last_struct = struct_list[idx-1]['node']['$STRUCT']
+ curr_struct = each['node']['$STRUCT']
+ if struct_list[idx-1]['alias'] == each['alias'] and \
+ curr_struct['length'] == last_struct['length'] and \
+ curr_struct['offset'] == last_struct['offset'] + last_struct['length']:
+ for idx2 in range (idx-1, -1, -1):
+ if struct_list[idx2]['count'] > 0:
+ struct_list[idx2]['count'] += 1
+ break
+ continue
+ each['count'] = 1
+
+ # generate common header
+ if com_hdr_file_name:
+ self.write_cfg_header_file (com_hdr_file_name, 0, tag_dict, struct_list)
+
+ # generate platform header
+ self.write_cfg_header_file (hdr_file_name, hdr_mode, tag_dict, struct_list)
+
+ return 0
+
+
+ def create_policy_header_file (self, hdr_file_name, com_hdr_file_name = ''):
+ def _build_header_struct (name, cfgs, level):
+ if CGenCfgData.STRUCT in cfgs:
+ if 'PolicyHeader' in cfgs:
+ # collect macro definitions
+ cfghdr = self.get_item_by_index (cfgs['PolicyHeader']['indx'])
+ tag_val = array_str_to_value(cfghdr['value']) >> 20
+ tag_dict[name] = tag_val
+ if level == 1:
+ tag_curr[0] = tag_val
+ struct_dict[name] = (level, tag_curr[0], cfgs)
+
+ tag_curr = [0]
+ tag_dict = {}
+ struct_dict = {}
+ self.traverse_cfg_tree (_build_header_struct)
+
+ if tag_curr[0] == 0:
+ hdr_mode = 2
+ else:
+ hdr_mode = 1
+
+ # filter out the items to be built for tags and structures
+ struct_list = []
+ for each in struct_dict:
+ match = False
+ for check in CGenCfgData.exclude_struct:
+ if re.match (check, each):
+ match = True
+ if each in tag_dict:
+ if each not in CGenCfgData.include_tag:
+ del tag_dict[each]
+ break
+ if not match:
+ struct_list.append ({'name':each, 'alias':'', 'count' : 0, 'level':struct_dict[each][0],
+ 'tag':struct_dict[each][1], 'node':struct_dict[each][2]})
+
+ # sort by level so that the bottom level struct will be build first to satisfy dependencies
+ struct_list = sorted(struct_list, key=lambda x: x['level'], reverse=True)
+
+ # Convert XXX_[0-9]+ to XXX as an array hint
+ for each in struct_list:
+ cfgs = each['node']
+ if 'struct' in cfgs['$STRUCT']:
+ each['alias'], array_num, var = self.get_struct_array_info (cfgs['$STRUCT']['struct'])
+ else:
+ match = re.match('(\w+)(_\d+)', each['name'])
+ if match:
+ each['alias'] = match.group(1)
+ else:
+ each['alias'] = each['name']
+
+ # count items for array build
+ for idx, each in enumerate(struct_list):
+ if idx > 0:
+ last_struct = struct_list[idx-1]['node']['$STRUCT']
+ curr_struct = each['node']['$STRUCT']
+ if struct_list[idx-1]['alias'] == each['alias'] and \
+ curr_struct['length'] == last_struct['length'] and \
+ curr_struct['offset'] == last_struct['offset'] + last_struct['length']:
+ for idx2 in range (idx-1, -1, -1):
+ if struct_list[idx2]['count'] > 0:
+ struct_list[idx2]['count'] += 1
+ break
+ continue
+ each['count'] = 1
+
+ # generate platform header
+ self.write_policy_header_file (hdr_file_name, hdr_mode, struct_list)
+
+ return 0
+
+
+ def load_yaml (self, cfg_file, shallow_load=False, is_policy=False):
+ cfg_yaml = CFG_YAML()
+ self.initialize ()
+ self.is_policy = is_policy
+ self._cfg_tree = cfg_yaml.load_yaml (cfg_file)
+ self._def_dict = cfg_yaml.def_dict
+ self._yaml_path = os.path.dirname(cfg_file)
+ if not shallow_load:
+ self.build_cfg_list()
+ self.build_var_dict()
+ self.update_def_value()
+ return 0
+
+
+def usage():
+ print ('\n'.join([
+ "GenCfgData Version 0.50",
+ "Usage:",
+ " GenCfgData GENINC BinFile IncOutFile",
+ " GenCfgData GENPKL YamlFile PklOutFile",
+ " GenCfgData GENBIN YamlFile[;DltFile] BinOutFile",
+ " GenCfgData GENDLT YamlFile[;BinFile] DltOutFile",
+ " GenCfgData GENHDR YamlFile HdrOutFile"
+ ]))
+
+
+def main():
+ # Parse the options and args
+ argc = len(sys.argv)
+ if argc < 4 or argc > 5:
+ usage()
+ return 1
+
+ gen_cfg_data = CGenCfgData()
+ command = sys.argv[1].upper()
+ out_file = sys.argv[3]
+
+ file_list = sys.argv[2].split(';')
+ if len(file_list) >= 2:
+ yml_file = file_list[0]
+ dlt_file = file_list[1]
+ elif len(file_list) == 1:
+ yml_file = file_list[0]
+ dlt_file = ''
+ else:
+ raise Exception ("ERROR: Invalid parameter '%s' !" % sys.argv[2])
+
+ if command == "GENDLT" and yml_file.endswith('.dlt'):
+ # It needs to expand an existing DLT file
+ dlt_file = yml_file
+ lines = gen_cfg_data.expand_include_files (dlt_file)
+ write_lines (lines, out_file)
+ return 0
+
+ bin_file = ''
+ if (yml_file.lower().endswith('.bin')) and (command == "GENINC"):
+ # It is binary file
+ bin_file = yml_file
+ yml_file = ''
+
+ if bin_file:
+ gen_cfg_data.generate_data_inc_file(out_file, bin_file)
+ return 0
+
+ cfg_bin_file = ''
+ cfg_bin_file2 = ''
+ if dlt_file:
+ if command == "GENDLT":
+ cfg_bin_file = dlt_file
+ dlt_file = ''
+ if len(file_list) >= 3:
+ cfg_bin_file2 = file_list[2]
+
+ if yml_file.lower().endswith('.pkl'):
+ with open(yml_file, "rb") as pkl_file:
+ gen_cfg_data.__dict__ = marshal.load(pkl_file)
+ gen_cfg_data.prepare_marshal (False)
+ else:
+ if command == 'GENHDR':
+ gen_cfg_data.load_yaml (yml_file, is_policy=True)
+
+ if command == 'GENPKL':
+ gen_cfg_data.prepare_marshal (True)
+ with open(out_file, "wb") as pkl_file:
+ marshal.dump(gen_cfg_data.__dict__, pkl_file)
+ json_file = os.path.splitext(out_file)[0] + '.json'
+ fo = open (json_file, 'w')
+ path_list = []
+ cfgs = {'_cfg_page' : gen_cfg_data._cfg_page, '_cfg_list':gen_cfg_data._cfg_list, '_path_list' : path_list}
+ # optimize to reduce size
+ path = None
+ for each in cfgs['_cfg_list']:
+ new_path = each['path'][:-len(each['cname'])-1]
+ if path != new_path:
+ path = new_path
+ each['path'] = path
+ path_list.append(path)
+ else:
+ del each['path']
+ if each['order'] == each['offset']:
+ del each['order']
+ del each['offset']
+
+ # value is just used to indicate display type
+ value = each['value']
+ if value.startswith ('0x'):
+ hex_len = ((each['length'] + 7) // 8) * 2
+ if len(value) == hex_len:
+ value = 'x%d' % hex_len
+ else:
+ value = 'x'
+ each['value'] = value
+ elif value and value[0] in ['"', "'", '{']:
+ each['value'] = value[0]
+ else:
+ del each['value']
+
+ fo.write(repr(cfgs))
+ fo.close ()
+ return 0
+
+ if dlt_file:
+ gen_cfg_data.override_default_value(dlt_file)
+
+ if command == "GENBIN":
+ if len(file_list) == 3:
+ old_data = gen_cfg_data.generate_binary_array()
+ fi = open (file_list[2], 'rb')
+ new_data = bytearray (fi.read ())
+ fi.close ()
+ if len(new_data) != len(old_data):
+ raise Exception ("Binary file '%s' length does not match, ignored !" % file_list[2])
+ else:
+ gen_cfg_data.load_default_from_bin (new_data)
+ gen_cfg_data.override_default_value(dlt_file)
+
+ gen_cfg_data.generate_binary(out_file)
+
+ elif command == "GENDLT":
+ gen_cfg_data.generate_delta_file (out_file, cfg_bin_file, cfg_bin_file2)
+
+ elif command == "GENHDR":
+ out_files = out_file.strip("'").split(';')
+ brd_out_file = out_files[0].strip()
+ if len(out_files) > 1:
+ com_out_file = out_files[1].strip()
+ else:
+ com_out_file = ''
+ gen_cfg_data.create_policy_header_file(brd_out_file, com_out_file)
+
+ elif command == "GENINC":
+ gen_cfg_data.generate_data_inc_file(out_file)
+
+ elif command == "DEBUG":
+ gen_cfg_data.print_cfgs()
+
+ else:
+ raise Exception ("Unsuported command '%s' !" % command)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
+