diff --git a/docs/trestle_author.md b/docs/trestle_author.md index 6c5c9bcb8..db3f94721 100644 --- a/docs/trestle_author.md +++ b/docs/trestle_author.md @@ -409,6 +409,61 @@ Running `trestle author docs validate -tn docs_task -gh="Governed section"` will - If `--template-version 1.0.0` (`-tv`) is passed the header field `x-trestle-template-version` will be ignored and document will be forcefully validated against template of version `1.0.0`. Use this for testing purposes _only_ when you need to validate the document against a specific template. By default the template version will be determined based on `x-trestle-template-version` in the document. +### Validating the documents against different templates + +Validation against multiple templates as stated before can be done, but there is another scenario that you can leverage on trestle to have multiple documents in the task folder corresponding to a single template. + +For that to happen you will need to provide your template with the following parameter at the yaml header level, matching the type of template to be implemented so the validation can occur: + +> x-trestle-template-type: insert_template_type_here + +Please, take into consideration that for the validation to happen you will also need to provide each instance document in the task folder a field called `x-trestle-template-type: insert_template_type_here` in the yaml header matching with the template name. + +```yaml +--- +authors: tmp +owner: tmp +valid: + from: null + to: null +x-trestle-template-type: insert_template_type_here +--- +``` + +With that, you will be able to create more than 1 instance document per template and give the instance the desired name. + +For instance, let´s consider the next folder structure: + +```text +trestle_root +┣ .trestle +┃ ┣ author +┃ ┃ ┣ my_task_2 +┃ ┃ ┃ ┣ 0.0.1 +┃ ┃ ┃ ┃ ┣ a_template.md +┃ ┃ ┃ ┃ ┣ another_template.md +┃ ┃ ┃ ┃ ┗ arhitecture.drawio +┃ ┗ config.ini + +trestle_root + ┣ .trestle + ┣ my_task_2 + ┃ ┣ sample_folder_0 + ┃ ┃ ┣ a_template_1.md + ┃ ┃ ┣ a_template_2.md + ┃ ┃ ┣ arhitecture_1.drawio + ┃ ┃ ┗ another_template_123.md + +``` + +If you noticed, names are no longer needed to match with exact template names, and that´s because validation will run through `x-trestle-template-type` field defined at the instance header, not through the name. + +To validate the documents against their respective templates using `x-trestle-template-type`, run: + +> trestle author folders validate -tn my_task_name -vtt + +Now, `-vtt` stands for validate template type. Validate template type option will provide you the ability to have more than 1 instance per template validated. +
diff --git a/setup.cfg b/setup.cfg index a0517b587..624f3cd6e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -77,7 +77,7 @@ dev = types-setuptools # # Docs website mkdocs - mkdocstrings[python-legacy]>=0.19.0 + mkdocstrings[python-legacy]==0.19.0 mkdocs-material markdown-include pymdown-extensions diff --git a/tests/data/author/0.0.1/test_1_md_format/bad_instance_reordered.md b/tests/data/author/0.0.1/test_1_md_format/bad_instance_reordered.md deleted file mode 100644 index 545c9cc2e..000000000 --- a/tests/data/author/0.0.1/test_1_md_format/bad_instance_reordered.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -yaml: header ---- - -# Required header 1 - -Here is stuff I need to write. - -# Required header 3 - -This is out of order - -# Required header 2 - -Here is stuff I need to write. - -## Required sub-header 1 - -Here is stuff I need to write. - -## Required sub-header 2 - -Here is stuff I need to write. diff --git a/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_1.md b/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_1.md new file mode 100644 index 000000000..233a1a40f --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_1.md @@ -0,0 +1,23 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +x-trestle-template-type: architecture +--- + +# System architecture + +Here is some content + +## Overview + +And some more + +## Security model + +And even more diff --git a/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_2.md b/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_2.md new file mode 100644 index 000000000..233a1a40f --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_with_template_type/architecture_test_2.md @@ -0,0 +1,23 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +x-trestle-template-type: architecture +--- + +# System architecture + +Here is some content + +## Overview + +And some more + +## Security model + +And even more diff --git a/tests/data/author/governed_folders/good_instance_with_template_type/network_test_1.md b/tests/data/author/governed_folders/good_instance_with_template_type/network_test_1.md new file mode 100644 index 000000000..46b5d2803 --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_with_template_type/network_test_1.md @@ -0,0 +1,27 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +x-trestle-template-type: network +--- + +# Network architecture + +Lots of stuff about the network overall including some diagrams. + +## External interconnections + +Here I put a table which describes the connections beyond my audit boundary with 3rd parties. + +## Corporate interconnections + +Here I describe interconnections into corporate systems. + +## Out of scope interconnections + +Here I describe interconnections that are out of scope because they occur outside of the current audit boundary. diff --git a/tests/data/author/governed_folders/good_instance_with_template_type/network_test_2.md b/tests/data/author/governed_folders/good_instance_with_template_type/network_test_2.md new file mode 100644 index 000000000..46b5d2803 --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_with_template_type/network_test_2.md @@ -0,0 +1,27 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +x-trestle-template-type: network +--- + +# Network architecture + +Lots of stuff about the network overall including some diagrams. + +## External interconnections + +Here I put a table which describes the connections beyond my audit boundary with 3rd parties. + +## Corporate interconnections + +Here I describe interconnections into corporate systems. + +## Out of scope interconnections + +Here I describe interconnections that are out of scope because they occur outside of the current audit boundary. diff --git a/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_1.md b/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_1.md new file mode 100644 index 000000000..a683e56bf --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_1.md @@ -0,0 +1,22 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# System architecture + +Here is some content + +## Overview + +And some more + +## Security model + +And even more diff --git a/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_2.md b/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_2.md new file mode 100644 index 000000000..a683e56bf --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_without_template_type/architecture_test_2.md @@ -0,0 +1,22 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# System architecture + +Here is some content + +## Overview + +And some more + +## Security model + +And even more diff --git a/tests/data/author/governed_folders/good_instance_without_template_type/network_test_1.md b/tests/data/author/governed_folders/good_instance_without_template_type/network_test_1.md new file mode 100644 index 000000000..1b1d249f2 --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_without_template_type/network_test_1.md @@ -0,0 +1,26 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# Network architecture + +Lots of stuff about the network overall including some diagrams. + +## External interconnections + +Here I put a table which describes the connections beyond my audit boundary with 3rd parties. + +## Corporate interconnections + +Here I describe interconnections into corporate systems. + +## Out of scope interconnections + +Here I describe interconnections that are out of scope because they occur outside of the current audit boundary. diff --git a/tests/data/author/governed_folders/good_instance_without_template_type/network_test_2.md b/tests/data/author/governed_folders/good_instance_without_template_type/network_test_2.md new file mode 100644 index 000000000..1b1d249f2 --- /dev/null +++ b/tests/data/author/governed_folders/good_instance_without_template_type/network_test_2.md @@ -0,0 +1,26 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# Network architecture + +Lots of stuff about the network overall including some diagrams. + +## External interconnections + +Here I put a table which describes the connections beyond my audit boundary with 3rd parties. + +## Corporate interconnections + +Here I describe interconnections into corporate systems. + +## Out of scope interconnections + +Here I describe interconnections that are out of scope because they occur outside of the current audit boundary. diff --git a/tests/data/author/governed_folders/instance_with_diff_heading_levels/architecture.md b/tests/data/author/governed_folders/instance_with_diff_heading_levels/architecture.md new file mode 100644 index 000000000..0863b79e1 --- /dev/null +++ b/tests/data/author/governed_folders/instance_with_diff_heading_levels/architecture.md @@ -0,0 +1,59 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# Vulnerability Management (VULN) Defect Checks +## 0. Vulnerability Management Workflow +### 0.1 Data Sources +### 0.2 Fetchers +### 0.3 Data Store +### 0.4 Policy Engine +### 0.5 Ticketing System +## 1. Facts Data Model +### 1.1 Devices +#### Server +#### KubernetesCluster +#### ContainerImage +### 1.2 Vulnerabilities +#### ResourceScan +#### ResourceScanFinding +#### ResourceScanResult +### 1.3 Thresholds +#### CISOOverride +#### CISAKEV +### 1.4 Risks +#### VulnDeviations +### 1.5 Scanner Definition +#### ScannerConfiguration +## 2. Defect Checks +### Sub-capability: Reduce Software/ Firmware Vulnerabilities +#### Vulnerable Software/ Firmware +##### Purpose +##### Assessment Criteria +###### Inputs +###### Rules +####### vuln_prod_os_scan_duedate_check +######## Type +######## Rationale Statement +######## Impact Statement +######## Implementation Description +######## Audit Procedure(s) +######## Remediation Procedure(s) +######## Parameters +####### vuln_prod_os_scan_warning_duedate_check_warning +######## Type +######## Rationale Statement +######## Impact Statement +######## Implementation Description +######## Audit Procedure(s) +######## Remediation Procedure(s) +######## Parameters +###### Additional Outputs +##### Assessment Objectives diff --git a/tests/data/author/governed_folders/template_folder_headling_levels/architecture.md b/tests/data/author/governed_folders/template_folder_headling_levels/architecture.md new file mode 100644 index 000000000..e633a3061 --- /dev/null +++ b/tests/data/author/governed_folders/template_folder_headling_levels/architecture.md @@ -0,0 +1,30 @@ +--- +authors: + - Tim + - Jane + - Sally +owner: Joe +valid: + from: 2020-01-01 + to: 2099-12-31 +--- + +# { Security Capability Name } Defect Checks +## 1. Facts Data Model +### Sub-Capability: { _insert name of subcapability_} +## 2. Defect Checks +### Sub-capability: { _insert sub-capability name_} +#### { _insert defect check name_} +##### Assessment Criteria +###### Inputs +###### Rules +####### { Rule Name} +######## Type +######## Rationale Statement +######## Impact Statement +######## Implementation Description +######## Audit Procedure(s) +######## Remediation Procedure(s) +######## Parameters +###### Additional Outputs +##### Assessment Objectives \ No newline at end of file diff --git a/tests/data/author/governed_folders/template_folder_with_template_type/architecture.md b/tests/data/author/governed_folders/template_folder_with_template_type/architecture.md new file mode 100644 index 000000000..31a41f68a --- /dev/null +++ b/tests/data/author/governed_folders/template_folder_with_template_type/architecture.md @@ -0,0 +1,13 @@ +--- +authors: tmp +owner: tmp +valid: + from: null + to: null +x-trestle-template-type: architecture +--- +# System architecture + +## Overview + +## Security model diff --git a/tests/data/author/governed_folders/template_folder_with_template_type/network.md b/tests/data/author/governed_folders/template_folder_with_template_type/network.md new file mode 100644 index 000000000..cdc431123 --- /dev/null +++ b/tests/data/author/governed_folders/template_folder_with_template_type/network.md @@ -0,0 +1,16 @@ +--- +authors: tmp +owner: tmp +valid: + from: + to: +x-trestle-template-type: network +--- + +# Network architecture + +## External interconnections + +## Corporate interconnections + +## Out of scope interconnections diff --git a/tests/data/csv/bp.sample.v3.csv b/tests/data/csv/bp.sample.v3.csv new file mode 100644 index 000000000..8c9328368 --- /dev/null +++ b/tests/data/csv/bp.sample.v3.csv @@ -0,0 +1,12 @@ +"Reference_Id","Rule_Id","Rule_Description","Check_Id","Check_Description","Fetcher","Fetcher_Description","Profile_Source","Profile_Description","Component_Type","Control_Id_List","Component_Title","Component_Description","Parameter_Id","Parameter_Description","Parameter_Value_Default","Parameter_Value_Alternatives","Namespace" +"column description","column description","column description",,,,,,,,,,,,,,, +3000020,"account_owner_authorized_ip_range_configured","Ensure authorized IP ranges are configured by the account owner","account_owner_authorized_ip_range_configured","Check whether authorized IP ranges are configured by the account owner",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","sc-7_smt.a sc-7_smt.b sc-7.3 sc-7.4_smt.a sc-7.5 ia-3","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000021,"iam_admin_role_users_per_account_maxcount","Ensure there are no more than # IAM administrators configured per account","iam_admin_role_users_per_account_maxcount","Check whether there are no more than # IAM administrators configured per account",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-6 ac-5_smt.c","IAM","IAM","allowed_admins_per_account","Maximum allowed administrators per",10,10,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000022,"iam_cos_public_access_disabled","Ensure Cloud Object Storage public access is disabled in IAM settings (not applicable to ACLs managed using S3 APIs)","iam_cos_public_access_disabled","Check whether Cloud Object Storage public access is disabled in IAM settings (not applicable to ACLs managed using S3 APIs)",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-3 ac-4 ac-6 sc-7_smt.a sc-7_smt.b sc-7.4_smt.a ac-14_smt.a cm-7_smt.a cm-7_smt.b","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000023,"iam_account_owner_no_api_key","Ensure the account owner does not have an IBM Cloud API key created in IAM","iam_account_owner_no_api_key","Check whether the account owner does not have an IBM Cloud API key created in IAM",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-2_smt.d ac-3 ac-5_smt.c ac-6","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000024,"iam_api_keys_rotation_configured","Ensure IBM Cloud API keys that are managed in IAM are rotated at least every # days","iam_api_keys_rotation_configured","Check whether IBM Cloud API keys that are managed in IAM are rotated at least every # days",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ia-5_smt.g","IAM","IAM","api_keys_rotated_days","API Keys Rotated","x, y, z",,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000027,"iam_account_owner_api_key_restrictions_configured","Ensure permissions for API key creation are limited and configured in IAM settings for the account owner","iam_account_owner_api_key_restrictions_configured","Check whether permissions for API key creation are limited and configured in IAM settings for the account owner",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-2_smt.d ac-3 ac-5_smt.c ac-6","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000029,"iam_admin_role__user_maxcount","Ensure IAM-enabled services have no more than # users with the IAM administrator role","iam_admin_role__user_maxcount","Check whether IAM-enabled services have no more than # users with the IAM administrator role",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-6 ac-5_smt.c ia-7","IAM","IAM","no_of_admins_for_iam","Maximum no of IAM user","a, b, c",,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000030,"iam_serviceID_policies_attached_to_access_groups_or_roles","Ensure IAM policies for service IDs are attached only to groups or roles","iam_serviceID_policies_attached_to_access_groups_or_roles","Check whether IAM policies for service IDs are attached only to groups or roles",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-3 ac-6 ac-2_smt.d ac-5_smt.c ia-7","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000031,"iam_logDNA_enabled","Ensure Identity and Access Management (IAM) is enabled with audit logging","iam_logDNA_enabled","Check whether Identity and Access Management (IAM) is enabled with audit logging",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","au-2_smt.a au-2_smt.d si-4_smt.a si-4_smt.b si-4_smt.c au-12_smt.a au-12_smt.b au-12_smt.c au-3 au-8_smt.a au-8_smt.b au-8.1_smt.a au-8.1_smt.b ca-7_smt.d","IAM","IAM",,,,,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" +3000032,"iam_admin_role_serviceid_maxcount","Ensure IAM-enabled services have no more than # service IDs with the IAM administrator role","iam_admin_role_serviceid_maxcount","Check whether IAM-enabled services have no more than # service IDs with the IAM administrator role",,,"https://github.com/usnistgov/oscal-content/blob/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_HIGH-baseline_profile.json","NIST Special Publication 800-53 Revision 5 HIGH IMPACT BASELINE","Service","ac-6 ac-5_smt.c ia-7","IAM","IAM","no_of_service_id_admins_for_iam","Maximum no of IAM Service ID","3, 4, 5",,"http://ibm.github.io/compliance-trestle/schemas/oscal/cd" diff --git a/tests/trestle/core/commands/author/folders_test.py b/tests/trestle/core/commands/author/folders_test.py index 4e7d2d2e7..36f958f43 100644 --- a/tests/trestle/core/commands/author/folders_test.py +++ b/tests/trestle/core/commands/author/folders_test.py @@ -674,3 +674,87 @@ def test_drawio_versioning_validation( monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) rc = trestle.cli.Trestle().run() assert rc == 0 + + +def test_validate_template_with_type_field( + testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch +) -> None: + """Test behaviour when validating an instance with x-trestle-template-type field.""" + task_template_folder = tmp_trestle_dir / '.trestle/author/test_task/' + test_template_folder = testdata_dir / 'author/governed_folders/template_folder_with_template_type' + test_instances_folder = testdata_dir / 'author/governed_folders/good_instance_with_template_type' + task_instance_folder = tmp_trestle_dir / 'test_task/folder_1' + + hidden_file = testdata_dir / pathlib.Path( + 'author/governed_folders/template_folder_with_drawio/.hidden_does_not_affect' + ) + test_utils.make_file_hidden(hidden_file) + + test_utils.copy_tree_or_file_with_hidden(test_template_folder, task_template_folder) + + shutil.copytree(test_instances_folder, task_instance_folder) + # test validate short + command_string_validate_content = 'trestle author folders validate -tn test_task -hv -vtt' + monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) + rc = trestle.cli.Trestle().run() + assert rc == 0 + + # test validate long + command_string_validate_content = 'trestle author folders validate -tn test_task -hv --validate-template-type' + monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) + rc = trestle.cli.Trestle().run() + assert rc == 0 + + +def test_validate_template_with_type_field_unhappy( + testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch +) -> None: + """Test hunhappy behaviour when validating an instance with x-trestle-template-type field.""" + task_template_folder = tmp_trestle_dir / '.trestle/author/test_task/' + test_template_folder = testdata_dir / 'author/governed_folders/template_folder_with_template_type' + test_instances_folder = testdata_dir / 'author/governed_folders/good_instance_without_template_type' + task_instance_folder = tmp_trestle_dir / 'test_task/folder_1' + + hidden_file = testdata_dir / pathlib.Path( + 'author/governed_folders/template_folder_with_drawio/.hidden_does_not_affect' + ) + test_utils.make_file_hidden(hidden_file) + + test_utils.copy_tree_or_file_with_hidden(test_template_folder, task_template_folder) + + shutil.copytree(test_instances_folder, task_instance_folder) + # test validate short + command_string_validate_content = 'trestle author folders validate -tn test_task -hv -vtt' + monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) + rc = trestle.cli.Trestle().run() + assert rc == 1 + + # test validate long + command_string_validate_content = 'trestle author folders validate -tn test_task -hv --validate-template-type' + monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) + rc = trestle.cli.Trestle().run() + assert rc == 1 + + +def test_heading_levels_hierarchy( + testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch +) -> None: + """Test behaviour when validating drawio instance.""" + task_template_folder = tmp_trestle_dir / '.trestle/author/test_task/' + test_template_folder = testdata_dir / 'author/governed_folders/template_folder_headling_levels' + test_instances_folder = testdata_dir / 'author/governed_folders/instance_with_diff_heading_levels' + task_instance_folder = tmp_trestle_dir / 'test_task/folder_1' + + hidden_file = testdata_dir / pathlib.Path( + 'author/governed_folders/template_folder_with_drawio/.hidden_does_not_affect' + ) + test_utils.make_file_hidden(hidden_file) + + test_utils.copy_tree_or_file_with_hidden(test_template_folder, task_template_folder) + + shutil.copytree(test_instances_folder, task_instance_folder) + + command_string_validate_content = 'trestle author folders validate -tn test_task -hv' + monkeypatch.setattr(sys, 'argv', command_string_validate_content.split()) + rc = trestle.cli.Trestle().run() + assert rc == 0 diff --git a/tests/trestle/core/markdown/markdown_validator_test.py b/tests/trestle/core/markdown/markdown_validator_test.py index 86ca09546..ada27a8ee 100644 --- a/tests/trestle/core/markdown/markdown_validator_test.py +++ b/tests/trestle/core/markdown/markdown_validator_test.py @@ -55,13 +55,6 @@ False, False ), - ( - pathlib.Path('tests/data/author/0.0.1/test_1_md_format/template.md'), - pathlib.Path('tests/data/author/0.0.1/test_1_md_format/bad_instance_reordered.md'), - False, - False, - False - ), ( pathlib.Path('tests/data/author/0.0.1/test_1_md_format/template.md'), pathlib.Path('tests/data/author/0.0.1/test_1_md_format/bad_instance_missing_heading.md'), @@ -125,13 +118,6 @@ False, False ), - ( - pathlib.Path('tests/data/author/0.0.1/test_4_md_format_extras/template.md'), - pathlib.Path('tests/data/author/0.0.1/test_4_md_format_extras/bad_instance_reordered.md'), - False, - False, - False - ), ( pathlib.Path('tests/data/author/0.0.1/test_4_md_format_extras/template.md'), pathlib.Path('tests/data/author/0.0.1/test_4_md_format_extras/bad_instance_missing_heading.md'), diff --git a/tests/trestle/core/profile_resolver_test.py b/tests/trestle/core/profile_resolver_test.py index 0351ee99e..d7eba4d77 100644 --- a/tests/trestle/core/profile_resolver_test.py +++ b/tests/trestle/core/profile_resolver_test.py @@ -441,6 +441,6 @@ def test_profile_resolver_no_params(tmp_trestle_dir: pathlib.Path) -> None: def test_remote_profile_relative_cat(tmp_trestle_dir: pathlib.Path) -> None: """Test profile resolver with remote profile and import of relative catalog path.""" - profile_path = 'https://raw.githubusercontent.com/usnistgov/oscal-content/feature-basic-end-to-end-example/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_LOW-baseline_profile.json' # noqa E501 + profile_path = 'https://raw.githubusercontent.com/usnistgov/oscal-content/main/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_LOW-baseline_profile.json' # noqa E501 resolved_cat = ProfileResolver.get_resolved_profile_catalog(tmp_trestle_dir, profile_path) assert len(resolved_cat.groups) > 10 diff --git a/tests/trestle/core/repository_test.py b/tests/trestle/core/repository_test.py index fd6c91999..34602117e 100644 --- a/tests/trestle/core/repository_test.py +++ b/tests/trestle/core/repository_test.py @@ -13,17 +13,25 @@ # limitations under the License. """Tests for trestle Repository APIs.""" +import os import pathlib import pytest from tests import test_utils +import trestle.common.const as const import trestle.oscal as oscal import trestle.oscal.catalog as cat +import trestle.oscal.profile as prof from trestle.common.err import TrestleError from trestle.core import generators, parser -from trestle.core.repository import ManagedOSCAL, Repository +from trestle.core.repository import AgileAuthoring, ManagedOSCAL, Repository + +prof_name = 'comp_prof' +ssp_name = 'my_ssp' +cat_name = 'simplified_nist_catalog' +md_dir = 'test_md' def test_repo(tmp_trestle_dir: pathlib.Path) -> None: @@ -384,3 +392,75 @@ def test_managed_validate(tmp_trestle_dir: pathlib.Path) -> None: managed = repo.import_model(catalog_data, 'imported') success = managed.validate() assert success + + +# Test default agile authoring paths to ensure call from repository are correct + + +def test_agile_authoring_catalog(tmp_trestle_dir: pathlib.Path) -> None: + """Test catalog generate and assemble through API.""" + test_utils.load_from_json(tmp_trestle_dir, cat_name, cat_name, cat.Catalog) + + authoring = AgileAuthoring(tmp_trestle_dir) + + md_cat = os.path.join(md_dir, cat_name) + success = authoring.generate_catalog_markdown(cat_name, md_cat) + + assert success + assert pathlib.Path(tmp_trestle_dir / md_cat).exists() + + new_cat = 'temp_cat' + success = authoring.assemble_catalog_markdown(cat_name, new_cat, md_cat) + assert success + assert pathlib.Path(tmp_trestle_dir, const.MODEL_DIR_CATALOG, new_cat).exists() + + +def test_agile_authoring_profile(tmp_trestle_dir: pathlib.Path) -> None: + """Test profile generate and assemble through API.""" + test_utils.load_from_json(tmp_trestle_dir, cat_name, cat_name, cat.Catalog) + test_utils.load_from_json(tmp_trestle_dir, prof_name, prof_name, prof.Profile) + + authoring = AgileAuthoring(tmp_trestle_dir) + + md_prof = os.path.join(md_dir, prof_name) + success = authoring.generate_profile_markdown(prof_name, md_prof) + + assert success + assert pathlib.Path(tmp_trestle_dir / md_prof).exists() + + new_prof = 'temp_prof' + success = authoring.assemble_profile_markdown(prof_name, new_prof, md_prof) + assert success + assert pathlib.Path(tmp_trestle_dir, const.MODEL_DIR_PROFILE, new_prof).exists() + + +def test_agile_authoring_component(tmp_trestle_dir: pathlib.Path) -> None: + """Test component generate and assemble through API.""" + comp_name = test_utils.setup_component_generate(tmp_trestle_dir) + authoring = AgileAuthoring(tmp_trestle_dir) + + md_comp = os.path.join(md_dir, comp_name) + success = authoring.generate_component_definition_markdown(comp_name, md_comp) + + assert success + assert pathlib.Path(tmp_trestle_dir / md_comp).exists() + + new_comp = 'temp_comp' + success = authoring.assemble_component_definition_markdown(comp_name, new_comp, md_comp) + assert success + assert pathlib.Path(tmp_trestle_dir, const.MODEL_DIR_COMPDEF, new_comp).exists() + + +def test_agile_authoring_ssp(tmp_trestle_dir: pathlib.Path) -> None: + """Test ssp generate and assemble through API.""" + args, _ = test_utils.setup_for_ssp(tmp_trestle_dir, prof_name, ssp_name) + authoring = AgileAuthoring(tmp_trestle_dir) + + success = authoring.generate_ssp_markdown(args.profile, args.output, args.compdefs) + + assert success + assert pathlib.Path(tmp_trestle_dir / args.output).exists() + + success = authoring.assemble_ssp_markdown(ssp_name, ssp_name, args.output, args.compdefs) + assert success + assert pathlib.Path(tmp_trestle_dir, const.MODEL_DIR_SSP, args.output).exists() diff --git a/tests/trestle/tasks/csv_to_oscal_cd_test.py b/tests/trestle/tasks/csv_to_oscal_cd_test.py index 67a5930bc..82913ef83 100644 --- a/tests/trestle/tasks/csv_to_oscal_cd_test.py +++ b/tests/trestle/tasks/csv_to_oscal_cd_test.py @@ -32,7 +32,7 @@ def monkey_exception() -> None: """Monkey exception.""" - raise Exception('foobar') + raise RuntimeError('foobar') def _get_rows(file_: str) -> List[List[str]]: @@ -139,7 +139,7 @@ def _get_config_section_init(tmp_path: pathlib.Path, fname: str) -> tuple: def test_print_info(tmp_path: pathlib.Path) -> None: """Test print_info.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.print_info() assert retval is None @@ -147,7 +147,7 @@ def test_print_info(tmp_path: pathlib.Path) -> None: def test_simulate(tmp_path: pathlib.Path) -> None: """Test simulate.""" - config, section = _get_config_section(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.simulate() assert retval == TaskOutcome.SIM_SUCCESS @@ -156,7 +156,7 @@ def test_simulate(tmp_path: pathlib.Path) -> None: def test_execute(tmp_path: pathlib.Path) -> None: """Test execute.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() assert retval == TaskOutcome.SUCCESS @@ -173,7 +173,7 @@ def test_config_missing(tmp_path: pathlib.Path) -> None: def test_config_missing_title(tmp_path: pathlib.Path) -> None: """Test config missing title.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') section.pop('title') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -182,7 +182,7 @@ def test_config_missing_title(tmp_path: pathlib.Path) -> None: def test_config_missing_version(tmp_path: pathlib.Path) -> None: """Test config missing version.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') section.pop('version') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -191,7 +191,7 @@ def test_config_missing_version(tmp_path: pathlib.Path) -> None: def test_config_missing_csv_file_spec(tmp_path: pathlib.Path) -> None: """Test config missing csv file spec.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') section['output-dir'] = str(tmp_path) section.pop('csv-file') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) @@ -201,7 +201,7 @@ def test_config_missing_csv_file_spec(tmp_path: pathlib.Path) -> None: def test_config_missing_csv_file(tmp_path: pathlib.Path) -> None: """Test config missing csv file.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') section['csv-file'] = 'foobar' tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -211,7 +211,7 @@ def test_config_missing_csv_file(tmp_path: pathlib.Path) -> None: def test_exception(tmp_path: pathlib.Path, monkeypatch: MonkeyPatch) -> None: """Test exception.""" monkeypatch.setattr(csv_to_oscal_cd._RuleSetIdMgr, 'get_next_rule_set_id', monkey_exception) - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() assert retval == TaskOutcome.FAILURE @@ -219,7 +219,7 @@ def test_exception(tmp_path: pathlib.Path, monkeypatch: MonkeyPatch) -> None: def test_execute_mock(tmp_path: pathlib.Path) -> None: """Test execute mock.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # get good data & test that mocking works rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') with mock.patch('trestle.tasks.csv_to_oscal_cd.csv.reader') as mock_csv_reader: @@ -260,7 +260,7 @@ def test_execute_validate_controls(tmp_path: pathlib.Path, monkeypatch: MonkeyPa cwd = os.getcwd() try: os.chdir(workspace) - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) section['validate-controls'] = 'warn' retval = tgt.execute() @@ -276,7 +276,7 @@ def test_execute_validate_controls_nist(tmp_path: pathlib.Path, monkeypatch: Mon cwd = os.getcwd() try: os.chdir(workspace) - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # replace resolved profile (catalog) rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') for i, row in enumerate(rows): @@ -305,7 +305,7 @@ def test_execute_control_invalid(tmp_path: pathlib.Path, monkeypatch: MonkeyPatc cwd = os.getcwd() try: os.chdir(workspace) - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[2] @@ -329,7 +329,7 @@ def test_execute_control_invalid_fail(tmp_path: pathlib.Path, monkeypatch: Monke cwd = os.getcwd() try: os.chdir(workspace) - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[2] @@ -348,7 +348,7 @@ def test_execute_control_invalid_fail(tmp_path: pathlib.Path, monkeypatch: Monke def test_execute_no_overwrite(tmp_path: pathlib.Path) -> None: """Test execute no overwrite.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() assert retval == TaskOutcome.SUCCESS @@ -359,7 +359,7 @@ def test_execute_no_overwrite(tmp_path: pathlib.Path) -> None: def test_execute_verbose(tmp_path: pathlib.Path) -> None: """Test execute verbose.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') section['quiet'] = 'False' tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -369,7 +369,7 @@ def test_execute_verbose(tmp_path: pathlib.Path) -> None: def test_execute_missing_heading(tmp_path: pathlib.Path) -> None: """Test execute missing heading.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[0] @@ -384,7 +384,7 @@ def test_execute_missing_heading(tmp_path: pathlib.Path) -> None: def test_execute_missing_value(tmp_path: pathlib.Path) -> None: """Test execute missing value.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[2] @@ -399,7 +399,7 @@ def test_execute_missing_value(tmp_path: pathlib.Path) -> None: def test_execute_missing_rule_id(tmp_path: pathlib.Path) -> None: """Test execute missing rule id.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[2] @@ -414,7 +414,7 @@ def test_execute_missing_rule_id(tmp_path: pathlib.Path) -> None: def test_execute_missing_control_id_list(tmp_path: pathlib.Path) -> None: """Test execute missing control id list.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') # inject error rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') row = rows[2] @@ -433,33 +433,46 @@ def test_execute_missing_control_id_list(tmp_path: pathlib.Path) -> None: assert len(component.props) == 423 -def test_execute_missing_parameter_id(tmp_path: pathlib.Path) -> None: - """Test execute missing parameter id.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd.config') - # inject error - rows = _get_rows('tests/data/csv/ocp4-user.v2.csv') - row = rows[2] - assert row[9] == 'scan_interval_max' - row[9] = '' - with mock.patch('trestle.tasks.csv_to_oscal_cd.csv.reader') as mock_csv_reader: - mock_csv_reader.return_value = rows - tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) - retval = tgt.execute() - assert retval == TaskOutcome.FAILURE - - def test_execute_bp_sample(tmp_path: pathlib.Path) -> None: """Test execute bp sample.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() assert retval == TaskOutcome.SUCCESS _validate_bp(tmp_path) +def test_execute_bp3_sample(tmp_path: pathlib.Path) -> None: + """Test execute bp3 sample.""" + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + section['csv-file'] = 'tests/data/csv/bp.sample.v3.csv' + tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) + retval = tgt.execute() + assert retval == TaskOutcome.SUCCESS + # read component-definition + fp = pathlib.Path(tmp_path) / 'component-definition.json' + cd = ComponentDefinition.oscal_read(fp) + # spot check + component = cd.components[0] + assert len(component.props) == 59 + assert len(component.control_implementations) == 1 + ci = component.control_implementations[0] + assert len(ci.set_parameters) == 4 + assert len(ci.set_parameters[0].values) == 1 + assert len(ci.set_parameters[1].values) == 3 + assert ci.set_parameters[1].values[0] == 'x' + assert ci.set_parameters[1].values[1] == 'y' + assert ci.set_parameters[1].values[2] == 'z' + assert len(ci.set_parameters[2].values) == 3 + assert len(ci.set_parameters[3].values) == 3 + assert ci.set_parameters[3].values[0] == '3' + assert ci.set_parameters[3].values[1] == '4' + assert ci.set_parameters[3].values[2] == '5' + + def test_execute_bp_cd(tmp_path: pathlib.Path) -> None: """Test execute bp cd.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -469,7 +482,7 @@ def test_execute_bp_cd(tmp_path: pathlib.Path) -> None: def test_execute_bp_cd_missing(tmp_path: pathlib.Path) -> None: """Test execute bp cd missing.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/foobar/component-definition.json' tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) retval = tgt.execute() @@ -478,7 +491,7 @@ def test_execute_bp_cd_missing(tmp_path: pathlib.Path) -> None: def test_execute_duplicate_rule(tmp_path: pathlib.Path) -> None: """Test execute duplicate rule.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # duplicate rule rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -492,7 +505,7 @@ def test_execute_duplicate_rule(tmp_path: pathlib.Path) -> None: def test_execute_delete_rule(tmp_path: pathlib.Path) -> None: """Test execute delete rule.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete rule rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -517,7 +530,7 @@ def test_execute_delete_rule(tmp_path: pathlib.Path) -> None: def test_execute_delete_all_rules_with_params(tmp_path: pathlib.Path) -> None: """Test execute delete all rules with params.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete all rules with params rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -543,7 +556,7 @@ def test_execute_delete_all_rules_with_params(tmp_path: pathlib.Path) -> None: def test_execute_delete_rule_with_params(tmp_path: pathlib.Path) -> None: """Test execute delete rule with params.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete rule with params rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -566,7 +579,7 @@ def test_execute_delete_rule_with_params(tmp_path: pathlib.Path) -> None: def test_execute_add_rule(tmp_path: pathlib.Path) -> None: """Test execute add rule.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add rule rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -650,7 +663,7 @@ def test_execute_add_rule(tmp_path: pathlib.Path) -> None: def test_execute_missing_param_default_value(tmp_path: pathlib.Path) -> None: """Test execute missing param default_value.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') # delete default param default value rows = _get_rows('tests/data/csv/bp.sample.v2.csv') row = rows[3] @@ -666,7 +679,7 @@ def test_execute_missing_param_default_value(tmp_path: pathlib.Path) -> None: def test_execute_change_param_default_value(tmp_path: pathlib.Path) -> None: """Test execute change param default_value.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # change default param default value rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -696,7 +709,7 @@ def test_execute_change_param_default_value(tmp_path: pathlib.Path) -> None: def test_execute_delete_param(tmp_path: pathlib.Path) -> None: """Test execute delete param.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete param rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -726,7 +739,7 @@ def test_execute_delete_param(tmp_path: pathlib.Path) -> None: def test_execute_delete_params(tmp_path: pathlib.Path) -> None: """Test execute delete params.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete params rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -750,7 +763,7 @@ def test_execute_delete_params(tmp_path: pathlib.Path) -> None: def test_execute_add_param(tmp_path: pathlib.Path) -> None: """Test execute add param.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add param rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -789,7 +802,7 @@ def test_execute_add_param(tmp_path: pathlib.Path) -> None: def test_execute_delete_all_control_id_list(tmp_path: pathlib.Path) -> None: """Test execute delete all control id list.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete all control lists rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -810,7 +823,7 @@ def test_execute_delete_all_control_id_list(tmp_path: pathlib.Path) -> None: def test_execute_delete_control_id(tmp_path: pathlib.Path) -> None: """Test execute delete control id.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete control id rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -835,7 +848,7 @@ def test_execute_delete_control_id(tmp_path: pathlib.Path) -> None: def test_execute_delete_control_id_multi(tmp_path: pathlib.Path) -> None: """Test execute delete control id multi.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete control id multi rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -860,7 +873,7 @@ def test_execute_delete_control_id_multi(tmp_path: pathlib.Path) -> None: def test_execute_delete_control_id_smt(tmp_path: pathlib.Path) -> None: """Test execute delete control id smt.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete control id smt rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -887,7 +900,7 @@ def test_execute_delete_control_id_smt(tmp_path: pathlib.Path) -> None: def test_execute_add_control_id(tmp_path: pathlib.Path) -> None: """Test execute add control id.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add control id rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -911,7 +924,7 @@ def test_execute_add_control_id(tmp_path: pathlib.Path) -> None: def test_execute_add_control_id_smt(tmp_path: pathlib.Path) -> None: """Test execute add control mapping smt.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add control mapping smt rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -941,7 +954,7 @@ def test_execute_add_control_id_smt(tmp_path: pathlib.Path) -> None: def test_execute_delete_property(tmp_path: pathlib.Path) -> None: """Test execute delete property.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # delete property rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -967,7 +980,7 @@ def test_execute_delete_property(tmp_path: pathlib.Path) -> None: def test_execute_add_property(tmp_path: pathlib.Path) -> None: """Test execute add property.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add property rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -999,7 +1012,7 @@ def test_execute_add_property(tmp_path: pathlib.Path) -> None: def test_execute_add_user_property(tmp_path: pathlib.Path) -> None: """Test execute add user property.""" - config, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' # add user property rows = _get_rows('tests/data/csv/bp.sample.v2.csv') @@ -1020,3 +1033,54 @@ def test_execute_add_user_property(tmp_path: pathlib.Path) -> None: assert len(component.props) == 71 assert component.props[5].name == 'New_Column_Name' assert component.props[5].value == 'new-column-value-2' + + +def test_execute_validation(tmp_path: pathlib.Path) -> None: + """Test execute validation.""" + _, section = _get_config_section_init(tmp_path, 'test-csv-to-oscal-cd-bp.config') + section['component-definition'] = 'tests/data/csv/component-definitions/bp/component-definition.json' + # set validation component type + rows = _get_rows('tests/data/csv/bp.sample.v2.csv') + row = [ + 'validation-reference-id', + 'validation-rule-id', + 'validation-rule-description', + 'validation-check-id', + 'validation-check-description', + 'validation-fetcher', + 'validation-fetcher-description', + 'https://abc.com/validation-profile-reference-url', + 'validation-profile-description', + 'Validation', + 'validation-control-id-list', + 'IAM', + 'IAM', + 'validation-parameter-id', + 'validation-parameter-description', + 'validation-parameter-value-default', + 'validation-parameter-value-alternatives', + 'https://abc.com/validation-namespace' + ] + rows.append(row) + with mock.patch('trestle.tasks.csv_to_oscal_cd.csv.reader') as mock_csv_reader: + mock_csv_reader.return_value = rows + tgt = csv_to_oscal_cd.CsvToOscalComponentDefinition(section) + retval = tgt.execute() + assert retval == TaskOutcome.SUCCESS + # read component-definition + fp = pathlib.Path(tmp_path) / 'component-definition.json' + cd = ComponentDefinition.oscal_read(fp) + # spot check + component = cd.components[1] + assert component.type == 'Validation' + assert component.title == 'IAM' + assert component.description == 'IAM' + assert len(component.props) == 6 + assert component.props[0].name == 'Rule_Id' + assert component.props[0].value == 'validation-rule-id' + assert component.props[0].class_ == 'scc_class' + assert component.props[1].name == 'Check_Id' + assert component.props[1].value == 'validation-check-id' + assert component.props[2].name == 'Check_Description' + assert component.props[2].value == 'validation-check-description' + assert len(component.control_implementations) == 0 diff --git a/trestle/common/const.py b/trestle/common/const.py index 0578f116f..02c37b00e 100644 --- a/trestle/common/const.py +++ b/trestle/common/const.py @@ -441,6 +441,10 @@ VALUES = 'values' +GUIDELINES = 'guidelines' + +LABEL = 'label' + SECTIONS_TAG = TRESTLE_TAG + 'sections' EDITABLE_CONTENT = 'Editable Content' diff --git a/trestle/common/model_utils.py b/trestle/common/model_utils.py index b38261300..09a07bc42 100644 --- a/trestle/common/model_utils.py +++ b/trestle/common/model_utils.py @@ -536,7 +536,7 @@ def _parameter_to_dict_recurse(obj: Union[OscalBaseModel, str], partial: bool) - Returns: The converted parameter as dictionary """ - main_fields = ['id', 'label', 'values', 'select', 'choice', 'how_many'] + main_fields = ['id', 'label', 'values', 'select', 'choice', 'how_many', 'guidelines', 'prose'] if isinstance(obj, common.Remarks): return obj.__root__ if isinstance(obj, common.HowMany): diff --git a/trestle/core/catalog/catalog_writer.py b/trestle/core/catalog/catalog_writer.py index fae752221..07c274041 100644 --- a/trestle/core/catalog/catalog_writer.py +++ b/trestle/core/catalog/catalog_writer.py @@ -173,6 +173,7 @@ def _construct_set_parameters_dict( # pull only the values from the actual control dict # all the other elements are from the profile set_param new_dict[const.VALUES] = orig_dict.get(const.VALUES, None) + new_dict[const.GUIDELINES] = orig_dict.get(const.GUIDELINES, None) else: # if the profile doesnt change this param at all, show it in the header with values tmp_dict = ModelUtils.parameter_to_dict(param_dict, True) @@ -181,7 +182,9 @@ def _construct_set_parameters_dict( new_dict.pop('id', None) if display_name: new_dict[const.DISPLAY_NAME] = display_name - set_param_dict[param_id] = new_dict + key_order = (const.LABEL, const.GUIDELINES, const.PROFILE_VALUES, const.VALUES, const.DISPLAY_NAME) + ordered_dict = {k: new_dict[k] for k in key_order if k in new_dict.keys()} + set_param_dict[param_id] = ordered_dict return set_param_dict diff --git a/trestle/core/commands/author/consts.py b/trestle/core/commands/author/consts.py index b61d9cfff..61ccad7fd 100644 --- a/trestle/core/commands/author/consts.py +++ b/trestle/core/commands/author/consts.py @@ -47,9 +47,14 @@ 'Enable to validate README.md files. Required if readme files are included in the' + 'template.' ) +TEMPLATE_TYPE_VALIDATE_SHORT = '-vtt' +TEMPLATE_TYPE_VALIDATE_LONG = '--validate-template-type' +TEMPLATE_TYPE_VALIDATE_HELP = 'Validate that template and instance files match with x-trestle-template-type field' + START_TEMPLATE_VERSION = '0.0.1' # first ever template version, all templates without version will be defaulted to this TRESTLE_RESOURCES = 'trestle.resources' TEMPLATE_VERSION_HEADER = 'x-trestle-template-version' +TEMPLATE_TYPE_HEADER = 'x-trestle-template-type' # Governed heading - capability: To be removed GH_SHORT = '-gh' diff --git a/trestle/core/commands/author/folders.py b/trestle/core/commands/author/folders.py index 79f6843fd..4576c2a92 100644 --- a/trestle/core/commands/author/folders.py +++ b/trestle/core/commands/author/folders.py @@ -81,6 +81,13 @@ def _init_arguments(self) -> None: action='store_true' ) + self.add_argument( + author_const.TEMPLATE_TYPE_VALIDATE_SHORT, + author_const.TEMPLATE_TYPE_VALIDATE_LONG, + help=author_const.TEMPLATE_TYPE_VALIDATE_HELP, + action='store_true' + ) + def _run(self, args: argparse.Namespace) -> int: try: if self._initialize(args): @@ -102,7 +109,8 @@ def _run(self, args: argparse.Namespace) -> int: args.governed_heading, args.readme_validate, args.template_version, - args.ignore + args.ignore, + args.validate_template_type ) else: raise TrestleIncorrectArgsError(f'Unsupported mode: {args.mode} for folders command.') @@ -186,12 +194,13 @@ def _measure_template_folder( governed_heading: str, readme_validate: bool, template_version: str, - ignore: str + ignore: str, + validate_by_type_field: bool, ) -> bool: """ Validate instances against templates. - Validation will succeed iff: + Validation will succeed if: 1. All template files from the specified version are present in the task 2. All of the instances are valid """ @@ -217,8 +226,24 @@ def _measure_template_folder( if instance_file.suffix == const.MARKDOWN_FILE_EXT: md_api = MarkdownAPI() versioned_template_dir = None + # checks on naming template name out of type header if needed + if validate_by_type_field: + template_name = md_api.processor.fetch_value_from_header( + instance_file, author_const.TEMPLATE_TYPE_HEADER + ) + if template_name is None: + logger.warning( + f'INVALID: Instance file {instance_file_name} does not have' + f' {author_const.TEMPLATE_TYPE_HEADER}' + ' field in its header and can not be validated using optional parameter validate' + ' template type field' + ) + return False + template_name = template_name + '.md' + else: + template_name = instance_file_name if template_version != '': - template_file = self.template_dir / instance_file_name + template_file = self.template_dir / template_name versioned_template_dir = self.template_dir else: instance_version = md_api.processor.fetch_value_from_header( @@ -229,16 +254,29 @@ def _measure_template_folder( versioned_template_dir = TemplateVersioning.get_versioned_template_dir( self.template_dir, instance_version ) - template_file = versioned_template_dir / instance_file_name + template_file = versioned_template_dir / template_name # Check if instance is in the available templates, # additional files are allowed but should not be validated. templates = self._get_templates(versioned_template_dir, readme_validate) is_template_present = False + template_type_is_valid = False for template in templates: - if template.name == str(instance_file_name): - is_template_present = True - break + # checks if valdation needs to check on x-trestle-template-type field on header + if validate_by_type_field: + instance_template_type = md_api.processor.fetch_value_from_header( + instance_file, author_const.TEMPLATE_TYPE_HEADER + ) + if template.stem == instance_template_type: + is_template_present = True + template_type_is_valid = True + break + # validation through template type field is not needed and performs validation + # through file name flow as usual + else: + if template.name == str(instance_file_name): + is_template_present = True + break if not is_template_present: logger.info( @@ -252,7 +290,7 @@ def _measure_template_folder( [t.relative_to(versioned_template_dir) for t in templates], False ) - if instance_file_name in all_versioned_templates[instance_version]: + if instance_file_name in all_versioned_templates[instance_version] or template_type_is_valid: # validate md_api.load_validator_with_template( template_file, validate_header, not validate_only_header, governed_heading @@ -266,7 +304,13 @@ def _measure_template_folder( else: logger.info(f'VALID: {instance_file}') # mark template as present - all_versioned_templates[instance_version][instance_file_name] = True + if template_type_is_valid: + template_file_name = [ + temp.relative_to(versioned_template_dir) for temp in templates if temp.name == template_name + ] + all_versioned_templates[instance_version][template_file_name[0]] = True + else: + all_versioned_templates[instance_version][instance_file_name] = True elif instance_file.suffix == const.DRAWIO_FILE_EXT: drawio = draw_io.DrawIO(instance_file) @@ -359,7 +403,8 @@ def validate( governed_heading: str, readme_validate: bool, template_version: str, - ignore: str + ignore: str, + validate_by_type_field: bool, ) -> int: """Validate task.""" if not self.task_path.is_dir(): @@ -376,7 +421,8 @@ def validate( governed_heading, readme_validate, template_version, - ignore + ignore, + validate_by_type_field ) if not result: raise TrestleError( diff --git a/trestle/core/commands/author/profile.py b/trestle/core/commands/author/profile.py index 77d2a4b05..2f4725d4f 100644 --- a/trestle/core/commands/author/profile.py +++ b/trestle/core/commands/author/profile.py @@ -644,8 +644,8 @@ def update_profile_import( Returns: None """ - exclude_with_ids: Set[prof.withId] = set() - components_by_id: Dict(str, List[ssp.ByComponent]) = {} + exclude_with_ids: Set[str] = set() + components_by_id: Dict[str, List[ssp.ByComponent]] = {} # Create dictionary containing all by-components by control for faster searching for implemented_requirement in leveraged_ssp.control_implementation.implemented_requirements: @@ -671,7 +671,7 @@ def update_profile_import( if by_comps is not None and ProfileInherit._is_inherited(by_comps): exclude_with_ids.add(control_id) - include_with_ids: Set[prof.withId] = catalog_control_ids - exclude_with_ids + include_with_ids: Set[str] = catalog_control_ids - exclude_with_ids orig_prof_import.include_controls = [prof.SelectControlById(with_ids=sorted(include_with_ids))] orig_prof_import.exclude_controls = [prof.SelectControlById(with_ids=sorted(exclude_with_ids))] @@ -760,11 +760,11 @@ def initialize_profile( if version: result_profile.metadata.version = version - if ModelUtils.models_are_equivalent(existing_profile, result_profile): # type: ignore + if ModelUtils.models_are_equivalent(existing_profile, result_profile): logger.info('Profile is no different from existing version, so no update.') return CmdReturnCodes.SUCCESS.value - ModelUtils.update_last_modified(result_profile) # type: ignore + ModelUtils.update_last_modified(result_profile) ModelUtils.save_top_level_model(result_profile, trestle_root, output_prof_name, FileContentType.JSON) except TrestleError as e: diff --git a/trestle/core/markdown/markdown_validator.py b/trestle/core/markdown/markdown_validator.py index c50f4d647..3cd91cea5 100644 --- a/trestle/core/markdown/markdown_validator.py +++ b/trestle/core/markdown/markdown_validator.py @@ -21,6 +21,7 @@ import trestle.core.markdown.markdown_const as md_const from trestle.common.err import TrestleError +from trestle.common.list_utils import as_list from trestle.core.commands.author.consts import START_TEMPLATE_VERSION, TEMPLATE_VERSION_HEADER from trestle.core.markdown.docs_markdown_node import DocsMarkdownNode @@ -202,30 +203,25 @@ def _validate_headings(self, instance: pathlib.Path, template_keys: List[str], i ) return False template_header_pointer = 0 + present_keys = [] for key in instance_keys: if template_header_pointer >= len(template_keys): break - if key in template_keys and key != template_keys[template_header_pointer]: - logger.warning( - f'Headings in the instance: {instance} were shuffled or modified. ' - f'\nInstance does not have required template heading ' - f'\"{template_keys[template_header_pointer]}\". ' - f'Check if this heading was modified/present in the instance.' - f'\nPlease note that no changes to template headings are allowed, ' - f'including extra spaces.' - ) - return False - elif key in template_keys and key == template_keys[template_header_pointer]: + if key in template_keys and key not in present_keys: + present_keys.append(template_keys[template_keys.index(key)]) template_header_pointer += 1 elif re.search(md_const.SUBSTITUTION_REGEX, template_keys[template_header_pointer]) is not None: + present_keys.append(template_keys[template_header_pointer]) template_header_pointer += 1 # skip headers with substitutions - if template_header_pointer != len(template_keys): + diff_keys = set(template_keys) - set(present_keys) + if template_header_pointer != len(template_keys) and len(diff_keys) > 0: logger.info( f'Headings in the instance: {instance} were removed. ' f'Expected {len(template_keys)} headings, but found only {template_header_pointer}.' ) + for result in as_list(diff_keys): + logger.info(f'Heading {result} in the instance: {instance} was removed or not present ') return False - return True @classmethod diff --git a/trestle/core/repository.py b/trestle/core/repository.py index fffca8a81..2b89305de 100644 --- a/trestle/core/repository.py +++ b/trestle/core/repository.py @@ -24,6 +24,10 @@ import trestle.common.const as const import trestle.core.commands.assemble as assemblecmd +import trestle.core.commands.author.catalog as catalogauthorcmd +import trestle.core.commands.author.component as componentauthorcmd +import trestle.core.commands.author.profile as profileauthorcmd +import trestle.core.commands.author.ssp as sspauthorcmd import trestle.core.commands.merge as mergecmd import trestle.core.commands.split as splitcmd import trestle.core.commands.validate as validatecmd @@ -362,3 +366,286 @@ def validate_model(self, model_type: Type[OscalBaseModel], name: str) -> bool: logger.debug(f'Model {name} validated successfully.') return success + + +class AgileAuthoring(Repository): + """ + AgileAuthoring extends the Repository class for performing authoring specific operations on Trestle repository. + + This class provides a set of APIs to perform generate and assemble authoring operations in the trestle repository + rather than using the command line. + + """ + + def __init__(self, root_dir: pathlib.Path) -> None: + """Initialize trestle repository object.""" + super().__init__(root_dir) + + def assemble_catalog_markdown( + self, + name: str, + output: str, + markdown_dir: str, + set_parameters: bool = False, + regenerate: bool = False, + version: str = '' + ) -> bool: + """Assemble catalog markdown into OSCAL Catalog in JSON.""" + logger.debug(f'Assembling model {name} of type catalog.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + markdown=markdown_dir, + trestle_root=self.root_dir, + set_parameters=set_parameters, + regenerate=regenerate, + version=version, + verbose=verbose + ) + + try: + ret = catalogauthorcmd.CatalogAssemble()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error assembling catalog {name}: {e}') + + logger.debug(f'Model {name} assembled successfully.') + return success + + def assemble_profile_markdown( + self, + name: str, + output: str, + markdown_dir: str, + set_parameters: bool = False, + regenerate: bool = False, + version: str = '', + sections: str = '', + required_sections: str = '', + allowed_sections: str = '' + ) -> bool: + """Assemble profile markdown into OSCAL Profile in JSON.""" + logger.debug(f'Assembling model {name} of type profile.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + markdown=markdown_dir, + trestle_root=self.root_dir, + set_parameters=set_parameters, + regenerate=regenerate, + version=version, + sections=sections, + required_sections=required_sections, + allowed_sections=allowed_sections, + verbose=verbose + ) + + try: + ret = profileauthorcmd.ProfileAssemble()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error assembling profile {name}: {e}') + + logger.debug(f'Model {name} assembled successfully.') + return success + + def assemble_component_definition_markdown( + self, name: str, output: str, markdown_dir: str, regenerate: bool = False, version: str = '' + ) -> bool: + """Assemble component definition markdown into OSCAL Component Definition in JSON.""" + logger.debug(f'Assembling model {name} of type component definition.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + markdown=markdown_dir, + trestle_root=self.root_dir, + regenerate=regenerate, + version=version, + verbose=verbose + ) + + try: + ret = componentauthorcmd.ComponentAssemble()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error assembling component definition {name}: {e}') + + logger.debug(f'Model {name} assembled successfully.') + return success + + def assemble_ssp_markdown( + self, + name: str, + output: str, + markdown_dir: str, + compdefs: str, + regenerate: bool = False, + version: str = '' + ) -> bool: + """Assemble ssp markdown into OSCAL SSP in JSON.""" + logger.debug(f'Assembling model {name} of type ssp.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + markdown=markdown_dir, + compdefs=compdefs, + trestle_root=self.root_dir, + regenerate=regenerate, + version=version, + verbose=verbose + ) + + try: + ret = sspauthorcmd.SSPAssemble()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error assembling ssp {name}: {e}') + + logger.debug(f'Model {name} assembled successfully.') + return success + + def generate_catalog_markdown( + self, + name: str, + output: str, + force_overwrite: bool = False, + yaml_header: str = '', + overwrite_header_values: bool = False + ) -> bool: + """Generate catalog markdown from OSCAL Catalog in JSON.""" + logger.debug(f'Generating markdown for {name} of type catalog.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + trestle_root=self.root_dir, + force_overwrite=force_overwrite, + yaml_header=yaml_header, + overwrite_header_values=overwrite_header_values, + verbose=verbose + ) + + try: + ret = catalogauthorcmd.CatalogGenerate()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error generate markdown for catalog {name}: {e}') + + logger.debug(f'Model {name} markdown generated successfully.') + return success + + def generate_profile_markdown( + self, + name: str, + output: str, + force_overwrite: bool = False, + yaml_header: str = '', + overwrite_header_values: bool = False, + sections: str = '', + required_sections: str = '' + ) -> bool: + """Generate profile markdown from OSCAL Profile in JSON.""" + logger.debug(f'Generating markdown for {name} of type profile.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, + output=output, + trestle_root=self.root_dir, + force_overwrite=force_overwrite, + yaml_header=yaml_header, + overwrite_header_values=overwrite_header_values, + sections=sections, + required_sections=required_sections, + verbose=verbose + ) + + try: + ret = profileauthorcmd.ProfileGenerate()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error generate markdown for profile {name}: {e}') + + logger.debug(f'Model {name} markdown generated successfully.') + return success + + def generate_component_definition_markdown( + self, + name: str, + output: str, + force_overwrite: bool = False, + ) -> bool: + """Generate component definition markdown from OSCAL Component Definition in JSON.""" + logger.debug(f'Generating markdown for {name} of type component definition.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + name=name, output=output, trestle_root=self.root_dir, force_overwrite=force_overwrite, verbose=verbose + ) + + try: + ret = componentauthorcmd.ComponentGenerate()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error generating markdown for component definition {name}: {e}') + + logger.debug(f'Model {name} markdown generated successfully.') + return success + + def generate_ssp_markdown( + self, + profile: str, + output: str, + compdefs: str, + force_overwrite: bool = False, + yaml_header: str = '', + overwrite_header_values: bool = False + ) -> bool: + """Generate ssp markdown from OSCAL Profile and Component Definitions.""" + logger.debug(f'Generating markdown for {output} of type ssp.') + success = False + + verbose = log.get_current_verbosity_level(logger) + args = argparse.Namespace( + profile=profile, + output=output, + compdefs=compdefs, + trestle_root=self.root_dir, + force_overwrite=force_overwrite, + yaml_header=yaml_header, + overwrite_header_values=overwrite_header_values, + verbose=verbose + ) + + try: + ret = sspauthorcmd.SSPGenerate()._run(args) + if ret == 0: + success = True + except Exception as e: + raise TrestleError(f'Error in generating markdown for ssp {output}: {e}') + + logger.debug(f'Model {output} markdown generated successfully.') + return success diff --git a/trestle/tasks/csv_to_oscal_cd.py b/trestle/tasks/csv_to_oscal_cd.py index 9a6f65d89..e792c54cf 100644 --- a/trestle/tasks/csv_to_oscal_cd.py +++ b/trestle/tasks/csv_to_oscal_cd.py @@ -54,11 +54,16 @@ PROFILE_DESCRIPTION = 'Profile_Description' CHECK_ID = 'Check_Id' CHECK_DESCRIPTION = 'Check_Description' +FETCHER_ID = 'Fetcher_Id' +FETCHER_DESCRIPTION = 'Fetcher_Description' PARAMETER_ID = 'Parameter_Id' PARAMETER_DESCRIPTION = 'Parameter_Description' PARAMETER_VALUE_DEFAULT = 'Parameter_Value_Default' PARAMETER_VALUE_ALTERNATIVES = 'Parameter_Value_Alternatives' +validation = 'validation' +prefix_rule_set = 'rule_set_' + logger = logging.getLogger(__name__) @@ -118,11 +123,20 @@ def print_info(self) -> None: text3 = ' [1st row are column headings; 2nd row are column descriptions; 3rd row and beyond is data]' logger.info(text1 + text2 + text3) text1 = ' required columns: ' - for text2 in CsvColumn.columns_required: + for text2 in CsvColumn.get_required_column_names(): + if text2 in ['Rule_Description', 'Profile_Source', 'Profile_Description', 'Control_Id_List']: + text2 += '*' logger.info(text1 + text2) text1 = ' ' text1 = ' optional columns: ' - for text2 in CsvColumn.columns_optional: + for text2 in CsvColumn.get_optional_column_names(): + if text2 in ['Parameter_Id', + 'Parameter_Description', + 'Parameter_Value_Alternatives', + 'Parameter_Value_Default']: + text2 += '*' + if text2 in ['Check_Id', 'Check_Description']: + text2 += '+' logger.info(text1 + text2) text1 = ' ' text1 = ' output-dir = ' @@ -140,6 +154,16 @@ def print_info(self) -> None: text1 = ' validate-controls = ' text2 = '(optional) on, warn, or off [default]; validate controls exist in resolved profile.' logger.info(text1 + text2) + # Notes + text1 = '' + text2 = '' + logger.info(text1 + text2) + text1 = 'Notes: ' + text2 = '* column is ignored for validation component type' + logger.info(text1 + text2) + text1 = ' ' + text2 = '+ column is required for validation component type' + logger.info(text1 + text2) def configure(self) -> bool: """Configure.""" @@ -464,18 +488,25 @@ def rules_add(self, add_rules: List[str]) -> None: # props component.props = as_list(component.props) component.props = component.props + self._create_rule_props(rule_key) - # control implementation - source = self._csv_mgr.get_value(rule_key, PROFILE_SOURCE) - description = self._csv_mgr.get_value(rule_key, PROFILE_DESCRIPTION) - control_implementation = self._get_control_implementation(component, source, description) - # set-parameter - set_parameter = self._create_set_parameter(rule_key) - if set_parameter: - control_implementation.set_parameters = as_list(control_implementation.set_parameters) - _OscalHelper.add_set_parameter(control_implementation.set_parameters, set_parameter) - # control-mappings - control_mappings = self._csv_mgr.get_value(rule_key, CONTROL_ID_LIST).split() - self._add_rule_prop(control_implementation, control_mappings, rule_key) + # additional props, when not validation component + if not self._is_validation(rule_key): + # control implementation + source = self._csv_mgr.get_value(rule_key, PROFILE_SOURCE) + description = self._csv_mgr.get_value(rule_key, PROFILE_DESCRIPTION) + control_implementation = self._get_control_implementation(component, source, description) + # set-parameter + set_parameter = self._create_set_parameter(rule_key) + if set_parameter: + control_implementation.set_parameters = as_list(control_implementation.set_parameters) + _OscalHelper.add_set_parameter(control_implementation.set_parameters, set_parameter) + # control-mappings + control_mappings = self._csv_mgr.get_value(rule_key, CONTROL_ID_LIST).split() + self._add_rule_prop(control_implementation, control_mappings, rule_key) + + def _is_validation(self, rule_key: tuple) -> bool: + """Check for validation component.""" + component_type = self._csv_mgr.get_value(rule_key, COMPONENT_TYPE) + return component_type.lower() == validation def _add_rule_prop( self, control_implementation: ControlImplementation, control_mappings: List[str], rule_key: tuple @@ -506,26 +537,29 @@ def _create_rule_props(self, rule_key: tuple) -> List[Property]: rule_set = self._rule_set_id_mgr.get_next_rule_set_id() row_number = self._csv_mgr.get_row_number(rule_key) rule_set_mgr = _RuleSetMgr(row_number, rule_set) - column_names = CsvColumn.get_filtered_required_column_names() + CsvColumn.get_filtered_optional_column_names() namespace = self._get_namespace(rule_key) + if self._is_validation(rule_key): + column_names = CsvColumn.get_check_property_column_names() + else: + column_names = CsvColumn.get_rule_property_column_names() # req'd & optional props for column_name in column_names: prop_name = self._get_prop_name(column_name) prop_value = self._csv_mgr.get_value(rule_key, column_name).strip() rule_set_mgr.add_prop(prop_name, prop_value, namespace, self.get_class(prop_name)) - # parameter columns - column_names = CsvColumn.get_parameter_column_names() - for column_name in column_names: - prop_name = self._get_prop_name(column_name) - prop_value = self._csv_mgr.get_value(rule_key, column_name).strip() - rule_set_mgr.add_prop(prop_name, prop_value, namespace, self.get_class(prop_name)) + if not self._is_validation(rule_key): + # parameter columns + column_names = CsvColumn.get_parameter_column_names() + for column_name in column_names: + prop_name = self._get_prop_name(column_name) + prop_value = self._csv_mgr.get_value(rule_key, column_name).strip() + rule_set_mgr.add_prop(prop_name, prop_value, namespace, self.get_class(prop_name)) # user props column_names = self._csv_mgr.get_user_column_names() for column_name in column_names: prop_name = self._get_prop_name(column_name) prop_value = self._csv_mgr.get_value(rule_key, column_name).strip() rule_set_mgr.add_prop(prop_name, prop_value, namespace, self.get_class(prop_name)) - rule_set_mgr.validate() return rule_set_mgr.get_props() def _get_control_implementation( @@ -542,14 +576,25 @@ def _get_control_implementation( component.control_implementations.append(control_implementation) return control_implementation + def _str_to_list(self, value: str) -> List[str]: + """Transform string to list.""" + rval = [] + if ',' in value: + values = value.split(',') + # remove leading/trailing whitespace + for v in values: + rval.append(v.strip()) + else: + rval.append(value) + return rval + def _create_set_parameter(self, rule_key: tuple) -> SetParameter: """Create create set parameters.""" set_parameter = None name = self._csv_mgr.get_value(rule_key, PARAMETER_ID) value = self._csv_mgr.get_value(rule_key, PARAMETER_VALUE_DEFAULT) if name and value: - value = self._csv_mgr.get_value(rule_key, PARAMETER_VALUE_DEFAULT) - values = value.split(',') + values = self._str_to_list(value) set_parameter = SetParameter( param_id=name, values=values, @@ -832,7 +877,7 @@ def __init__(self, max_rule_set_number: int, add_rules_count: int) -> None: def get_next_rule_set_id(self) -> str: self._prev_rule_set_number += 1 - rval = f'rule_set_{str(self._prev_rule_set_number).zfill(self._fill_sz)}' + rval = f'{prefix_rule_set}{str(self._prev_rule_set_number).zfill(self._fill_sz)}' return rval @@ -857,15 +902,6 @@ def add_prop(self, name: str, value: str, ns: str, class_: str) -> None: ) self._props[name] = prop - def validate(self) -> None: - """Validate.""" - if PARAMETER_ID not in self._props.keys(): - forbidden = CsvColumn.get_parameter_dependent_column_names() - for name in self._props.keys(): - if name in forbidden: - text = f'row "{self._row_number}" invalid "{name}"' - raise RuntimeError(text) - def get_props(self) -> List[Property]: """Get props.""" rval = [] @@ -900,7 +936,7 @@ def _initialize(self): for profile in self._profile_list: catalog = ProfileResolver.get_resolved_profile_catalog( pathlib.Path(self._root), - pathlib.Path(profile), + profile, ) self._profile_map[profile] = catalog controls = CatalogInterface.get_control_ids_from_catalog(catalog) @@ -1046,7 +1082,7 @@ def accounting_rule_definitions(self, component: DefinedComponent) -> None: value = prop.remarks self._cd_rules_map[key] = value logger.debug(f'cd: {key} {self._cd_rules_map[key]}') - rule_set_number = int(value.replace('rule_set_', '')) + rule_set_number = int(value.replace(f'{prefix_rule_set}', '')) if rule_set_number > self._max_rule_set_number: self._max_rule_set_number = rule_set_number @@ -1207,7 +1243,7 @@ def delete_property(self, component: DefinedComponent, rule_set: str, name: str) class CsvColumn(): """CsvColumn.""" - columns_required = [ + _columns_required = [ f'{COMPONENT_TITLE}', f'{COMPONENT_DESCRIPTION}', f'{COMPONENT_TYPE}', @@ -1219,18 +1255,7 @@ class CsvColumn(): f'{NAMESPACE}', ] - # columns required which do not become properties - columns_required_filtered = [ - f'{COMPONENT_TITLE}', - f'{COMPONENT_DESCRIPTION}', - f'{COMPONENT_TYPE}', - f'{PROFILE_SOURCE}', - f'{PROFILE_DESCRIPTION}', - f'{CONTROL_ID_LIST}', - f'{NAMESPACE}', - ] - - columns_optional = [ + _columns_optional = [ f'{PARAMETER_ID}', f'{PARAMETER_DESCRIPTION}', f'{PARAMETER_VALUE_ALTERNATIVES}', @@ -1239,35 +1264,21 @@ class CsvColumn(): f'{CHECK_DESCRIPTION}', ] - # optional columns which do not become properties, initially - columns_optional_filtered = [ - f'{PARAMETER_ID}', - f'{PARAMETER_DESCRIPTION}', - f'{PARAMETER_VALUE_ALTERNATIVES}', - f'{PARAMETER_VALUE_DEFAULT}', - ] - - # optional columns which do become properties, afterwards - columns_parameters = [ - f'{PARAMETER_ID}', - f'{PARAMETER_DESCRIPTION}', - f'{PARAMETER_VALUE_ALTERNATIVES}', - ] - - # optional columns which require Param_Id be present in the row - columns_parameters_dependent = [ - f'{PARAMETER_DESCRIPTION}', - f'{PARAMETER_VALUE_ALTERNATIVES}', - f'{PARAMETER_VALUE_DEFAULT}', + _columns_required_validation = [ + f'{COMPONENT_TITLE}', + f'{COMPONENT_DESCRIPTION}', + f'{COMPONENT_TYPE}', + f'{RULE_ID}', + f'{NAMESPACE}', + f'{CHECK_ID}', + f'{CHECK_DESCRIPTION}', ] - columns_filtered = columns_required_filtered + columns_optional_filtered - @staticmethod def get_order(column_name: str) -> int: """Get order for column_name.""" rval = sys.maxsize - columns_ordered = CsvColumn.columns_required + CsvColumn.columns_optional + columns_ordered = CsvColumn._columns_required + CsvColumn._columns_optional if column_name in columns_ordered: rval = columns_ordered.index(column_name) return rval @@ -1276,23 +1287,74 @@ def get_order(column_name: str) -> int: def get_required_column_names() -> List[str]: """Get required column names.""" rval = [] - rval += CsvColumn.columns_required + rval += CsvColumn._columns_required return rval @staticmethod - def get_filtered_required_column_names() -> List[str]: - """Get filtered required column names.""" + def get_optional_column_names() -> List[str]: + """Get optional column names.""" rval = [] - for column_name in CsvColumn.get_required_column_names(): - if column_name not in CsvColumn.columns_filtered: - rval.append(column_name) + rval += CsvColumn._columns_optional return rval @staticmethod - def get_optional_column_names() -> List[str]: - """Get optional column names.""" + def get_reserved_column_names() -> List[str]: + """Get reserved column names.""" + rval = [] + rval += CsvColumn._columns_required + rval += CsvColumn._columns_optional + return rval + + @staticmethod + def get_required_column_names_validation() -> List[str]: + """Get required column names validation.""" + rval = [] + rval += CsvColumn._columns_required_validation + return rval + + _rule_property_column_names = [ + f'{RULE_ID}', + f'{RULE_DESCRIPTION}', + f'{PARAMETER_ID}', + f'{PARAMETER_DESCRIPTION}', + f'{PARAMETER_VALUE_ALTERNATIVES}', + f'{CHECK_ID}', + f'{CHECK_DESCRIPTION}', + ] + + @staticmethod + def get_rule_property_column_names() -> List[str]: + """Get rule property column names.""" + return CsvColumn._rule_property_column_names + + # columns required which do not become properties + _columns_required_filtered = [ + f'{COMPONENT_TITLE}', + f'{COMPONENT_DESCRIPTION}', + f'{COMPONENT_TYPE}', + f'{PROFILE_SOURCE}', + f'{PROFILE_DESCRIPTION}', + f'{CONTROL_ID_LIST}', + f'{NAMESPACE}', + ] + + # optional columns which do not become properties, initially + _columns_optional_filtered = [ + f'{PARAMETER_ID}', + f'{PARAMETER_DESCRIPTION}', + f'{PARAMETER_VALUE_ALTERNATIVES}', + f'{PARAMETER_VALUE_DEFAULT}', + ] + + _columns_filtered = _columns_required_filtered + _columns_optional_filtered + + @staticmethod + def get_filtered_required_column_names() -> List[str]: + """Get filtered required column names.""" rval = [] - rval += CsvColumn.columns_optional + for column_name in CsvColumn.get_required_column_names(): + if column_name not in CsvColumn._columns_filtered: + rval.append(column_name) return rval @staticmethod @@ -1300,31 +1362,41 @@ def get_filtered_optional_column_names() -> List[str]: """Get filtered optional column names.""" rval = [] for column_name in CsvColumn.get_optional_column_names(): - if column_name not in CsvColumn.columns_filtered: + if column_name not in CsvColumn._columns_filtered: rval.append(column_name) return rval + _check_property_column_names = [ + f'{RULE_ID}', + f'{CHECK_ID}', + f'{CHECK_DESCRIPTION}', + ] + @staticmethod - def get_reserved_column_names() -> List[str]: - """Get reserved column names.""" - rval = [] - rval += CsvColumn.columns_required - rval += CsvColumn.columns_optional - return rval + def get_check_property_column_names() -> List[str]: + """Get check property column names.""" + return CsvColumn._check_property_column_names + + # optional columns which do become properties, afterwards + _columns_parameters = [ + f'{PARAMETER_ID}', + f'{PARAMETER_DESCRIPTION}', + f'{PARAMETER_VALUE_ALTERNATIVES}', + ] @staticmethod def get_parameter_column_names() -> List[str]: """Get parameter column names.""" rval = [] - rval += CsvColumn.columns_parameters + rval += CsvColumn._columns_parameters return rval - @staticmethod - def get_parameter_dependent_column_names() -> List[str]: - """Get parameter dependent column names.""" - rval = [] - rval += CsvColumn.columns_parameters_dependent - return rval + # optional columns which require Param_Id be present in the row + _columns_parameters_dependent = [ + f'{PARAMETER_DESCRIPTION}', + f'{PARAMETER_VALUE_ALTERNATIVES}', + f'{PARAMETER_VALUE_DEFAULT}', + ] class _CsvMgr(): @@ -1344,6 +1416,8 @@ def __init__(self, csv_path: pathlib.Path) -> None: self._csv_controls_map = {} self._csv_profile_list = [] for row_num, row in self.row_generator(): + if self._is_no_control(row): + continue self._check_row_minimum_requirements(row_num, row) component_title = self.get_row_value(row, f'{COMPONENT_TITLE}') component_type = self.get_row_value(row, f'{COMPONENT_TYPE}') @@ -1393,20 +1467,42 @@ def row_generator(self) -> Generator[Union[int, Iterator[List[str]]], None, None index += 1 if index < 3: continue - control_mappings = self.get_row_value(row, CONTROL_ID_LIST).strip() - if not len(control_mappings): - continue logger.debug(f'row_gen: {index} {row}') yield index, row def _check_row_minimum_requirements(self, row_num: int, row: List) -> None: """Check row minimum requirements.""" - for column_name in CsvColumn.get_required_column_names(): + if self._is_component_type_validation(row): + column_names = CsvColumn.get_required_column_names_validation() + else: + column_names = CsvColumn.get_required_column_names() + for column_name in column_names: value = self.get_row_value(row, column_name) if value is None or value == '': text = f'row "{row_num}" missing value for "{column_name}"' raise RuntimeError(text) + def _is_no_control(self, row: List) -> bool: + """Check for no control.""" + if self._is_component_type_validation(row): + rval = False + else: + control_id_list = self.get_row_value(row, f'{CONTROL_ID_LIST}') + if control_id_list.strip() == '': + rval = True + else: + rval = False + return rval + + def _is_component_type_validation(self, row: List) -> bool: + """Check for component type validation.""" + component_type = self.get_row_value(row, f'{COMPONENT_TYPE}') + if component_type.lower().strip() == validation: + rval = True + else: + rval = False + return rval + def _undecorate_header(self) -> None: """Undecorate header.""" head_row = self._csv[0] diff --git a/trestle/transforms/implementations/xccdf.py b/trestle/transforms/implementations/xccdf.py index e4ee26be6..fa4fbf3f6 100644 --- a/trestle/transforms/implementations/xccdf.py +++ b/trestle/transforms/implementations/xccdf.py @@ -111,7 +111,7 @@ def _ingest_xml(self, blob: str) -> Optional[Results]: results.__root__.append(self._results_factory.result) return results - def _ingest_configmaps(self, jdata: str) -> None: + def _ingest_configmaps(self, jdata: dict) -> None: """Ingest configmaps.""" items = jdata['items'] for item in items: @@ -121,7 +121,7 @@ def _ingest_configmaps(self, jdata: str) -> None: resource = item self._results_factory.ingest(resource) - def _ingest_auditree(self, jdata: str) -> None: + def _ingest_auditree(self, jdata: dict) -> None: """Ingest auditree.""" for key in jdata.keys(): for group in jdata[key]: