Skip to content

Commit

Permalink
Merge pull request #458 from NREL/res-reporting2
Browse files Browse the repository at this point in the history
  • Loading branch information
rajeee authored Jul 19, 2024
2 parents 383b1b8 + cc152d2 commit bf86749
Show file tree
Hide file tree
Showing 51 changed files with 54,100 additions and 56 deletions.
89 changes: 57 additions & 32 deletions buildstockbatch/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,16 @@ def get_sampler_class(sampler_name):
return getattr(sampler, sampler_class_name)

@staticmethod
def get_workflow_generator_class(workflow_generator_name):
workflow_generator_class_name = (
"".join(x.capitalize() for x in workflow_generator_name.strip().split("_")) + "WorkflowGenerator"
)
return getattr(workflow_generator, workflow_generator_class_name)
def get_workflow_generator_class(workflow_generator_block):
generator_type = workflow_generator_block["type"]
# version can be missing in older schema -> default to latest
generator_version = workflow_generator_block.get("version", "2024.07.18")
if generator_version not in workflow_generator.version2GeneratorClass[generator_type]:
raise ValidationError(
f"Invalid generator version {generator_version} for {generator_type}."
f"Avaliable versions are {workflow_generator.version2GeneratorClass[generator_type].keys()}"
)
return workflow_generator.version2GeneratorClass[generator_type][generator_version]

@property
def sampler(self):
Expand Down Expand Up @@ -141,7 +146,7 @@ def get_reporting_measures(cls, cfg):
if "reporting_measure_names" in cfg:
return cfg["reporting_measure_names"]

WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"]["type"])
WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"])
wg = WorkflowGenerator(cfg, 1) # Number of datapoints doesn't really matter here
return wg.reporting_measures()

Expand All @@ -150,7 +155,7 @@ def run_batch(self):

@classmethod
def create_osw(cls, cfg, n_datapoints, *args, **kwargs):
WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"]["type"])
WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"])
osw_generator = WorkflowGenerator(cfg, n_datapoints)
return osw_generator.create_osw(*args, **kwargs)

Expand Down Expand Up @@ -370,7 +375,7 @@ def validate_buildstock_csv(project_file, buildstock_df):
@classmethod
def validate_workflow_generator(cls, project_file):
cfg = get_project_configuration(project_file)
WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"]["type"])
WorkflowGenerator = cls.get_workflow_generator_class(cfg["workflow_generator"])
return WorkflowGenerator(cfg, 1).validate()

@staticmethod
Expand Down Expand Up @@ -782,36 +787,56 @@ def validate_reference_scenario(project_file):

return True # Only print the warning, but always pass the validation

@staticmethod
def get_stock_version_info(project_file):
cfg = get_project_configuration(project_file)
buildstock_dir = BuildStockBatchBase.get_buildstock_dir(project_file, cfg)
version_rb = os.path.join(buildstock_dir, "resources/buildstock.rb")
if not os.path.exists(version_rb):
return {}

with open(version_rb, "r") as f:
versions = dict(
re.findall(
r"^\s*(ResStock|ComStock|BuildStockBatch|WorkflowGenerator)_Version\s*=\s*'(.+)'",
f.read(),
re.MULTILINE,
)
)
return versions

@staticmethod
def validate_resstock_or_comstock_version(project_file):
"""
Checks the minimum required version of BuildStockBatch against the version being used
"""
cfg = get_project_configuration(project_file)

buildstock_rb = os.path.join(cfg["buildstock_directory"], "resources/buildstock.rb")
if os.path.exists(buildstock_rb):
with open(buildstock_rb, "r") as f:
versions = dict(
re.findall(
r"^\s*(ResStock|ComStock|BuildStockBatch)_Version\s*=\s*'(.+)'",
f.read(),
re.MULTILINE,
)
)
BuildStockBatch_Version = semver.Version.parse(versions["BuildStockBatch"])
if bsb_version < BuildStockBatch_Version:
if "ResStock" in versions.keys():
stock_version = versions["ResStock"]
elif "ComStock" in versions.keys():
stock_version = versions["ComStock"]
else:
stock_version = "Unknown"
val_err = (
f"BuildStockBatch version {BuildStockBatch_Version} or above is required"
f" for ResStock or ComStock version {stock_version}. Found {bsb_version}"
)
raise ValidationError(val_err)
version_info = BuildStockBatchBase.get_stock_version_info(project_file)
if not version_info:
return True
BuildStockBatch_Version = semver.Version.parse(version_info["BuildStockBatch"])
if bsb_version < BuildStockBatch_Version:
if "ResStock" in version_info.keys():
stock_version = version_info["ResStock"]
elif "ComStock" in version_info.keys():
stock_version = version_info["ComStock"]
else:
stock_version = "Unknown"
val_err = (
f"BuildStockBatch version {BuildStockBatch_Version} or above is required"
f" for ResStock or ComStock version {stock_version}. Found {bsb_version}"
)
raise ValidationError(val_err)
wg_version = cfg["workflow_generator"].get("version", "2024.07.18")
wg_type = cfg["workflow_generator"]["type"]
if wg_version not in workflow_generator.version2info[wg_type]:
raise ValidationError(f"Workflow generator version {wg_version} not found")
expected_version = version_info.get("WorkflowGenerator", "2024.07.18")
if wg_version != expected_version:
raise ValidationError(
f"Workflow generator version {expected_version} is required by the buildstock."
f"The yaml is asking for {wg_version}"
)

return True

Expand Down
169 changes: 169 additions & 0 deletions buildstockbatch/schemas/v0.4.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
schema_version: enum('0.4')
buildstock_directory: str()
project_directory: str(required=True)
weather_files_path: str(required=False)
weather_files_url: str(required=False)
sampler: include('sampler-spec', required=True)
workflow_generator: include('workflow-generator-spec', required=True)
eagle: include('hpc-spec', required=False)
kestrel: include('hpc-spec', required=False)
gcp: include('gcp-spec', required=False)
aws: include('aws-spec', required=False)
output_directory: regex('^(.*\/)?[a-z][a-z0-9_]*\/?$', required=True)
sys_image_dir: str(required=False)
baseline: include('sim-spec', required=True)
os_version: str(required=True)
os_sha: str(required=True)
max_minutes_per_sim: int(max=1440, required=False)
upgrades: list(include('upgrade-spec'), required=False)
postprocessing: include('postprocessing-spec', required=False)
references: map(required=False)
---
gcp-spec:
# The GCP job ID (for Batch and Cloud Run) pattern is `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`.
# For postprocessing job id, we append 3 characters ("-pp"), so this can be up to 60 chars.
job_identifier: regex('^[a-z]([a-z0-9-]{0,58}[a-z0-9])?$', required=True)
project: str(required=True)
region: str(required=True)
service_account: str(required=False)
artifact_registry: include('gcp-ar-spec', required=True)
batch_array_size: num(min=1, max=10000, required=True)
parallelism: num(min=1, max=10000, required=False)
gcs: include('gcs-spec', required=True)
job_environment: include('gcp-job-environment-spec', required=False)
postprocessing_environment: include('gcp-postprocessing_environment-spec', required=False)

gcs-spec:
bucket: str(required=True)
prefix: str(required=True)
upload_chunk_size_mib: num(min=5, max=5000, required=False)

gcp-ar-spec:
repository: str(required=True)

gcp-job-environment-spec:
vcpus: int(min=1, max=224, required=False)
memory_mib: int(min=512, required=False)
boot_disk_mib: int(required=False)
machine_type: str(required=False)
use_spot: bool(required=False)
minutes_per_sim: num(min=0.05, max=480, required=False)

gcp-postprocessing_environment-spec:
# Limits documented at
# https://cloud.google.com/run/docs/configuring/services/memory-limits
# https://cloud.google.com/run/docs/configuring/services/cpu
cpus: int(min=1, max=8, required=False)
memory_mib: int(min=512, max=32768, required=False)

aws-spec:
job_identifier: regex('^[a-zA-Z]\w{,9}$', required=True)
s3: include('s3-aws-postprocessing-spec', required=True)
region: str(required=True)
use_spot: bool(required=False)
spot_bid_percent: num(min=1, max=100, required=False)
batch_array_size: num(min=1, max=10000, required=True)
notifications_email: regex('^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', name='email', required=True)
dask: include('aws-dask-spec', required=True)
job_environment: include('aws-job-environment', required=False)
tags: map(str(), str(), required=False)

aws-job-environment:
vcpus: int(min=1, max=36, required=False)
memory: int(min=1024, required=False)

aws-dask-spec:
scheduler_cpu: enum(1024, 2048, 4096, 8192, 16384, required=False)
scheduler_memory: int(min=1024, required=False)
worker_cpu: enum(1024, 2048, 4096, 8192, 16384, required=False)
worker_memory: int(min=1024, required=False)
n_workers: int(min=1, required=True)

hpc-spec:
account: str(required=True)
minutes_per_sim: num(min=0.05, max=480, required=True)
n_jobs: int(required=False)
postprocessing: include('hpc-postprocessing-spec', required=False)
sampling: include('sampling-spec', required=False)

hpc-postprocessing-spec:
time: int(required=True)
n_workers: int(min=1, max=32, required=False)
node_memory_mb: int(min=85248, max=751616, required=False)
n_procs: int(min=1, max=36, required=False)
parquet_memory_mb: int(min=100, max=4096, required=False)


sampler-spec:
type: str(required=True)
args: map(key=regex(r'^[a-zA-Z_]\w*$', name='valid variable name'), required=False)

workflow-generator-spec:
type: enum('residential_hpxml', 'commercial_default', required=True)
version: str(required=True)
args: map(key=regex(r'^[a-zA-Z_]\w*$', name='valid variable name'), required=False)

sampling-spec:
time: int(required=True)

sim-spec:
n_buildings_represented: int(required=True)
skip_sims: bool(required=False)
custom_gems: bool(required=False)

upgrade-spec:
upgrade_name: str(required=True)
options: list(include('option-spec'), required=True)
package_apply_logic: include('apply-logic-spec', required=False)
reference_scenario: str(required=False)

option-spec:
option: include('param_option-spec', required=True)
apply_logic: include('apply-logic-spec', required=False)
costs: list(include('cost-spec'), required=False)
lifetime: num(required=False)

param_option-spec: str(exclude=':(){}[]')

apply-logic-spec: >
any(
list(
include('and-spec'),
include('or-spec'),
include('not-spec'),
include('param_option-spec'),
),
include('and-spec'),
include('or-spec'),
include('not-spec'),
include('param_option-spec')
)
or-spec:
or: list(include('apply-logic-spec'))
and-spec:
and: list(include('apply-logic-spec'))
not-spec:
not: any(include('apply-logic-spec'), list(include('apply-logic-spec')))

cost-spec:
value: num(required=True)
multiplier: str(required=True)

postprocessing-spec:
partition_columns: list(str(), required=False)
aws: include('aws-postprocessing-spec', required=False)
keep_individual_timeseries: bool(required=False)

aws-postprocessing-spec:
region_name: str(required=False)
s3: include('s3-aws-postprocessing-spec', required=True)
athena: include('athena-aws-postprocessing-spec', required=False)

s3-aws-postprocessing-spec:
bucket: str(required=True)
prefix: str(required=True)

athena-aws-postprocessing-spec:
glue_service_role: str(required=False)
database_name: regex('^[a-z][a-z0-9_]*$', required=True)
max_crawling_time: num(requried=False)
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
<schema_version>3.1</schema_version>
<name>upgrade_costs</name>
<uid>ef51212c-acc4-48d7-9b29-cf2a5c6c4449</uid>
<version_id>91316c37-1384-4da6-bfc7-f3ffff7479bd</version_id>
<version_modified>2024-05-16T22:50:36Z</version_modified>
<xml_checksum>9BF1E6AC</xml_checksum>
<version_id>4761c00c-8f1a-4a74-924a-1d45c1a3722f</version_id>
<version_modified>2024-06-07T21:06:10Z</version_modified>
<xml_checksum>B1F14CB4</xml_checksum>
<class_name>UpgradeCosts</class_name>
<display_name>Upgrade Costs</display_name>
<description>Measure that calculates upgrade costs.</description>
Expand Down Expand Up @@ -39,7 +39,7 @@
<attributes>
<attribute>
<name>Measure Type</name>
<value>ReportingMeasure</value>
<value>ModelMeasure</value>
<datatype>string</datatype>
</attribute>
<attribute>
Expand Down Expand Up @@ -75,7 +75,13 @@
<filename>measure.rb</filename>
<filetype>rb</filetype>
<usage_type>script</usage_type>
<checksum>9CBCFE21</checksum>
<checksum>B50B2AE9</checksum>
</file>
<file>
<filename>constants.rb</filename>
<filetype>rb</filetype>
<usage_type>resource</usage_type>
<checksum>64289999</checksum>
</file>
<file>
<filename>MF_1story_UB_Furnace_AC1_FuelTankWH.osw</filename>
Expand Down Expand Up @@ -213,7 +219,7 @@
<filename>upgrade_costs_test.rb</filename>
<filetype>rb</filetype>
<usage_type>test</usage_type>
<checksum>4695A458</checksum>
<checksum>5087DD5F</checksum>
</file>
</files>
</measure>
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
class Version
ResStock_Version = '3.1.0' # Version of ResStock
BuildStockBatch_Version = '2023.5.0' # Minimum required version of BuildStockBatch

WorkflowGenerator_Version = '2024.07.18'
def self.check_buildstockbatch_version
if ENV.keys.include?('BUILDSTOCKBATCH_VERSION') # buildstockbatch is installed
bsb_version = ENV['BUILDSTOCKBATCH_VERSION']
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
schema_version: '0.4'
buildstock_directory: test_openstudio_buildstock
project_directory: project_singlefamilydetached
weather_files_url: https://fake-url
baseline:
n_buildings_represented: 81221016

sampler:
type: residential_quota
args:
n_datapoints: 30

workflow_generator:
type: residential_hpxml
version: "vNotARealVersion"
args:
build_existing_model:
simulation_control_timestep: 60
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
schema_version: '0.4'
buildstock_directory: test_openstudio_buildstock
project_directory: project_singlefamilydetached
weather_files_url: https://fake-url
baseline:
n_buildings_represented: 81221016

sampler:
type: residential_quota
args:
n_datapoints: 30

workflow_generator:
type: residential_hpxml
version: "latest"
args:
build_existing_model:
simulation_control_timestep: 60
Loading

0 comments on commit bf86749

Please sign in to comment.