From d08b9c83c4643aa4ae230ad4e82b1f139873b3ac Mon Sep 17 00:00:00 2001 From: Edwin Lee Date: Wed, 12 Jun 2024 13:51:43 -0500 Subject: [PATCH 1/5] Tentative attempt to move to boto3 --- .../diffs/ci_compare_script.py | 94 +++++++++---------- requirements.txt | 2 +- 2 files changed, 47 insertions(+), 49 deletions(-) diff --git a/energyplus_regressions/diffs/ci_compare_script.py b/energyplus_regressions/diffs/ci_compare_script.py index f948331..580b9d5 100644 --- a/energyplus_regressions/diffs/ci_compare_script.py +++ b/energyplus_regressions/diffs/ci_compare_script.py @@ -7,6 +7,7 @@ import os import sys from datetime import datetime +from tempfile import mkdtemp # add the root of the repo to the python path so it can find things relative to it # like the energyplus_regressions package @@ -45,7 +46,7 @@ def process_diffs(diff_name, diffs, this_has_diffs, this_has_small_diffs): return this_has_diffs, this_has_small_diffs -def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, device_id, test_mode): +def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, device_id, test_mode): print("Device id: %s" % device_id) # build type really doesn't matter, so use the simplest one, the E+ install @@ -223,7 +224,7 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, if test_mode: print("Skipping Amazon upload in test_mode operation") elif has_small_diffs or has_diffs: # pragma: no cover -- not testing the Amazon upload anytime soon - import boto + import boto3 # so ... if you want to run tests of this script including the Amazon side, you need to pass in Amazon creds # to the boto connect_s3 method. To run this test, put the amazon key and secret in a file, one per line. @@ -233,10 +234,9 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, # bucket, making them public, and reporting the URL in the output # file_data = open('/path/to/s3/creds.txt').read().split('\n') - # conn = boto.connect_s3(file_data[0], file_data[1]) - conn = boto.connect_s3() + # s3 = boto3.client('s3', aws_access_key_id=file_data[0], aws_secret_access_key=file_data[1]) + s3 = boto3.client('s3') bucket_name = 'energyplus' - bucket = conn.get_bucket(bucket_name) potential_files = get_diff_files(base_dir) @@ -256,27 +256,20 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, continue try: - file_path = "{0}/{1}".format(file_dir, os.path.basename(filename)) + n = os.path.basename(filename) + file_path = f"{file_dir}/{n}" # print("Processing output file: {0}, uploading to: {1}".format(filepath_to_send, filepath)) - - key = boto.s3.key.Key(bucket, file_path) - with open(file_path_to_send, 'r') as file_to_send: - contents = file_to_send.read() - key.set_contents_from_string(contents) - - if make_public: - key.make_public() - - htmlkey = boto.s3.key.Key(bucket, file_path + ".html") - - if file_path_to_send.endswith('.htm'): - htmlkey.set_contents_from_string( - contents, - headers={"Content-Type": "text/html", "Content-Disposition": "inline"} - ) - else: - htmlkey.set_contents_from_string( - """ + if file_path_to_send.endswith('.htm'): + s3.upload_file( + file_path, bucket_name, file_path + ".html", + ExtraArgs={'ACL': 'public-read', "ContentType": "text/html", "ContentDisposition": "inline"} + ) + else: + # if it's not an HTML file, wrap it inside an HTML wrapper in a temp file and send it + with open(file_path_to_send, 'r') as file_to_send: + + contents = file_to_send.read() + new_contents = f""" @@ -286,17 +279,19 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public,

-""" + contents + """
+        {contents}
     
- - """, - headers={"Content-Type": "text/html"} - ) - - if make_public: - htmlkey.make_public() +""" + temp_dir = mkdtemp() + new_file = f"{temp_dir}/{n}.html" + with open(new_file, 'w') as f: + f.write(new_contents) + s3.upload_file( + new_file, bucket_name, file_path, + ExtraArgs={'ACL': 'public-read', "ContentType": "text/html"} + ) found_files.append(filename) except Exception as e: @@ -305,7 +300,8 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, if len(found_files) > 0: try: - htmlkey = boto.s3.key.Key(bucket, file_dir + "/index.html") + temp_dir = mkdtemp() + new_file = f"{temp_dir}/index.html" index = """ @@ -329,25 +325,27 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, make_public, """ for filename in found_files: - filepath = "{0}/{1}".format(file_dir, os.path.basename(filename)) - index += "" - index += os.path.basename(filename) - index += "downloadview" - + n = os.path.basename(filename) + index += f""" + + {n} + download + view + """ index += """ - - """ +""" - htmlkey.set_contents_from_string(index, headers={"Content-Type": "text/html"}) + with open(new_file, 'w') as f: + f.write(index) - if make_public: - htmlkey.make_public() + s3.upload_file( + new_file, + bucket_name, + file_dir + "/index.html", + ExtraArgs={'ACL': 'public-read', "ContentType": "text/html"} + ) url = "http://{0}.s3-website-{1}.amazonaws.com/{2}".format(bucket_name, "us-east-1", file_dir) print("Regression Results".format(url)) diff --git a/requirements.txt b/requirements.txt index 86b2713..7c6b370 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ pypubsub beautifulsoup4 # if running with CI, this is needed for talking to s3 -boto +boto3 # for running tests coveralls From 0a986554e01036495110026250e266d3da6aba1a Mon Sep 17 00:00:00 2001 From: Edwin Lee Date: Thu, 13 Jun 2024 09:27:57 -0500 Subject: [PATCH 2/5] Fixup local filename in call to upload --- .../diffs/ci_compare_script.py | 48 ++++++++++--------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/energyplus_regressions/diffs/ci_compare_script.py b/energyplus_regressions/diffs/ci_compare_script.py index 580b9d5..84de8c1 100644 --- a/energyplus_regressions/diffs/ci_compare_script.py +++ b/energyplus_regressions/diffs/ci_compare_script.py @@ -238,36 +238,38 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, s3 = boto3.client('s3') bucket_name = 'energyplus' - potential_files = get_diff_files(base_dir) + potential_local_diff_file_paths = get_diff_files(base_dir) date = datetime.now() date_str = "%d-%02d" % (date.year, date.month) - file_dir = "regressions/{0}/{1}-{2}/{3}/{4}".format(date_str, base_sha, mod_sha, file_name, device_id) + file_dir_once_uploaded = f"regressions/{date_str}/{base_sha}-{mod_sha}/{file_name}/{device_id}" found_files = [] - for filename in potential_files: - file_path_to_send = filename + for local_file_path in potential_local_diff_file_paths: + file_path_to_send = local_file_path + # local_file_name = os.path.basename(file_path_to_send) # print("Processing output file: {0}".format(filepath_to_send)) if not os.path.isfile(file_path_to_send): continue if not os.stat(file_path_to_send).st_size > 0: - print("File is empty, not sending: {0}".format(file_path_to_send)) + print(f"File is empty, not sending: {file_path_to_send}") continue try: - n = os.path.basename(filename) - file_path = f"{file_dir}/{n}" + n = os.path.basename(local_file_path) + target_upload_file_path_no_extension = f"{file_dir_once_uploaded}/{n}" # print("Processing output file: {0}, uploading to: {1}".format(filepath_to_send, filepath)) + # TODO: In the previous code this would upload the raw original contents to s3 to the basename path with + # TODO: no extension, regardless of .htm or not. I'm not sure if that's still needed. if file_path_to_send.endswith('.htm'): s3.upload_file( - file_path, bucket_name, file_path + ".html", + file_path_to_send, bucket_name, target_upload_file_path_no_extension + ".html", ExtraArgs={'ACL': 'public-read', "ContentType": "text/html", "ContentDisposition": "inline"} ) else: # if it's not an HTML file, wrap it inside an HTML wrapper in a temp file and send it with open(file_path_to_send, 'r') as file_to_send: - contents = file_to_send.read() new_contents = f""" @@ -285,15 +287,15 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, """ temp_dir = mkdtemp() - new_file = f"{temp_dir}/{n}.html" - with open(new_file, 'w') as f: + local_fixed_up_file_path = f"{temp_dir}/{n}.html" + with open(local_fixed_up_file_path, 'w') as f: f.write(new_contents) s3.upload_file( - new_file, bucket_name, file_path, + local_fixed_up_file_path, bucket_name, target_upload_file_path_no_extension, ExtraArgs={'ACL': 'public-read', "ContentType": "text/html"} ) - found_files.append(filename) + found_files.append(local_file_path) except Exception as e: success = False print("There was a problem processing file: %s" % e) @@ -301,7 +303,7 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, if len(found_files) > 0: try: temp_dir = mkdtemp() - new_file = f"{temp_dir}/index.html" + local_fixed_up_file_path = f"{temp_dir}/index.html" index = """ @@ -324,31 +326,31 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, filename """ - for filename in found_files: - n = os.path.basename(filename) + for local_file_path in found_files: + n = os.path.basename(local_file_path) index += f""" {n} - download - view + download + view """ index += """ """ - with open(new_file, 'w') as f: + with open(local_fixed_up_file_path, 'w') as f: f.write(index) s3.upload_file( - new_file, + local_fixed_up_file_path, bucket_name, - file_dir + "/index.html", + file_dir_once_uploaded + "/index.html", ExtraArgs={'ACL': 'public-read', "ContentType": "text/html"} ) - url = "http://{0}.s3-website-{1}.amazonaws.com/{2}".format(bucket_name, "us-east-1", file_dir) - print("Regression Results".format(url)) + url = f"http://{bucket_name}.s3-website-us-east-1.amazonaws.com/{file_dir_once_uploaded}" + print(f"Regression Results") except Exception as e: success = False print("There was a problem generating results webpage: %s" % e) From 21a8e2c051d8e7ede72868f0820f69a22085ec57 Mon Sep 17 00:00:00 2001 From: Edwin Lee Date: Thu, 13 Jun 2024 09:44:10 -0500 Subject: [PATCH 3/5] Fix main_function argument --- .../tests/diffs/test_ci_compare_script.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/energyplus_regressions/tests/diffs/test_ci_compare_script.py b/energyplus_regressions/tests/diffs/test_ci_compare_script.py index 2e3f89f..32c8925 100644 --- a/energyplus_regressions/tests/diffs/test_ci_compare_script.py +++ b/energyplus_regressions/tests/diffs/test_ci_compare_script.py @@ -123,7 +123,7 @@ def test_main_function(self): mod_dir=self.temp_mod_dir, base_sha='base123', mod_sha='mod456', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=True ) @@ -148,7 +148,7 @@ def test_main_function(self): mod_dir=self.temp_mod_dir, base_sha='base123', mod_sha='mod456', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=True ) @@ -227,7 +227,7 @@ def test_main_function(self): mod_dir=self.temp_mod_dir, base_sha='base124', mod_sha='mod457', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=True ) @@ -279,7 +279,7 @@ def test_main_function_not_test_mode(self): # pragma: no cover mod_dir=self.temp_mod_dir, base_sha='base123', mod_sha='mod456', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=True ) @@ -304,7 +304,7 @@ def test_main_function_not_test_mode(self): # pragma: no cover mod_dir=self.temp_mod_dir, base_sha='base123', mod_sha='mod456', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=True ) @@ -369,7 +369,7 @@ def test_main_function_not_test_mode(self): # pragma: no cover mod_dir=self.temp_mod_dir, base_sha='base123', mod_sha='mod456', - make_public=True, + _make_public=True, device_id='some_device_id', test_mode=False ) From 58e821de3e662b4929f3982f60a689536db40ffd Mon Sep 17 00:00:00 2001 From: Edwin Lee Date: Thu, 13 Jun 2024 15:28:06 -0500 Subject: [PATCH 4/5] Tweak upload steps, should be very close now --- .../diffs/ci_compare_script.py | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/energyplus_regressions/diffs/ci_compare_script.py b/energyplus_regressions/diffs/ci_compare_script.py index 84de8c1..3dcd597 100644 --- a/energyplus_regressions/diffs/ci_compare_script.py +++ b/energyplus_regressions/diffs/ci_compare_script.py @@ -247,7 +247,6 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, found_files = [] for local_file_path in potential_local_diff_file_paths: file_path_to_send = local_file_path - # local_file_name = os.path.basename(file_path_to_send) # print("Processing output file: {0}".format(filepath_to_send)) if not os.path.isfile(file_path_to_send): @@ -257,14 +256,20 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, continue try: - n = os.path.basename(local_file_path) - target_upload_file_path_no_extension = f"{file_dir_once_uploaded}/{n}" - # print("Processing output file: {0}, uploading to: {1}".format(filepath_to_send, filepath)) - # TODO: In the previous code this would upload the raw original contents to s3 to the basename path with - # TODO: no extension, regardless of .htm or not. I'm not sure if that's still needed. + local_raw_file_name = os.path.basename(local_file_path) + target_upload_file_path = f"{file_dir_once_uploaded}/{local_raw_file_name}" + target_upload_file_path_with_html_added = target_upload_file_path + ".html" + # always upload the raw file for downloading: + # like c:/ci/whatever/a.bnd.diff > /regressions/whatever/a.bnd.diff + s3.upload_file( + local_file_path, bucket_name, target_upload_file_path, + ExtraArgs={'ACL': 'public-read', "ContentType": "text/html", "ContentDisposition": "inline"} + ) + # but we also need to upload the HTML "view" of the file as well if file_path_to_send.endswith('.htm'): + # if it's already an html file, then we can just upload the raw contents but renamed as ...htm.html s3.upload_file( - file_path_to_send, bucket_name, target_upload_file_path_no_extension + ".html", + file_path_to_send, bucket_name, target_upload_file_path_with_html_added, ExtraArgs={'ACL': 'public-read', "ContentType": "text/html", "ContentDisposition": "inline"} ) else: @@ -287,11 +292,11 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, """ temp_dir = mkdtemp() - local_fixed_up_file_path = f"{temp_dir}/{n}.html" + local_fixed_up_file_path = f"{temp_dir}/{local_raw_file_name}.html" with open(local_fixed_up_file_path, 'w') as f: f.write(new_contents) s3.upload_file( - local_fixed_up_file_path, bucket_name, target_upload_file_path_no_extension, + local_fixed_up_file_path, bucket_name, target_upload_file_path_with_html_added, ExtraArgs={'ACL': 'public-read', "ContentType": "text/html"} ) @@ -327,12 +332,12 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, """ for local_file_path in found_files: - n = os.path.basename(local_file_path) + local_raw_file_name = os.path.basename(local_file_path) index += f""" - {n} - download - view + {local_raw_file_name} + download + view """ index += """ From 394a87cd17fb9ac4ed7b34b0c3803a97800dfc96 Mon Sep 17 00:00:00 2001 From: Edwin Lee Date: Fri, 14 Jun 2024 08:21:05 -0500 Subject: [PATCH 5/5] Add download directive to index.html download column --- energyplus_regressions/diffs/ci_compare_script.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/energyplus_regressions/diffs/ci_compare_script.py b/energyplus_regressions/diffs/ci_compare_script.py index 3dcd597..b2f11e3 100644 --- a/energyplus_regressions/diffs/ci_compare_script.py +++ b/energyplus_regressions/diffs/ci_compare_script.py @@ -336,7 +336,7 @@ def main_function(file_name, base_dir, mod_dir, base_sha, mod_sha, _make_public, index += f""" {local_raw_file_name} - download + download view """ index += """