Skip to content

Commit

Permalink
Merge pull request #436 from USACE/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
adamscarberry authored Sep 13, 2023
2 parents bc4d7f3 + c676742 commit 384d991
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 13 deletions.
8 changes: 4 additions & 4 deletions async_packager/packager.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ def handle_message(message):
if len(PayloadResp.contents) == 0:
handler.update_status(download_id, handler.PACKAGE_STATUS["FAILED"], 0)
# TODO: Add new package_status in database to represent EMPTY condition
logger.info(f"Download Failed Due to Empty Contents: {download_id}")
logger.info(f'Empty Contents: No products selected in the request for download ID "{download_id}"')
else:
package_file = handler.handle_message(PayloadResp, dst.name)

if package_file:
# Upload File to S3
logger.debug(f"ID '{download_id}'; Packaging Successful")
logger.debug(f'Packaging successful for download ID "{download_id}"')
t1 = Timer(logger=None)
t1.start()
s3_upload_worked = s3_upload_file(
Expand All @@ -82,7 +82,7 @@ def handle_message(message):
elapsed_time = t1.stop()
if s3_upload_worked:
logger.info(
f"S3 upload '{PayloadResp.output_key}' in {elapsed_time:.4f} seconds"
f'S3 upload "{PayloadResp.output_key}" in {elapsed_time:.4f} seconds'
)
handler.update_status(
download_id,
Expand All @@ -101,7 +101,7 @@ def handle_message(message):
)
else:
logger.critical(
f"Failed to package or upload to S3; Download {download_id}"
f'Failed to package or upload "{package_file}" to S3 download ID "{download_id}"'
)

except Exception as ex:
Expand Down
2 changes: 1 addition & 1 deletion async_packager/src/cumulus_packager/packager/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def handle_message(payload_resp: namedtuple, dst: str):
Temporary directory name
"""
logger.info(f"Handle message with plugin '{payload_resp.format}'")
logger.info(f'Handle message with plugin "{payload_resp.format}"')
result = pkg_writer(
plugin=payload_resp.format,
id=payload_resp.download_id,
Expand Down
14 changes: 7 additions & 7 deletions async_packager/src/cumulus_packager/writers/dss7.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,13 @@ def writer(
tz_offset = heclib.time_zone[tz_name]
is_interval = 1

dssfilename = Path(dst).joinpath(id).with_suffix(".dss").as_posix()

for idx, tif in enumerate(src):
TifCfg = namedtuple("TifCfg", tif)(**tif)
dsspathname = f"/{grid_type_name}/{_extent_name}/{TifCfg.dss_cpart}/{TifCfg.dss_dpart}/{TifCfg.dss_epart}/{TifCfg.dss_fpart}/"

try:
dssfilename = Path(dst).joinpath(id).with_suffix(".dss").as_posix()
data_type = heclib.data_type[TifCfg.dss_datatype]
ds = gdal.Open(f"/vsis3_streaming/{TifCfg.bucket}/{TifCfg.key}")

Expand Down Expand Up @@ -155,17 +156,16 @@ def writer(
elapsed_time = t.stop()
logger.debug(f'Processed "{TifCfg.key}" in {elapsed_time:.4f} seconds')
if result != 0:
logger.warning(f"TiffDss Write Record Fail: {result}")
logger.info(f'TiffDss write record failed for "{TifCfg.key}": {result}')

_progress = int(((idx + 1) / gridcount) * 100)
# Update progress at predefined interval
if idx % PACKAGER_UPDATE_INTERVAL == 0 or idx == gridcount - 1:
# double the PACKAGER_UPDATE_INTERVAL for logging
if _progress % (PACKAGER_UPDATE_INTERVAL * 2) == 0:
logger.info(f"Progress: {_progress}")
update_status(
id=id, status_id=PACKAGE_STATUS["INITIATED"], progress=_progress
)
if _progress % PACKAGER_UPDATE_INTERVAL == 0:
logger.info(f'Download ID "{id}" progress: {_progress}%')

except (RuntimeError, Exception) as ex:
exc_type, exc_value, exc_traceback = sys.exc_info()
Expand All @@ -189,11 +189,11 @@ def writer(
# If no progress was made for any items in the payload (ex: all tifs could not be projected properly),
# don't return a dssfilename
if _progress == 0:
logger.error(f"No files processed - Progress:{_progress}")
logger.error(f'No files processed for download ID "{id}"- Progress:{_progress}')
update_status(id=id, status_id=PACKAGE_STATUS["FAILED"], progress=_progress)
return None

total_time = Timer.timers["accumuluated"]
logger.info(f"Total Processing Time: {total_time:.4f} seconds")
logger.info(f'Total processing time for download ID "{id}" in {total_time:.4f} seconds')

return dssfilename
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ services:
- CUMULUS_API_URL=http://api
- HTTP2=False
- GDAL_DISABLE_READDIR_ON_OPEN=EMPTY_DIR
- LOGGER_LEVEL=DEBUG
- LOGGER_LEVEL=INFO
- PACKAGER_UPDATE_INTERVAL=5
# - CPL_VSIL_CURL_CHUNK_SIZE=20000000
# - CPL_CURL_VERBOSE=1
Expand Down
4 changes: 4 additions & 0 deletions sql/common/V2.24.0__watershed_nwdm_ftra_gavins.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-- extent to polygon reference order - simple 4 point extents
-- xmin,ymax (top left), xmax ymax (top right), xmax ymin (bottom right), xmin ymin (bottom left), xmin ymax (top left again)
INSERT INTO watershed (id,slug,"name",geometry,office_id) VALUES
('d41831ea-74ae-4cf9-b1ed-9e173b875ffd','ftra-to-gavins', 'FTRA to Gavins', ST_GeomFromText('POLYGON ((-710000 2302000, -112400 2302000, -112400 2114000, -710000 2114000 , -710000 2302000))',5070), '90173658-2de9-4329-926d-176c1b29089a');

0 comments on commit 384d991

Please sign in to comment.