diff --git a/ush/enspost_config.py b/ush/enspost_config.py index f9388dc8..f69a5ee2 100644 --- a/ush/enspost_config.py +++ b/ush/enspost_config.py @@ -1,6 +1,7 @@ -# HRRR-E PYTHON GRIB GENERATION -# CONFIGURATION FILE +""" +HRRR-E python GRIB generation configuration file +""" ## CREATE DICTIONARIES ## grib_discipline = {} grib_category = {} diff --git a/ush/fire_emiss_tools.py b/ush/fire_emiss_tools.py index 6625a638..cc60ec6d 100755 --- a/ush/fire_emiss_tools.py +++ b/ush/fire_emiss_tools.py @@ -1,3 +1,6 @@ +""" +Fire emisivity tools? +""" import os import numpy as np import xarray as xr @@ -5,12 +8,25 @@ from netCDF4 import Dataset import interp_tools as i_tools -#Compute average FRP from raw RAVE for the previous 24 hours def averaging_FRP(fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_area, beta, fg_to_ug): - # There are two situations here. - # 1) there is only on fire detection whithin 24 hours so FRP is divided by 2 - # 2) There are more than one fire detection so the average FRP is stimated - # ebb_smoke is always divided by the number of times a fire is detected within 24 hours window + """ + Compute average FRP from raw RAVE for the previous 24 hours. + + There are two situations here. + 1) there is only on fire detection whithin 24 hours so FRP is divided by 2 + 2) There are more than one fire detection so the average FRP is stimated + ebb_smoke is always divided by the number of times a fire is detected within 24 hours window + + fcst_dates: ??? + cols: ??? + rows: ??? + intp_dir: ??? + rave_to_intp: ??? + veg_map: ??? + tgt_area: ??? + beta: ??? + fg_to_ug: ??? + """ base_array = np.zeros((cols*rows)) frp_daily = base_array ebb_smoke_total = [] @@ -67,9 +83,25 @@ def averaging_FRP(fcst_dates, cols, rows, intp_dir, rave_to_intp, veg_map, tgt_a return(frp_avg_reshaped, ebb_total_reshaped) def estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, cols, rows, rave_to_intp): - # There are two steps here. - # 1) First day simulation no RAVE from previous 24 hours available (fire age is set to zero) - # 2) previus files are present (estimate fire age as the difference between the date of the current cycle and the date whe the fire was last observed whiting 24 hours) + """ + There are two steps here. + + 1. First day simulation no RAVE from previous 24 hours available + (fire age is set to zero) + + 2. previus files are present (estimate fire age as the difference + between the date of the current cycle and the date whe the fire + was last observed whiting 24 hours) + + intp_avail_hours: ??? + intp_dir: ??? + fcst_dates: ??? + current_day: ??? + cols: ??? + rows: ??? + rave_to_intp: ??? + + """ t_fire = np.zeros((cols, rows)) for date_str in fcst_dates: @@ -103,10 +135,34 @@ def estimate_fire_duration(intp_avail_hours, intp_dir, fcst_dates, current_day, return(te) def save_fire_dur(cols, rows, te): + """ + ??? + + cols: ??? + rows: ??? + te: ??? + + """ fire_dur = np.array(te).reshape(cols, rows) return(fire_dur) def produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp, intp_dir, current_day, tgt_latt, tgt_lont, ebb_tot_reshaped, fire_age, cols, rows): + """ + ??? + + xarr_hwp: ??? + frp_avg_reshaped: ??? + totprcp_ave_arr: ??? + xarr_totprcp: ??? + intp_dir: ??? + current_day: ??? + tgt_latt: ??? + tgt_lont: ??? + ebb_tot_reshaped: ??? + fire_age: ??? + cols: ??? + rows: ??? + """ # Ensure arrays are not negative or NaN frp_avg_reshaped = np.clip(frp_avg_reshaped, 0, None) frp_avg_reshaped = np.nan_to_num(frp_avg_reshaped) @@ -159,4 +215,4 @@ def produce_emiss_file(xarr_hwp, frp_avg_reshaped, totprcp_ave_arr, xarr_totprcp print(f"Error creating or writing to NetCDF file {file_path}: {e}") return f"Error creating or writing to NetCDF file {file_path}: {e}" - return "Emissions file created successfully" \ No newline at end of file + return "Emissions file created successfully" diff --git a/ush/interp_tools.py b/ush/interp_tools.py index cc939478..6e82aaf3 100755 --- a/ush/interp_tools.py +++ b/ush/interp_tools.py @@ -1,3 +1,7 @@ +""" +??? + +""" import datetime as dt import pandas as pd import os @@ -7,8 +11,13 @@ import numpy as np from netCDF4 import Dataset -#Create date range, this is later used to search for RAVE and HWP from previous 24 hours def date_range(current_day): + """ + Create date range, this is later used to search for RAVE and HWP from previous 24 hours. + + current_day: ??? + + """ print(f'Searching for interpolated RAVE for {current_day}') fcst_datetime = dt.datetime.strptime(current_day, "%Y%m%d%H") @@ -19,8 +28,15 @@ def date_range(current_day): print(f'Current cycle: {fcst_datetime}') return(fcst_dates) -# Check if interoplated RAVE is available for the previous 24 hours def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp): + """ + Check if interoplated RAVE is available for the previous 24 hours. + + intp_dir: ??? + fcst_dates: ??? + rave_to_intp: ??? + + """ intp_avail_hours = [] intp_non_avail_hours = [] # There are four situations here. @@ -48,8 +64,15 @@ def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp): return(intp_avail_hours, intp_non_avail_hours, inp_files_2use) -#Check if raw RAVE in intp_non_avail_hours list is available for interpolatation def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours): + """ + Check if raw RAVE in intp_non_avail_hours list is available for interpolatation. + + RAVE: ??? + intp_non_avail_hours: ??? + intp_avail_hours: ??? + + """ rave_avail = [] rave_avail_hours = [] rave_nonavail_hours_test = [] @@ -72,9 +95,16 @@ def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours): print(f'FIRST DAY?: {first_day}') return(rave_avail, rave_avail_hours, rave_nonavail_hours_test, first_day) -#Create source and target fields def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours): + """ + Create source and target fields. + grid_in: ??? + grid_out: ??? + intp_dir: ??? + rave_avail_hours: ??? + + """ # Open datasets with context managers with xr.open_dataset(grid_in) as ds_in, xr.open_dataset(grid_out) as ds_out: tgt_area = ds_out['area'] @@ -93,7 +123,12 @@ def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours): #Define output and variable meta data def create_emiss_file(fout, cols, rows): - """Create necessary dimensions for the emission file.""" + """Create necessary dimensions for the emission file. + + fout: ??? + cols: ??? + rows: ??? + """ fout.createDimension('t', None) fout.createDimension('lat', cols) fout.createDimension('lon', rows) @@ -101,7 +136,17 @@ def create_emiss_file(fout, cols, rows): setattr(fout, 'TIME_RANGE', '1 hour') def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfactor): - """Store a 2D variable (latitude/longitude) in the file.""" + """Store a 2D variable (latitude/longitude) in the file. + + fout: ??? + varname: ??? + var: ??? + long_name: ??? + units: ??? + dim: ??? + fval: ??? + sfactor: ??? + """ var_out = fout.createVariable(varname, 'f4', ('lat','lon')) var_out.units=units var_out.long_name=long_name @@ -111,7 +156,16 @@ def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfact var_out.coordinates='geolat geolon' def Store_by_Level(fout, varname, long_name, units, dim, fval, sfactor): - """Store a 3D variable (time, latitude/longitude) in the file.""" + """Store a 3D variable (time, latitude/longitude) in the file. + + fout: ??? + varname: ??? + long_name: ??? + units: ??? + dim: ??? + fval: ??? + sfactor: ??? + """ var_out = fout.createVariable(varname, 'f4', ('t','lat','lon')) var_out.units=units var_out.long_name = long_name @@ -119,8 +173,18 @@ def Store_by_Level(fout, varname, long_name, units, dim, fval, sfactor): var_out.FillValue=fval var_out.coordinates='t geolat geolon' -#create a dummy rave interpolated file if first day or regrider fails def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows): + """ + Create a dummy rave interpolated file if first day or regrider fails. + + intp_dir: ??? + current_day: ??? + tgt_latt: ??? + tgt_lont: ??? + cols: ??? + rows: ??? + + """ file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc') dummy_file = np.zeros((cols, rows)) # Changed to 3D to match the '3D' dimensions with Dataset(file_path, 'w') as fout: @@ -143,8 +207,18 @@ def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows): return "Emissions dummy file created successfully" -#generate regridder def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_files_2use, intp_avail_hours): + """ + generate regridder. + + rave_avail_hours: ??? + srcfield: ??? + tgtfield: ??? + weightfile: ??? + inp_files_2use: ??? + intp_avail_hours: ??? + + """ print('Checking conditions for generating regridder.') use_dummy_emiss = len(rave_avail_hours) == 0 and len(intp_avail_hours) == 0 regridder = None @@ -167,9 +241,28 @@ def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_file return(regridder, use_dummy_emiss) -#process RAVE available for interpolation def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_emis, regridder, srcgrid, tgtgrid, rave_to_intp, intp_dir, src_latt, tgt_latt, tgt_lont, cols, rows): + """ + Process RAVE available for interpolation. + + RAVE: ??? + rave_avail: ??? + rave_avail_hours: ??? + use_dummy_emiss: ??? + vars_emis: ??? + regridder: ??? + srcgrid: ??? + tgtgrid: ??? + rave_to_intp: ??? + intp_dir: ??? + src_latt: ??? + tgt_latt: ??? + tgt_lont: ??? + cols: ??? + rows: ??? + + """ for index, current_hour in enumerate(rave_avail_hours): file_name = rave_avail[index] rave_file_path = os.path.join(RAVE, file_name[0]) @@ -221,4 +314,4 @@ def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_e except (OSError, IOError, RuntimeError, FileNotFoundError, TypeError, IndexError, MemoryError) as e: print(f"Error reading NetCDF file {rave_file_path}: {e}") else: - print(f"File not found or dummy emissions required: {rave_file_path}") \ No newline at end of file + print(f"File not found or dummy emissions required: {rave_file_path}")