diff --git a/scm/doc/TechGuide/chap_cases.rst b/scm/doc/TechGuide/chap_cases.rst index ff97454d7..cd83dc07e 100644 --- a/scm/doc/TechGuide/chap_cases.rst +++ b/scm/doc/TechGuide/chap_cases.rst @@ -409,16 +409,16 @@ Activate environment: .. _`ufsicgenerator`: -UFS_IC_generator.py +UFS_case_gen.py ~~~~~~~~~~~~~~~~~~~ -A script exists in ``scm/etc/scripts/UFS_IC_generator.py`` to read in UFS history (output) files and their +A script exists in ``scm/etc/scripts/UFS_case_gen.py`` to read in UFS history (output) files and their initial conditions to generate a SCM case input data file, in DEPHY format. .. code:: bash - ./UFS_IC_generator.py [-h] (-l LOCATION LOCATION | -ij INDEX INDEX) -d + ./UFS_case_gen.py [-h] (-l LOCATION LOCATION | -ij INDEX INDEX) -d DATE -i IN_DIR -g GRID_DIR -f FORCING_DIR -n CASE_NAME [-t {1,2,3,4,5,6,7}] [-a AREA] [-oc] [-lam] [-sc] [-near] diff --git a/scm/etc/scripts/UFS_IC_generator.py b/scm/etc/scripts/UFS_case_gen.py similarity index 68% rename from scm/etc/scripts/UFS_IC_generator.py rename to scm/etc/scripts/UFS_case_gen.py index 5e290d3df..54802d3a7 100755 --- a/scm/etc/scripts/UFS_IC_generator.py +++ b/scm/etc/scripts/UFS_case_gen.py @@ -21,6 +21,7 @@ #Physical constants earth_radius = 6371000.0 #m +Omega = 7.292E-5 rdgas = 287.05 rvgas = 461.50 cp = 1004.6 @@ -30,11 +31,28 @@ deg_to_rad = math.pi/180.0 kappa = rdgas/cp p0 = 100000.0 +one_twelfth = 1.0/12.0 +two_thirds = 2.0/3.0 + +use_oro_for_geos_wind = False #use surface geopotential in the calculation of geostrophic winds (if True, the gradient of geopotential will include the surface geopotential height gradient) +use_actual_geos_wind = False #use geostrophic winds calculated from the large-scale UFS-modeled geopotential (the scale is determined by the modeled winds and the choice of critical Rossby number for which geostrophic balance is assumed to hold) +critical_Rossby = 0.1 #the Rossby number under which geostrophic balance is assumed to be valid (a lower number means a larger horizontal area is required for a given wind speed); geostrophic balance can be assumed when Ro << 1. + +PBL_top_for_geos = 85000.0 # (Pa) when using the mean UFS-modeled winds as geostrophic winds, below this pressure, geostrophic winds return linearly to 0 at the surface missing_value = -9999.0 #9.99e20 +top_pres_for_forcing = 20000.0 # (Pa) pressure above which to vertically smooth wind profiles to handle noisy model output at upper UFS levels + +const_nudging_time = 3600.0 + n_lam_halo_points = 3 +n_forcing_halo_points = 2 #number of points in each direction used to retrieve forcing (1 = 3x3, 2 = 5x5, 3 = 7x7); must be >=1 + +#dist_method = 'euclidean' +dist_method = 'haversine' #faster + missing_variable_snow_layers = 3 missing_variable_soil_layers = 4 missing_variable_ice_layers = 2 @@ -66,8 +84,12 @@ parser.add_argument('-a', '--area', help='area of grid cell in m^2', type=float) parser.add_argument('-oc', '--old_chgres', help='flag to denote that the initial conditions use an older data format (pre-chgres_cube)', action='store_true') parser.add_argument('-lam', '--lam', help='flag to signal that the ICs and forcing is from a limited-area model run', action='store_true') -parser.add_argument('-sc', '--save_comp', help='flag to create UFS reference file for comparison', action='store_true') -parser.add_argument('-near','--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint',action='store_true') +parser.add_argument('-sc', '--save_comp', help='flag to create UFS reference file for comparison', action='store_true') +parser.add_argument('-near','--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint', action='store_true') +parser.add_argument('-fm', '--forcing_method', help='method used to calculate forcing (1=total tendencies from UFS dycore, 2=advective terms calculated from UFS history files, 3=total time tendency terms calculated)', type=int, choices=range(1,4), default=2) +parser.add_argument('-vm', '--vertical_method', help='method used to calculate vertical advective forcing (1=vertical advective terms calculated from UFS history files and added to total, 2=smoothed vertical velocity provided)', type=int, choices=range(1,3), default=2) +parser.add_argument('-wn', '--wind_nudge', help='flag to turn on wind nudging to UFS profiles', action='store_true') +parser.add_argument('-geos','--geostrophic', help='flag to turn on geostrophic wind forcing', action='store_true') ######################################################################################## # @@ -88,6 +110,10 @@ def parse_arguments(): lam = args.lam save_comp = args.save_comp use_nearest = args.use_nearest + forcing_method = args.forcing_method + vertical_method = args.vertical_method + geos_wind_forcing = args.geostrophic + wind_nudge = args.wind_nudge #validate args if not os.path.exists(in_dir): @@ -127,7 +153,8 @@ def parse_arguments(): raise Exception(message) return (location, index, date_dict, in_dir, grid_dir, forcing_dir, tile, \ - area, case_name, old_chgres, lam, save_comp, use_nearest) + area, case_name, old_chgres, lam, save_comp, use_nearest, forcing_method, \ + vertical_method, geos_wind_forcing, wind_nudge) ######################################################################################## # @@ -136,6 +163,74 @@ def setup_logging(): """Sets up the logging module.""" logging.basicConfig(format='%(levelname)s: %(message)s', level=LOGLEVEL) +######################################################################################## +# +######################################################################################## +def haversine_distance(lat1, lon1, lat2, lon2): + #lats and lons in degrees + p = 0.017453292519943295 + hav = 0.5 - math.cos((lat2-lat1)*p)/2 + math.cos(lat1*p)*math.cos(lat2*p) * (1-math.cos((lon2-lon1)*p)) / 2 + return 12742 * math.asin(math.sqrt(hav)) #km + +######################################################################################## +# +######################################################################################## +def sph2cart(az, el, r): + """Calculate the Cartesian coordiates from spherical coordinates""" + + rcos_theta = r * np.cos(el) + x = rcos_theta * np.cos(az) + y = rcos_theta * np.sin(az) + z = r * np.sin(el) + + return (x, y, z) + +######################################################################################## +# +######################################################################################## +def moving_average(interval, window_size): + convolve_mode = 'same' + window= np.ones(int(window_size))/float(window_size) + + output = np.zeros((interval.shape[0])) + if (convolve_mode == 'same'): + convolution = np.convolve(interval, window, mode='same') + output = convolution + elif (convolve_mode == 'valid'): + convolution = np.convolve(interval, window, mode='valid') + num_not_valid_points = interval.shape[0] - (max(interval.shape[0], window_size) - min(interval.shape[0], window_size) + 1) + output[num_not_valid_points:] = convolution + for k in range(num_not_valid_points): + output[k] = interval[k] + output[-1-k] = interval[-1-k] + + return output + +######################################################################################## +# +######################################################################################## +def centered_diff_derivative(y, dx): + #y is the variable of interest, dx is the space between contiguous points (starting from the first point) + cent_diff_coef_three = [-0.5, 0.0, 0.5] #second-order accuracy in space (assuming constant grid) + cent_diff_coef_five = [one_twelfth, -two_thirds, 0.0, two_thirds, -one_twelfth] #fourth-order accuracy in space (assuming constant grid) + + if y.shape[0] % 2: + center = int((y.shape[0]-1)/2) + if y.shape[0] == 3: + deriv = np.sum(cent_diff_coef_three*y)/np.mean(dx) + elif y.shape[0] == 5: + deriv = np.sum(cent_diff_coef_five*y)/np.mean(dx) + else: + message = 'centered_diff_derivative can only use n_forcing_halo_points = 1 or 2' + logging.exception(message) + raise Exception(message) + else: + message = 'An even number of points was passed into centered_diff_derivative' + logging.exception(message) + raise Exception(message) + + return deriv + ######################################################################################## # ######################################################################################## @@ -291,24 +386,32 @@ def find_loc_indices(loc, dir, tile, lam): if loc[0] < 180: temp_loc[0] += 360 - #set up an array to hold the euclidean distance between the given point and every grid cell - eucl_dist = np.zeros((longitude.shape[0],longitude.shape[1])) - - #get the Cartesian location of the given point - cart_loc = np.asarray(sph2cart(math.radians(temp_loc[0]), math.radians(temp_loc[1]), earth_radius)) - - for i in range(len(longitude)): - for j in range(len(longitude[i])): - #get the Cartesian location of all grid points - cart_cell = np.asarray(sph2cart(math.radians(longitude[i,j]), math.radians(latitude[i,j]), earth_radius)) - - #calculate the euclidean distance from the given point to the current grid cell - eucl_dist[i,j] = np.linalg.norm(cart_loc - cart_cell) - + #set up an array to hold the distance between the given point and every grid cell + dist = np.zeros((longitude.shape[0],longitude.shape[1])) + if (dist_method == 'euclidean'): + + #get the Cartesian location of the given point + cart_loc = np.asarray(sph2cart(math.radians(loc[0]), math.radians(loc[1]), earth_radius)) + + for i in range(len(longitude)): + for j in range(len(longitude[i])): + #get the Cartesian location of all grid points + cart_cell = np.asarray(sph2cart(math.radians(longitude[i,j]), math.radians(latitude[i,j]), earth_radius)) + + #calculate the euclidean distance from the given point to the current grid cell + dist[i,j] = np.linalg.norm(cart_loc - cart_cell) + else: + for i in range(len(longitude)): + for j in range(len(longitude[i])): + dist[i,j] = haversine_distance(latitude[i,j],longitude[i,j],loc[1],loc[0]) #get the indices of the grid point with the minimum euclidean distance to the given point - i,j = np.unravel_index(eucl_dist.argmin(), eucl_dist.shape) + i,j = np.unravel_index(dist.argmin(), dist.shape) + + #get the direction of the closest point from the given point + theta = math.atan2(math.sin(math.radians(longitude[i,j] - loc[0]))*math.cos(math.radians(latitude[i,j])), math.cos(math.radians(loc[1]))*math.sin(math.radians(latitude[i,j])) - math.sin(math.radians(loc[1]))*math.cos(math.radians(latitude[i,j]))*math.cos(math.radians(longitude[i,j] - loc[0]))) + theta_deg = math.fmod((math.degrees(theta) + 360.0), 360.0) - return (i,j,longitude[i,j]%360.0, latitude[i,j], eucl_dist[i,j]) + return (i,j,longitude[i,j]%360.0, latitude[i,j], dist[i,j], theta_deg) ######################################################################################## # @@ -353,6 +456,147 @@ def find_lon_lat_of_indices(indices, dir, tile, lam): return (longitude[indices[1],indices[0]], latitude[indices[1],indices[0]]) +######################################################################################## +# +######################################################################################## +def find_loc_indices_UFS_history(loc, dir): + """Find the nearest neighbor UFS history file grid point given a lon/lat pair""" + #returns the indices of the nearest neighbor point in the given tile, the lon/lat of the nearest neighbor, + #and the distance (m) from the given point to the nearest neighbor grid cell + + filename_pattern = 'atmf000.nc' + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + + longitude = np.asarray(nc_file['lon']) + latitude = np.asarray(nc_file['lat']) + + nc_file.close() + + #set up an array to hold the distance between the given point and every grid cell + dist = np.zeros((longitude.shape[0],longitude.shape[1])) + if (dist_method == 'euclidean'): + + #get the Cartesian location of the given point + cart_loc = np.asarray(sph2cart(math.radians(loc[0]), math.radians(loc[1]), earth_radius)) + + for i in range(len(longitude)): + for j in range(len(longitude[i])): + #get the Cartesian location of all grid points + cart_cell = np.asarray(sph2cart(math.radians(longitude[i,j]), math.radians(latitude[i,j]), earth_radius)) + + #calculate the euclidean distance from the given point to the current grid cell + dist[i,j] = np.linalg.norm(cart_loc - cart_cell) + else: + for i in range(len(longitude)): + for j in range(len(longitude[i])): + dist[i,j] = haversine_distance(latitude[i,j],longitude[i,j],loc[1],loc[0]) + #get the indices of the grid point with the minimum euclidean distance to the given point + i,j = np.unravel_index(dist.argmin(), dist.shape) + + #get the direction of the closest point from the given point + theta = math.atan2(math.sin(math.radians(longitude[i,j] - loc[0]))*math.cos(math.radians(latitude[i,j])), math.cos(math.radians(loc[1]))*math.sin(math.radians(latitude[i,j])) - math.sin(math.radians(loc[1]))*math.cos(math.radians(latitude[i,j]))*math.cos(math.radians(longitude[i,j] - loc[0]))) + theta_deg = math.fmod((math.degrees(theta) + 360.0), 360.0) + + if i < n_forcing_halo_points or i > latitude.shape[0]-1-n_forcing_halo_points: + message = 'Chosen point too close to the poles for this algorithm' + logging.critical(message) + raise Exception(message) + else: + #longitude = (n rows of latitude, 2*n columns of longitude), i index increases toward EAST; rows are circles of latitude, columns are meridians + #latitude = (n rows of latitude, 2*n columns of longitude), j index increases toward NORTH; rows are circles of latitude, columns are meridians + neighbors = np.mgrid[-n_forcing_halo_points:n_forcing_halo_points+1,-n_forcing_halo_points:n_forcing_halo_points+1] + neighbors[0,:,:] = neighbors[0,:,:] + i + neighbors[1,:,:] = neighbors[1,:,:] + j + + + # the closest point is near the western edge of the prime meridian, but longitude should be cyclic, so get neighbors on the eastern/western side of the prime meridian as necessary + neighbors[1,:,:] = neighbors[1,:,:]%longitude.shape[1] + + # if j > n_forcing_halo_points: + # if j < latitude.shape[1]-1-n_forcing_halo_points: + # neighbors[1,:,:] = neighbors[1,:,:] + j + # else: + # + # n_points_on_eastern_side = (j + n_forcing_halo_points)%latitude.shape[1] + + + + message = 'nearest history file indices (i,j): ({},{})'.format(i, j) + message += '\n lon/lat : {},{}'.format(longitude[i,j], latitude[i,j]) + message += '\n distance between history file point and desired point: {} km'.format(dist[i,j]) + message += '\n bearing from desired point to history file point: {} degrees'.format(theta_deg) + logging.debug(message) + + message = 'neighboring points' + for ii in neighbors[0,:,0]: + for jj in neighbors[1,0,:]: + message += '\n (i,j,lon,lat,dist): ({:>},{:>},{:f},{:f},{:f})'.format(ii,jj,longitude[ii,jj], latitude[ii,jj], dist[ii,jj]) + logging.debug(message) + + #calc dx, dy (n_forcing_halo_points*2 x n_forcing_halo_points*2) + dx = np.zeros((n_forcing_halo_points*2+1,n_forcing_halo_points*2)) + dy = np.zeros((n_forcing_halo_points*2,n_forcing_halo_points*2+1)) + for ii in range(2*n_forcing_halo_points+1): + for jj in range(2*n_forcing_halo_points): + current_ii_index = neighbors[0,ii,jj] + current_jj_index = neighbors[1,ii,jj] + eastern_jj_index = neighbors[1,ii,jj+1] + dx[ii,jj] = haversine_distance(latitude[current_ii_index,current_jj_index],longitude[current_ii_index,current_jj_index],latitude[current_ii_index,eastern_jj_index],longitude[current_ii_index,eastern_jj_index]) + + for ii in range(2*n_forcing_halo_points): + for jj in range(2*n_forcing_halo_points+1): + current_ii_index = neighbors[0,ii,jj] + northern_ii_index = neighbors[0,ii+1,jj] + current_jj_index = neighbors[1,ii,jj] + dy[ii,jj] = haversine_distance(latitude[current_ii_index,current_jj_index],longitude[current_ii_index,current_jj_index],latitude[northern_ii_index,current_jj_index],longitude[northern_ii_index,current_jj_index]) + + return (i,j,longitude[i,j], latitude[i,j], dist[i,j], theta_deg, neighbors, dx, dy) + +######################################################################################## +# +######################################################################################## +def find_loc_indices_UFS_IC(loc, dir, lam, tile, indices): + + #find tile containing the point using the supergrid if no tile is specified + #if not tile and not lam: + if not tile: + tile = int(find_tile(loc, dir, lam)) + if tile < 0: + message = 'No tile was found for location {0}'.format(location) + logging.critical(message) + raise Exception(message) + message = 'Tile found: {0}'.format(tile) + logging.debug(message) + + #find index of closest point in the tile if indices are not specified + theta = 0.0 + if not indices: + (tile_j, tile_i, point_lon, point_lat, dist_min, theta) = find_loc_indices(loc, dir, tile, lam) + message = 'nearest IC file indices in tile {} - (i,j): ({},{})'.format(tile, tile_i, tile_j) + message += '\n lon/lat : {},{}'.format(point_lon, point_lat) + message += '\n distance between history file point and desired point: {} km'.format(dist_min) + message += '\n bearing from desired point to history file point: {} degrees'.format(theta) + logging.debug(message) + else: + tile_i = indices[0] + tile_j = indices[1] + #still need to grab the lon/lat if the tile and indices are supplied + (point_lon, point_lat) = find_lon_lat_of_indices(indices, grid_dir, tile, lam) + + message = 'This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat) + logging.debug(message) + + return (tile_i, tile_j, tile, point_lon, point_lat, dist_min, theta) + ######################################################################################## # ######################################################################################## @@ -392,20 +636,54 @@ def get_initial_lon_lat_grid(dir, tile, lam): nc_file.close() - return (longitude, latitude) + return (longitude, latitude) -######################################################################################## -# -######################################################################################## -def sph2cart(az, el, r): - """Calculate the Cartesian coordiates from spherical coordinates""" +def compare_hist_and_IC_points(location, hist_lon, hist_lat, IC_lon, IC_lat, hist_dist, angle_to_hist_point, IC_dist, angle_to_IC_point): + #determine distance and angle from IC point to hist point + dist = haversine_distance(IC_lat,IC_lon,hist_lat,hist_lon) + theta = math.atan2(math.sin(math.radians(hist_lon - IC_lon))*math.cos(math.radians(hist_lat)), math.cos(math.radians(IC_lat))*math.sin(math.radians(hist_lat)) - math.sin(math.radians(IC_lat))*math.cos(math.radians(hist_lat))*math.cos(math.radians(hist_lon - IC_lon))) + theta_deg = math.fmod((math.degrees(theta) + 360.0), 360.0) - rcos_theta = r * np.cos(el) - x = rcos_theta * np.cos(az) - y = rcos_theta * np.sin(az) - z = r * np.sin(el) + message = 'Location summary' + message += '\n The location as entered is {} deg E {} deg N'.format(location[0],location[1]) + message += '\n The closest point in the UFS history file is {} deg E {} deg N, or {} km away at a bearing of {} deg'.format(hist_lon, hist_lat, hist_dist, angle_to_hist_point) + message += '\n The closest point in the UFS IC files (native grid) is {} deg E {} deg N, or {} km away at a bearing of {} deg'.format(IC_lon, IC_lat, IC_dist, angle_to_IC_point) + message += '\n Therefore, the history point (used to define the SCM grid column) is {} km away from the closest UFS native grid poing at a bearing of {} deg'.format(dist, theta_deg) + logging.info(message) + + return + +def check_IC_hist_surface_compatibility(dir, i, j, surface_data, lam, old_chgres, tile): + + # Determine UFS history file format (tiled/quilted) + if lam: + filename_pattern = '*sfcf000.tile{}.nc'.format(tile) + else: + filename_pattern = '*sfcf000.nc' + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + + nc_file = Dataset('{0}/{1}'.format(dir,filename)) - return (x, y, z) + slmsk_hist = read_NetCDF_surface_var(nc_file, "land", j, i, old_chgres, 0) + + if (slmsk_hist != surface_data["slmsk"]): + message = 'There is a mismatch between the UFS IC file and history file land masks: IC={}, history={}'.format(surface_data["slmsk"],slmsk_hist) + logging.critical(message) + raise Exception(message) + else: + message = 'UFS IC file and history file land masks match: IC={}, history={}'.format(surface_data["slmsk"],slmsk_hist) + logging.info(message) + + nc_file.close() + + return ######################################################################################## # @@ -456,19 +734,82 @@ def read_NetCDF_surface_var(nc_file, var_name, i, j, old_chgres, vert_dim): var = missing_value return var +def get_IC_data_from_UFS_history(dir, i, j, lam, tile): + + # Determine UFS history file format (tiled/quilted) + if lam: + filename_pattern = '*atmf000.tile{}.nc'.format(tile) + else: + filename_pattern = '*atmf000.nc' + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + + pfull = nc_file['pfull'][::-1]*100.0 + phalf = nc_file['phalf'][::-1]*100.0 + nlevs = len(pfull) + time = nc_file['time'][0] #model hours since being initialized + lon = nc_file['lon'][i,j] + lat = nc_file['lat'][i,j] + logging.debug('Grabbing data from history file point with lon/lat: {}/{}'.format(lon,lat)) + delz = -1*nc_file['delz'][0,::-1,i,j] #derive zh + ugrd = nc_file['ugrd'][0,::-1,i,j] + vgrd = nc_file['vgrd'][0,::-1,i,j] + spfh = nc_file['spfh'][0,::-1,i,j] + o3mr = nc_file['o3mr'][0,::-1,i,j] + clwmr = nc_file['clwmr'][0,::-1,i,j] + icmr = nc_file['icmr'][0,::-1,i,j] + pressfc = nc_file['pressfc'][0,i,j] + tmp = nc_file['tmp'][0,::-1,i,j] + + delz = np.asarray(delz) + zh = np.zeros(nlevs) + zh[0] = 0.5*delz[0] + for k in range(1,nlevs): + zh[k] = zh[k-1] + 0.5*delz[k-1] + 0.5*delz[k] + + dz = np.zeros(nlevs-1) + for k in range(nlevs-1): + dz[k] = zh[k+1]-zh[k] + + state = { + "nlevs": nlevs, + "zh": zh, + "dz": dz, + "ua": np.asarray(ugrd), + "va": np.asarray(vgrd), + "qv": np.asarray(spfh), + "o3": np.asarray(o3mr), + "ql": np.asarray(clwmr), + "qi": np.asarray(icmr), + "ps": pressfc, + "ta": np.asarray(tmp), + "pa": np.asarray(pfull), + "pa_i": np.asarray(phalf) + } + + nc_file.close() + + return state + ######################################################################################## # ######################################################################################## -def get_UFS_IC_data(dir, grid_dir, tile, i, j, old_chgres, lam): +def get_IC_data_from_UFS_ICs(dir, grid_dir, tile, i, j, old_chgres, lam): """Get the state, surface, and orographic data for the given tile and indices""" #returns dictionaries with the data vgrid_data = get_UFS_vgrid_data(grid_dir) #only needed for ak, bk to calculate pressure (state_data, error_msg) = get_UFS_state_data(vgrid_data, dir, tile, i, j, old_chgres, lam) - surface_data = get_UFS_surface_data(dir, tile, i, j, old_chgres, lam) - oro_data = get_UFS_oro_data(dir, tile, i, j, lam) - return (state_data, surface_data, oro_data, error_msg) + return (state_data, error_msg) ######################################################################################## # @@ -1351,7 +1692,7 @@ def get_UFS_oro_data(dir, tile, i, j, lam): if lam: nc_file = Dataset('{0}/{1}'.format(dir,'oro_data.nc')) else: - filename_pattern = 'oro*.tile{0}.nc'.format(tile) + filename_pattern = 'oro_data.tile{0}.nc'.format(tile) for f_name in os.listdir(dir): if fnmatch.fnmatch(f_name, filename_pattern): filename = f_name @@ -1488,11 +1829,481 @@ def search_in_dict(listin,name): if dictionary["name"] == name: return count +def expand_neighbors_for_geostrophic_balance(dir, hist_i, hist_j, neighbors, dx, dy, nlevs, lam): + + # Determine UFS history file format (tiled/quilted) + if lam: + atm_ftag = 'atmf*.tile{0}.nc'.format(tile) + sfc_ftag = 'sfcf*.tile{0}.nc'.format(tile) + else: + atm_ftag = '*atmf*.nc' + sfc_ftag = '*sfcf*.nc' + + # Get list of UFS history files with 3D ATMospheric state variables. + atm_filenames = [] + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, atm_ftag): + atm_filenames.append(f_name) + if not atm_filenames: + message = 'No filenames matching the pattern {0} found in {1}'. \ + format(atm_ftag,dir) + logging.critical(message) + raise Exception(message) + atm_filenames = sorted(atm_filenames) + n_filesA = len(atm_filenames) + + npts = 2*n_forcing_halo_points+1 + + lon = np.zeros((n_filesA,npts,npts)) + lat = np.zeros((n_filesA,npts,npts)) + time = np.zeros((n_filesA)) + u_wind = np.zeros((n_filesA,nlevs,npts,npts)) + v_wind = np.zeros((n_filesA,nlevs,npts,npts)) + wind_speed = np.zeros((n_filesA,nlevs)) + wind_speed_time_mean = np.zeros((nlevs)) + + nc_file = Dataset('{0}/{1}'.format(dir,atm_filenames[0])) + nc_file.set_always_mask(False) + longitude_all = np.asarray(nc_file['lon']) + latitude_all = np.asarray(nc_file['lat']) + nc_file.close() + + #determine average wind speeds in the existing forcing stencil + + # Read in 3D UFS history files + for count, filename in enumerate(atm_filenames, start=1): + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + nc_file.set_always_mask(False) + + time[count-1] = nc_file['time'][0] + + for ii in range(2*n_forcing_halo_points+1): + for jj in range(2*n_forcing_halo_points+1): + current_ii_index = neighbors[0,ii,jj] + current_jj_index = neighbors[1,ii,jj] + + lon[count-1,ii,jj] = nc_file['lon'][current_ii_index,current_jj_index] + lat[count-1,ii,jj] = nc_file['lat'][current_ii_index,current_jj_index] + u_wind[count-1,:,ii,jj] = nc_file['ugrd'][0,::-1,current_ii_index,current_jj_index] + v_wind[count-1,:,ii,jj] = nc_file['vgrd'][0,::-1,current_ii_index,current_jj_index] + + for k in range(nlevs): + wind_speed[count-1,k] = np.sqrt(np.mean(u_wind[count-1,k,:,:])**2 + np.mean(v_wind[count-1,k,:,:]**2)) + + nc_file.close() + + critical_geos_scale_npts = np.zeros((nlevs)) + mean_lat = np.mean(lat[:,:,:]) + coriolis = 2.0*Omega*np.sin(mean_lat*deg_to_rad) + grid_length_scale = np.sqrt(np.mean(dx)**2 + np.mean(dy)**2)*1.0E3 + for k in range(nlevs): + wind_speed_time_mean[k] = np.mean(wind_speed[:,k]) + critical_geos_scale = wind_speed_time_mean[k]/(coriolis*critical_Rossby) + critical_geos_scale_npts[k] = critical_geos_scale/grid_length_scale + floor = np.floor(np.mean(critical_geos_scale_npts)) + ceiling = np.ceil(np.mean(critical_geos_scale_npts)) + if (floor%2 == 1): + odd_pts = int(floor) + else: + odd_pts = int(ceiling) + + message = 'Need {} points for approximate geostrophic balance assuming a Rossby number of {}'.format(odd_pts, critical_Rossby) + logging.debug(message) + + n_geo_halo_points = int((odd_pts - 1)/2) + + if hist_i < n_geo_halo_points or hist_i > latitude_all.shape[0]-1-n_geo_halo_points: + message = 'There are not enough neighboring grid cells between the chosen point and the poles for calculation of geostrophic winds' + logging.critical(message) + raise Exception(message) + else: + #longitude = (n rows of latitude, 2*n columns of longitude), i index increases toward EAST; rows are circles of latitude, columns are meridians + #latitude = (n rows of latitude, 2*n columns of longitude), j index increases toward NORTH; rows are circles of latitude, columns are meridians + new_neighbors = np.mgrid[-n_geo_halo_points:n_geo_halo_points+1,-n_geo_halo_points:n_geo_halo_points+1] + new_neighbors[0,:,:] = new_neighbors[0,:,:] + hist_i + new_neighbors[1,:,:] = new_neighbors[1,:,:] + hist_j + + + # the closest point is near the western edge of the prime meridian, but longitude should be cyclic, so get neighbors on the eastern/western side of the prime meridian as necessary + new_neighbors[1,:,:] = new_neighbors[1,:,:]%longitude_all.shape[1] + + new_neighbors = new_neighbors.astype(int) + #message = 'orig neighbors',neighbors,latitude_all[neighbors[0,:,0],neighbors[1,0,:]],longitude_all[neighbors[0,:,0],neighbors[1,0,:]] + #logging.debug(message) + #message = 'new neighbors',new_neighbors,latitude_all[new_neighbors[0,:,0],new_neighbors[1,0,:]],longitude_all[new_neighbors[0,:,0],new_neighbors[1,0,:]] + #logging.debug(message) + + #calc dx, dy (n_forcing_halo_points*2 x n_forcing_halo_points*2) + new_dx = np.zeros((n_geo_halo_points*2+1,n_geo_halo_points*2)) + new_dy = np.zeros((n_geo_halo_points*2,n_geo_halo_points*2+1)) + for ii in range(2*n_geo_halo_points+1): + for jj in range(2*n_geo_halo_points): + current_ii_index = new_neighbors[0,ii,jj] + current_jj_index = new_neighbors[1,ii,jj] + eastern_jj_index = new_neighbors[1,ii,jj+1] + new_dx[ii,jj] = haversine_distance(latitude_all[current_ii_index,current_jj_index],longitude_all[current_ii_index,current_jj_index],latitude_all[current_ii_index,eastern_jj_index],longitude_all[current_ii_index,eastern_jj_index]) + + for ii in range(2*n_geo_halo_points): + for jj in range(2*n_geo_halo_points+1): + current_ii_index = new_neighbors[0,ii,jj] + northern_ii_index = new_neighbors[0,ii+1,jj] + current_jj_index = new_neighbors[1,ii,jj] + new_dy[ii,jj] = haversine_distance(latitude_all[current_ii_index,current_jj_index],longitude_all[current_ii_index,current_jj_index],latitude_all[northern_ii_index,current_jj_index],longitude_all[northern_ii_index,current_jj_index]) + + return (n_geo_halo_points, new_neighbors, new_dx, new_dy) + +def get_UFS_forcing_data_advective_tendency(dir, i, j, tile, neighbors, dx, dy, nlevs, lam, save_comp_data, vertical_method, geos_wind_forcing, wind_nudge, geo_pts, geo_neighbors, geo_dx, geo_dy, use_actual_geos_wind): + + # Determine UFS history file format (tiled/quilted) + if lam: + atm_ftag = 'atmf*.tile{0}.nc'.format(tile) + sfc_ftag = 'sfcf*.tile{0}.nc'.format(tile) + else: + atm_ftag = '*atmf*.nc' + sfc_ftag = '*sfcf*.nc' + + # Get list of UFS history files with 3D ATMospheric state variables. + atm_filenames = [] + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, atm_ftag): + atm_filenames.append(f_name) + if not atm_filenames: + message = 'No filenames matching the pattern {0} found in {1}'. \ + format(atm_ftag,dir) + logging.critical(message) + raise Exception(message) + atm_filenames = sorted(atm_filenames) + n_filesA = len(atm_filenames) + + npts = 2*n_forcing_halo_points+1 + npts_geo = 2*geo_pts+1 + + lon = np.zeros((n_filesA,npts,npts)) + lat = np.zeros((n_filesA,npts,npts)) + time = np.zeros((n_filesA)) + u_wind = np.zeros((n_filesA,nlevs,npts,npts)) + v_wind = np.zeros((n_filesA,nlevs,npts,npts)) + w_wind = np.zeros((n_filesA,nlevs,npts,npts)) + temp = np.zeros((n_filesA,nlevs,npts,npts)) + qv = np.zeros((n_filesA,nlevs,npts,npts)) + pres = np.zeros((n_filesA,nlevs)) + presi = np.zeros((n_filesA,nlevs+1)) + pressfc = np.zeros((n_filesA,npts,npts)) + delz = np.zeros((n_filesA,nlevs,npts,npts)) + if(geos_wind_forcing and use_actual_geos_wind): + temp_geo = np.zeros((n_filesA,nlevs,npts_geo,npts_geo)) + qv_geo = np.zeros((n_filesA,nlevs,npts_geo,npts_geo)) + hgtsfc = np.zeros((n_filesA,npts,npts)) + + # Read in 3D UFS history files + for count, filename in enumerate(atm_filenames, start=1): + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + nc_file.set_always_mask(False) + + time[count-1] = nc_file['time'][0] + + for ii in range(2*n_forcing_halo_points+1): + for jj in range(2*n_forcing_halo_points+1): + current_ii_index = neighbors[0,ii,jj] + current_jj_index = neighbors[1,ii,jj] + + lon[count-1,ii,jj] = nc_file['lon'][current_ii_index,current_jj_index] + lat[count-1,ii,jj] = nc_file['lat'][current_ii_index,current_jj_index] + u_wind[count-1,:,ii,jj] = nc_file['ugrd'][0,::-1,current_ii_index,current_jj_index] + v_wind[count-1,:,ii,jj] = nc_file['vgrd'][0,::-1,current_ii_index,current_jj_index] + w_wind[count-1,:,ii,jj] = nc_file['dzdt'][0,::-1,current_ii_index,current_jj_index] + temp[count-1,:,ii,jj] = nc_file['tmp'][0,::-1,current_ii_index,current_jj_index] + qv[count-1,:,ii,jj] = nc_file['spfh'][0,::-1,current_ii_index,current_jj_index] + pressfc[count-1,ii,jj] = nc_file['pressfc'][0,current_ii_index,current_jj_index] + delz[count-1,:,ii,jj] = -1*nc_file['delz'][0,::-1,current_ii_index,current_jj_index] #derive zh + + if (geos_wind_forcing and use_actual_geos_wind): + for ii in range(npts_geo): + for jj in range(npts_geo): + current_ii_index = geo_neighbors[0,ii,jj] + current_jj_index = geo_neighbors[1,ii,jj] + temp_geo[count-1,:,ii,jj] = nc_file['tmp'][0,::-1,current_ii_index,current_jj_index] + qv_geo[count-1,:,ii,jj] = nc_file['spfh'][0,::-1,current_ii_index,current_jj_index] + hgtsfc[count-1,ii,jj] = nc_file['hgtsfc'][0,current_ii_index,current_jj_index] + + pres[count-1,:] = nc_file['pfull'][::-1]*100.0 + presi[count-1,:] = nc_file['phalf'][::-1]*100.0 + + #check for poor time resolution (e.g. if mean number of grid points traversed by the wind between data intervals is much larger than the number of grid points used for calculating the advective terms) + # t=0 + # for k in range(u_wind.shape[1]): + # mean_u = np.mean(np.sqrt(u_wind[t,k,:,:]*u_wind[t,k,:,:])) + # mean_dist = 43200*mean_u*1.0E-3 + # print("k, mean_dist, dx, grid points traversed in timestep: ",k, mean_dist, np.mean(dx), mean_dist/np.mean(dx)) + + center = n_forcing_halo_points + smoothed_u = np.zeros((n_filesA,nlevs,npts,npts)) + smoothed_v = np.zeros((n_filesA,nlevs,npts,npts)) + smoothed_w = np.zeros((n_filesA,nlevs)) + smoothed_T = np.zeros((n_filesA,nlevs,npts,npts)) + smoothed_qv = np.zeros((n_filesA,nlevs,npts,npts)) + h_advec_u = np.zeros((n_filesA,nlevs)) + h_advec_v = np.zeros((n_filesA,nlevs)) + h_advec_T = np.zeros((n_filesA,nlevs)) + h_advec_qv = np.zeros((n_filesA,nlevs)) + v_advec_u = np.zeros((n_filesA,nlevs)) + v_advec_v = np.zeros((n_filesA,nlevs)) + v_advec_T = np.zeros((n_filesA,nlevs)) + v_advec_qv = np.zeros((n_filesA,nlevs)) + t_advec_u = np.zeros((n_filesA,nlevs)) + t_advec_v = np.zeros((n_filesA,nlevs)) + t_advec_T = np.zeros((n_filesA,nlevs)) + t_advec_qv = np.zeros((n_filesA,nlevs)) + u_g = np.zeros((n_filesA,nlevs)) + v_g = np.zeros((n_filesA,nlevs)) + if(geos_wind_forcing and use_actual_geos_wind): + phii = np.zeros((n_filesA,nlevs+1,npts_geo,npts_geo)) + phil = np.zeros((n_filesA,nlevs,npts_geo,npts_geo)) + for t in range(n_filesA): + #velocities gets very noisy in the UFS above the tropopause; define a linear return-to-zero profile above some pressure + k_p_top = np.where(pres[t,:] <= top_pres_for_forcing)[0][0] + + #smooth velocity profile + n_smooth_points = 5 + for ii in range(npts): + for jj in range(npts): + #the last argument defines the averaging "window" in grid layers; greater numbers results in greater smoothing + smoothed_u[t,:,ii,jj] = moving_average(u_wind[t,:,ii,jj], n_smooth_points) + smoothed_v[t,:,ii,jj] = moving_average(v_wind[t,:,ii,jj], n_smooth_points) + smoothed_T[t,:,ii,jj] = moving_average(temp [t,:,ii,jj], n_smooth_points) + smoothed_qv[t,:,ii,jj] = moving_average(qv [t,:,ii,jj], n_smooth_points) + + #velocities gets very noisy in the UFS above the tropopause; define a linear return-to-zero profile above some pressure + for k in range(k_p_top+1,nlevs): + lifrac = pres[t,k]/pres[t,k_p_top] + smoothed_u[t,k,ii,jj] = lifrac*smoothed_u[t,k_p_top,ii,jj] + smoothed_v[t,k,ii,jj] = lifrac*smoothed_v[t,k_p_top,ii,jj] + + smoothed_w[t,:] = moving_average(w_wind[t,:,center,center], n_smooth_points) + + for k in range(k_p_top+1,nlevs): + lifrac = pres[t,k]/pres[t,k_p_top] + smoothed_w[t,k] = lifrac*smoothed_w[t,k_p_top] + + for k in range(nlevs): + dTdx = centered_diff_derivative(smoothed_T[t,k,center,:],dx[center,:]*1.0E3) + dTdy = centered_diff_derivative(smoothed_T[t,k,:,center],dy[:,center]*1.0E3) + dqdx = centered_diff_derivative(smoothed_qv[t,k,center,:],dx[center,:]*1.0E3) + dqdy = centered_diff_derivative(smoothed_qv[t,k,:,center],dy[:,center]*1.0E3) + dudx = centered_diff_derivative(smoothed_u[t,k,center,:],dx[center,:]*1.0E3) + dudy = centered_diff_derivative(smoothed_u[t,k,:,center],dy[:,center]*1.0E3) + dvdx = centered_diff_derivative(smoothed_v[t,k,center,:],dx[center,:]*1.0E3) + dvdy = centered_diff_derivative(smoothed_v[t,k,:,center],dy[:,center]*1.0E3) + mean_u = np.mean(smoothed_u[t,k,center,center-1:center+1+1]) + mean_v = np.mean(smoothed_v[t,k,center-1:center+1+1,center]) + + h_advec_T[t,k] = -mean_u*dTdx - mean_v*dTdy #K s-1 + h_advec_qv[t,k] = -mean_u*dqdx - mean_v*dqdy #kg kg-1 s-1 + h_advec_u[t,k] = -mean_u*dudx - mean_v*dudy #m s-1 s-1 + h_advec_v[t,k] = -mean_u*dvdx - mean_v*dvdy #m s-1 s-1 + + if (vertical_method == 1): + v_advec_u[t,0] = 0.0 + v_advec_u[t,nlevs-1] = 0.0 + v_advec_v[t,0] = 0.0 + v_advec_v[t,nlevs-1] = 0.0 + v_advec_T[t,0] = 0.0 + v_advec_T[t,nlevs-1] = 0.0 + v_advec_qv[t,0] = 0.0 + v_advec_qv[t,nlevs-1] = 0.0 + for k in range(1, nlevs-1): + w_asc = max(smoothed_w[t,k], 0.0) + w_des = min(smoothed_w[t,k], 0.0) + #noisy wind profiles necessitate using smoothed profiles; this doesn't appear to be needed for T and qv + gradient_T_asc = (temp[t,k,center,center] - temp[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + gradient_T_des = (temp[t,k+1,center,center] - temp[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + #gradient_T_asc = (smoothed_T[t,k,center,center] - smoothed_T[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + #gradient_T_des = (smoothed_T[t,k+1,center,center] - smoothed_T[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + gradient_qv_asc = (qv[t,k,center,center] - qv[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + gradient_qv_des = (qv[t,k+1,center,center] - qv[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + #gradient_qv_asc = (smoothed_qv[t,k,center,center] - smoothed_qv[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + #gradient_qv_des = (smoothed_qv[t,k+1,center,center] - smoothed_qv[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + #gradient_u_asc = (u_wind[t,k,center,center] - u_wind[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + #gradient_u_des = (u_wind[t,k+1,center,center] - u_wind[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + gradient_u_asc = (smoothed_u[t,k,center,center] - smoothed_u[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + gradient_u_des = (smoothed_u[t,k+1,center,center] - smoothed_u[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + #gradient_v_asc = (v_wind[t,k,center,center] - v_wind[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + #gradient_v_des = (v_wind[t,k+1,center,center] - v_wind[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + gradient_v_asc = (smoothed_v[t,k,center,center] - smoothed_v[t,k-1,center,center])/(pres[t,k]-pres[t,k-1]) + gradient_v_des = (smoothed_v[t,k+1,center,center] - smoothed_v[t,k,center,center])/(pres[t,k+1]-pres[t,k]) + + rho = pres[t,k]/(rdgas*temp[t,k,center,center]) + adiabatic_exp_comp_term = -(w_asc + w_des)*grav/cp + v_advec_u[t,k] = rho*grav*(w_asc*gradient_u_asc + w_des*gradient_u_des) + v_advec_v[t,k] = rho*grav*(w_asc*gradient_v_asc + w_des*gradient_v_des) + v_advec_T[t,k] = rho*grav*(w_asc*gradient_T_asc + w_des*gradient_T_des) + adiabatic_exp_comp_term + v_advec_qv[t,k] = rho*grav*(w_asc*gradient_qv_asc + w_des*gradient_qv_des) + + if (geos_wind_forcing): + coriolis = 2.0*Omega*np.sin(lat[t,center,center]*deg_to_rad) + + if (use_actual_geos_wind): + #calc geopotential at interface levels, starting from surface, convert to full pressure levels + for ii in range(npts_geo): + for jj in range(npts_geo): + if (use_oro_for_geos_wind): + phii[t,0,ii,jj] = hgtsfc[t,ii,jj]*grav + else: + phii[t,0,ii,jj] = 0.0 #don't need to include orography -- geostrophic winds should assume geopotential over the geoid + for k in range(1,nlevs+1): + phii[t,k,ii,jj] = phii[t,k-1,ii,jj] - rdgas*temp_geo[t,k-1,ii,jj]*(1.0 + zvir*qv_geo[t,k-1,ii,jj])*np.log(presi[t,k]/presi[t,k-1]) + + for k in range(0,nlevs): + phil[t,k,ii,jj] = 0.5*(phii[t,k,ii,jj] + phii[t,k+1,ii,jj]) + + for k in range(1,nlevs): + dphidx = np.zeros((npts_geo,npts_geo)) + dphidy = np.zeros((npts_geo,npts_geo)) + #valid points: + #dphidx = np.zeros((npts_geo,npts_geo-2)) + #dphidy = np.zeros((npts_geo-2,npts_geo)) + for ii in range(0,npts_geo): + for jj in range(1,npts_geo-1): + dphidx[ii,jj] = centered_diff_derivative(phil[t,k,ii,jj-1:jj+2],geo_dx[ii,jj-1:jj+1]*1.0E3) + mean_dphidx = np.mean(dphidx[0:npts_geo,1:npts_geo-1]) + + for ii in range(1,npts_geo-1): + for jj in range(0,npts_geo): + dphidy[ii,jj] = centered_diff_derivative(phil[t,k,ii-1:ii+2,jj],geo_dy[ii-1:ii+1,jj]*1.0E3) + mean_dphidy = np.mean(dphidy[1:npts_geo-1,0:npts_geo]) + + u_g[t,k] = -mean_dphidy/coriolis + v_g[t,k] = mean_dphidx/coriolis + else: + for k in range(0,nlevs): + u_g[t,k] = np.mean(u_wind[t,k,:,:]) + v_g[t,k] = np.mean(v_wind[t,k,:,:]) + k_pbl_top = np.where(pres[t,:] <= PBL_top_for_geos)[0][0] + u_g[t,0] = 0.0 + v_g[t,0] = 0.0 + for k in range(1,k_pbl_top): + lifrac = (pres[t,k] - pres[t,0])/(pres[t,k_pbl_top] - pres[t,0]) + u_g[t,k] = lifrac*u_g[t,k_pbl_top] + v_g[t,k] = lifrac*v_g[t,k_pbl_top] + + + if (vertical_method == 1): + t_advec_T[:,:] = h_advec_T[:,:] + v_advec_T[:,:] + t_advec_qv[:,:] = h_advec_qv[:,:] + v_advec_qv[:,:] + t_advec_u[:,:] = h_advec_u[:,:] + v_advec_u[:,:] + t_advec_v[:,:] = h_advec_v[:,:] + v_advec_v[:,:] + else: + t_advec_T[:,:] = h_advec_T[:,:] + t_advec_qv[:,:] = h_advec_qv[:,:] + t_advec_u[:,:] = h_advec_u[:,:] + t_advec_v[:,:] = h_advec_v[:,:] + + if (save_comp_data): + # Read in 2D UFS history files + phystends = [{"name":"dtend_temp_lw"}, {"name":"dtend_temp_sw"}, {"name":"dtend_temp_pbl"}, {"name":"dtend_temp_deepcnv"},\ + {"name":"dtend_temp_shalcnv"},{"name":"dtend_temp_mp"}, {"name":"dtend_temp_orogwd"},{"name":"dtend_temp_phys"}, \ + {"name":"dtend_u_pbl"}, {"name":"dtend_v_pbl"}, {"name":"dtend_u_orogwd"}, {"name":"dtend_v_orogwd"}, \ + {"name":"dtend_u_deepcnv"}, {"name":"dtend_v_deepcnv"},{"name":"dtend_u_shalcnv"}, {"name":"dtend_v_shalcnv"}, \ + {"name":"dtend_u_phys"}, {"name":"dtend_v_phys"}, {"name":"dtend_qv_pbl"}, {"name":"dtend_qv_deepcnv"}, \ + {"name":"dtend_qv_shalcnv"}, {"name":"dtend_qv_mp"}, {"name":"dtend_qv_phys"}, {"name":"dtend_poop"}] + for phystend in phystends: phystend["values"] = [] + + # Variables to be added to "UFS comparison file" + vars2d =[{"name":"spfh2m"}, {"name":"tmp2m"}, {"name":"dswrf_ave"}, \ + {"name":"ulwrf_ave"}, {"name":"lhtfl_ave"}, {"name":"shtfl_ave"}, \ + {"name":"dswrf"}, {"name":"ulwrf"}, {"name":"lhtfl"}, \ + {"name":"shtfl"}, {"name":"pwat"}, {"name":"vgrd10m"}, \ + {"name":"ugrd10m"}] + for var2d in vars2d: var2d["values"] = [] + + # Get list of UFS history files with 2D fields. + sfc_filenames = [] + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, sfc_ftag): + sfc_filenames.append(f_name) + if not sfc_filenames: + message = 'No filenames matching the pattern {0} found in {1}'. \ + format(sfc_ftag,dir) + logging.critical(message) + raise Exception(message) + sfc_filenames = sorted(sfc_filenames) + n_filesS = len(sfc_filenames) + + if (n_filesS == n_filesA): + n_files = n_filesA + else: + message = 'Number of UFS 2D/3D history files is inconsistent' + logging.critical(message) + raise Exception(message) + + center_ii_index = neighbors[0,center,center] + center_jj_index = neighbors[1,center,center] + for count, filename in enumerate(sfc_filenames, start=1): + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + nc_file.set_always_mask(False) + + for var2d in vars2d: + data = nc_file[var2d["name"]][0,center_ii_index,center_jj_index] + var2d["values"].append(data) + var2d["units"] = nc_file[var2d["name"]].getncattr(name="units") + var2d["long_name"] = nc_file[var2d["name"]].getncattr(name="long_name") + + for phystend in phystends: + if phystend["name"] in nc_file.variables.keys(): + data = nc_file[phystend["name"]][0,::-1,center_ii_index,center_jj_index] + phystend["values"].append(data[:]) + phystend["units"] = nc_file[phystend["name"]].getncattr(name="units") + phystend["long_name"] = nc_file[phystend["name"]].getncattr(name="long_name") + phystend["missing"] = False + else: + phystend["missing"] = True + + nc_file.close() + + # Convert to numpy arrays + for var2d in vars2d: + var2d["values"] = np.asarray(var2d["values"]) + for phystend in phystends: + if (not phystend["missing"]): + phystend["values"] = np.asarray(phystend["values"]) + sec_in_hr = 3600. + time[0] = 0. + forcing = { + "time": time*sec_in_hr, + "wa": smoothed_w, + "ps_forc": pressfc[:,center,center], + "pa_forc": pres, + "tnta_adv": t_advec_T[:,:], + "tnqv_adv": t_advec_qv[:,:], + "tnua_adv": t_advec_u[:,:], + "tnva_adv": t_advec_v[:,:], + "ug" : u_g[:,:], + "vg" : v_g[:,:], + "ua_nud" : u_wind[:,:,center,center], + "va_nud" : v_wind[:,:,center,center] + } + + if (save_comp_data): + comp_data = { + "time": time*sec_in_hr, + "pa" : pres, + "ta" : temp [:,:,center,center], + "qv" : qv [:,:,center,center], + "ua" : u_wind[:,:,center,center], + "va" : v_wind[:,:,center,center], + "vars2d":vars2d, + "phystends":phystends} + else: + comp_data = {} + + + return (forcing, comp_data) + ######################################################################################## # ######################################################################################## def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, grid_dir, - tile, i, j, lam, save_comp_data): + tile, i, j, lam, save_comp_data, exact_mode): """Get the horizontal and vertical advective tendencies for the given tile and indices""" # Determine UFS history file format (tiled/quilted) @@ -1502,17 +2313,21 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr else: atm_ftag = '*atmf*.nc' sfc_ftag = '*sfcf*.nc' - + # end if + # Get list of UFS history files with 3D ATMospheric state variables. atm_filenames = [] for f_name in os.listdir(forcing_dir): if fnmatch.fnmatch(f_name, atm_ftag): atm_filenames.append(f_name) + # end if + # end for if not atm_filenames: message = 'No filenames matching the pattern {0} found in {1}'. \ format(atm_ftag,forcing_dir) logging.critical(message) raise Exception(message) + # end if atm_filenames = sorted(atm_filenames) n_filesA = len(atm_filenames) @@ -1521,11 +2336,14 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr for f_name in os.listdir(forcing_dir): if fnmatch.fnmatch(f_name, sfc_ftag): sfc_filenames.append(f_name) + # end if + # end fo if not sfc_filenames: message = 'No filenames matching the pattern {0} found in {1}'. \ format(sfc_ftag,forcing_dir) logging.critical(message) raise Exception(message) + # end if sfc_filenames = sorted(sfc_filenames) n_filesS = len(sfc_filenames) @@ -1535,7 +2353,8 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr message = 'Number of UFS 2D/3D history files is inconsistent' logging.critical(message) raise Exception(message) - + # end if + # Physical constants (used by FV3 remapping functions) kord_tm = -9 kord_tr = 9 @@ -1545,18 +2364,19 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr #################################################################################### # - # Read in atmospheric state, atmf*.nc history files. + # Read in 3D UFS history files # #################################################################################### # Find nearest point on UFS history file (quilted) grid. if use_nearest: - (tile_jj, tile_ii, point_lon, point_lat, dist_min) = find_loc_indices(location, forcing_dir, -999, lam) + (tile_jj, tile_ii, point_lon, point_lat, dist_min, theta) = find_loc_indices(location, forcing_dir, -999, lam) print('The closest point has indices [{0},{1}]'.format(tile_ii,tile_jj)) print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) print('This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1])) - - # + # end if + + # Initialize ps = [] p_lev = [] p_lay = [] @@ -1582,12 +2402,15 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr except: data_grid_lon = nc_file['grid_xt'][:,:] data_grid_lat = nc_file['grid_yt'][:,:] + # end try equal_grids = False if (ic_grid_lon.shape == data_grid_lon.shape and \ ic_grid_lat.shape == ic_grid_lat.shape): if (np.equal(ic_grid_lon,data_grid_lon).all() and \ np.equal(ic_grid_lat,data_grid_lat).all()): equal_grids = True + # end if + # end if # If necessary, remap history file (data_grid) to IC file (ic_grid). if (not equal_grids): @@ -1614,6 +2437,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr v_data = nc_file['vgrd'][0,::-1,:,:] i_get = i j_get = j + # end if else: print('Using nearest UFS point {} progress = {}%'.format(filename, 100.0*count/(2*n_files))) ps_data = nc_file['pressfc'][0,:,:] @@ -1623,21 +2447,30 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr v_data = nc_file['vgrd'][0,::-1,:,:] j_get = tile_jj i_get = tile_ii + # end if (use-nearest) - # Compute and store vertical grid information. - ak = getattr(nc_file, "ak")[::-1] - bk = getattr(nc_file, "bk")[::-1] + # Store surface pressure. ps.append(ps_data[j_get,i_get]) + + # Compute and store vertical grid information. + ak = getattr(nc_file, "ak")[::-1] + bk = getattr(nc_file, "bk")[::-1] nlevs = len(nc_file.dimensions['pfull']) + + # Compute and store pressure at layer-interfaces (nlev+1). p_interface = np.zeros(nlevs+1) for k in range(nlevs+1): p_interface[k]=ak[k]+ps[-1]*bk[k] + # end for p_lev.append(p_interface) + + # Compute and store pressure at layer-centers (nlev). p_layer = np.zeros(nlevs) for k in range(nlevs): p_layer[k] = ((1.0/(rocp+1.0))*(p_interface[k]**(rocp+1.0) - \ p_interface[k+1]**(rocp+1.0))/(p_interface[k] - \ p_interface[k+1]))**(1.0/rocp) + # end for p_lay.append(p_layer) # Store state variables. @@ -1646,9 +2479,10 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr u_lay.append(u_data[:,j_get,i_get]) v_lay.append(v_data[:,j_get,i_get]) time_hr.append(nc_file['time'][0]) - + # Close file nc_file.close() + # end for # Convert from python list to numpy array ps = np.asarray(ps) @@ -1658,19 +2492,49 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr qv_lay = np.asarray(qv_lay) u_lay = np.asarray(u_lay) v_lay = np.asarray(v_lay) - tv_lay = t_lay*(1.0 + zvir*qv_lay) time_hr = np.asarray(time_hr) + # Compute virtual temperature. + tv_lay = t_lay*(1.0 + zvir*qv_lay) + + #################################################################################### + # # Read in 2D UFS history files - vars2d =[{"name":"spfh2m"}, {"name":"tmp2m"}, \ - {"name":"dswrf_ave"}, {"name":"ulwrf_ave"},\ - {"name":"lhtfl_ave"}, {"name":"shtfl_ave"},\ - {"name":"dswrf"}, {"name":"ulwrf"},\ - {"name":"lhtfl"}, {"name":"shtfl"},\ - {"name":"pwat"}, {"name":"vgrd10m"},\ + # + # Comments: + # These files contain the 3D (diagnostic) dynamic tendencies needed to create "exact" + # UFS forcings. These are stored in the python dictionary "dyntends". + # + # Additionally, a limited set of two-dimensionsal variables, python dictionary "vars2d", + # are output to the "case comparison file". + # + #################################################################################### + + # Variables needed for "exact" UFS case generation. + dyntends = [{"name":"dtend_temp_nophys", "diag_table":'"gfs_dyn", "dtend_temp_nophys", "dtend_temp_nophys", "fv3_history", "all", .false., "none", 2'},\ + {"name":"dtend_u_nophys", "diag_table":'"gfs_dyn", "dtend_u_nophys", "dtend_u_nophys", "fv3_history", "all", .false., "none", 2'},\ + {"name":"dtend_v_nophys", "diag_table":'"gfs_dyn", "dtend_v_nophys", "dtend_v_nophys", "fv3_history", "all", .false., "none", 2'},\ + {"name":"dtend_qv_nophys", "diag_table":'"gfs_dyn", "dtend_qv_nophys", "dtend_qv_nophys", "fv3_history", "all", .false., "none", 2'}] + for dyntend in dyntends: dyntend["values"] = [] + + # Optional diagnostic to be copied into UFS comparison file + phystends = [{"name":"dtend_temp_lw"}, {"name":"dtend_temp_sw"}, {"name":"dtend_temp_pbl"}, {"name":"dtend_temp_deepcnv"},\ + {"name":"dtend_temp_shalcnv"},{"name":"dtend_temp_mp"}, {"name":"dtend_temp_orogwd"},{"name":"dtend_temp_phys"}, \ + {"name":"dtend_u_pbl"}, {"name":"dtend_v_pbl"}, {"name":"dtend_u_orogwd"}, {"name":"dtend_v_orogwd"}, \ + {"name":"dtend_u_deepcnv"}, {"name":"dtend_v_deepcnv"},{"name":"dtend_u_shalcnv"}, {"name":"dtend_v_shalcnv"}, \ + {"name":"dtend_u_phys"}, {"name":"dtend_v_phys"}, {"name":"dtend_qv_pbl"}, {"name":"dtend_qv_deepcnv"}, \ + {"name":"dtend_qv_shalcnv"}, {"name":"dtend_qv_mp"}, {"name":"dtend_qv_phys"}, {"name":"dtend_poop"}] + for phystend in phystends: phystend["values"] = [] + + # Variables to be added to "UFS comparison file" + vars2d =[{"name":"spfh2m"}, {"name":"tmp2m"}, {"name":"dswrf_ave"}, \ + {"name":"ulwrf_ave"}, {"name":"lhtfl_ave"}, {"name":"shtfl_ave"}, \ + {"name":"dswrf"}, {"name":"ulwrf"}, {"name":"lhtfl"}, \ + {"name":"shtfl"}, {"name":"pwat"}, {"name":"vgrd10m"}, \ {"name":"ugrd10m"}] for var2d in vars2d: var2d["values"] = [] + # Read in 2D UFS history files for count, filename in enumerate(sfc_filenames, start=1): nc_file = Dataset('{0}/{1}'.format(forcing_dir,filename)) nc_file.set_always_mask(False) @@ -1683,12 +2547,15 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr except: data_grid_lon = nc_file['grid_xt'][:,:] data_grid_lat = nc_file['grid_yt'][:,:] + # end try equal_grids = False if (ic_grid_lon.shape == data_grid_lon.shape and \ ic_grid_lat.shape == ic_grid_lat.shape): if (np.equal(ic_grid_lon,data_grid_lon).all() and \ np.equal(ic_grid_lat,data_grid_lat).all()): equal_grids = True + # end if + # endif # If necessary, remap history file (data_grid) to IC file (ic_grid). if (not equal_grids): @@ -1705,53 +2572,117 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr else: i_get = i j_get = j + # end if else: print('Using nearest UFS point {} progress = {}%'.format(filename, 50+50.0*count/(n_files))) j_get = tile_jj i_get = tile_ii - + # end if + for var2d in vars2d: if not use_nearest: data = regridder(nc_file[var2d["name"]][0,:,:]) else: data = nc_file[var2d["name"]][0,:,:] + # end if var2d["values"].append(data[j_get,i_get]) var2d["units"] = nc_file[var2d["name"]].getncattr(name="units") var2d["long_name"] = nc_file[var2d["name"]].getncattr(name="long_name") - + # end for + for dyntend in dyntends: + if dyntend["name"] in nc_file.variables.keys(): + if not use_nearest: + data = regridder(nc_file[dyntend["name"]][0,::-1,:,:]) + else: + data = nc_file[dyntend["name"]][0,::-1,:,:] + # end if (use-nearest) + dyntend["values"].append(data[:,j_get,i_get]) + dyntend["units"] = nc_file[dyntend["name"]].getncattr(name="units") + dyntend["long_name"] = nc_file[dyntend["name"]].getncattr(name="long_name") + dyntend["missing"] = False + else: + dyntend["missing"] = True + # end if + # end for + for phystend in phystends: + if phystend["name"] in nc_file.variables.keys(): + if not use_nearest: + data = regridder(nc_file[phystend["name"]][0,::-1,:,:]) + else: + data = nc_file[phystend["name"]][0,::-1,:,:] + # end if (use-nearest) + phystend["values"].append(data[:,j_get,i_get]) + phystend["units"] = nc_file[phystend["name"]].getncattr(name="units") + phystend["long_name"] = nc_file[phystend["name"]].getncattr(name="long_name") + phystend["missing"] = False + else: + phystend["missing"] = True + # end if + # end for nc_file.close() + # end for + + # Handle the case that "exact-mode" was requested, but the UFS history files do not contains + # the necessary fields. + # In this scenario, create warning/message that informs the users how to rerun the UFS with + # the correct configuration. + # Revert to "approximate-mode" + if exact_mode: + missing_dtend_names = [] + missing_diag_tables = [] + for dyntend in dyntends: + if dyntend["missing"]: + missing_dtend_names.append(dyntend["name"]) + missing_diag_tables.append(dyntend["diag_table"]) + exact_mode = False + # end if + # end for + if missing_dtend_names: + print("#"*128) + print("#"*59,"WARNING","#"*60) + print("#"*128) + print("Could not find the following fields required for ",'"exact-mode"',':') + for missing_dtend_name in missing_dtend_names: + print(" -",missing_dtend_name) + # end for + print("Using ",'"approximate mode"',' instead') + print("") + print(" *NOTE*") + print(" You selected to create the UFS case using ",'"exact-mode"'," which requires additional (diagnostic)") + print(" fields in the UFS history files.") + print("") + print(" These optional diagnostics tendencies can be output from any UFS forecast by:") + print(" a) Modifying the physics namelist, gfs_phys_nml, so that:") + print(" *) ldiag3d = .true. ") + print(" *) qdiag3d = .true. ") + print(" b) Addding the following lines to the UFS diag_table:") + for missing_diag_table in missing_diag_tables: + print(' *) ',missing_diag_table ) + # end for + print("") + print("#"*128) + print("#"*128) + # end if + # end if (exact-mode) # Convert to numpy arrays for var2d in vars2d: var2d["values"] = np.asarray(var2d["values"]) - - # - # Create dictionary with full "Native" state (IC@t=0,ATMF*@t>0) - # - ps_IC = np.zeros([1]) - ps_IC[0] = state_IC["ps"] - pi_IC = np.zeros([1,nlevs+1]) - pi_IC[0,:] = state_IC["pa_i"] - p_IC = np.zeros([1,nlevs]) - p_IC[0,:] = state_IC["pa"] - t_IC = np.zeros([1,nlevs]) - t_IC[0,:] = state_IC["ta"] - qv_IC = np.zeros([1,nlevs]) - qv_IC[0,:] = state_IC["qv"] - u_IC = np.zeros([1,nlevs]) - u_IC[0,:] = state_IC["ua"] - v_IC = np.zeros([1,nlevs]) - v_IC[0,:] = state_IC["va"] - tv_IC = t_IC*(1.0 + zvir*qv_IC) - stateNATIVE = {"time": np.concatenate((np.asarray([0.]), time_hr[:])), \ - "ps": np.concatenate((ps_IC, ps), axis=0), \ - "p_lev": np.concatenate((pi_IC, p_lev), axis=0), \ - "p_lay": np.concatenate((p_IC, p_lay), axis=0), \ - "t_lay": np.concatenate((t_IC, t_lay), axis=0), \ - "qv_lay": np.concatenate((qv_IC, qv_lay), axis=0), \ - "u_lay": np.concatenate((u_IC, u_lay), axis=0), \ - "v_lay": np.concatenate((v_IC, v_lay), axis=0), \ - "tv_lay": np.concatenate((tv_IC, tv_lay), axis=0)} + # end for + if exact_mode: + for dyntend in dyntends: + dyntend["values"] = np.asarray(dyntend["values"]) + # end for + dtend_temp_nophys = dyntends[0]["values"] + dtend_u_nophys = dyntends[1]["values"] + dtend_v_nophys = dyntends[2]["values"] + dtend_qv_nophys = dyntends[3]["values"] + # end if (exact-mode) + for phystend in phystends: + if (not phystend["missing"]): + phystend["values"] = np.asarray(phystend["values"]) + # end if + # end for #################################################################################### # @@ -1775,8 +2706,8 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr # #################################################################################### - # - nlevs = len(p_lay[0,:]) + # Initialize + #nlevs = len(p_lay[0,:]) dummy = np.zeros(1) from_p = np.zeros([1,nlevs+1]) to_p = np.zeros([1,nlevs+1]) @@ -1796,6 +2727,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr # # First timestep... + # (Same for Exact-mode or Approximate mode) # # Interpolation range @@ -1810,7 +2742,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr tv_rev_new = fv3_remap.map_scalar(nlevs, log_from_p, tv_init_rev[np.newaxis, :], \ dummy, nlevs, log_to_p, 0, 0, 1, \ np.abs(kord_tm), t_min) - + # IC Specific humidity on vertical-grid of first UFS history file. qv_init_rev = state_IC["qv"][::-1] for k in range(0,nlevs): dp2[0,k] = from_p[0,k+1] - from_p[0,k] @@ -1826,7 +2758,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr v_init_rev = state_IC["va"][::-1] v_rev_new = fv3_remap.map1_ppm(nlevs, from_p, v_init_rev[np.newaxis, :], 0.0, \ nlevs, to_p, 0, 0, -1, kord_tm ) - + # Store p_layr[0,:] = p_lay[0,:] p_levr[0,:] = p_lev[0,:] @@ -1834,115 +2766,142 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr u_layr[0,:] = u_rev_new[0,::-1] tv_layr[0,:] = tv_rev_new[0,::-1] qv_layr[0,:] = qv_rev_new[0,::-1] - - # - # Subsequent timestep(s)... - # (current state on vertical grid of subsequent time-step(s). Used for differencing) - # - - # - for t in range(n_files-1): + + # Subsequent timestep(s). (exact-mode only) + if exact_mode: + for t in range(n_files-1): + # Interpolation range + from_p[0,:] = p_lev[t,::-1] + to_p[0,:] = p_lev[t+1,::-1] + log_from_p[0,:] = np.log(p_lev[t,::-1]) + log_to_p[0,:] = np.log(p_lev[t+1,::-1]) + + # Virtual Temperature @ time > 0 + tv_rev[0,:] = tv_lay[t,::-1] + tv_rev_new = fv3_remap.map_scalar(nlevs, log_from_p, tv_rev, dummy, nlevs, \ + log_to_p, 0, 0, 1, np.abs(kord_tm), t_min) + # Specific humidity @ time > 0 + qv_rev[0,:] = qv_lay[t,::-1] + for k in range(0,nlevs): dp2[0,k] = to_p[0,k+1] - to_p[0,k] + qv_rev_new = fv3_remap.map1_q2(nlevs, from_p, qv_rev, nlevs, to_p, dp2, \ + 0, 0, 0, kord_tr, q_min) + # Zonal wind @ time > 0 + u_rev[0,:] = u_lay[t,::-1] + u_rev_new = fv3_remap.map1_ppm(nlevs, from_p, u_rev, 0.0, nlevs, to_p, \ + 0, 0, -1, kord_tm ) + # Meridional wind @ time > 0 + v_rev[0,:] = v_lay[t,::-1] + v_rev_new = fv3_remap.map1_ppm(nlevs, from_p, v_rev, 0.0, nlevs, to_p, \ + 0, 0, -1, kord_tm ) + # Store + p_layr[t+1,:] = p_lay[t+1,:] + p_levr[t+1,:] = p_lev[t+1,:] + tv_layr[t+1,:] = tv_rev_new[0,::-1] + qv_layr[t+1,:] = qv_rev_new[0,::-1] + u_layr[t+1,:] = u_rev_new[0,::-1] + v_layr[t+1,:] = v_rev_new[0,::-1] + # end for + # - from_p[0,:] = p_lev[t,::-1] - to_p[0,:] = p_lev[t+1,::-1] - log_from_p[0,:] = np.log(p_lev[t,::-1]) - log_to_p[0,:] = np.log(p_lev[t+1,::-1]) - - # Virtual Temperature @ time > 0 - tv_rev[0,:] = tv_lay[t,::-1] - tv_rev_new = fv3_remap.map_scalar(nlevs, log_from_p, tv_rev, dummy, nlevs, \ - log_to_p, 0, 0, 1, np.abs(kord_tm), t_min) - # Specific humidity @ time > 0 - qv_rev[0,:] = qv_lay[t,::-1] - for k in range(0,nlevs): dp2[0,k] = to_p[0,k+1] - to_p[0,k] - qv_rev_new = fv3_remap.map1_q2(nlevs, from_p, qv_rev, nlevs, to_p, dp2, \ - 0, 0, 0, kord_tr, q_min) - # Zonal wind @ time > 0 - u_rev[0,:] = u_lay[t,::-1] - u_rev_new = fv3_remap.map1_ppm(nlevs, from_p, u_rev, 0.0, nlevs, to_p, \ - 0, 0, -1, kord_tm ) - # Meridional wind @ time > 0 - v_rev[0,:] = v_lay[t,::-1] - v_rev_new = fv3_remap.map1_ppm(nlevs, from_p, v_rev, 0.0, nlevs, to_p, \ - 0, 0, -1, kord_tm ) - - # Store - p_layr[t+1,:] = p_lay[t+1,:] - p_levr[t+1,:] = p_lev[t+1,:] - tv_layr[t+1,:] = tv_rev_new[0,::-1] - qv_layr[t+1,:] = qv_rev_new[0,::-1] - u_layr[t+1,:] = u_rev_new[0,::-1] - v_layr[t+1,:] = v_rev_new[0,::-1] - - # - p_layr[t+2,:] = p_layr[t+1,:] - p_levr[t+2,:] = p_levr[t+1,:] - tv_layr[t+2,:] = tv_layr[t+1,:] - qv_layr[t+2,:] = qv_layr[t+1,:] - u_layr[t+2,:] = u_layr[t+1,:] - v_layr[t+2,:] = v_layr[t+1,:] - + p_layr[t+2,:] = p_layr[t+1,:] + p_levr[t+2,:] = p_levr[t+1,:] + tv_layr[t+2,:] = tv_layr[t+1,:] + qv_layr[t+2,:] = qv_layr[t+1,:] + u_layr[t+2,:] = u_layr[t+1,:] + v_layr[t+2,:] = v_layr[t+1,:] + # end if (exact-mode) + # Temperature t_layr = tv_layr/(1.0 + zvir*qv_layr) - # Create dictionary with "Regridded" state - stateREGRID = {"time": np.concatenate((np.asarray([0.]), time_hr[:])), \ - "ps": np.concatenate((ps[0:1], ps), axis=0), \ - "p_lev": p_levr, \ - "p_lay": p_layr, \ - "t_lay": t_layr, \ - "qv_lay": qv_layr, \ - "u_lay": u_layr, \ - "v_lay": v_layr, \ - "tv_lay": tv_layr } + # Save regridded initial state. + stateInit = {"p_lay": p_lay[0,:], \ + "t_lay": t_lay[0,:], \ + "qv_lay": qv_lay[0,:], \ + "u_lay": u_lay[0,:], \ + "v_lay": v_lay[0,:]} #################################################################################### # - # Compute tendencies advective = total - remapping - # - #################################################################################### - + # Compute forcing. # + #################################################################################### + # Initialize tendencies. dtdt_adv = np.zeros([n_files+1,nlevs]) dqvdt_adv = np.zeros([n_files+1,nlevs]) dudt_adv = np.zeros([n_files+1,nlevs]) dvdt_adv = np.zeros([n_files+1,nlevs]) pres_adv = np.zeros([n_files+1,nlevs]) pres_i_adv = np.zeros([n_files+1,nlevs+1]) - tend_remap = np.zeros([1,nlevs]) - tend_total = np.zeros([1,nlevs]) - - # - for t in range(n_files): - # - dtime_sec = (stateNATIVE["time"][t+1] - stateNATIVE["time"][t])*sec_in_hr - # - pres_adv[t,:] = stateNATIVE["p_lay"][t,:] - pres_i_adv[t,:] = stateNATIVE["p_lev"][t,:] - # - tend_total[0,:] = stateREGRID["t_lay"][t+1,:] - stateREGRID["t_lay"][t,:] - tend_remap[0,:] = stateREGRID["t_lay"][t,:] - stateNATIVE["t_lay"][t,:] - dtdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec - # - tend_total[0,:] = stateREGRID["qv_lay"][t+1,:] - stateREGRID["qv_lay"][t,:] - tend_remap[0,:] = stateREGRID["qv_lay"][t,:] - stateNATIVE["qv_lay"][t,:] - dqvdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec - # - tend_total[0,:] = stateREGRID["u_lay"][t+1,:] - stateREGRID["u_lay"][t,:] - tend_remap[0,:] = stateREGRID["u_lay"][t,:] - stateNATIVE["u_lay"][t,:] - dudt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec - # - tend_total[0,:] = stateREGRID["v_lay"][t+1,:] - stateREGRID["v_lay"][t,:] - tend_remap[0,:] = stateREGRID["v_lay"][t,:] - stateNATIVE["v_lay"][t,:] - dvdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec - - # - dtdt_adv[t+1,:] = dtdt_adv[t,:] - dqvdt_adv[t+1,:] = dqvdt_adv[t,:] - dudt_adv[t+1,:] = dudt_adv[t,:] - dvdt_adv[t+1,:] = dvdt_adv[t,:] - pres_adv[t+1,:] = pres_adv[t,:] - pres_i_adv[t+1,:] = pres_i_adv[t,:] + dtdtr = np.zeros([1,nlevs]) + dqdtr = np.zeros([1,nlevs]) + dudtr = np.zeros([1,nlevs]) + dvdtr = np.zeros([1,nlevs]) + + # First timestep. + pres_adv[0,:] = state_IC["pa"] + pres_i_adv[0,:] = state_IC["pa_i"] + dt = 3600.0*(time_hr[0]) + if exact_mode: + dqdtr[0,:] = (qv_layr[0,:] - state_IC["qv"][:])/dt + dqvdt_adv[0,:] = dtend_qv_nophys[0,:] - dqdtr[0,:] + dudtr[0,:] = (u_layr[0,:] - state_IC["ua"][:])/dt + dudt_adv[0,:] = (dtend_u_nophys[0,:] - dudtr[0,:]) + dvdtr[0,:] = (v_layr[0,:] - state_IC["va"][:])/dt + dvdt_adv[0,:] = (dtend_v_nophys[0,:] - dvdtr[0,:]) + dtdtr[0,:] = (t_layr[0,:] - state_IC["ta"][:])/dt + dtdt_adv[0,:] = (dtend_temp_nophys[0,:] - dtdtr[0,:]) + else: + dqdtt = qv_lay[0,:] - state_IC["qv"][:] + dqvdt_adv[0,:] = dqdtt/dt + dudtt = u_lay[0,:] - state_IC["ua"][:] + dudt_adv[0,:] = dudtt/dt + dvdtt = v_lay[0,:] - state_IC["va"][:] + dvdt_adv[0,:] = dvdtt/dt + dtdtt = t_lay[0,:] - state_IC["ta"][:] + dtdt_adv[0,:] = dtdtt/dt + + # Subsequent timestep(s). + dtdtr = np.zeros([n_files-1,nlevs]) + dqdtr = np.zeros([n_files-1,nlevs]) + dudtr = np.zeros([n_files-1,nlevs]) + dvdtr = np.zeros([n_files-1,nlevs]) + dtdtt = np.zeros([n_files-1,nlevs]) + dqdtt = np.zeros([n_files-1,nlevs]) + dudtt = np.zeros([n_files-1,nlevs]) + dvdtt = np.zeros([n_files-1,nlevs]) + for t in range(n_files-1): + dt = (time_hr[t+1] - time_hr[t])*3600. + pres_adv[t+1,:] = p_lay[t] + pres_i_adv[t+1,:] = p_lev[t] + if exact_mode: + dqdtr[t,:] = (qv_layr[t+1,:] - qv_lay[t,:])/dt + dqvdt_adv[t+1,:] = dtend_qv_nophys[t+1,:] - dqdtr[t,:] + dtdtr[t,:] = (t_layr[t+1,:] - t_lay[t,:])/dt + dtdt_adv[t+1,:] = dtend_temp_nophys[t+1,:] - dtdtr[t,:] + dudtr[t,:] = (u_layr[t+1,:] - u_lay[t,:])/dt + dudt_adv[t+1,:] = dtend_u_nophys[t+1,:] - dudtr[t,:] + dvdtr[t,:] = (v_layr[t+1,:] - v_lay[t,:])/dt + dvdt_adv[t+1,:] = dtend_v_nophys[t+1,:] - dvdtr[t,:] + else: + dqdtt = qv_lay[t+1,:] - qv_lay[t,:] + dqvdt_adv[t+1,:] = (dqdtt - dqdtr[t,:])/dt + dudtt = u_lay[t+1,:] - u_lay[t,:] + dudt_adv[t+1,:] = (dudtt - dudtr[t,:])/dt + dvdtt = v_lay[t+1,:] - v_lay[t,:] + dvdt_adv[t+1,:] = (dvdtt - dvdtr[t,:])/dt + dtdtt = t_lay[t+1,:] - t_lay[t,:] + dtdt_adv[t+1,:] = (dtdtt - dtdtr[t,:])/dt + # end if (exact-mode) + # end for (ufs history files) + + # Last timestep. + dtdt_adv[t+2,:] = dtdt_adv[t+1,:] + dqvdt_adv[t+2,:] = dqvdt_adv[t+1,:] + dudt_adv[t+2,:] = dudt_adv[t+1,:] + dvdt_adv[t+2,:] = dvdt_adv[t+1,:] + pres_adv[t+2,:] = pres_adv[t+1,:] + pres_i_adv[t+2,:] = pres_i_adv[t+1,:] #################################################################################### # @@ -1958,8 +2917,8 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr # step) # #################################################################################### - time_method = 'constant_simple' #this is not implemented in the SCM code yet - #time_method = 'constant_interp' + #time_method = 'constant_simple' #this is not implemented in the SCM code yet + time_method = 'constant_interp' #time_method = 'gradient' #this produced wonky results in the SCM; avoid until investigated more if (time_method == 'constant_simple'): @@ -1989,6 +2948,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr tot_advec_qv[:,t] = dqvdt_adv[t,:] tot_advec_u[:,t] = dudt_adv[t,:] tot_advec_v[:,t] = dvdt_adv[t,:] + # end for elif (time_method == 'constant_interp'): print('Forcing can be interpolated in time, but the time values are chosen such that forcing will effectively be held consant during a diagnostic time interval.') ntimes = 2*n_files @@ -2033,15 +2993,7 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr tot_advec_u[:,2*t+1] = tot_advec_u[:,2*t] tot_advec_v[:,2*t] = dvdt_adv[t,:] tot_advec_v[:,2*t+1] = tot_advec_v[:,2*t] - # - #p_s[2*t-1] = 0.5*(p_s[2*t] + p_s[2*t-1]) - #pressure_forc[:,2*t-1] = 0.5*(pressure_forc[:,2*t] + pressure_forc[:,2*t-1]) - #tot_advec_T[:,2*t-1] = 0.5*(tot_advec_T[:,2*t] + tot_advec_T[:,2*t-1]) - #tot_advec_qv[:,2*t-1] = 0.5*(tot_advec_qv[:,2*t] + tot_advec_qv[:,2*t-1]) - #tot_advec_u[:,2*t-1] = 0.5*(tot_advec_u[:,2*t] + tot_advec_u[:,2*t-1]) - #tot_advec_v[:,2*t-1] = 0.5*(tot_advec_v[:,2*t] + tot_advec_v[:,2*t-1]) - - + # end for elif (time_method == 'gradient'): #this produced wonky results in the SCM; avoid until investigated more print('Forcing can be interpolated in time since the forcing terms are assumed to follow a constant time-gradient.') @@ -2090,7 +3042,8 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr else: print('Unrecognized forcing time method. Exiting.') exit() - + # end if + # w_ls = np.zeros((nlevs,ntimes),dtype=float) omega = np.zeros((nlevs,ntimes),dtype=float) @@ -2110,23 +3063,25 @@ def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, gr } if (save_comp_data): + time_hr[0] = 0.0 comp_data = { - "time": stateNATIVE["time"]*sec_in_hr, - "pa" : stateNATIVE["p_lay"][:,:], - "ta" : stateNATIVE["t_lay"][:,:], - "qv" : stateNATIVE["qv_lay"][:,:], - "ua" : stateNATIVE["u_lay"][:,:], - "va" : stateNATIVE["v_lay"][:,:], - "vars2d":vars2d} + "time" : time_hr*sec_in_hr, + "pa" : p_lay,#[:,::-1], + "ta" : t_lay,#[:,::-1], + "qv" : qv_lay,#[:,::-1], + "ua" : u_lay,#[:,::-1], + "va" : v_lay,#[:,::-1], + "vars2d":vars2d, + "phystends":phystends} else: comp_data = {} - return (forcing, comp_data, stateREGRID) + return (forcing, comp_data, stateInit) ######################################################################################## # ######################################################################################## -def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): +def write_SCM_case_file(state, surface, oro, forcing, init, case, date, forcing_method, vertical_method, geos_wind_forcing, wind_nudge): """Write all data to a netCDF file in the DEPHY-SCM format""" # Working types @@ -2193,11 +3148,21 @@ def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): nc_file.adv_qt = forcing_off nc_file.adv_rv = forcing_off nc_file.adv_rt = forcing_off - nc_file.forc_wa = forcing_off + if (vertical_method == 2): + nc_file.forc_wa = forcing_on + else: + nc_file.forc_wa = forcing_off nc_file.forc_wap = forcing_off - nc_file.forc_geo = forcing_off - nc_file.nudging_ua = forcing_off - nc_file.nudging_va = forcing_off + if (geos_wind_forcing): + nc_file.forc_geo = forcing_on + else: + nc_file.forc_geo = forcing_off + if (wind_nudge): + nc_file.nudging_ua = forcing_on*const_nudging_time + nc_file.nudging_va = forcing_on*const_nudging_time + else: + nc_file.nudging_ua = forcing_off + nc_file.nudging_va = forcing_off nc_file.nudging_ta = forcing_off nc_file.nudging_theta = forcing_off nc_file.nudging_thetal = forcing_off @@ -2230,7 +3195,6 @@ def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): nc_file.adv_qv = forcing_on nc_file.adv_ua = forcing_on nc_file.adv_va = forcing_on - # nc_file.surface_forcing_temp = 'none' nc_file.surface_forcing_moisture = 'none' nc_file.surface_forcing_wind = 'none' @@ -2330,24 +3294,20 @@ def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): ######################################################################################## var_dict = [{"name": "orog", "type":wp, "dimd": ('t0' ), "units": "m", "desc": "surface_altitude"},\ {"name": "zh", "type":wp, "dimd": ('t0', 'lev'), "units": "m", "desc": "height"},\ - {"name": "pa", "type":wp, "dimd": ('t0', 'lev'), "units": "Pa", "desc": "air_ressure"}, \ - {"name": "ta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_temperature","default_value": stateREGRID["t_lay"][1,:], "override": True}, \ + {"name": "pa", "type":wp, "dimd": ('t0', 'lev'), "units": "Pa", "desc": "air_pressure"}, \ {"name": "theta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_potential_temperature"}, \ {"name": "thetal", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_liquid_potential_temperature"}, \ {"name": "rv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "humidity_mixing_ratio"}, \ {"name": "rl", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "cloud_liquid_water_mixing_ratio"}, \ {"name": "ri", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "cloud_ice_water_mixing_ratio"}, \ {"name": "rt", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "water_mixing_ratio"}, \ - {"name": "qv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "specific_humidity","default_value": stateREGRID["qv_lay"][1,:], "override": True}, \ {"name": "ql", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_cloud_liquid_water_in_air"}, \ {"name": "qi", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_cloud_ice_water_in_air", "default_value": 0.0}, \ {"name": "qt", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_water_in_air"}, \ {"name": "hur", "type":wp, "dimd": ('t0', 'lev'), "units": "%", "desc": "relative_humidity"}, \ {"name": "tke", "type":wp, "dimd": ('t0', 'lev'), "units": "m2 s-2", "desc": "specific_turbulen_kinetic_energy", "default_value": 0.0}, \ - {"name": "ua", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "eastward_wind", "default_value": stateREGRID["u_lay"][1,:], "override": True}, \ - {"name": "va", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "northward_wind", "default_value": stateREGRID["v_lay"][1,:], "override": True}, \ {"name": "ts", "type":wp, "dimd": ('t0' ), "units": "K", "desc": "surface_temperature"},\ - {"name": "tskin", "type":wp, "dimd": ('t0' ), "units": "K", "desc": "surface_skin_pressure"}, \ + {"name": "tskin", "type":wp, "dimd": ('t0' ), "units": "K", "desc": "surface_skin_temperature"}, \ {"name": "ps", "type":wp, "dimd": ('t0' ), "units": "Pa", "desc": "surface_air_pressure"}, \ {"name": "beta", "type":wp, "dimd": ('t0' ), "units": "m", "desc": "soil_water_stress_factor"}, \ {"name": "mrsos", "type":wp, "dimd": ('t0' ), "units": "kg m-2", "desc": "mass_content_of_water_in_soil_layer"}, \ @@ -2357,6 +3317,17 @@ def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): {"name": "alb", "type":wp, "dimd": ('t0' ), "units": "1", "desc": "surface_albedo"}, \ {"name": "emis", "type":wp, "dimd": ('t0' ), "units": "1", "desc": "surface_longwave_emissivity"}, \ {"name": "slmsk", "type":wp, "dimd": ('t0' ), "units": "none", "desc": "land_sea_ice_mask"}] + if (forcing_method == 1 or forcing_method == 3): + var_dict.insert(3, {"name": "ta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_temperature","default_value": init["t_lay"][:], "override": True}) + var_dict.insert(10,{"name": "qv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "specific_humidity","default_value": init["qv_lay"][:], "override": True}) + var_dict.insert(16,{"name": "ua", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "eastward_wind","default_value": init["u_lay"][:], "override": True}) + var_dict.insert(17,{"name": "va", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "northward_wind","default_value": init["v_lay"][:], "override": True}) + else: + var_dict.insert(3, {"name": "ta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_temperature"}) + var_dict.insert(10,{"name": "qv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "specific_humidity"}) + var_dict.insert(16,{"name": "ua", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "eastward_wind"}) + var_dict.insert(17,{"name": "va", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "northward_wind"}) + # var_frc = [{"name": "zh_forc", "type":wp, "dimd": ('time', 'lev'), "units": "m", "desc": "height_forcing","default_value": 1.},\ {"name": "pa_forc", "type":wp, "dimd": ('time', 'lev'), "units": "Pa", "desc": "air_pressure_forcing"}, \ @@ -2593,7 +3564,8 @@ def write_comparison_file(comp_data, case_name, date, surface): # Dimensions lev_dim = nc_file.createDimension('lev', size=nlevs) time_dim = nc_file.createDimension('time', size=ntime) - time_ufs_history_dim = nc_file.createDimension('time_ufs_history', size=ntime-1) + #time_ufs_history_dim = nc_file.createDimension('time_ufs_history', size=ntime-1) #GJF why are we ignoring the first history file? + time_ufs_history_dim = nc_file.createDimension('time_ufs_history', size=ntime) # Varaibles time_var = nc_file.createVariable('time', wp, ('time',)) @@ -2604,7 +3576,7 @@ def write_comparison_file(comp_data, case_name, date, surface): time2_var = nc_file.createVariable('time_ufs_history', wp, ('time_ufs_history',)) time2_var.units = 'second' time2_var.long_name = 'UFS history file time' - time2_var[:] = comp_data['time'][1::] + time2_var[:] = comp_data['time'] lev_var = nc_file.createVariable('levs', wp, ('time','lev',)) lev_var.units = 'Pa' @@ -2637,6 +3609,15 @@ def write_comparison_file(comp_data, case_name, date, surface): tempVar.long_name = var2d["long_name"] tempVar[:] = var2d["values"] + for phystend in comp_data["phystends"]: + if (not phystend["missing"]): + tempVar = nc_file.createVariable(phystend["name"], wp, ('time', 'lev',)) + tempVar.units = phystend["units"] + tempVar.long_name = phystend["long_name"] + tempVar[:] = phystend["values"] + # end if + # end for + # end if nc_file.close() return @@ -2684,62 +3665,86 @@ def main(): #read in arguments (location, indices, date, in_dir, grid_dir, forcing_dir, tile, area, case_name, - old_chgres, lam, save_comp, use_nearest) = parse_arguments() + old_chgres, lam, save_comp, use_nearest, forcing_method, vertical_method, geos_wind_forcing, wind_nudge) = parse_arguments() + + #find indices corresponding to both UFS history files and initial condition (IC) files + (hist_i, hist_j, hist_lon, hist_lat, hist_dist_min, angle_to_hist_point, neighbors, dx, dy) = find_loc_indices_UFS_history(location, forcing_dir) + + (IC_i, IC_j, tile, IC_lon, IC_lat, IC_dist_min, angle_to_IC_point) = find_loc_indices_UFS_IC(location, grid_dir, lam, tile, indices) - #find tile containing the point using the supergrid if no tile is specified - #if not tile and not lam: - if not tile: - tile = int(find_tile(location, grid_dir, lam)) - if tile < 0: - message = 'No tile was found for location {0}'.format(location) - logging.critical(message) - raise Exception(message) - print('Tile found: {0}'.format(tile)) + #compare the locations of the found history file point and UFS IC point + compare_hist_and_IC_points(location, hist_lon, hist_lat, IC_lon, IC_lat, hist_dist_min, angle_to_hist_point, IC_dist_min, angle_to_IC_point) - #find index of closest point in the tile if indices are not specified - if not indices: - (tile_j, tile_i, point_lon, point_lat, dist_min) = find_loc_indices(location, grid_dir, tile, lam) - print('The closest point in tile {0} has indices [{1},{2}]'.format(tile,tile_i,tile_j)) - print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) - print('This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1])) + #read in surface data for the intial conditions at the IC point + surface_data = get_UFS_surface_data(in_dir, tile, IC_i, IC_j, old_chgres, lam) + + #when using the advective forcing methods, initial conditions are taken from the history files; check that the history file surface type is the same as the UFS IC point from which data is read + check_IC_hist_surface_compatibility(forcing_dir, hist_i, hist_j, surface_data, lam, old_chgres, tile) + + #read in orographic data for the initial conditions at the IC point + oro_data = get_UFS_oro_data(in_dir, tile, IC_i, IC_j, lam) + + #read in the initial condition profiles + + if (forcing_method == 1 or forcing_method == 3): + #read initial condition profiles from UFS IC files + (state_data, error_msg) = get_IC_data_from_UFS_ICs(in_dir, grid_dir, tile, IC_i,\ + IC_j, old_chgres, lam) else: - tile_i = indices[0] - tile_j = indices[1] - #still need to grab the lon/lat if the tile and indices are supplied - (point_lon, point_lat) = find_lon_lat_of_indices(indices, grid_dir, tile, lam) - - print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) + #read initial condition profiles from UFS history files (not true "initial" conditions, but after first UFS timestep) + state_data = get_IC_data_from_UFS_history(forcing_dir, hist_i, hist_j, lam, tile) + error_msg = '' # get UFS IC data (TODO: flag to read in RESTART data rather than IC data and implement # different file reads) - (state_data, surface_data, oro_data, error_msg) = get_UFS_IC_data(in_dir, grid_dir, tile, tile_i,\ - tile_j, old_chgres, lam) + if (error_msg): - print(error_msg) - print("STOPPING") + print("ERROR: unknown grid orientation") exit() if not date: # date was not included on command line; look in atmf* file for initial date date = find_date(forcing_dir) - #get grid cell area if not given if not area: - area = get_UFS_grid_area(grid_dir, tile, tile_i, tile_j, lam) + area = get_UFS_grid_area(grid_dir, tile, IC_i, IC_j, lam) surface_data["area"] = area - surface_data["lon"] = point_lon - surface_data["lat"] = point_lat + surface_data["lon"] = hist_lon + surface_data["lat"] = hist_lat # Get UFS forcing data - (forcing_data, comp_data, stateREGRID) = get_UFS_forcing_data(state_data["nlevs"], state_data, \ - location, use_nearest, forcing_dir, \ - grid_dir, tile, tile_i, tile_j, lam,\ - save_comp) + stateInit = {} + if forcing_method == 1: + exact_mode = True + (forcing_data, comp_data, stateInit) = get_UFS_forcing_data(state_data["nlevs"], state_data, \ + location, use_nearest, forcing_dir, \ + grid_dir, tile, IC_i, IC_j, lam,\ + save_comp, exact_mode) + elif forcing_method == 2: + geo_pts = n_forcing_halo_points + geo_neighbors = '' + geo_dx = '' + geo_dy = '' + if (geos_wind_forcing and use_actual_geos_wind): + #need to check and potentially get more neighboring data for geostrophic wind (for low enough Rossby numbers where geostrophic balance can be assumed) + (geo_pts, geo_neighbors, geo_dx, geo_dy) = expand_neighbors_for_geostrophic_balance(forcing_dir, hist_i, hist_j, neighbors, dx, dy, state_data["nlevs"], lam) + (forcing_data, comp_data) = get_UFS_forcing_data_advective_tendency(forcing_dir, hist_i, hist_j, tile, neighbors, dx, dy, state_data["nlevs"], lam, save_comp, vertical_method, \ + geos_wind_forcing, wind_nudge, geo_pts, geo_neighbors, geo_dx, geo_dy, use_actual_geos_wind) + elif forcing_method == 3: + exact_mode = False + (forcing_data, comp_data, stateInit) = get_UFS_forcing_data(state_data["nlevs"], state_data, \ + location, use_nearest, forcing_dir, \ + grid_dir, tile, IC_i, IC_j, lam,\ + save_comp, exact_mode) + else: + message = "Unsupported forcing_method chosen" + logging.critical(message) + raise Exception(message) + # Write SCM case file - fileOUT = write_SCM_case_file(state_data, surface_data, oro_data, forcing_data, case_name, date, \ - stateREGRID) + fileOUT = write_SCM_case_file(state_data, surface_data, oro_data, forcing_data, stateInit, case_name, date, forcing_method, vertical_method, geos_wind_forcing, wind_nudge) # read in and remap the state variables to the first history file pressure profile and # write them out to compare SCM output to (atmf for state variables and sfcf for physics diff --git a/scm/etc/scripts/UFS_forcing_ensemble_generator.py b/scm/etc/scripts/UFS_forcing_ensemble_generator.py index c4a533127..26b15e89a 100755 --- a/scm/etc/scripts/UFS_forcing_ensemble_generator.py +++ b/scm/etc/scripts/UFS_forcing_ensemble_generator.py @@ -14,19 +14,23 @@ # Argument list ############################################################################### parser = argparse.ArgumentParser() -parser.add_argument('-d', '--dir', help='path to UFS Regression Test output', required=True) -parser.add_argument('-n', '--case_name', help='name of case', required=True) -parser.add_argument('-lonl', '--lon_limits', help='longitude range, separated by a space', nargs=2, type=float, required=False) -parser.add_argument('-latl', '--lat_limits', help='latitude range, separated by a space', nargs=2, type=float, required=False) -parser.add_argument('-lons', '--lon_list', help='longitudes, separated by a space', nargs='*', type=float, required=False) -parser.add_argument('-lats', '--lat_list', help='latitudes, separated by a space', nargs='*', type=float, required=False) -parser.add_argument('-fxy', '--lonlat_file', help='file containing longitudes and latitude',nargs=1, required=False) -parser.add_argument('-nens', '--nensmembers', help='number of SCM UFS ensemble memebers to create', type=int, required=False) -parser.add_argument('-dt', '--timestep', help='SCM timestep, in seconds', type=int, default = 3600) -parser.add_argument('-cres', '--C_RES', help='UFS spatial resolution', type=int, default = 96) -parser.add_argument('-sdf', '--suite', help='CCPP suite definition file to use for ensemble', default = 'SCM_GFS_v16') -parser.add_argument('-sc', '--save_comp', help='flag to save a file with UFS data for comparisons', action='store_true') -parser.add_argument('-near', '--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint, no regridding',action='store_true') +parser.add_argument('-d', '--dir', help='path to UFS Regression Test output', required=True) +parser.add_argument('-n', '--case_name', help='name of case', required=True) +parser.add_argument('-lonl', '--lon_limits', help='longitude range, separated by a space', nargs=2, type=float, required=False) +parser.add_argument('-latl', '--lat_limits', help='latitude range, separated by a space', nargs=2, type=float, required=False) +parser.add_argument('-lons', '--lon_list', help='longitudes, separated by a space', nargs='*', type=float, required=False) +parser.add_argument('-lats', '--lat_list', help='latitudes, separated by a space', nargs='*', type=float, required=False) +parser.add_argument('-fxy', '--lonlat_file', help='file containing longitudes and latitude',nargs=1, required=False) +parser.add_argument('-nens', '--nensmembers', help='number of SCM UFS ensemble memebers to create', type=int, required=False) +parser.add_argument('-dt', '--timestep', help='SCM timestep, in seconds', type=int, default = 3600) +parser.add_argument('-cres', '--C_RES', help='UFS spatial resolution', type=int, default = 96) +parser.add_argument('-sdf', '--suite', help='CCPP suite definition file to use for ensemble', default = 'SCM_GFS_v16') +parser.add_argument('-sc', '--save_comp', help='flag to save a file with UFS data for comparisons', action='store_true') +parser.add_argument('-near', '--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint, no regridding',action='store_true') +parser.add_argument('-fm', '--forcing_method', help='method used to calculate forcing (1=total tendencies from UFS dycore, 2=advective terms calculated from UFS history files, 3=total time tendency terms calculated)', type=int, choices=range(1,4), default=2) +parser.add_argument('-vm', '--vertical_method',help='method used to calculate vertical advective forcing (1=vertical advective terms calculated from UFS history files and added to total, 2=smoothed vertical velocity provided)', type=int, choices=range(1,3), default=2) +parser.add_argument('-wn', '--wind_nudge', help='flag to turn on wind nudging to UFS profiles', action='store_true') +parser.add_argument('-geos', '--geostrophic', help='flag to turn on geostrophic wind forcing', action='store_true') ############################################################################### # Main program @@ -134,10 +138,14 @@ def main(): {"name": "dt", "values": str(args.timestep)}, \ {"name": "C_RES", "values": str(args.C_RES)}] - # What, if any, options neeed to be passsed to UFS_IC_generator.py? + # What, if any, options neeed to be passsed to UFS_case_gen.py? com_config = '' - if args.save_comp: com_config = com_config + ' -sc' - if args.use_nearest: com_config = com_config + ' -near' + if args.save_comp: com_config = com_config + ' -sc' + if args.use_nearest: com_config = com_config + ' -near' + if args.forcing_method: com_config = com_config + ' -fm ' + str(args.forcing_method) + if args.vertical_method: com_config = com_config + ' -vm ' + str(args.vertical_method) + if args.wind_nudge: com_config = com_config + ' -wn' + if args.geostrophic: com_config = com_config + ' -geos' # Create inputs to SCM case_list = "" @@ -145,10 +153,10 @@ def main(): count = 0 run_list = [] for pt in range(0,npts): - # Call UFS_IC_generator.py + # Call UFS_case_gen.py case_name = args.case_name +"_n" + str(pt).zfill(3) file_scminput = "../../data/processed_case_input/"+case_name+"_SCM_driver.nc" - com = "./UFS_IC_generator.py -l " +str(lons[pt]) + " " + str(lats[pt]) + \ + com = "./UFS_case_gen.py -l " +str(lons[pt]) + " " + str(lats[pt]) + \ " -i " + args.dir_ic + " -g " + args.dir_grid + " -f " + args.dir_forcing + " -n " + case_name + com_config print(com) os.system(com) @@ -211,12 +219,12 @@ def main(): # Display run commands # ########################################################################### - print("-------------------------------------------------------------------------------------------") + print("#"*128) print("Command(s) to execute in ccpp-scm/scm/bin/: ") print(" ") print("./run_scm.py --npz_type gfs --file " + fileOUT + " --timestep " + str(args.timestep)) print("") - print("-------------------------------------------------------------------------------------------") + print("#"*128) if __name__ == '__main__': main()