diff --git a/compass/landice/__init__.py b/compass/landice/__init__.py index 97b33a010..2b531dfdb 100644 --- a/compass/landice/__init__.py +++ b/compass/landice/__init__.py @@ -10,6 +10,7 @@ from compass.landice.tests.humboldt import Humboldt from compass.landice.tests.hydro_radial import HydroRadial from compass.landice.tests.ismip6_forcing import Ismip6Forcing +from compass.landice.tests.ismip6_GrIS_forcing import Ismip6GrISForcing from compass.landice.tests.ismip6_run import Ismip6Run from compass.landice.tests.isunnguata_sermia import IsunnguataSermia from compass.landice.tests.kangerlussuaq import Kangerlussuaq @@ -43,6 +44,7 @@ def __init__(self): self.add_test_group(Humboldt(mpas_core=self)) self.add_test_group(HydroRadial(mpas_core=self)) self.add_test_group(Ismip6Forcing(mpas_core=self)) + self.add_test_group(Ismip6GrISForcing(mpas_core=self)) self.add_test_group(Ismip6Run(mpas_core=self)) self.add_test_group(IsunnguataSermia(mpas_core=self)) self.add_test_group(Kangerlussuaq(mpas_core=self)) diff --git a/compass/landice/tests/ismip6_GrIS_forcing/__init__.py b/compass/landice/tests/ismip6_GrIS_forcing/__init__.py new file mode 100644 index 000000000..232d8428f --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/__init__.py @@ -0,0 +1,36 @@ +import os + +import yaml + +from compass.landice.tests.ismip6_GrIS_forcing.forcing_gen import ForcingGen +from compass.testgroup import TestGroup + + +class Ismip6GrISForcing(TestGroup): + """ + A test group for processing the forcing for ISMIP6 GrIS projections + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='ismip6_GrIS_forcing') + + self.add_test_case(ForcingGen(test_group=self)) + + # open, read, and validate the experiment file + self._read_and_validate_experiment_file() + + def _read_and_validate_experiment_file(self): + + # get the filepath to current module, needed of opening experiment file + module_path = os.path.dirname(os.path.realpath(__file__)) + + with open(f"{module_path}/experiments.yml", "r") as f: + experiments = yaml.safe_load(f) + + # should I filter through the experiments dictionary to make sure + # everything is valid.... + + self.experiments = experiments diff --git a/compass/landice/tests/ismip6_GrIS_forcing/create_mapping_files.py b/compass/landice/tests/ismip6_GrIS_forcing/create_mapping_files.py new file mode 100644 index 000000000..511983276 --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/create_mapping_files.py @@ -0,0 +1,207 @@ +import glob +import os +import shutil + +import xarray as xr +from mpas_tools.logging import check_call +from mpas_tools.scrip.from_mpas import scrip_from_mpas +from pyproj import Proj +from pyremap.descriptor import ProjectionGridDescriptor + +from compass.step import Step + + +class CreateMappingFiles(Step): + """ + """ + + def __init__(self, test_case): + """ + """ + name = "create_mapping_files" + subdir = "mapping_files" + + super().__init__(test_case=test_case, name=name, subdir=subdir, + cpus_per_task=128, min_cpus_per_task=1) + + # initalize the attributes with empty values for now, attributes will + # be propely in `setup` method so user specified config options can + # be parsed + self.mali_mesh = None + self.racmo_grid = None + + def setup(self): + """ + """ + + # parse user specified parameters from the config + config = self.config + forcing_section = config["ISMIP6_GrIS_Forcing"] + smb_ref_section = config["smb_ref_climatology"] + + # read the mesh path/name from the config file + mali_mesh = forcing_section.get("MALI_mesh_fp") + + # make sure the mesh path/name is valid and accessible + if not os.path.exists(mali_mesh): + raise FileNotFoundError(f"{mali_mesh}") + + # add mesh name as an attribute and an input file to the step + self.mali_mesh = mali_mesh + self.add_input_file(filename=self.mali_mesh) + + # + racmo_directory = smb_ref_section.get("racmo_directory") + # this filename should probably just be hardcoded..... + racmo_grid_fn = smb_ref_section.get("racmo_grid_fn") + # combine the directory and filename + racmo_grid_fp = os.path.join(racmo_directory, racmo_grid_fn) + + # make sure the combined filename exists + if not os.path.exists(racmo_grid_fp): + # check if the parent directory exists + if not os.path.exists(racmo_directory): + raise FileNotFoundError(f"{racmo_directory} does not exist") + # the parent directory exists but the forcing file does not + else: + raise FileNotFoundError(f"{racmo_grid_fp} does not exist") + + # add the racmo grid as an attribute and as an input file + self.racmo_grid = racmo_grid_fp + self.add_input_file(filename=racmo_grid_fp) + + # loop over mapping file `test_case` attributes and add the current + # steps `work_dir` to the absolute path. This will ensure other steps + # will be able to access the mapping files created here + for attr in ["mali_mesh_scrip", + "racmo_gis_scrip", + "ismip6_gis_scrip", + "racmo_2_mali_weights", + "ismip6_2_mali_weights"]: + # get the attribute values + file_name = getattr(self.test_case, attr) + # combine filename w/ the testcases work dir + full_path = os.path.join(self.work_dir, file_name) + # update the attribute value to include the full path + setattr(self.test_case, attr, full_path) + + # Add the testcase attributes as output files; since they'll be + # generated as part of this step + self.add_output_file(filename=self.test_case.mali_mesh_scrip) + self.add_output_file(filename=self.test_case.racmo_gis_scrip) + self.add_output_file(filename=self.test_case.ismip6_gis_scrip) + self.add_output_file(filename=self.test_case.racmo_2_mali_weights) + self.add_output_file(filename=self.test_case.ismip6_2_mali_weights) + + def run(self): + """ + """ + + # unpack the filepaths stored as attributes of the test_case + mali_mesh_scrip = self.test_case.mali_mesh_scrip + racmo_gis_scrip = self.test_case.racmo_gis_scrip + ismip6_gis_scrip = self.test_case.ismip6_gis_scrip + racmo_2_mali_weights = self.test_case.racmo_2_mali_weights + ismip6_2_mali_weights = self.test_case.ismip6_2_mali_weights + + # create scrip file describing MALI mesh that forcing variables + # will be interpolated onto + scrip_from_mpas(self.mali_mesh, mali_mesh_scrip) + + # WGS 84 / NSIDC Sea Ice Polar Stereographic North Projection + epsg_3413 = Proj("EPSG:3413") + + # make scrip file for racmo grid and mapping weights for racmo->mpas + self.make_forcing_descriptor_and_weights(self.racmo_grid, + racmo_gis_scrip, + mali_mesh_scrip, + epsg_3413, + racmo_2_mali_weights) + + # finding the right ismip6 forcing file is complicated, + # so let's call a helper function to do that. + ismip6_forcing_fp = self._find_ismip6_forcing_files() + + # make scrip file for ismip6 grid and mapping weights for ismip6->mpas + self.make_forcing_descriptor_and_weights(ismip6_forcing_fp, + ismip6_gis_scrip, + mali_mesh_scrip, + epsg_3413, + ismip6_2_mali_weights) + + def make_forcing_descriptor_and_weights(self, + forcing_file_fp, + forcing_scrip_fp, + mali_mesh_scrip_fp, + forcing_proj, + weights_file_fp): + + # open up forcing grid; get x and y coordinates as numpy arrays + with xr.open_dataset(forcing_file_fp) as ds: + x = ds.x.values + y = ds.y.values + + # create pyremap `MeshDescriptor` obj for the forcing grid + forcingDescriptor = ProjectionGridDescriptor.create( + projection=forcing_proj, x=x, y=y, meshName=forcing_file_fp) + + # write scrip file describing forcing grid + forcingDescriptor.to_scrip(forcing_scrip_fp) + + # generate mapping weights between forcing data and the MALI mesh + args = ['srun', '-n', '128', 'ESMF_RegridWeightGen', + '-s', forcing_scrip_fp, + '-d', mali_mesh_scrip_fp, + '-w', weights_file_fp, + '--method', 'conserve', + "--netcdf4", + # "--no_log", + "--dst_regional", "--src_regional", '--ignore_unmapped'] + + check_call(args, logger=self.logger) + + # `ESMF_RegridWeightGen` will generate a log file for each processor, + # which really clutters up the directory. So, if log files are created + # move them to their own `logs` directory + log_files = glob.glob("PET*.RegridWeightGen.Log") + + # check if glob returns an empty list + if log_files: + # split the output weights filename so that only the basename w/ + # no extension is left, in order to clean up the log files + base_name = os.path.splitext(os.path.basename(weights_file_fp))[0] + dir_name = f"{base_name}.Logs/" + + # check if there is already a Log files directory, if so wipe it + if os.path.exists(dir_name): + shutil.rmtree(dir_name) + + # make the log directory + os.mkdir(dir_name) + + # copy the log files to the log directory; using list comprehension + _ = [shutil.move(f, dir_name) for f in log_files] + + def _find_ismip6_forcing_files(self): + """ + """ + # get a list of all projection experiments requesed. + # (i.e. experiments names that use ISMIP6 forcing) + proj_exprs = list(filter(lambda x: "Exp" in x, + self.test_case.experiments.keys())) + + if proj_exprs: + # b/c all GrIS forcing files are on the same grid, it doesn't + # matter what expr we use; so just use the first suitable candiate + expr = proj_exprs[0] + + expr_params = self.test_case.experiments[expr] + GCM = expr_params["GCM"] + scenario = expr_params["Scenario"] + variable = "thermal_forcing" + + # get the filepath for any given forcing file (since all files are on + # the same grid), in order to generate a scrip file for the grid + forcing_fp = self.test_case.findForcingFiles(GCM, scenario, variable) + + return forcing_fp diff --git a/compass/landice/tests/ismip6_GrIS_forcing/experiments.yml b/compass/landice/tests/ismip6_GrIS_forcing/experiments.yml new file mode 100644 index 000000000..e5b959fba --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/experiments.yml @@ -0,0 +1,35 @@ +ctrl: + Scenario: ctrl + GCM: RACMO + start: 1960 + end: 1989 + +hist: + Scenario: ctrl + GCM: RACMO + start: 1989 + end: 2015 + +Exp05: + Scenario: rcp8.5 + GCM: MIROC5 + start: 2015 + end: 2100 + +Exp06: + Scenario: rcp8.5 + GCM: NorESM1-M + start: 2015 + end: 2100 + +Exp07: + Scenario: rcp2.6 + GCM: MIROC5 + start: 2015 + end: 2100 + +Exp08: + Scenario: rcp8.5 + GCM: HadGEM2-ES + start: 2015 + end: 2100 diff --git a/compass/landice/tests/ismip6_GrIS_forcing/file_finders.py b/compass/landice/tests/ismip6_GrIS_forcing/file_finders.py new file mode 100644 index 000000000..9f41ca0eb --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/file_finders.py @@ -0,0 +1,137 @@ +import glob +import os + +import xarray as xr + +# create mapping dictionary of ISMIP6 variables to MALI variable names +{"thermal_forcing": "ismip6_2dThermalForcing", + "basin_runoff": "basin_runoff"} + + +def check_year(fn, start=2015, end=2100): + + # strip the file extension from filename and + # convert the year at end of str to an int + fn_year = int(os.path.splitext(fn)[0].split("-")[-1]) + + # check if year is within range + return start <= fn_year <= end + + +class oceanFileFinder: + + def __init__(self, archive_fp, version="v4"): + + self.version = version + + # file strucutre within Ocean_Forcing directory for navigating + file_struct = f"Ocean_Forcing/Melt_Implementation/{version}" + + # file path to directory containing forcings files from each GCM + dir2GCMs = os.path.join(archive_fp, file_struct) + + # should do some checking here that all the fps work... + self.dir2GCMs = dir2GCMs + + def get_filename(self, GCM, scenario, variable): + + # convert from variable name within NetCDF file, to variable name as it + # appears in the filename + if variable == "thermal_forcing": + fn_var = "oceanThermalForcing" + elif variable == "basin_runoff": + fn_var = "basinRunoff" + else: + raise ValueError(f"invalid varibale name: {variable}") + + # within filename scenario have for removed + scenario_no_dot = "".join(scenario.split(".")) + + fp = (f"{GCM.lower()}_{scenario}/" + f"MAR3.9_{GCM}_{scenario_no_dot}_{fn_var}_{self.version}.nc") + + # check that file exists!!! + return os.path.join(self.dir2GCMs, fp) + + +class atmosphereFileFinder: + + def __init__(self, archive_fp, version="v1", workdir="./"): + + self.version = version + + if os.path.exists(workdir): + self.workdir = workdir + else: + print("gotta do error checking") + + # file strucutre within Atmosphere_Forcing directory for navigating + file_struct = f"Atmosphere_Forcing/aSMB_observed/{version}" + + # file path to directory containing forcings files from each GCM + dir2GCMs = os.path.join(archive_fp, file_struct) + + # should do some checking here that all the fps work... + self.dir2GCMs = dir2GCMs + + def get_filename(self, GCM, scenario, variable, start=2015, end=2100): + """ + """ + if GCM == "NorESM1-M": + GCM = "NorESM1" + # get a list of all the yearly files within the period of intrest + yearly_files = self.__find_yearly_files(GCM, scenario, variable, + start, end) + # still need to make the output filename to write combined files to + out_fn = f"MAR3.9_{GCM}_{scenario}_{variable}_{start}--{end}.nc" + # relative to the workdir, which we've already checked if if existed + # make the filepath for the nested GCM/scenario/var direcotry struct. + top_fp = os.path.join(self.workdir, f"{GCM}-{scenario}/{variable}") + + # make the nested directory strucure; if needed + if not os.path.exists(top_fp): + os.makedirs(top_fp, exist_ok=True) + + out_fp = os.path.join(top_fp, out_fn) + + # only combine the files if they don't exist yet + if not os.path.exists(out_fp): + # with output filename, combine the files to a single + self.__combine_files(yearly_files, out_fp) + + return out_fp + + def __find_yearly_files(self, GCM, scenario, variable, start, end): + """ + """ + # within filename scenario have for removed + scenario_no_dot = "".join(scenario.split(".")) + + # create filename template to be globbed + fp = (f"{GCM}-{scenario_no_dot}/{variable}/" + f"{variable}_MARv3.9-yearly-{GCM}-{scenario_no_dot}-*.nc") + + # glob the files + all_files = glob.glob(os.path.join(self.dir2GCMs, fp)) + # all directories should have same number of files; so check here to + # make sure things worked properly + + # get the files within the desired period + files = sorted(f for f in all_files if check_year(f, start, end)) + + # make sure the length of fns returned matches the length (in years) + # of the period + period = (end - start) + 1 + + assert len(files) == period + return files + + def __combine_files(self, files, out_fn): + """ + """ + ds = xr.open_mfdataset(files, concat_dim="time", combine="nested", + data_vars='minimal', coords='minimal', + compat="broadcast_equals", + combine_attrs="override") + + ds.to_netcdf(out_fn) diff --git a/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/__init__.py b/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/__init__.py new file mode 100644 index 000000000..78a721df3 --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/__init__.py @@ -0,0 +1,105 @@ +from compass.landice.tests.ismip6_GrIS_forcing.create_mapping_files import ( + CreateMappingFiles, +) +from compass.landice.tests.ismip6_GrIS_forcing.file_finders import ( + atmosphereFileFinder, + oceanFileFinder, +) +from compass.landice.tests.ismip6_GrIS_forcing.process_forcing import ( + ProcessForcing, +) +from compass.landice.tests.ismip6_GrIS_forcing.ref_smb_climatology import ( + SMBRefClimatology, +) +from compass.testcase import TestCase + + +class ForcingGen(TestCase): + """ + A TestCase for remapping (i.e. interpolating) ISMIP6 GrIS forcing files + onto a MALI mesh + + Attributes + ---------- + + mali_mesh_scrip : str + filepath to the scrip file describing the MALI mesh + + ismip6_GrIS_scrip : str + filepath to the scrip file describing the grid the ISMIP6 GrIS + forcing data is on. (Note: All forcing files are on the same grid, + so only need one scrip file for all forcing files) + + remapping_weights : str + filepath to the `ESMF_RegirdWeightGen` generated mapping file + + experiments : dict + + """ + + def __init__(self, test_group): + """ + Parameters + ---------- + test_group : compass.landice.tests... + The test group that this test case belongs to + """ + name = "forcing_gen" + super().__init__(test_group=test_group, name=name) + + # NOTE: scrip and weight filenames are stored at the testcase level + # so they will be accesible to all steps in the testcase + + # filenames for scrip files (i.e. grid descriptors) + self.mali_mesh_scrip = "mali_mesh.scrip.nc" + self.racmo_gis_scrip = "racmo_gis.scrip.nc" + self.ismip6_gis_scrip = "ismip6_GrIS.scrip.nc" + # filenames of remapping files + self.racmo_2_mali_weights = "racmo_2_mali.weights.nc" + self.ismip6_2_mali_weights = "ismip6_2_mali.weights.nc" + # filename of the reference climatology + self.smb_ref_climatology = None + # place holder for file finders that will be initialized in `configure` + self.__atmFF = None + self.__ocnFF = None + + # precusssory step that builds scrip file for mali mesh, + # and generates a common weights file to be used in remapping + self.add_step(CreateMappingFiles(test_case=self)) + + # step that deals with racmo, do all I need to do is remap and average? + self.add_step(SMBRefClimatology(test_case=self)) + + # add steps that re-maps and processes downscaled GCM data for each + # experiment. + self.add_step(ProcessForcing(test_case=self)) + + def configure(self): + + config = self.config + # add ouputdir path to the remapping files + + # get the list of requested experiments + expr2run = config.getlist("ISMIP6_GrIS_Forcing", "experiments") + # get the dictionary of experiments, as defined in the yaml file + all_exprs = self.test_group.experiments + # get subset of dictionaries, based on requested expriments + self.experiments = {e: all_exprs[e] for e in expr2run} + + archive_fp = config.get("ISMIP6_GrIS_Forcing", "archive_fp") + + # initalize the oceanFileFinder + self.__ocnFF = oceanFileFinder(archive_fp) + self.__atmFF = atmosphereFileFinder(archive_fp) # , workdir=workdir) + + def findForcingFiles(self, GCM, scenario, variable): + """ + """ + + if variable in ["basin_runoff", "thermal_forcing"]: + forcing_fp = self.__ocnFF.get_filename(GCM, scenario, variable) + + if variable in ["aSMB", "aST", "dSMBdz", "dSTdz"]: + forcing_fp = self.__atmFF.get_filename(GCM, scenario, variable) + + return forcing_fp diff --git a/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/forcing_gen.cfg b/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/forcing_gen.cfg new file mode 100644 index 000000000..becc05ce4 --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/forcing_gen/forcing_gen.cfg @@ -0,0 +1,22 @@ +[smb_ref_climatology] +# path to racmo data... or could just have it on lcrc +racmo_directory = /global/cfs/cdirs/fanssie/standard_datasets/RACMO2p3 +# should this be included here, or just hardcoded w/in the step? +racmo_grid_fn = Icemask_Topo_Iceclasses_lon_lat_average_1km_GrIS.nc +# see above about hardcoding +racmo_smb_fn = smb_rec.1958-2019.BN_RACMO2.3p2_FGRN055_GrIS.MM.nc +# +climatology_start = 1960 +climatology_end = 1989 + +[ISMIP6_GrIS_Forcing] + +# full path to MALI mesh forcing data will be regridded onto +MALI_mesh_fp = /global/cfs/cdirs/fanssie/MALI_input_files/GIS_1to10km_r02/GIS_1to10km_r02_20230202_relaxed.nc + +# Path to GrIS folder of the ISMIP6-Forcing-Ghub archive +archive_fp = /global/cfs/cdirs/fanssie/standard_datasets/ISMIP6-Forcing-Ghub/GrIS + +# list of experiments to generate forcing for. +# See `experiments.yml` for a list of supported experiments +experiments = ctrl, Exp05 diff --git a/compass/landice/tests/ismip6_GrIS_forcing/process_forcing.py b/compass/landice/tests/ismip6_GrIS_forcing/process_forcing.py new file mode 100644 index 000000000..d8f8b0cf4 --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/process_forcing.py @@ -0,0 +1,145 @@ +import os + +import xarray as xr +from mpas_tools.io import write_netcdf + +from compass.landice.tests.ismip6_GrIS_forcing.utilities import ( + add_xtime, + remap_variables, +) +from compass.step import Step + +renaming_dict = {"thermal_forcing": "ismip6_2dThermalForcing", + "basin_runoff": "ismip6Runoff", + "aSMB": "sfcMassBal", + "dSMBdz": "sfcMassBal_lapseRate", + "aST": "surfaceAirTemperature", + "dSTdz": "surfaceAirTemperature_lapseRate"} + + +class ProcessForcing(Step): + """ + + """ + + def __init__(self, test_case): + """ + """ + + name = "process_forcing" + + super().__init__(test_case=test_case, name=name, subdir=None, + cpus_per_task=1, min_cpus_per_task=1) + # read and store the experiment dictionary + + # initalize the FileFinders are store them as dict? + + def run(self): + + # list of variables to process + atmosphere_vars = ["aSMB", "aST", "dSMBdz", "dSTdz"] + ocean_vars = ["basin_runoff", "thermal_forcing"] + + # loop over experiments passed via config file + for expr_name, expr_dict in self.test_case.experiments.items(): + # print to logger which expriment we are on (i/n) + + GCM = expr_dict["GCM"] + scenario = expr_dict["Scenario"] + start = expr_dict["start"] + end = expr_dict["end"] + + # need a special way to treat the RACMO datasets + if expr_name in ["ctrl", "hist"]: + continue + + atm_fn = f"gis_atm_forcing_{GCM}_{scenario}_{start}--{end}.nc" + self.process_variables(GCM, scenario, start, end, atmosphere_vars, + atm_fn) + + ocn_fn = f"gis_ocn_forcing_{GCM}_{scenario}_{start}--{end}.nc" + self.process_variables(GCM, scenario, start, end, ocean_vars, + ocn_fn) + + def process_variables(self, GCM, scenario, start, end, + variables, output_fn): + + forcing_datastes = [] + + for var in variables: + var_fp = self.test_case.findForcingFiles(GCM, scenario, var) + + ds = self.process_variable(GCM, scenario, var, var_fp, start, end) + + forcing_datastes.append(ds) + + forcing_ds = xr.merge(forcing_datastes) + + write_netcdf(forcing_ds, output_fn) + + def process_variable(self, GCM, scenario, var, forcing_fp, start, end): + + # + config = self.config + # + renamed_var = renaming_dict[var] + + # create the remapped file name, using original variable name + remapped_fn = f"remapped_{GCM}_{scenario}_{var}_{start}-{end}.nc" + # follow the directory structure of the concatenated source files + remapped_fp = os.path.join(f"{GCM}-{scenario}/{var}", remapped_fn) + + # remap the forcing file + remap_variables(forcing_fp, + remapped_fp, + self.test_case.ismip6_2_mali_weights, + variables=[var], + logger=self.logger) + + # Now that forcing has been regridded onto the MALI grid, we can drop + # ISMIP6 grid variables. Special note: the char field `mapping` + # particularily causes problem with `xtime` (40byte vs. 64byte chars) + vars_2_drop = ["time_bounds", "lat_vertices", "lon_vertices", "area", + "mapping", "lat", "lon", "polar_stereographic"] + + # open the remapped file for post processing + remapped_ds = xr.open_dataset(remapped_fp, + drop_variables=vars_2_drop, + use_cftime=True) + + # mask of desired time indices + mask = remapped_ds.time.dt.year.isin(range(start, end)) + # drop the non-desired time indices from remapped dataset + remapped_ds = remapped_ds.where(mask, drop=True) + # add xtime variable based on `time` + remapped_ds["xtime"] = add_xtime(remapped_ds, var="time") + + # rename the variable/dimensions to match MPAS/MALI conventions + remapped_ds = remapped_ds.rename({"ncol": "nCells", + "time": "Time", + var: renamed_var}) + + # drop the unneeded attributes from the dataset + for _var in remapped_ds: + remapped_ds[_var].attrs.pop("grid_mapping", None) + remapped_ds[_var].attrs.pop("cell_measures", None) + + # SMB is not a var in ISMIP6 forcing files, need to use `SMB_ref` + # and the processed `aSMB` to produce a `SMB` forcing + if renamed_var == "sfcMassBal": + # open the reference climatology file + smb_ref = xr.open_dataset(self.test_case.smb_ref_climatology) + # squeeze the empty time dimension so that broadcasting works + smb_ref = smb_ref.squeeze("Time") + # add climatology to anomoly to make full forcing field + remapped_ds[renamed_var] += smb_ref[renamed_var] + + if renamed_var == "surfaceAirTemperature": + # read the mesh path/name from the config file + mali_mesh_fp = config.get("ISMIP6_GrIS_Forcing", "MALI_mesh_fp") + # squeeze the empty time dimension so that broadcasting works + mali_ds = xr.open_dataset(mali_mesh_fp).squeeze("Time") + + remapped_ds[renamed_var] += mali_ds[renamed_var] + + return remapped_ds diff --git a/compass/landice/tests/ismip6_GrIS_forcing/ref_smb_climatology.py b/compass/landice/tests/ismip6_GrIS_forcing/ref_smb_climatology.py new file mode 100644 index 000000000..e9b6b718c --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/ref_smb_climatology.py @@ -0,0 +1,121 @@ +import os + +import xarray as xr +from mpas_tools.io import write_netcdf +from mpas_tools.logging import check_call + +from compass.step import Step + + +class SMBRefClimatology(Step): + """ + """ + + def __init__(self, test_case): + """ + """ + name = "smb_ref_climatology" + subdir = None + + super().__init__(test_case=test_case, name=name, subdir=subdir) + + # initalize the attributes with empty values for now, attributes will + # be propely in `setup` method so user specified config options can + # be parsed + self.racmo_smb_fp = None + self.smb_ref_climatology = None + + def setup(self): + """ + """ + + # parse user specified parameters from the config + config = self.config + smb_ref_section = config["smb_ref_climatology"] + + # + racmo_directory = smb_ref_section.get("racmo_directory") + # this filename should probably just be hardcoded..... + racmo_smb_fn = smb_ref_section.get("racmo_smb_fn") + # combine the directory and filename + racmo_smb_fp = os.path.join(racmo_directory, racmo_smb_fn) + + # make sure the combined filename exists + if not os.path.exists(racmo_smb_fp): + # check if the parent directory exists + if not os.path.exists(racmo_directory): + raise FileNotFoundError(f"{racmo_directory} does not exist") + # the parent directory exists but the forcing file does not + else: + raise FileNotFoundError(f"{racmo_smb_fp} does not exist") + + # add the racmo smb as an attribute and as an input file + self.racmo_smb = racmo_smb_fp + self.add_input_file(filename=racmo_smb_fp) + + # get the start and end dates for the climatological mean + clima_start = smb_ref_section.getint("climatology_start") + clima_end = smb_ref_section.getint("climatology_end") + + # make a descriptive filename based on climatology period + clima_fn = f"racmo_climatology_{clima_start}--{clima_end}.nc" + # add the steps workdir to the filename to make it a full path + clima_fp = os.path.join(self.work_dir, clima_fn) + + # set the testcase attribute and add it as an output file for this + # step so the climatology will by useable by other steps + self.test_case.smb_ref_climatology = clima_fp + self.add_output_file(filename=self.test_case.smb_ref_climatology) + + def run(self): + """ + """ + + # parse user specified parameters from the config + config = self.config + smb_ref_section = config["smb_ref_climatology"] + # get the start and end dates for the climatological mean + clima_start = smb_ref_section.getint("climatology_start") + clima_end = smb_ref_section.getint("climatology_end") + + # remap the gridded racmo data onto the mpas grid + self.remap_variable(self.racmo_smb, + self.test_case.smb_ref_climatology, + self.test_case.racmo_2_mali_weights) + + ds = xr.open_dataset(self.test_case.smb_ref_climatology, + decode_times=False) + + # find indices of climatology start/end (TO DO: make more robust) + s_idx = ((clima_start - 1958) * 12) - 1 + e_idx = ((clima_end - 1958) * 12) - 1 + + # calculate climatology + ds["SMB_rec"] = ds.SMB_rec.isel(time=slice(s_idx, e_idx)).mean("time") + # rename variables to match MALI/MPAS conventiosn + ds = ds.rename(SMB_rec="sfcMassBal", ncol="nCells") + # drop unused dimensions + ds = ds.drop_dims(["time", "nv"]) + # drop un-needed varibales + ds = ds.drop_vars(["area", "lat", "lon"]) + # convert `sfcMassBal` to MPAS units + ds["sfcMassBal"] /= (60 * 60 * 24 * 365) / 12. + # add a units attribute to `sfcMassBal` + ds["sfcMassBal"].attrs["units"] = "kg m-2 s-1" + # expand sfcMassBal dimension to match what MALI expects + ds["sfcMassBal"] = ds.sfcMassBal.expand_dims("Time") + # write the file + write_netcdf(ds, self.test_case.smb_ref_climatology) + + def remap_variable(self, input_file, output_file, weights_file): + """ + """ + + # remap the forcing file onto the MALI mesh + args = ["ncremap", + "-i", input_file, + "-o", output_file, + "-m", weights_file, + "-v", "SMB_rec"] + + check_call(args, logger=self.logger) diff --git a/compass/landice/tests/ismip6_GrIS_forcing/utilities.py b/compass/landice/tests/ismip6_GrIS_forcing/utilities.py new file mode 100644 index 000000000..b553ef8eb --- /dev/null +++ b/compass/landice/tests/ismip6_GrIS_forcing/utilities.py @@ -0,0 +1,50 @@ +import xarray as xr +from mpas_tools.logging import check_call + + +def _datetime_2_xtime(ds, var="Time", xtime_fmt="%Y-%m-%d_%H:%M:%S"): + + return xr.apply_ufunc(lambda t: t.strftime(xtime_fmt).ljust(64), + ds[var], vectorize=True, output_dtypes=["S"]) + + +def add_xtime(ds, var="Time"): + """ + ds["xtime"] = add_xtime(ds) + """ + + # ensure time variable has been properly parsed as a datetime object + if not hasattr(ds[var], "dt"): + raise TypeError(f"The {var} variable passed has not been parsed as" + " a datetime object, so conversion to xtime string" + " will not work.\n\nTry using ds = xr.open_dataset" + "(..., use_cftime=True).") + + # xtime related attributes + attrs = {"units": "unitless", + "long_name": "model time, with format \'YYYY-MM-DD_HH:MM:SS\'"} + + # compute xtime dataarray from time variable passed + xtime = _datetime_2_xtime(ds, var=var) + + # update attributes of dataarray + xtime.attrs = attrs + + return xtime + + +def remap_variables(in_fp, out_fp, weights_fp, variables=None, logger=None): + """ + """ + + args = ["ncremap", + "-i", in_fp, + "-o", out_fp, + "-m", weights_fp] + + if variables and not isinstance(variables, list): + raise TypeError("`variables` kwarg must be a list of strings.") + elif variables: + args += ["-v"] + variables + + check_call(args, logger=logger)