diff --git a/1.11.0rc1/_images/ant_melt.png b/1.11.0rc1/_images/ant_melt.png new file mode 100644 index 000000000..8b25842e7 Binary files /dev/null and b/1.11.0rc1/_images/ant_melt.png differ diff --git a/1.11.0rc1/_images/berg_conc_sh.png b/1.11.0rc1/_images/berg_conc_sh.png new file mode 100644 index 000000000..d060a5d96 Binary files /dev/null and b/1.11.0rc1/_images/berg_conc_sh.png differ diff --git a/1.11.0rc1/_images/bgc.png b/1.11.0rc1/_images/bgc.png new file mode 100644 index 000000000..1c9ec7001 Binary files /dev/null and b/1.11.0rc1/_images/bgc.png differ diff --git a/1.11.0rc1/_images/bsf.png b/1.11.0rc1/_images/bsf.png new file mode 100644 index 000000000..10746159d Binary files /dev/null and b/1.11.0rc1/_images/bsf.png differ diff --git a/1.11.0rc1/_images/clim_argo_salin.png b/1.11.0rc1/_images/clim_argo_salin.png new file mode 100644 index 000000000..d1cc9bf0c Binary files /dev/null and b/1.11.0rc1/_images/clim_argo_salin.png differ diff --git a/1.11.0rc1/_images/clim_argo_temp.png b/1.11.0rc1/_images/clim_argo_temp.png new file mode 100644 index 000000000..dd0e8754e Binary files /dev/null and b/1.11.0rc1/_images/clim_argo_temp.png differ diff --git a/1.11.0rc1/_images/clim_ohc.png b/1.11.0rc1/_images/clim_ohc.png new file mode 100644 index 000000000..cc82c2b2f Binary files /dev/null and b/1.11.0rc1/_images/clim_ohc.png differ diff --git a/1.11.0rc1/_images/clim_sose_temp.png b/1.11.0rc1/_images/clim_sose_temp.png new file mode 100644 index 000000000..ab554ccb9 Binary files /dev/null and b/1.11.0rc1/_images/clim_sose_temp.png differ diff --git a/1.11.0rc1/_images/clim_woa_temp.png b/1.11.0rc1/_images/clim_woa_temp.png new file mode 100644 index 000000000..32043e7f9 Binary files /dev/null and b/1.11.0rc1/_images/clim_woa_temp.png differ diff --git a/1.11.0rc1/_images/colormaps.png b/1.11.0rc1/_images/colormaps.png new file mode 100644 index 000000000..fdba52eb0 Binary files /dev/null and b/1.11.0rc1/_images/colormaps.png differ diff --git a/1.11.0rc1/_images/drake_passage_transport.png b/1.11.0rc1/_images/drake_passage_transport.png new file mode 100644 index 000000000..d04dcdd08 Binary files /dev/null and b/1.11.0rc1/_images/drake_passage_transport.png differ diff --git a/1.11.0rc1/_images/eke.png b/1.11.0rc1/_images/eke.png new file mode 100644 index 000000000..5aa1ba865 Binary files /dev/null and b/1.11.0rc1/_images/eke.png differ diff --git a/1.11.0rc1/_images/geojson_transect.png b/1.11.0rc1/_images/geojson_transect.png new file mode 100644 index 000000000..10f0669de Binary files /dev/null and b/1.11.0rc1/_images/geojson_transect.png differ diff --git a/1.11.0rc1/_images/histogram_ssh_aviso_atl.png b/1.11.0rc1/_images/histogram_ssh_aviso_atl.png new file mode 100644 index 000000000..166351a4b Binary files /dev/null and b/1.11.0rc1/_images/histogram_ssh_aviso_atl.png differ diff --git a/1.11.0rc1/_images/hovmoller_salin.png b/1.11.0rc1/_images/hovmoller_salin.png new file mode 100644 index 000000000..6c8ec1a30 Binary files /dev/null and b/1.11.0rc1/_images/hovmoller_salin.png differ diff --git a/1.11.0rc1/_images/hovmoller_temp.png b/1.11.0rc1/_images/hovmoller_temp.png new file mode 100644 index 000000000..7dfcf6e3a Binary files /dev/null and b/1.11.0rc1/_images/hovmoller_temp.png differ diff --git a/1.11.0rc1/_images/hovmoller_weddell.png b/1.11.0rc1/_images/hovmoller_weddell.png new file mode 100644 index 000000000..acf0650e3 Binary files /dev/null and b/1.11.0rc1/_images/hovmoller_weddell.png differ diff --git a/1.11.0rc1/_images/ice_area_nh.png b/1.11.0rc1/_images/ice_area_nh.png new file mode 100644 index 000000000..dbfbfee3d Binary files /dev/null and b/1.11.0rc1/_images/ice_area_nh.png differ diff --git a/1.11.0rc1/_images/ice_conc_nh.png b/1.11.0rc1/_images/ice_conc_nh.png new file mode 100644 index 000000000..039edfd0b Binary files /dev/null and b/1.11.0rc1/_images/ice_conc_nh.png differ diff --git a/1.11.0rc1/_images/ice_conc_sh.png b/1.11.0rc1/_images/ice_conc_sh.png new file mode 100644 index 000000000..dd81d6587 Binary files /dev/null and b/1.11.0rc1/_images/ice_conc_sh.png differ diff --git a/1.11.0rc1/_images/ice_melting_nh.png b/1.11.0rc1/_images/ice_melting_nh.png new file mode 100644 index 000000000..1b419e4d5 Binary files /dev/null and b/1.11.0rc1/_images/ice_melting_nh.png differ diff --git a/1.11.0rc1/_images/ice_melting_sh.png b/1.11.0rc1/_images/ice_melting_sh.png new file mode 100644 index 000000000..3b0f48b36 Binary files /dev/null and b/1.11.0rc1/_images/ice_melting_sh.png differ diff --git a/1.11.0rc1/_images/ice_production_nh.png b/1.11.0rc1/_images/ice_production_nh.png new file mode 100644 index 000000000..0f333d9d3 Binary files /dev/null and b/1.11.0rc1/_images/ice_production_nh.png differ diff --git a/1.11.0rc1/_images/ice_production_sh.png b/1.11.0rc1/_images/ice_production_sh.png new file mode 100644 index 000000000..c9574d2e1 Binary files /dev/null and b/1.11.0rc1/_images/ice_production_sh.png differ diff --git a/1.11.0rc1/_images/ice_thick_nh.png b/1.11.0rc1/_images/ice_thick_nh.png new file mode 100644 index 000000000..d1c33a947 Binary files /dev/null and b/1.11.0rc1/_images/ice_thick_nh.png differ diff --git a/1.11.0rc1/_images/ice_thick_sh.png b/1.11.0rc1/_images/ice_thick_sh.png new file mode 100644 index 000000000..9a3766dcc Binary files /dev/null and b/1.11.0rc1/_images/ice_thick_sh.png differ diff --git a/1.11.0rc1/_images/melt_flux_east_ant.png b/1.11.0rc1/_images/melt_flux_east_ant.png new file mode 100644 index 000000000..d66314b93 Binary files /dev/null and b/1.11.0rc1/_images/melt_flux_east_ant.png differ diff --git a/1.11.0rc1/_images/mht.png b/1.11.0rc1/_images/mht.png new file mode 100644 index 000000000..f19b9e6d8 Binary files /dev/null and b/1.11.0rc1/_images/mht.png differ diff --git a/1.11.0rc1/_images/mld.png b/1.11.0rc1/_images/mld.png new file mode 100644 index 000000000..8d146dcaa Binary files /dev/null and b/1.11.0rc1/_images/mld.png differ diff --git a/1.11.0rc1/_images/moc.png b/1.11.0rc1/_images/moc.png new file mode 100644 index 000000000..432f86579 Binary files /dev/null and b/1.11.0rc1/_images/moc.png differ diff --git a/1.11.0rc1/_images/nino.png b/1.11.0rc1/_images/nino.png new file mode 100644 index 000000000..2e008551d Binary files /dev/null and b/1.11.0rc1/_images/nino.png differ diff --git a/1.11.0rc1/_images/ocean_profile.png b/1.11.0rc1/_images/ocean_profile.png new file mode 100644 index 000000000..b9c0aa0ca Binary files /dev/null and b/1.11.0rc1/_images/ocean_profile.png differ diff --git a/1.11.0rc1/_images/peak_period.png b/1.11.0rc1/_images/peak_period.png new file mode 100644 index 000000000..0379b5942 Binary files /dev/null and b/1.11.0rc1/_images/peak_period.png differ diff --git a/1.11.0rc1/_images/schmidtko_temp.png b/1.11.0rc1/_images/schmidtko_temp.png new file mode 100644 index 000000000..459db2df9 Binary files /dev/null and b/1.11.0rc1/_images/schmidtko_temp.png differ diff --git a/1.11.0rc1/_images/so_ts_diag.png b/1.11.0rc1/_images/so_ts_diag.png new file mode 100644 index 000000000..ed0edc6d3 Binary files /dev/null and b/1.11.0rc1/_images/so_ts_diag.png differ diff --git a/1.11.0rc1/_images/sose_transect.png b/1.11.0rc1/_images/sose_transect.png new file mode 100644 index 000000000..0dae74b99 Binary files /dev/null and b/1.11.0rc1/_images/sose_transect.png differ diff --git a/1.11.0rc1/_images/ssh.png b/1.11.0rc1/_images/ssh.png new file mode 100644 index 000000000..6d93dcf78 Binary files /dev/null and b/1.11.0rc1/_images/ssh.png differ diff --git a/1.11.0rc1/_images/sss.png b/1.11.0rc1/_images/sss.png new file mode 100644 index 000000000..a1650fd9c Binary files /dev/null and b/1.11.0rc1/_images/sss.png differ diff --git a/1.11.0rc1/_images/sst.png b/1.11.0rc1/_images/sst.png new file mode 100644 index 000000000..467732517 Binary files /dev/null and b/1.11.0rc1/_images/sst.png differ diff --git a/1.11.0rc1/_images/sst_example.png b/1.11.0rc1/_images/sst_example.png new file mode 100644 index 000000000..9235ed56e Binary files /dev/null and b/1.11.0rc1/_images/sst_example.png differ diff --git a/1.11.0rc1/_images/swh.png b/1.11.0rc1/_images/swh.png new file mode 100644 index 000000000..bce08f473 Binary files /dev/null and b/1.11.0rc1/_images/swh.png differ diff --git a/1.11.0rc1/_images/time_series_moc.png b/1.11.0rc1/_images/time_series_moc.png new file mode 100644 index 000000000..dfc7b56de Binary files /dev/null and b/1.11.0rc1/_images/time_series_moc.png differ diff --git a/1.11.0rc1/_images/time_series_ohc.png b/1.11.0rc1/_images/time_series_ohc.png new file mode 100644 index 000000000..e6e6f88cc Binary files /dev/null and b/1.11.0rc1/_images/time_series_ohc.png differ diff --git a/1.11.0rc1/_images/time_series_sst.png b/1.11.0rc1/_images/time_series_sst.png new file mode 100644 index 000000000..045b9d809 Binary files /dev/null and b/1.11.0rc1/_images/time_series_sst.png differ diff --git a/1.11.0rc1/_images/total_mass_flux.png b/1.11.0rc1/_images/total_mass_flux.png new file mode 100644 index 000000000..efd40eabf Binary files /dev/null and b/1.11.0rc1/_images/total_mass_flux.png differ diff --git a/1.11.0rc1/_images/west_ross_shelf_temp.png b/1.11.0rc1/_images/west_ross_shelf_temp.png new file mode 100644 index 000000000..5c5b7bff9 Binary files /dev/null and b/1.11.0rc1/_images/west_ross_shelf_temp.png differ diff --git a/1.11.0rc1/_images/woce_transect.png b/1.11.0rc1/_images/woce_transect.png new file mode 100644 index 000000000..4ded78f12 Binary files /dev/null and b/1.11.0rc1/_images/woce_transect.png differ diff --git a/1.11.0rc1/_modules/index.html b/1.11.0rc1/_modules/index.html new file mode 100644 index 000000000..2212158e2 --- /dev/null +++ b/1.11.0rc1/_modules/index.html @@ -0,0 +1,189 @@ + + + + + + Overview: module code — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

All modules for which code is available

+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/__main__.html b/1.11.0rc1/_modules/mpas_analysis/__main__.html new file mode 100644 index 000000000..8ca349c3f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/__main__.html @@ -0,0 +1,1182 @@ + + + + + + mpas_analysis.__main__ — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for mpas_analysis.__main__

+#!/usr/bin/env python
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+"""
+Runs MPAS-Analysis via a configuration file (e.g. `analysis.cfg`)
+specifying analysis options.
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Phillip J. Wolfram, Milena Veneziani
+
+import mpas_analysis
+
+import argparse
+import traceback
+import sys
+import shutil
+import os
+from collections import OrderedDict
+import progressbar
+import logging
+import xarray
+import time
+from importlib.resources import contents
+
+from mache import discover_machine, MachineInfo
+
+from mpas_tools.config import MpasConfigParser
+
+from mpas_analysis.shared.analysis_task import AnalysisFormatter
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, copyfile
+
+from mpas_analysis.shared.html import generate_html
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.analysis_task import \
+    update_time_bounds_from_file_names
+
+from mpas_analysis.shared.plot.colormap import register_custom_colormaps, \
+    _plot_color_gradients
+
+from mpas_analysis import ocean
+from mpas_analysis import sea_ice
+from mpas_analysis.shared.climatology import MpasClimatologyTask, \
+    RefYearMpasClimatologyTask
+from mpas_analysis.shared.time_series import MpasTimeSeriesTask
+
+from mpas_analysis.shared.regions import ComputeRegionMasks
+
+
+def update_time_bounds_in_config(config):
+    """
+    Updates the start and end year (and associated full date) for
+    climatologies, time series and climate indices based on the files that are
+    actually available.
+
+    Parameters
+    ----------
+    config : mpas_tools.config.MpasConfigParser
+        contains config options
+
+    """
+    # By updating the bounds for each component, we should end up with the
+    # more constrained time bounds if any component has less output than others
+    for componentName in ['ocean', 'seaIce']:
+        for section in ['climatology', 'timeSeries', 'index']:
+            update_time_bounds_from_file_names(config, section, componentName)
+
+
+
+[docs] +def build_analysis_list(config, controlConfig): + """ + Build a list of analysis tasks. New tasks should be added here, following + the approach used for existing analysis tasks. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + contains config options + + controlConfig : mpas_tools.config.MpasConfigParser or None + contains config options for a control run, or ``None`` if no config + file for a control run was specified + + Returns + ------- + analyses : list of ``AnalysisTask`` objects + A list of all analysis tasks + """ + # Authors + # ------- + # Xylar Asay-Davis + + analyses = [] + + # Ocean Analyses + oceanClimatologyTasks = {} + for op in ['avg', 'min', 'max']: + oceanClimatologyTasks[op] = MpasClimatologyTask(config=config, + componentName='ocean', + op=op) + oceanTimeSeriesTask = MpasTimeSeriesTask(config=config, + componentName='ocean') + oceanIndexTask = MpasTimeSeriesTask(config=config, + componentName='ocean', + section='index') + + oceanRefYearClimatologyTask = RefYearMpasClimatologyTask( + config=config, componentName='ocean') + + oceanRegionMasksTask = ComputeRegionMasks(config=config, + conponentName='ocean') + + for op in oceanClimatologyTasks: + analyses.append(oceanClimatologyTasks[op]) + analyses.append(oceanRefYearClimatologyTask) + + analyses.append(ocean.ClimatologyMapMLD(config, + oceanClimatologyTasks['avg'], + controlConfig)) + + analyses.append(ocean.ClimatologyMapMLDMinMax(config, + oceanClimatologyTasks, + controlConfig)) + + analyses.append(ocean.ClimatologyMapSST(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapSSS(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapSSH(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapEKE(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapBSF(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapOHCAnomaly( + config, oceanClimatologyTasks['avg'], oceanRefYearClimatologyTask, + controlConfig)) + + analyses.append(ocean.ClimatologyMapSose( + config, oceanClimatologyTasks['avg'], controlConfig)) + analyses.append(ocean.ClimatologyMapWoa( + config, oceanClimatologyTasks['avg'], controlConfig)) + analyses.append(ocean.ClimatologyMapBGC(config, + oceanClimatologyTasks['avg'], + controlConfig)) + + analyses.append(ocean.ClimatologyMapArgoTemperature( + config, oceanClimatologyTasks['avg'], controlConfig)) + analyses.append(ocean.ClimatologyMapArgoSalinity( + config, oceanClimatologyTasks['avg'], controlConfig)) + + analyses.append(ocean.ClimatologyMapSchmidtko( + config, oceanClimatologyTasks['avg'], controlConfig)) + + analyses.append(ocean.ClimatologyMapAntarcticMelt( + config, oceanClimatologyTasks['avg'], oceanRegionMasksTask, + controlConfig)) + + analyses.append(ocean.ConservationTask( + config, controlConfig)) + + analyses.append(ocean.RegionalTSDiagrams( + config, oceanClimatologyTasks['avg'], oceanRegionMasksTask, + controlConfig)) + + analyses.append(ocean.TimeSeriesAntarcticMelt(config, oceanTimeSeriesTask, + oceanRegionMasksTask, + controlConfig)) + + analyses.append(ocean.TimeSeriesOceanRegions(config, oceanRegionMasksTask, + controlConfig)) + + analyses.append(ocean.TimeSeriesTemperatureAnomaly(config, + oceanTimeSeriesTask)) + analyses.append(ocean.TimeSeriesSalinityAnomaly(config, + oceanTimeSeriesTask)) + analyses.append(ocean.TimeSeriesOHCAnomaly(config, + oceanTimeSeriesTask, + controlConfig)) + analyses.append(ocean.TimeSeriesSSHAnomaly(config, + oceanTimeSeriesTask, + controlConfig)) + analyses.append(ocean.TimeSeriesSST(config, oceanTimeSeriesTask, + controlConfig)) + analyses.append(ocean.TimeSeriesTransport(config, controlConfig)) + + analyses.append(ocean.OceanHistogram(config, oceanClimatologyTasks['avg'], + oceanRegionMasksTask, + controlConfig)) + analyses.append(ocean.MeridionalHeatTransport( + config, oceanClimatologyTasks['avg'], controlConfig)) + + analyses.append(ocean.StreamfunctionMOC(config, + oceanClimatologyTasks['avg'], + controlConfig)) + analyses.append(ocean.IndexNino34(config, oceanIndexTask, controlConfig)) + + analyses.append(ocean.WoceTransects(config, oceanClimatologyTasks['avg'], + controlConfig)) + + analyses.append(ocean.SoseTransects(config, oceanClimatologyTasks['avg'], + controlConfig)) + + analyses.append(ocean.GeojsonTransects(config, + oceanClimatologyTasks['avg'], + controlConfig)) + + oceanRegionalProfiles = ocean.OceanRegionalProfiles( + config, oceanRegionMasksTask, controlConfig) + analyses.append(oceanRegionalProfiles) + + analyses.append(ocean.HovmollerOceanRegions( + config, oceanRegionMasksTask, oceanRegionalProfiles, controlConfig)) + + # Sea Ice Analyses + seaIceClimatologyTask = MpasClimatologyTask(config=config, + componentName='seaIce') + seaIceTimeSeriesTask = MpasTimeSeriesTask(config=config, + componentName='seaIce') + + analyses.append(seaIceClimatologyTask) + analyses.append(sea_ice.ClimatologyMapSeaIceConc( + config=config, mpasClimatologyTask=seaIceClimatologyTask, + hemisphere='NH', controlConfig=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceThick( + config=config, mpasClimatologyTask=seaIceClimatologyTask, + hemisphere='NH', controlConfig=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceConc( + config=config, mpasClimatologyTask=seaIceClimatologyTask, + hemisphere='SH', controlConfig=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceThick( + config=config, mpasClimatologyTask=seaIceClimatologyTask, + hemisphere='SH', controlConfig=controlConfig)) + analyses.append(seaIceTimeSeriesTask) + analyses.append(sea_ice.ClimatologyMapSeaIceProduction( + config=config, mpas_climatology_task=seaIceClimatologyTask, + hemisphere='NH', control_config=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceProduction( + config=config, mpas_climatology_task=seaIceClimatologyTask, + hemisphere='SH', control_config=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceMelting( + config=config, mpas_climatology_task=seaIceClimatologyTask, + hemisphere='NH', control_config=controlConfig)) + analyses.append(sea_ice.ClimatologyMapSeaIceMelting( + config=config, mpas_climatology_task=seaIceClimatologyTask, + hemisphere='SH', control_config=controlConfig)) + + analyses.append(sea_ice.TimeSeriesSeaIce(config, seaIceTimeSeriesTask, + controlConfig)) + + # Iceberg Analyses + analyses.append(sea_ice.ClimatologyMapIcebergConc( + config=config, mpasClimatologyTask=seaIceClimatologyTask, + hemisphere='SH', controlConfig=controlConfig)) + + # Wave Analyses + analyses.append(ocean.ClimatologyMapWaves( + config, oceanClimatologyTasks['avg'], oceanRegionMasksTask, + controlConfig)) + + check_for_duplicate_names(analyses) + + return analyses
+ + + +def check_for_duplicate_names(analyses): + """ + Check for duplicate taskName and subtaskName in the list of analysis tasks + and their subtasks + + Parameters + ---------- + analyses : list of mpas_analysis.shared.AnalysisTask + A list of all analysis tasks + """ + all_task_names = [] + errors = [] + for analysis in analyses: + mainTaskName = analysis.taskName + assert(analysis.subtaskName is None) + fullName = (mainTaskName, None) + if fullName in all_task_names: + errors.append( + f'A task named {mainTaskName} has been added more than once') + all_task_names.append(fullName) + for subtask in analysis.subtasks: + taskName = subtask.taskName + subtaskName = subtask.subtaskName + if taskName != mainTaskName: + errors.append( + f'A subtask named {taskName}: {subtaskName} has a ' + f'different task name than its parent task: \n' + f' {mainTaskName}') + fullName = (taskName, subtaskName) + if fullName in all_task_names: + errors.append( + f'A subtask named {taskName}: {subtaskName} has been ' + f'added more than once') + all_task_names.append(fullName) + + if len(errors) > 0: + all_errors = '\n '.join(errors) + raise ValueError(f'Analysis tasks failed these checks:\n' + f' {all_errors}') + + +
+[docs] +def determine_analyses_to_generate(analyses, verbose): + """ + Build a list of analysis tasks to run based on the 'generate' config + option (or command-line flag) and prerequisites and subtasks of each + requested task. Each task's ``setup_and_check`` method is called in the + process. + + Parameters + ---------- + analyses : list of ``AnalysisTask`` objects + A list of all analysis tasks + + verbose : bool + Whether to write out a full stack trace when exceptions occur during + ``setup_and_check()`` calls for each task + + Returns + ------- + analysesToGenerate : ``OrderedDict`` of ``AnalysisTask`` objects + A dictionary of analysis tasks to run + """ + # Authors + # ------- + # Xylar Asay-Davis + + totalFailures = 0 + + print('') + + analysesToGenerate = OrderedDict() + # check which analysis we actually want to generate and only keep those + for analysisTask in analyses: + # update the dictionary with this task and perhaps its subtasks + failureCount = add_task_and_subtasks(analysisTask, analysesToGenerate, + verbose) + + totalFailures += failureCount + + if totalFailures > 0: + print('\n{} tasks and subtasks failed during setup.'.format( + totalFailures)) + if not verbose: + print('To find out why these tasks are failing, use the --verbose ' + 'flag') + + print('') + + return analysesToGenerate
+ + + +
+[docs] +def add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, + callCheckGenerate=True): + + """ + If a task has been requested through the generate config option or + if it is a prerequisite of a requested task, add it to the dictionary of + tasks to generate. + + Parameters + ---------- + analysisTask : ``AnalysisTask`` + A task to be added + + analysesToGenerate : ``OrderedDict`` of ``AnalysisTask`` + The list of analysis tasks to be generated, which this call may + update to include this task and its subtasks + + verbose : bool + Whether to write out a full stack trace when exceptions occur during + ``setup_and_check()`` calls for each task + + callCheckGenerate : bool + Whether the ``check_generate`` method should be call for this task to + see if it has been requested. We skip this for subtasks and + prerequisites, since they are needed by another task regardless of + whether the user specifically requested them. + """ + # Authors + # ------- + # Xylar Asay-Davis + + totalFailures = 0 + + key = (analysisTask.taskName, analysisTask.subtaskName) + if key in analysesToGenerate.keys(): + # The task was already added + if analysisTask._setupStatus != 'success': + ValueError("task {} already added but this version was not set up " + "successfully. Typically, this indicates two tasks " + "with the same full name".format( + analysisTask.fullTaskName)) + return totalFailures + + # for each analysis task, check if we want to generate this task + # and if the analysis task has a valid configuration + taskTitle = analysisTask.printTaskName + if callCheckGenerate and not analysisTask.check_generate(): + # we don't need to add this task -- it wasn't requested + return totalFailures + + # first, we should try to add the prerequisites of this task and its + # subtasks (if they aren't also subtasks for this task) + prereqs = analysisTask.runAfterTasks + for subtask in analysisTask.subtasks: + for prereq in subtask.runAfterTasks: + if prereq not in analysisTask.subtasks: + prereqs.extend(subtask.runAfterTasks) + + for prereq in prereqs: + failureCount = add_task_and_subtasks(prereq, analysesToGenerate, + verbose, callCheckGenerate=False) + totalFailures += failureCount + if prereq._setupStatus != 'success': + assert(failureCount > 0) + # a prereq failed setup_and_check + print("Warning: prerequisite of {} failed during check, " + "so this task will not be run".format( + taskTitle)) + analysisTask._setupStatus = 'fail' + totalFailures += 1 + return totalFailures + + # make sure all prereqs have been set up successfully before trying to + # set up this task -- this task's setup may depend on setup in the prereqs + try: + analysisTask.setup_and_check() + except (Exception, BaseException): + if verbose: + traceback.print_exc(file=sys.stdout) + print("Warning: {} failed during check and will not be run".format( + taskTitle)) + analysisTask._setupStatus = 'fail' + totalFailures += 1 + return totalFailures + + # next, we should try to add the subtasks. This is done after the current + # analysis task has been set up in case subtasks depend on information + # from the parent task + for subtask in analysisTask.subtasks: + failureCount = add_task_and_subtasks(subtask, analysesToGenerate, + verbose, callCheckGenerate=False) + totalFailures += failureCount + if subtask._setupStatus != 'success': + assert(failureCount > 0) + # a subtask failed setup_and_check + print("Warning: subtask of {} failed during check, " + "so this task will not be run".format( + taskTitle)) + analysisTask._setupStatus = 'fail' + totalFailures += 1 + return totalFailures + + analysesToGenerate[key] = analysisTask + analysisTask._setupStatus = 'success' + assert(totalFailures == 0) + return totalFailures
+ + + +
+[docs] +def update_generate(config, generate): + """ + Update the 'generate' config option using a string from the command line. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + contains config options + + generate : str + a comma-separated string of generate flags: either names of analysis + tasks or commands of the form ``all_<tag>`` or ``no_<tag>`` indicating + that analysis with a given tag should be included or excluded). + """ + # Authors + # ------- + # Xylar Asay-Davis + + # overwrite the 'generate' in config with a string that parses to + # a list of string + generateList = generate.split(',') + generateString = ', '.join(["'{}'".format(element) + for element in generateList]) + generateString = '[{}]'.format(generateString) + config.set('output', 'generate', generateString, user=True)
+ + + +
+[docs] +def run_analysis(config, analyses): + """ + Run all the tasks, either in serial or in parallel + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + contains config options + + analyses : OrderedDict of ``AnalysisTask`` objects + A dictionary of analysis tasks to run with (task, subtask) names as + keys + """ + # Authors + # ------- + # Xylar Asay-Davis + + # write the config file the log directory + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + + mainRunName = config.get('runs', 'mainRunName') + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if len(mainRunName) > maxTitleLength: + print('Warning: The main run name is quite long and will be ' + 'truncated in some plots: \n{}\n\n'.format(mainRunName)) + + configFileName = '{}/complete.{}.cfg'.format(logsDirectory, mainRunName) + + configFile = open(configFileName, 'w') + config.write(configFile) + configFile.close() + + parallelTaskCount = config.getint('execute', 'parallelTaskCount') + + isParallel = parallelTaskCount > 1 and len(analyses) > 1 + + for analysisTask in analyses.values(): + if not analysisTask.runAfterTasks and not analysisTask.subtasks: + analysisTask._runStatus.value = AnalysisTask.READY + else: + analysisTask._runStatus.value = AnalysisTask.BLOCKED + + tasksWithErrors = [] + runningTasks = {} + + # redirect output to a log file + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + + logFileName = '{}/taskProgress.log'.format(logsDirectory) + + logger = logging.getLogger('mpas_analysis') + handler = logging.FileHandler(logFileName) + + formatter = AnalysisFormatter() + handler.setFormatter(formatter) + logger.addHandler(handler) + logger.setLevel(logging.INFO) + logger.propagate = False + + totalTaskCount = len(analyses) + widgets = ['Running tasks: ', progressbar.Percentage(), ' ', + progressbar.Bar(), ' ', progressbar.ETA()] + progress = progressbar.ProgressBar(widgets=widgets, + max_value=totalTaskCount).start() + + runningProcessCount = 0 + + # run each analysis task + while True: + # we still have tasks to run + for analysisTask in analyses.values(): + if analysisTask._runStatus.value == AnalysisTask.BLOCKED: + prereqs = analysisTask.runAfterTasks + analysisTask.subtasks + prereqStatus = [prereq._runStatus.value for prereq in prereqs] + if any([runStatus == AnalysisTask.FAIL for runStatus in + prereqStatus]): + # a prerequisite failed so this task cannot succeed + analysisTask._runStatus.value = AnalysisTask.FAIL + if all([runStatus == AnalysisTask.SUCCESS for runStatus in + prereqStatus]): + # no unfinished prerequisites so we can run this task + analysisTask._runStatus.value = AnalysisTask.READY + + unfinishedCount = 0 + for analysisTask in analyses.values(): + if analysisTask._runStatus.value not in [AnalysisTask.SUCCESS, + AnalysisTask.FAIL]: + unfinishedCount += 1 + + progress.update(totalTaskCount - unfinishedCount) + + if unfinishedCount <= 0 and runningProcessCount == 0: + # we're done + break + + # launch new tasks + runDirectly = False + for key, analysisTask in analyses.items(): + if analysisTask._runStatus.value == AnalysisTask.READY: + if isParallel: + newProcessCount = runningProcessCount + \ + analysisTask.subprocessCount + if newProcessCount > parallelTaskCount and \ + runningProcessCount > 0: + # this task should run next but we need to wait for + # more processes to finish + break + + logger.info('Running {}'.format( + analysisTask.printTaskName)) + if analysisTask.runDirectly: + analysisTask.run(writeLogFile=True) + runDirectly = True + break + else: + analysisTask._runStatus.value = AnalysisTask.RUNNING + analysisTask.start() + runningTasks[key] = analysisTask + runningProcessCount = newProcessCount + if runningProcessCount >= parallelTaskCount: + # don't try to run any more tasks + break + else: + analysisTask.run(writeLogFile=False) + break + + if isParallel: + + if not runDirectly: + assert(runningProcessCount > 0) + # wait for a task to finish + analysisTask = wait_for_task(runningTasks) + key = (analysisTask.taskName, analysisTask.subtaskName) + runningTasks.pop(key) + runningProcessCount -= analysisTask.subprocessCount + + taskTitle = analysisTask.printTaskName + + if analysisTask._runStatus.value == AnalysisTask.SUCCESS: + logger.info(" Task {} has finished successfully.".format( + taskTitle)) + elif analysisTask._runStatus.value == AnalysisTask.FAIL: + message = "ERROR in task {}. See log file {} for " \ + "details".format(taskTitle, + analysisTask._logFileName) + logger.error(message) + print(message) + tasksWithErrors.append(taskTitle) + else: + message = "Unexpected status from in task {}. This may be " \ + "a bug.".format(taskTitle) + logger.error(message) + print(message) + else: + if analysisTask._runStatus.value == AnalysisTask.FAIL: + sys.exit(1) + + progress.finish() + + # blank line to make sure remaining output is on a new line + print('') + + handler.close() + logger.handlers = [] + + # raise the last exception so the process exits with an error + errorCount = len(tasksWithErrors) + if errorCount == 1: + print("There were errors in task {}".format(tasksWithErrors[0])) + sys.exit(1) + elif errorCount > 0: + print("There were errors in {} tasks: {}".format( + errorCount, ', '.join(tasksWithErrors))) + print("See log files in {} for details.".format(logsDirectory)) + print("The following commands may be helpful:") + print(" cd {}".format(logsDirectory)) + print(" grep Error *.log") + sys.exit(1) + else: + print('Log files for executed tasks can be found in {}'.format( + logsDirectory))
+ + + +
+[docs] +def wait_for_task(runningTasks, timeout=0.1): + """ + Build a list of analysis modules based on the 'generate' config option. + New tasks should be added here, following the approach used for existing + analysis tasks. + + Parameters + ---------- + runningTasks : dict of ``AnalysisTasks`` + The tasks that are currently running, with task names as keys + + Returns + ------- + analysisTask : ``AnalysisTasks`` + A task that finished + """ + # Authors + # ------- + # Xylar Asay-Davis + + # necessary to have a timeout so we can kill the whole thing + # with a keyboard interrupt + while True: + for analysisTask in runningTasks.values(): + analysisTask.join(timeout=timeout) + if not analysisTask.is_alive(): + return analysisTask
+ + + +def purge_output(config): + outputDirectory = config.get('output', 'baseDirectory') + if not os.path.exists(outputDirectory): + print('Output directory {} does not exist.\n' + 'No purge necessary.'.format(outputDirectory)) + else: + for subdirectory in ['plots', 'logs', 'mpasClimatology', 'mapping', + 'timeSeries', 'html', 'mask', 'profiles', + 'histogram']: + option = '{}Subdirectory'.format(subdirectory) + directory = build_config_full_path( + config=config, section='output', + relativePathOption=option) + if os.path.exists(directory): + print('Deleting contents of {}'.format(directory)) + if os.path.islink(directory): + os.unlink(directory) + else: + shutil.rmtree(directory) + + for component in ['ocean', 'seaIce']: + for subdirectory in ['climatology', 'remappedClim']: + option = '{}Subdirectory'.format(subdirectory) + section = '{}Observations'.format(component) + directory = build_config_full_path( + config=config, section='output', + relativePathOption=option, + relativePathSection=section) + if os.path.exists(directory): + print('Deleting contents of {}'.format(directory)) + if os.path.islink(directory): + os.unlink(directory) + else: + shutil.rmtree(directory) + + +def build_config(user_config_file, shared_configs, machine_info): + """ + Create a config parser from a user config file (either main or control) + and a set of shared config file, also adding the username to the web_portal + section + """ + if not os.path.exists(user_config_file): + raise OSError(f'A config file {user_config_file} was specified but ' + f'the file does not exist') + config = MpasConfigParser() + for config_file in shared_configs: + if config_file.endswith('.py'): + # we'll skip config options set in python files + continue + config.add_from_file(config_file) + config.add_user_config(user_config_file) + + if machine_info is not None: + config.set('web_portal', 'username', machine_info.username) + + return config + + +def symlink_main_run(config, shared_configs, machine_info): + """ + Create symlinks to the climatology and time-series directories for the + main run that has already been computed so we don't have to recompute + the analysis. + """ + + def link_dir(section, option): + dest_directory = build_config_full_path(config=config, + section='output', + relativePathOption=option, + relativePathSection=section) + if not os.path.exists(dest_directory): + + source_directory = build_config_full_path( + config=main_config, section='output', + relativePathOption=option, relativePathSection=section) + + if os.path.exists(source_directory): + + dest_base = os.path.split(dest_directory)[0] + + make_directories(dest_base) + + os.symlink(source_directory, dest_directory) + + main_config_file = config.get('runs', 'mainRunConfigFile') + main_config = build_config(main_config_file, shared_configs, machine_info) + + for subdirectory in ['mpasClimatology', 'timeSeries', 'mapping', 'mask', + 'profiles']: + section = 'output' + option = '{}Subdirectory'.format(subdirectory) + link_dir(section=section, option=option) + + for component in ['ocean', 'seaIce']: + for subdirectory in ['climatology', 'remappedClim']: + section = '{}Observations'.format(component) + option = '{}Subdirectory'.format(subdirectory) + link_dir(section=section, option=option) + + +
+[docs] +def main(): + """ + Entry point for the main script ``mpas_analysis`` + """ + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-v', '--version', + action='version', + version='mpas_analysis {}'.format( + mpas_analysis.__version__), + help="Show version number and exit") + parser.add_argument("--setup_only", dest="setup_only", action='store_true', + help="If only the setup phase, not the run or HTML " + "generation phases, should be executed.") + parser.add_argument("--html_only", dest="html_only", action='store_true', + help="If only the setup and HTML generation phases, " + "not the run phase, should be executed.") + parser.add_argument("-g", "--generate", dest="generate", + help="A list of analysis modules to generate " + "(nearly identical generate option in config file).", + metavar="ANALYSIS1[,ANALYSIS2,ANALYSIS3,...]") + parser.add_argument("-l", "--list", dest="list", action='store_true', + help="List the available analysis tasks") + parser.add_argument("-p", "--purge", dest="purge", action='store_true', + help="Purge the analysis by deleting the output" + "directory before running") + parser.add_argument("config_file", metavar="CONFIG", type=str, nargs='*', + help="config file") + parser.add_argument("--plot_colormaps", dest="plot_colormaps", + action='store_true', + help="Make a plot displaying all available colormaps") + parser.add_argument("--verbose", dest="verbose", action='store_true', + help="Verbose error reporting during setup-and-check " + "phase") + parser.add_argument("-m", "--machine", dest="machine", + help="The name of the machine for loading machine-" + "related config options", metavar="MACH") + parser.add_argument("--polar_regions", dest="polar_regions", + action='store_true', + help="Include config options for analysis focused on " + "polar regions") + args = parser.parse_args() + + if len(sys.argv) == 1: + parser.print_help() + sys.exit(0) + + config = MpasConfigParser() + + # add default.cfg to cover default not included in the config files + # provided on the command line + config.add_from_package('mpas_analysis', 'default.cfg') + + # Add config options for E3SM supported machines from the mache package + machine = args.machine + + if machine is None and 'E3SMU_MACHINE' in os.environ: + machine = os.environ['E3SMU_MACHINE'] + + if machine is None: + machine = discover_machine() + + if machine is not None: + print(f'Detected E3SM supported machine: {machine}') + try: + config.add_from_package('mache.machines', f'{machine}.cfg') + except FileNotFoundError: + + possible_machines = [] + machine_configs = contents('mache.machines') + for config in machine_configs: + if config.endswith('.cfg'): + possible_machines.append(os.path.splitext(config)[0]) + + possible_machines = '\n '.join(sorted(possible_machines)) + raise ValueError( + f'We could not find the machine: {machine}.\n' + f'Possible machines are:\n {possible_machines}') + + try: + config.add_from_package('mpas_analysis.configuration', + f'{machine}.cfg') + except FileNotFoundError: + # we don't have a config file for this machine, so we'll just + # skip it. + print(f'Warning: no MPAS-Analysis config file found for machine:' + f' {machine}') + pass + + if args.polar_regions: + config.add_from_package('mpas_analysis', 'polar_regions.cfg') + + if machine is not None: + # set the username so we can use it in the htmlSubdirectory + machine_info = MachineInfo(machine=machine) + config.set('web_portal', 'username', machine_info.username) + else: + machine_info = None + + shared_configs = config.list_files() + + for user_config in args.config_file: + if not os.path.exists(user_config): + raise OSError(f'Config file {user_config} not found.') + + config.add_user_config(user_config) + + print('Using the following config files:') + for config_file in config.list_files(): + print(f' {config_file}') + + if args.list: + # set this config option so we don't have issues + config.set('diagnostics', 'baseDirectory', '') + analyses = build_analysis_list(config, controlConfig=None) + for analysisTask in analyses: + print('task: {}'.format(analysisTask.taskName)) + print(' component: {}'.format(analysisTask.componentName)), + print(' tags: {}'.format(', '.join(analysisTask.tags))) + sys.exit(0) + + if args.plot_colormaps: + register_custom_colormaps() + _plot_color_gradients() + sys.exit(0) + + if config.has_option('runs', 'controlRunConfigFile'): + control_config_file = config.get('runs', 'controlRunConfigFile') + control_config = build_config(control_config_file, shared_configs, + machine_info) + + # replace the log directory so log files get written to this run's + # log directory, not the control run's + logs_dir = build_config_full_path(config, 'output', 'logsSubdirectory') + + control_config.set('output', 'logsSubdirectory', logs_dir) + + print('Comparing to control run {} rather than observations. \n' + 'Make sure that MPAS-Analysis has been run previously with the ' + 'control config file.'.format(control_config.get('runs', + 'mainRunName'))) + else: + control_config = None + + if args.purge: + purge_output(config) + + if config.has_option('runs', 'mainRunConfigFile'): + symlink_main_run(config, shared_configs, machine_info) + + if args.generate: + update_generate(config, args.generate) + + if control_config is not None: + # we want to use the "generate" option from the current run, not + # the control config file + control_config.set('output', 'generate', config.get('output', + 'generate')) + + log_dir = build_config_full_path(config, 'output', 'logsSubdirectory') + make_directories(log_dir) + + update_time_bounds_in_config(config) + + file_cache_maxsize = config.getint('input', 'file_cache_maxsize') + try: + xarray.set_options(file_cache_maxsize=file_cache_maxsize) + except ValueError: + # xarray version doesn't support file_cache_maxsize yet... + pass + + start_time = time.time() + + custom_config_files = list(args.config_file) + for option in ['controlRunConfigFile', 'mainRunConfigFile']: + if config.has_option('runs', option): + custom_config_files.append(config.get('runs', option)) + + html_base_directory = build_config_full_path(config, 'output', + 'htmlSubdirectory') + make_directories(html_base_directory) + for config_filename in custom_config_files: + config_filename = os.path.abspath(config_filename) + print(f'copying {config_filename} to HTML dir.') + basename = os.path.basename(config_filename) + copyfile(config_filename, f'{html_base_directory}/{basename}') + + analyses = build_analysis_list(config, control_config) + analyses = determine_analyses_to_generate(analyses, args.verbose) + + setup_duration = time.time() - start_time + + if not args.setup_only and not args.html_only: + run_analysis(config, analyses) + run_duration = time.time() - start_time + m, s = divmod(setup_duration, 60) + h, m = divmod(int(m), 60) + print('Total setup time: {}:{:02d}:{:05.2f}'.format(h, m, s)) + m, s = divmod(run_duration, 60) + h, m = divmod(int(m), 60) + print('Total run time: {}:{:02d}:{:05.2f}'.format(h, m, s)) + + if not args.setup_only: + generate_html(config, analyses, control_config, custom_config_files)
+ + + +if __name__ == "__main__": + main() +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/download_data.html b/1.11.0rc1/_modules/mpas_analysis/download_data.html new file mode 100644 index 000000000..df6f848da --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/download_data.html @@ -0,0 +1,191 @@ + + + + + + mpas_analysis.download_data — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for mpas_analysis.download_data

+#!/usr/bin/env python
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+"""
+Entry points for downloading data for MPAS-Analysis
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Phillip J. Wolfram, Milena Veneziani
+
+
+import argparse
+import pkg_resources
+import os
+
+from mpas_analysis.shared.io.download import download_files
+
+
+
+[docs] +def download_analysis_data(): + """ + Entry point for downloading the input data set from public repository for + MPAS-Analysis to work. The input data set includes: pre-processed + observations data, MPAS mapping files and MPAS regional mask files + (which are used for the MOC computation), for a subset of MPAS meshes. + """ + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument("-o", "--outDir", dest="outDir", required=True, + help="Directory where MPAS-Analysis input data will" + "be downloaded") + parser.add_argument("-d", "--dataset", dest="dataset", default='analysis', + help="Directory where MPAS-Analysis input data will" + "be downloaded") + args = parser.parse_args() + + try: + os.makedirs(args.outDir) + except OSError: + pass + + urlBase = 'https://web.lcrc.anl.gov/public/e3sm/diagnostics' + analysisFileList = pkg_resources.resource_string( + 'mpas_analysis', + 'obs/{}_input_files'.format(args.dataset)).decode('utf-8') + + # remove any empty strings from the list + analysisFileList = list(filter(None, analysisFileList.split('\n'))) + download_files(analysisFileList, urlBase, args.outDir, verify=True)
+ + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_antarctic_melt.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_antarctic_melt.html new file mode 100644 index 000000000..229d265be --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_antarctic_melt.html @@ -0,0 +1,843 @@ + + + + + + mpas_analysis.ocean.climatology_map_antarctic_melt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_antarctic_melt

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import os
+import csv
+
+import numpy as np
+import xarray as xr
+import dask
+from multiprocessing.pool import ThreadPool
+from pyremap import ProjectionGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path, decode_strings, \
+    build_config_full_path
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+from mpas_analysis.shared.projection import get_pyproj_projection
+from mpas_analysis.shared.climatology.climatology import \
+    get_masked_mpas_climatology_file_name
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.constants import constants
+
+
+
+[docs] +class ClimatologyMapAntarcticMelt(AnalysisTask): + """ + An analysis task for comparison of Antarctic melt rates against + observations + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, regionMasksTask, + controlConfig): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + controlConfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'meltRate' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapAntarcticMelt, self).__init__( + config=config, taskName='climatologyMapAntarcticMelt', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, + 'landIceCavities', 'antarctic']) + + sectionName = self.taskName + + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + f'valid list of seasons') + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + makeTables = config.getboolean(sectionName, 'makeTables') + + if makeTables: + for season in seasons: + tableSubtask = AntarcticMeltTableSubtask( + parentTask=self, mpasClimatologyTask=mpasClimatologyTask, + controlConfig=controlConfig, + regionMasksTask=regionMasksTask, season=season) + self.add_subtask(tableSubtask) + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + f'valid list of comparison grids') + + # the variable 'timeMonthly_avg_landIceFreshwaterFlux' will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtask = RemapMpasAntarcticMeltClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=None, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + + refTitleLabel = \ + 'Observations (Paolo et al. 2023)' + + observationsDirectory = build_obs_path( + config, 'ocean', 'meltSubdirectory') + + comparison_res = config.getfloat( + 'climatology', 'comparisonAntarcticStereoResolution') + + # the maximum available resolution that is not coarser than the + # comparison + avail_res = np.array([10., 4., 1.]) + valid = avail_res >= comparison_res + if np.count_nonzero(valid) == 0: + res = np.amin(avail_res) + else: + res = np.amax(avail_res[valid]) + + obsFileName = \ + f'{observationsDirectory}/Paolo/Paolo_2023_' \ + f'iceshelf_melt_rates_1992-2017_v1.0_6000x6000km_{res:g}km_' \ + f'Antarctic_stereo.20240220.nc' + + remapObservationsSubtask = RemapObservedAntarcticMeltClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='meltRate', + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + diffTitleLabel = 'Model - Observations' + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'Control: {controlRunName}' + diffTitleLabel = 'Main - Control' + + totalFluxVar = 'timeMonthly_avg_landIceFreshwaterFluxTotal' + landIceFluxVar = 'timeMonthly_avg_landIceFreshwaterFlux' + frazilFluxVar = 'timeMonthly_avg_frazilIceFreshwaterFlux' + + mpasFieldName = totalFluxVar + + if controlConfig is None: + refFieldName = 'meltRate' + else: + refFieldName = mpasFieldName + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtaskName = f'plot_total_melt_{season}_{comparisonGridName}' + subtask = PlotAntarcticMeltSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel='antMeltTotal', + fieldNameInTitle='Total Melt Flux', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm a$^{-1}$ freshwater equiv.', + imageCaption='Antarctic Total Melt Flux', + galleryGroup='Melt Rate', + groupSubtitle=None, + groupLink='antarctic_melt', + galleryName='Total Melt Flux') + + self.add_subtask(subtask) + + mpasFieldName = landIceFluxVar + + if controlConfig is None: + refFieldName = 'meltRate' + else: + refFieldName = mpasFieldName + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtaskName = \ + f'plot_interface_melt_{season}_{comparisonGridName}' + subtask = PlotAntarcticMeltSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig, + subtaskName=subtaskName) + + # In PlotAntarcticMeltSubtask, we will remove the obs from + # these plots if totalFluxVar is present so we only compare one + # field with obs + + subtask.set_plot_info( + outFileLabel='antMeltInterface', + fieldNameInTitle='Melt Rate at Interface', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm a$^{-1}$ freshwater equiv.', + imageCaption='Antarctic Melt Rate at Interface', + galleryGroup='Melt Rate', + groupSubtitle=None, + groupLink='antarctic_melt_int', + galleryName='Melt Rate at the Ice-ocean Interface') + + self.add_subtask(subtask) + + mpasFieldName = frazilFluxVar + + if controlConfig is None: + refTitleLabel = None + refFieldName = None + diffTitleLabel = None + + else: + controlRunName = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'Control: {controlRunName}' + refFieldName = mpasFieldName + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtaskName = \ + f'plot_interface_frazil_{season}_{comparisonGridName}' + subtask = PlotAntarcticMeltSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + controlConfig=controlConfig, subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel='antFrazil', + fieldNameInTitle='Frazil Accretion Rate, neg. upward', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm a$^{-1}$ freshwater equiv.', + imageCaption='Antarctic Accretion Rate', + galleryGroup='Melt Rate', + groupSubtitle=None, + groupLink='antarctic_frazil_flux', + galleryName='Frazil Accretion Rate') + + self.add_subtask(subtask)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class + # (AnalysisTask), which will perform some common setup + super(ClimatologyMapAntarcticMelt, self).setup_and_check() + + landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') + if landIceFluxMode not in ['data', 'standalone', 'coupled']: + raise ValueError('*** climatologyMapMeltAntarctic requires ' + 'config_land_ice_flux_mode \n' + ' to be data, standalone or coupled. ' + ' Otherwise, no melt rates are available \n' + ' for plotting.')
+ + + +class RemapMpasAntarcticMeltClimatology(RemapMpasClimatologySubtask): + """ + A subtask for remapping climatologies of Antarctic melt rates and adding + + Attributes + ---------- + landIceMask : xarray.DataArray + A mask indicating where there is land ice on the ocean grid (thus, + where melt rates are valid) + + renameDict : dict + A dictionary use to rename variables in the climatology + """ + # Authors + # ------- + # Xylar Asay-Davis + + def setup_and_check(self): + """ + Figure out which variable(s) to remap + """ + # Authors + # ------- + # Xylar Asay-Davis + + totalFluxVar = 'timeMonthly_avg_landIceFreshwaterFluxTotal' + landIceFluxVar = 'timeMonthly_avg_landIceFreshwaterFlux' + frazilFluxVar = 'timeMonthly_avg_frazilIceFreshwaterFlux' + + if totalFluxVar in self.mpasClimatologyTask.allVariables: + # include the total and constituent fluxes + self.variableList = [totalFluxVar, landIceFluxVar, frazilFluxVar] + else: + # we only have the old name without the frazil accretion rate + self.variableList = [landIceFluxVar] + + super().setup_and_check() + + def run_task(self): + """ + Compute climatologies of melt rates from E3SM/MPAS output + + This function has been overridden to load ``landIceMask`` from a + restart file for later use in masking the melt rate. It then simply + calls the run function from + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, load the land-ice mask from the restart file + dsLandIceMask = xr.open_dataset(self.restartFileName) + dsLandIceMask = dsLandIceMask[['landIceMask']] + dsLandIceMask = dsLandIceMask.isel(Time=0) + self.landIceMask = dsLandIceMask.landIceMask > 0. + + # then, call run from the base class (RemapMpasClimatologySubtask), + # which will perform the main function of the task + super(RemapMpasAntarcticMeltClimatology, self).run_task() + + def customize_masked_climatology(self, climatology, season): + """ + Mask the melt rates using ``landIceMask`` and rescale it to m/yr + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + for fieldName in self.variableList: + + # scale the field to m/yr from kg/m^2/s and mask out non-land-ice + # areas + climatology[fieldName] = \ + constants.sec_per_year / constants.rho_fw * \ + climatology[fieldName].where(self.landIceMask) + climatology[fieldName].attrs['units'] = 'm yr^-1' + + return climatology + + +class RemapObservedAntarcticMeltClimatology(RemapObservedClimatologySubtask): + + """ + A subtask for reading and remapping Antarctic melt-rate observations + """ + # Authors + # ------- + # Xylar Asay-Davis + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the x/y polar + # stereographic coordinates + projection = get_pyproj_projection(comparison_grid_name='antarctic') + obsDescriptor = ProjectionGridDescriptor.read( + projection, fileName=fileName, xVarName='x', yVarName='y') + + # update the mesh name to match the format used elsewhere in + # MPAS-Analysis + x = obsDescriptor.x + y = obsDescriptor.y + width = 1e-3 * (x[-1] - x[0]) + height = 1e-3 * (y[-1] - y[0]) + res = 1e-3 * (x[1] - x[0]) + obsDescriptor.meshName = f'{width}x{height}km_{res}km_Antarctic_stereo' + + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + # Load MLD observational data + dsObs = xr.open_dataset(fileName) + + return dsObs + + +class AntarcticMeltTableSubtask(AnalysisTask): + def __init__(self, parentTask, mpasClimatologyTask, controlConfig, + regionMasksTask, season, subtaskName=None): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``ClimatologyMapAntarcticMelt`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlConfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + season : str + One of the seasons in ``constants.monthDictionary`` + + subtaskName : str, optional + The name of the subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + tags = ['climatology', 'table'] + + if subtaskName is None: + subtaskName = f'table{season}' + + # call the constructor from the base class (AnalysisTask) + super(AntarcticMeltTableSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + subtaskName=subtaskName, + componentName=parentTask.componentName, + tags=tags) + + self.season = season + self.mpasClimatologyTask = mpasClimatologyTask + self.controlConfig = controlConfig + + self.masksSubtask = regionMasksTask.add_mask_subtask( + regionGroup='Ice Shelves') + self.iceShelfMasksFile = self.masksSubtask.geojsonFileName + + self.run_after(self.masksSubtask) + self.run_after(mpasClimatologyTask) + + def run_task(self): + """ + Computes and plots table of Antarctic sub-ice-shelf melt rates. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.logger.info("Computing Antarctic melt rate table...") + config = self.config + + sectionName = self.taskName + iceShelvesInTable = config.getexpression(sectionName, + 'iceShelvesInTable') + if len(iceShelvesInTable) == 0: + return + + iceShelvesInTable = self.masksSubtask.expand_region_names( + iceShelvesInTable) + + meltRateFileName = get_masked_mpas_climatology_file_name( + config, self.season, self.componentName, + climatologyName='antarcticMeltTable') + + if not os.path.exists(meltRateFileName): + with dask.config.set(schedular='threads', + pool=ThreadPool(1)): + + # Load data: + inFileName = \ + self.mpasClimatologyTask.get_file_name(self.season) + mpasFieldName = 'timeMonthly_avg_landIceFreshwaterFlux' + dsIn = xr.open_dataset(inFileName) + freshwaterFlux = dsIn[mpasFieldName] + if 'Time' in freshwaterFlux.dims: + freshwaterFlux = freshwaterFlux.isel(Time=0) + + regionMaskFileName = self.masksSubtask.maskFileName + + dsRegionMask = xr.open_dataset(regionMaskFileName) + + # figure out the indices of the regions to plot + regionNames = decode_strings(dsRegionMask.regionNames) + + regionIndices = [] + for iceShelf in iceShelvesInTable: + for index, regionName in enumerate(regionNames): + if iceShelf == regionName: + regionIndices.append(index) + break + + # select only those regions we want to plot + dsRegionMask = dsRegionMask.isel(nRegions=regionIndices) + cellMasks = \ + dsRegionMask.regionCellMasks.chunk({'nRegions': 10}) + + restartFileName = \ + self.runStreams.readpath('restart')[0] + + dsRestart = xr.open_dataset(restartFileName) + landIceFraction = dsRestart.landIceFraction.isel(Time=0) + areaCell = dsRestart.areaCell + + # convert from kg/s to kg/yr + totalMeltFlux = constants.sec_per_year * \ + (cellMasks * areaCell * freshwaterFlux).sum(dim='nCells') + totalMeltFlux.compute() + + totalArea = \ + (landIceFraction * cellMasks * areaCell).sum(dim='nCells') + + # from kg/m^2/yr to m/yr + meltRates = ((1. / constants.rho_fw) * + (totalMeltFlux / totalArea)) + meltRates.compute() + + # convert from kg/yr to GT/yr + totalMeltFlux /= constants.kg_per_GT + + ds = xr.Dataset() + ds['totalMeltFlux'] = totalMeltFlux + ds.totalMeltFlux.attrs['units'] = 'GT a$^{-1}$' + ds.totalMeltFlux.attrs['description'] = \ + 'Total melt flux summed over each ice shelf or region' + ds['meltRates'] = meltRates + ds.meltRates.attrs['units'] = 'm a$^{-1}$' + ds.meltRates.attrs['description'] = \ + 'Melt rate averaged over each ice shelf or region' + + ds['area'] = 1e-6*totalArea + ds.meltRates.attrs['units'] = 'km$^2$' + ds.meltRates.attrs['description'] = \ + 'Region or ice shelf area' + + ds['regionNames'] = dsRegionMask.regionNames + + write_netcdf_with_fill(ds, meltRateFileName) + else: + ds = xr.open_dataset(meltRateFileName) + + mainRunName = config.get('runs', 'mainRunName') + fieldNames = ['Region', 'Area', mainRunName] + + controlConfig = self.controlConfig + if controlConfig is not None: + controlFileName = get_masked_mpas_climatology_file_name( + controlConfig, self.season, self.componentName, + climatologyName='antarcticMeltTable') + dsControl = xr.open_dataset(controlFileName) + controlRunName = controlConfig.get('runs', 'mainRunName') + fieldNames.append(controlRunName) + else: + dsControl = None + controlRunName = None + + regionNames = decode_strings(ds.regionNames) + + tableBase = build_config_full_path(config, 'output', + 'tablesSubdirectory') + outDirectory = f'{tableBase}/antarcticMelt/' + + try: + os.makedirs(outDirectory) + except OSError: + pass + + tableFileName = \ + f'{outDirectory}/antarcticMeltRateTable_{self.season}.csv' + + with open(tableFileName, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=fieldNames) + + writer.writeheader() + for index, regionName in enumerate(regionNames): + row = {'Region': regionName, + 'Area': f'{ds.area[index].values}', + mainRunName: f'{ds.meltRates[index].values}'} + if dsControl is not None: + row[controlRunName] = \ + f'{dsControl.meltRates[index].values}' + writer.writerow(row) + + tableFileName = \ + f'{outDirectory}/antarcticMeltFluxTable_{self.season}.csv' + + with open(tableFileName, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=fieldNames) + + writer.writeheader() + for index, regionName in enumerate(regionNames): + row = {'Region': regionName, + 'Area': f'{ds.area[index].values}', + mainRunName: f'{ds.totalMeltFlux[index].values}'} + if dsControl is not None: + row[controlRunName] = \ + f'{dsControl.totalMeltFlux[index].values}' + writer.writerow(row) + + +class PlotAntarcticMeltSubtask(PlotClimatologyMapSubtask): + """ + A subtask for plotting antarctic melt fields if available + + Attributes + ---------- + doPlot : bool + Whether the required variable from the climatology is available so that + a plot should be generated + """ + # Authors + # ------- + # Xylar Asay-Davis + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + allVariables = \ + self.remapMpasClimatologySubtask.mpasClimatologyTask.allVariables + + totalFluxVar = 'timeMonthly_avg_landIceFreshwaterFluxTotal' + landIceFluxVar = 'timeMonthly_avg_landIceFreshwaterFlux' + plotAll = (totalFluxVar in allVariables) + + if self.mpasFieldName == landIceFluxVar and plotAll and \ + self.controlConfig is None: + # need to remove obs because we only wnat to plot them vs the + # total flux + self.remapObsClimatologySubtask = None + self.refTitleLabel = None + self.refFieldName = None + self.diffTitleLabel = None + + self.doPlot = (self.mpasFieldName == landIceFluxVar or plotAll) + + if self.doPlot: + super().setup_and_check() + else: + # still need to call the base class's method + AnalysisTask.setup_and_check(self=self) + + def run_task(self): + """ + Plot the variable if available + """ + if self.doPlot: + super().run_task() +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_argo.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_argo.html new file mode 100644 index 000000000..be9d50671 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_argo.html @@ -0,0 +1,593 @@ + + + + + + mpas_analysis.ocean.climatology_map_argo — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_argo

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Analysis tasks for comparing Global climatology maps against Argo data.
+"""
+# Authors
+# -------
+# Luke Van Roekel
+
+import xarray as xr
+import numpy as np
+
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.ocean.remap_depth_slices_subtask import \
+    RemapDepthSlicesSubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapObservedClimatologySubtask
+
+
+
+[docs] +class ClimatologyMapArgoTemperature(AnalysisTask): + """ + An analysis task for comparison of potential temperature against Argo + observations + """ + + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + fieldName = 'temperatureArgo' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapArgoTemperature, self).__init__( + config=config, taskName='climatologyMapArgoTemperature', + componentName='ocean', + tags=['climatology', 'horizontalMap', 'argo', 'temperature', + 'publicObs']) + + sectionName = self.taskName + + mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + depths = config.getexpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of depths'.format(sectionName)) + + # the variable 'timeMonthly_avg_landIceFreshwaterFlux' will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtask = RemapDepthSlicesSubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + seasons=seasons, + depths=depths, + comparisonGridNames=comparisonGridNames, + iselValues=iselValues) + + if controlConfig is None: + + refTitleLabel = 'Roemmich-Gilson Argo Climatology: Potential ' \ + 'Temperature' + + observationsDirectory = build_obs_path( + config, 'ocean', 'argoSubdirectory') + + obsFileName = \ + '{}/ArgoClimatology_TS_20180710.nc'.format( + observationsDirectory) + refFieldName = 'theta' + outFileLabel = 'tempArgo' + galleryName = 'Roemmich-Gilson Climatology: Argo' + diffTitleLabel = 'Model - Argo' + + remapObservationsSubtask = RemapArgoClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='{}Argo'.format(refFieldName), + fieldName=refFieldName, + depths=depths, + comparisonGridNames=comparisonGridNames) + + self.add_subtask(remapObservationsSubtask) + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = 'Control: {}'.format(controlRunName) + refTitleLabel = galleryName + + refFieldName = mpasFieldName + outFileLabel = 'temp' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + for depth in depths: + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + controlConfig=controlConfig, + depth=depth) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='Potential Temperature', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'$\degree$C', + imageCaption='Model potential temperature compared ' + 'with Argo observations', + galleryGroup='Argo Potential Temperature', + groupSubtitle=None, + groupLink='tempArgo', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +
+[docs] +class ClimatologyMapArgoSalinity(AnalysisTask): + """ + An analysis task for comparison of global salinity against Argo + observations + """ + + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + + fieldName = 'salinityArgo' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapArgoSalinity, self).__init__( + config=config, taskName='climatologyMapArgoSalinity', + componentName='ocean', + tags=['climatology', 'horizontalMap', 'argo', 'salinity']) + + sectionName = self.taskName + + mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + depths = config.getexpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of depths'.format(sectionName)) + + # the variable 'timeMonthly_avg_landIceFreshwaterFlux' will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtask = RemapDepthSlicesSubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + seasons=seasons, + depths=depths, + comparisonGridNames=comparisonGridNames, + iselValues=iselValues) + + if controlConfig is None: + + refTitleLabel = 'Roemmich-Gilson Argo Climatology: Salinity' + + observationsDirectory = build_obs_path( + config, 'ocean', 'argoSubdirectory') + + obsFileName = \ + '{}/ArgoClimatology_TS_20180710.nc'.format( + observationsDirectory) + refFieldName = 'salinity' + outFileLabel = 'salinArgo' + galleryName = 'Roemmich-Gilson Climatology: Argo' + diffTitleLabel = 'Model - Argo' + + remapObservationsSubtask = RemapArgoClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='{}Argo'.format(refFieldName), + fieldName=refFieldName, + depths=depths, + comparisonGridNames=comparisonGridNames) + + self.add_subtask(remapObservationsSubtask) + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'salin' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + for depth in depths: + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + controlConfig=controlConfig, + depth=depth) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='Salinity', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'PSU', + imageCaption='Model Salinity compared with Argo ' + 'observations', + galleryGroup='Argo Salinity', + groupSubtitle=None, + groupLink='salinArgo', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +class RemapArgoClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping SOSE fields to the comparison grid + """ + + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + + def __init__(self, parentTask, seasons, fileName, outFilePrefix, + fieldName, depths, + comparisonGridNames=['latlon'], + subtaskName='remapObservations'): + + """ + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent (main) task for this subtask + + seasons : list of str + A list of seasons (keys in ``constants.monthDictionary``) over + which the climatology should be computed. + + fileName : str + The name of the observation file + + outFilePrefix : str + The prefix in front of output files and mapping files, typically + the name of the field being remapped + + fieldName : str + The name of the 3D field to remap + + depths : list of {None, float, 'top'} + A list of depths at which the climatology will be sliced in the + vertical. + + comparisonGridNames : list of {'latlon', 'antarctic'}, optional + The name(s) of the comparison grid to use for remapping. + + subtaskName : str, optional + The name of the subtask + """ + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + + self.fieldName = fieldName + self.depths = depths + + # call the constructor from the base class + # (RemapObservedClimatologySubtask) + super(RemapArgoClimatology, self).__init__( + parentTask, seasons, fileName, outFilePrefix, + comparisonGridNames, subtaskName) + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + + # Load Argo observational Data + dsObs = self.build_observational_dataset(fileName) + + # create a descriptor of the observation grid using Lat/Lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, + latVarName='latCoord', + lonVarName='lonCoord') + dsObs.close() + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis, Luke Van Roekel + + # Load Argo observational data + dsObs = xr.open_dataset(fileName) + + # Rename coordinates to be consistent with other datasets + dsObs = dsObs.rename({'month': 'calmonth', 'LATITUDE': 'latCoord', + 'LONGITUDE': 'lonCoord', 'DEPTH': 'depth'}) + dsObs.coords['LATITUDE'] = dsObs['latCoord'] + dsObs.coords['LONGITUDE'] = dsObs['lonCoord'] + dsObs.coords['DEPTH'] = dsObs['depth'] + dsObs.coords['month'] = ('Time', np.array(dsObs['calmonth'], int)) + + # no meaningful year since this is already a climatology + dsObs.coords['year'] = ('Time', np.ones(dsObs.sizes['Time'], int)) + dsObs = dsObs[[self.fieldName, 'month']] + + slices = [] + field = dsObs[self.fieldName] + for depth in self.depths: + if depth == 'top': + slices.append(field.sel(method='nearest', depth=0.).drop_vars( + 'depth')) + else: + slices.append( + field.sel(method='nearest', depth=depth).drop_vars('depth')) + + depthNames = [str(depth) for depth in self.depths] + field = xr.concat(slices, dim='depthSlice') + + dsObs = xr.Dataset(data_vars={self.fieldName: field}, + coords={'depthSlice': depthNames}) + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_eke.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_eke.html new file mode 100644 index 000000000..f647666c8 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_eke.html @@ -0,0 +1,417 @@ + + + + + + mpas_analysis.ocean.climatology_map_eke — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_eke

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+
+
+[docs] +class ClimatologyMapEKE(AnalysisTask): + """ + An analysis task for comparison of eddy kinetic energy (eke) against + observations + """ + # Authors + # ------- + # Kevin Rosa + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis, Kevin Rosa + + fieldName = 'eke' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapEKE, self).__init__( + config=config, taskName='climatologyMapEKE', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs']) + + mpasFieldName = 'eke' + iselValues = {'nVertLevels': 0} + + sectionName = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + # EKE observations are annual climatology so only accept annual + # climatology **should move this to setup_and_check() + if seasons != ['ANN']: + raise ValueError('config section {} does not contain valid list ' + 'of seasons. For EKE, may only request annual ' + 'climatology'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variables in variableList will be added to mpasClimatologyTask + # along with the seasons. + variableList = ['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional', + 'timeMonthly_avg_velocityZonalSquared', + 'timeMonthly_avg_velocityMeridionalSquared'] + remapClimatologySubtask = RemapMpasEKEClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=variableList, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + # to compare to observations: + if controlConfig is None: + + refTitleLabel = \ + 'Observations (Surface EKE from Drifter Data)' + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + obsFileName = \ + "{}/drifter_variance_20180804.nc".format( + observationsDirectory) + refFieldName = 'eke' + outFileLabel = 'ekeDRIFTER' + galleryName = 'Observations: EKE from Drifters' + + remapObservationsSubtask = RemapObservedEKEClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix=refFieldName, + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + diffTitleLabel = 'Model - Observations' + + # compare with previous run: + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'eke' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='EKE', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'cm$^2$/s$^2$', + imageCaption='Mean Surface Eddy Kinetic Energy', + galleryGroup='Eddy Kinetic Energy', + groupSubtitle=None, + groupLink='eke', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +# adds to the functionality of RemapDepthSlicesSubtask +class RemapMpasEKEClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of eddy kinetic energy from means of + velocity and velocity-squared. + """ + # Authors + # ------- + # Kevin Rosa + + def customize_masked_climatology(self, climatology, season): + """ + Construct velocity magnitude as part of the climatology + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the base class's version of this function so we extract + # the desired slices. + climatology = super(RemapMpasEKEClimatology, + self).customize_masked_climatology(climatology, + season) + + # calculate mpas eddy kinetic energy + scaleFactor = 100 * 100 # m2/s2 to cm2/s2 + eke = 0.5 * scaleFactor * \ + (climatology.timeMonthly_avg_velocityZonalSquared - + climatology.timeMonthly_avg_velocityZonal ** 2 + + climatology.timeMonthly_avg_velocityMeridionalSquared - + climatology.timeMonthly_avg_velocityMeridional ** 2) + + # drop unnecessary fields before re-mapping + climatology.drop_vars(['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional', + 'timeMonthly_avg_velocityZonalSquared', + 'timeMonthly_avg_velocityMeridionalSquared']) + + # this creates a new variable eke in climatology (like netcdf) + climatology['eke'] = eke + climatology.eke.attrs['units'] = 'cm$^[2]$ s$^{-2}$' + climatology.eke.attrs['description'] = 'eddy kinetic energy' + + return climatology + + +class RemapObservedEKEClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping EKE observations + """ + # Authors + # ------- + # Kevin Rosa + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Kevin Rosa + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='Lat', + lonVarName='Lon') + + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Kevin Rosa, Xylar Asay-Davis + + dsIn = xr.open_dataset(fileName) + + scaleFactor = 100 * 100 # m2/s2 to cm2/s2 + eke = 0.5 * scaleFactor * \ + (dsIn['Up2bar'].values + dsIn['Vp2bar'].values) + + # create a new dataset for the observations. solves transpose issues. + dsObs = xr.Dataset({'eke': (['latitude', 'longitude'], eke.T)}, + coords={'Lat': (['latitude'], dsIn.Lat.values), + 'Lon': (['longitude'], dsIn.Lon.values)} + ) + # update attributes + dsObs.eke.attrs['units'] = 'cm$^2$ s$^{-2}$' + dsObs.eke.attrs['long_name'] = 'Eddy kinetic energy' + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld.html new file mode 100644 index 000000000..7589394bc --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld.html @@ -0,0 +1,376 @@ + + + + + + mpas_analysis.ocean.climatology_map_mld — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_mld

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import numpy as np
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+
+
+[docs] +class ClimatologyMapMLD(AnalysisTask): + """ + An analysis task for comparison of mixed layer depth (mld) against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'mld' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapMLD, self).__init__( + config=config, taskName='climatologyMapMLD', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs']) + + sectionName = self.taskName + + mpasFieldName = 'timeMonthly_avg_dThreshMLD' + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable 'timeMonthly_avg_dThreshMLD' will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + obsFileName = "{}/holtetalley_mld_climatology_20180710.nc".format( + observationsDirectory) + + refFieldName = 'mld' + outFileLabel = 'mldHolteTalleyARGO' + + remapObservationsSubtask = RemapObservedMLDClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix=refFieldName, + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + galleryName = 'Observations: Holte-Talley ARGO' + refTitleLabel = \ + 'Observations (HolteTalley density threshold MLD)' + diffTitleLabel = 'Model - Observations' + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'mld' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='MLD', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm', + imageCaption='Mean Mixed-Layer Depth', + galleryGroup='Mixed-Layer Depth', + groupSubtitle=None, + groupLink='mld', + galleryName=galleryName) + + self.add_subtask(subtask)
+ + + def setup_and_check(self): + """ + Check if MLD capability was turned on in the run. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ClimatologyMapMLD, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_mixedlayerdepths_enable', + raiseException=True)
+ + + +class RemapObservedMLDClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping MLD observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # Load MLD observational data + dsObs = self.build_observational_dataset(fileName) + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, + latVarName='lat', + lonVarName='lon') + dsObs.close() + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + # Load MLD observational data + dsObs = xr.open_dataset(fileName) + + # Increment month value to be consistent with the model output + dsObs.assign_coords(iMONTH=dsObs.iMONTH+1) + # Rename the dimensions to be consistent with other obs. data sets + dsObs = dsObs.rename({'month': 'calmonth', 'lat': 'latCoord', + 'lon': 'lonCoord', 'mld_dt_mean': 'mld'}) + dsObs = dsObs.rename({'iMONTH': 'Time', 'iLAT': 'lat', 'iLON': 'lon'}) + + # set the coordinates now that the dimensions have the same names + dsObs.coords['lat'] = dsObs['latCoord'] + dsObs.coords['lon'] = dsObs['lonCoord'] + dsObs.coords['Time'] = dsObs['calmonth'] + dsObs.coords['month'] = ('Time', np.array(dsObs['calmonth'], int)) + + # no meaningful year since this is already a climatology + dsObs.coords['year'] = ('Time', np.ones(dsObs.sizes['Time'], int)) + + dsObs = dsObs[['mld', 'month']] + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld_min_max.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld_min_max.html new file mode 100644 index 000000000..50d3f52c4 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_mld_min_max.html @@ -0,0 +1,353 @@ + + + + + + mpas_analysis.ocean.climatology_map_mld_min_max — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_mld_min_max

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+
+
+[docs] +class ClimatologyMapMLDMinMax(AnalysisTask): + """ + An analysis task for comparison of mixed layer depth (mld) against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTasks, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTasks : dict of ``MpasClimatologyTask`` + The tasks that produced the climatology of monthly min and max to + be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'mld' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapMLDMinMax, self).__init__( + config=config, taskName='climatologyMapMLDMinMax', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs', + 'min', 'max']) + + self._add_tasks(config, mpasClimatologyTasks, controlConfig, + title='Density MLD', + mpasVariableSuffix='dThreshMLD', + filePrefix='d_mld', + sectionPrefix='climatologyMapDensityMLD') + self._add_tasks(config, mpasClimatologyTasks, controlConfig, + title='Temperature MLD', + mpasVariableSuffix='tThreshMLD', + filePrefix='t_mld', + sectionPrefix='climatologyMapTemperatureMLD') + self._add_tasks(config, mpasClimatologyTasks, controlConfig, + title='Boundary-Layer Depth', + mpasVariableSuffix='boundaryLayerDepth', + filePrefix='bld', + sectionPrefix='climatologyMapBLD')
+ + + def setup_and_check(self): + """ + Check if MLD capability was turned on in the run. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ClimatologyMapMLDMinMax, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_mixedlayerdepths_enable', + raiseException=True) + + self.check_analysis_enabled( + analysisOptionName='config_AM_timeSeriesStatsMonthlyMin_enable', + raiseException=True) + self.check_analysis_enabled( + analysisOptionName='config_AM_timeSeriesStatsMonthlyMax_enable', + raiseException=True) + + def _add_tasks(self, config, mpasClimatologyTasks, controlConfig, + title, mpasVariableSuffix, filePrefix, sectionPrefix): + """ + Add tasks for a given variable + """ + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(self.taskName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(self.taskName)) + + comparisonGridNames = config.getexpression(self.taskName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(self.taskName)) + + remapClimatologySubtasks = {} + mpasFieldNames = {} + for op in ['min', 'max']: + upperOp = op[0].upper() + op[1:] + + mpasFieldNames[op] = 'timeMonthly{}_{}_{}'.format( + upperOp, op, mpasVariableSuffix) + # the variable 'timeMonthly_avg_dThreshMLD' will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtasks[op] = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTasks[op], + parentTask=self, + climatologyName=filePrefix, + variableList=[mpasFieldNames[op]], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues, + subtaskName='remap{}_{}'.format(upperOp, filePrefix)) + + if controlConfig is None: + fieldNameInTitle = 'Max {}'.format(title) + refTitleLabel = 'Min {}'.format(title) + diffTitleLabel = 'Max - Min {}'.format(title) + galleryName = title + outFileLabel = '{}_min_max'.format(filePrefix) + sectionName = '{}MinMax'.format(sectionPrefix) + + for comparisonGridName in comparisonGridNames: + for season in seasons: + subtaskName = 'plot_{}_{}_{}'.format(filePrefix, + season, + comparisonGridName) + # make a new subtask for this season and comparison grid + firstTask = remapClimatologySubtasks['max'] + secondTask = remapClimatologySubtasks['min'] + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, + remapMpasClimatologySubtask=firstTask, + secondRemapMpasClimatologySubtask=secondTask, + controlConfig=controlConfig, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=fieldNameInTitle, + mpasFieldName=mpasFieldNames['max'], + refFieldName=mpasFieldNames['min'], + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm', + imageCaption='Min/Max Mixed-Layer Depth', + galleryGroup='Min/Max Mixed-Layer Depth', + groupSubtitle=None, + groupLink='mld_min_max', + galleryName=galleryName, + configSectionName=sectionName) + + self.add_subtask(subtask) + + else: + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = title + refTitleLabel = 'Control: {}'.format(controlRunName) + + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + for op in ['min', 'max']: + upperOp = op[0].upper() + op[1:] + subtaskName = 'plot{}_{}_{}_{}'.format( + upperOp, filePrefix, season, comparisonGridName) + fieldNameInTitle = '{} {}'.format(upperOp, title) + outFileLabel = '{}_{}'.format(filePrefix, op) + sectionName = '{}{}'.format(sectionPrefix, upperOp) + + # make a new subtask for this season and comparison + # grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, + remapClimatologySubtasks[op], + controlConfig=controlConfig, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=fieldNameInTitle, + mpasFieldName=mpasFieldNames[op], + refFieldName=mpasFieldNames[op], + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm', + imageCaption=fieldNameInTitle, + galleryGroup='Min/Max Mixed-Layer Depth', + groupSubtitle=None, + groupLink='mld_{}'.format(op), + galleryName=galleryName, + configSectionName=sectionName) + + self.add_subtask(subtask)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ohc_anomaly.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ohc_anomaly.html new file mode 100644 index 000000000..6588b64a9 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ohc_anomaly.html @@ -0,0 +1,463 @@ + + + + + + mpas_analysis.ocean.climatology_map_ohc_anomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_ohc_anomaly

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import numpy as np
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+from mpas_analysis.ocean.utility import compute_zmid
+
+
+
+[docs] +class ClimatologyMapOHCAnomaly(AnalysisTask): + """ + An analysis task for comparison of the anomaly from a reference year + (typically the start of the simulation) of ocean heat content (OHC) + + Attributes + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + """ + +
+[docs] + def __init__(self, config, mpas_climatology_task, + ref_year_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + control_config : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + + field_name = 'deltaOHC' + # call the constructor from the base class (AnalysisTask) + super().__init__(config=config, taskName='climatologyMapOHCAnomaly', + componentName='ocean', + tags=['climatology', 'horizontalMap', field_name, + 'publicObs', 'anomaly']) + + self.mpas_climatology_task = mpas_climatology_task + self.ref_year_climatology_task = ref_year_climatology_task + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of seasons') + + comparison_grid_names = config.getexpression(section_name, + 'comparisonGrids') + + if len(comparison_grid_names) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of comparison grids') + + depth_ranges = config.getexpression('climatologyMapOHCAnomaly', + 'depthRanges', + use_numpyfunc=True) + + mpas_field_name = 'deltaOHC' + + variable_list = ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_layerThickness'] + + for min_depth, max_depth in depth_ranges: + depth_range_string = \ + f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + remap_climatology_subtask = RemapMpasOHCClimatology( + mpas_climatology_task=mpas_climatology_task, + ref_year_climatology_task=ref_year_climatology_task, + parent_task=self, + climatology_name=f'{field_name}_{depth_range_string}', + variable_list=variable_list, + comparison_grid_names=comparison_grid_names, + seasons=seasons, + min_depth=min_depth, + max_depth=max_depth) + + self.add_subtask(remap_climatology_subtask) + + out_file_label = f'deltaOHC_{depth_range_string}' + remap_observations_subtask = None + if control_config is None: + ref_title_label = None + ref_field_name = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_title_label = f'Control: {control_run_name}' + ref_field_name = mpas_field_name + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}_{depth_range_string}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=f'$\\Delta$OHC over {depth_range_string}', + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel=r'GJ m$^{-2}$', + imageCaption=f'Anomaly in Ocean Heat Content over {depth_range_string}', + galleryGroup='OHC Anomaly', + groupSubtitle=None, + groupLink='ohc_anom', + galleryName=None) + + self.add_subtask(subtask)
+ + + def setup_and_check(self): + """ + Checks whether analysis is being performed only on the reference year, + in which case the analysis will not be meaningful. + + Raises + ------ + ValueError: if attempting to analyze only the reference year + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super().setup_and_check() + + start_year, end_year = self.mpas_climatology_task.get_start_and_end() + ref_start_year, ref_end_year = \ + self.ref_year_climatology_task.get_start_and_end() + + if (start_year == ref_start_year) and (end_year == ref_end_year): + raise ValueError('OHC Anomaly is not meaningful and will not work ' + 'when climatology and ref year are the same.')
+ + + +class RemapMpasOHCClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of ocean heat content from + climatologies of temperature + + Attributes + ---------- + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + + def __init__(self, mpas_climatology_task, ref_year_climatology_task, + parent_task, climatology_name, variable_list, seasons, + comparison_grid_names, min_depth, max_depth): + + """ + Construct the analysis task and adds it as a subtask of the + ``parent_task``. + + Parameters + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + parent_task : mpas_analysis.shared.AnalysisTask + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatology_name : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variable_list : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + comparison_grid_names : list of {'latlon', 'antarctic'} + The name(s) of the comparison grid to use for remapping. + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + + depth_range_string = f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + subtask_name = f'remapMpasClimatology_{depth_range_string}' + # call the constructor from the base class + # (RemapMpasClimatologySubtask) + super().__init__( + mpas_climatology_task, parent_task, climatology_name, + variable_list, seasons, comparison_grid_names, + subtaskName=subtask_name) + + self.ref_year_climatology_task = ref_year_climatology_task + self.run_after(ref_year_climatology_task) + self.min_depth = min_depth + self.max_depth = max_depth + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + + # first, call setup_and_check from the base class + # (RemapMpasClimatologySubtask), which will set up remappers and add + # variables to mpas_climatology_task + super().setup_and_check() + + # don't add the variables and seasons to mpas_climatology_task until + # we're sure this subtask is supposed to run + self.ref_year_climatology_task.add_variables(self.variableList, + self.seasons) + + def customize_masked_climatology(self, climatology, season): + """ + Compute the ocean heat content (OHC) anomaly from the temperature + and layer thickness fields. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + + ohc = self._compute_ohc(climatology) + ref_file_name = self.ref_year_climatology_task.get_file_name(season) + ref_year_climo = xr.open_dataset(ref_file_name) + if 'Time' in ref_year_climo.dims: + ref_year_climo = ref_year_climo.isel(Time=0) + ref_ohc = self._compute_ohc(ref_year_climo) + + climatology['deltaOHC'] = ohc - ref_ohc + climatology.deltaOHC.attrs['units'] = 'GJ m^-2' + start_year = self.ref_year_climatology_task.startYear + climatology.deltaOHC.attrs['description'] = \ + f'Anomaly from year {start_year} in ocean heat content' + climatology = climatology.drop_vars(self.variableList) + + return climatology + + def _compute_ohc(self, climatology): + """ + Compute the OHC from the temperature and layer thicknesses in a given + climatology data sets. + """ + ds_restart = xr.open_dataset(self.restartFileName) + ds_restart = ds_restart.isel(Time=0) + + # specific heat [J/(kg*degC)] + cp = self.namelist.getfloat('config_specific_heat_sea_water') + # [kg/m3] + rho = self.namelist.getfloat('config_density0') + + units_scale_factor = 1e-9 + + n_vert_levels = ds_restart.sizes['nVertLevels'] + + z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, + ds_restart.layerThickness) + + vert_index = xr.DataArray.from_dict( + {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) + + temperature = climatology['timeMonthly_avg_activeTracers_temperature'] + layer_thickness = climatology['timeMonthly_avg_layerThickness'] + + masks = [vert_index < ds_restart.maxLevelCell, + z_mid <= self.min_depth, + z_mid >= self.max_depth] + for mask in masks: + temperature = temperature.where(mask) + layer_thickness = layer_thickness.where(mask) + + ohc = units_scale_factor * rho * cp * layer_thickness * temperature + ohc = ohc.sum(dim='nVertLevels') + return ohc +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sose.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sose.html new file mode 100644 index 000000000..3f4f6fb9d --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sose.html @@ -0,0 +1,481 @@ + + + + + + mpas_analysis.ocean.climatology_map_sose — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_sose

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Analysis tasks for comparing Antarctic climatology maps against observations
+and reanalysis data.
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import numpy
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+
+from mpas_analysis.ocean.remap_depth_slices_subtask import \
+    RemapDepthSlicesSubtask
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+from mpas_analysis.ocean.remap_sose_climatology import RemapSoseClimatology
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+
+
+[docs] +class ClimatologyMapSose(AnalysisTask): + """ + An analysis task for comparison of antarctic field against the Southern + Ocean State Estimate + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fields = \ + [{'prefix': 'temperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature', + '3D': True, + 'obsFilePrefix': 'pot_temp', + 'obsFieldName': 'theta', + 'obsBotFieldName': 'botTheta'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity', + '3D': True, + 'obsFilePrefix': 'salinity', + 'obsFieldName': 'salinity', + 'obsBotFieldName': 'botSalinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density', + '3D': True, + 'obsFilePrefix': 'pot_den', + 'obsFieldName': 'potentialDensity', + 'obsBotFieldName': 'botPotentialDensity'}, + {'prefix': 'mixedLayerDepth', + 'mpas': 'timeMonthly_avg_dThreshMLD', + 'units': r'm', + 'titleName': 'Mixed Layer Depth', + '3D': False, + 'obsFilePrefix': 'mld', + 'obsFieldName': 'mld', + 'obsBotFieldName': None}, + {'prefix': 'zonalVelocity', + 'mpas': 'timeMonthly_avg_velocityZonal', + 'units': r'm s$^{-1}$', + 'titleName': 'Zonal Velocity', + '3D': True, + 'obsFilePrefix': 'zonal_vel', + 'obsFieldName': 'zonalVel', + 'obsBotFieldName': 'botZonalVel'}, + {'prefix': 'meridionalVelocity', + 'mpas': 'timeMonthly_avg_velocityMeridional', + 'units': r'm s$^{-1}$', + 'titleName': 'Meridional Velocity', + '3D': True, + 'obsFilePrefix': 'merid_vel', + 'obsFieldName': 'meridVel', + 'obsBotFieldName': 'botMeridVel'}, + {'prefix': 'velocityMagnitude', + 'mpas': 'velMag', + 'units': r'm s$^{-1}$', + 'titleName': 'Velocity Magnitude', + '3D': True, + 'obsFilePrefix': 'vel_mag', + 'obsFieldName': 'velMag', + 'obsBotFieldName': 'botVelMag'}] + + tags = ['climatology', 'horizontalMap', 'sose', 'publicObs', + 'antarctic'] + [field['prefix'] for field in fields] + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSose, self).__init__( + config=config, taskName='climatologyMapSose', + componentName='ocean', + tags=tags) + + sectionName = self.taskName + + fileSuffix = config.get(sectionName, 'fileSuffix') + if fileSuffix.endswith('.nc'): + fileSuffix = fileSuffix.strip('.nc') + + fieldList = config.getexpression(sectionName, 'fieldList') + fields = [field for field in fields if field['prefix'] in fieldList] + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of comparison grids'.format( + sectionName)) + + if not numpy.any([field['3D'] for field in fields]): + depths = None + else: + depths = config.getexpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError('config section {} does not contain valid ' + 'list of depths'.format(sectionName)) + + variableList = [field['mpas'] for field in fields + if field['mpas'] != 'velMag'] + + shallowVsDeepColormapDepth = config.getfloat( + sectionName, 'shallowVsDeepColormapDepth') + + shallow = [] + for depth in depths: + if depth == 'top': + shallow.append(True) + elif depth == 'bot': + shallow.append(False) + else: + shallow.append(depth >= shallowVsDeepColormapDepth) + + if depths is None: + remapMpasSubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='SOSE', + variableList=variableList, + seasons=seasons, + comparisonGridNames=comparisonGridNames, + iselValues=None) + else: + remapMpasSubtask = RemapMpasVelMagClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='SOSE', + variableList=variableList, + seasons=seasons, + depths=depths, + comparisonGridNames=comparisonGridNames, + iselValues=None) + + for field in fields: + fieldPrefix = field['prefix'] + upperFieldPrefix = fieldPrefix[0].upper() + fieldPrefix[1:] + sectionName = '{}{}'.format(self.taskName, upperFieldPrefix) + + if field['3D']: + fieldDepths = depths + else: + fieldDepths = None + + if controlConfig is None: + + refTitleLabel = 'State Estimate (SOSE)' + + observationsDirectory = build_obs_path( + config, 'ocean', 'soseSubdirectory') + + obsFileName = '{}/SOSE_2005-2010_monthly_{}_{}.nc'.format( + observationsDirectory, field['obsFilePrefix'], fileSuffix) + refFieldName = field['obsFieldName'] + outFileLabel = '{}SOSE'.format(fieldPrefix) + galleryName = 'State Estimate: SOSE' + diffTitleLabel = 'Model - State Estimate' + + remapObsSubtask = RemapSoseClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='{}SOSE'.format(refFieldName), + fieldName=refFieldName, + botFieldName=field['obsBotFieldName'], + depths=fieldDepths, + comparisonGridNames=comparisonGridNames, + subtaskName='remapObservations{}'.format( + upperFieldPrefix)) + + self.add_subtask(remapObsSubtask) + + else: + remapObsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = 'Control: {}'.format(controlRunName) + refTitleLabel = galleryName + + refFieldName = field['mpas'] + outFileLabel = '{}SOSE'.format(fieldPrefix) + diffTitleLabel = 'Main - Control' + + if field['3D']: + fieldDepths = depths + else: + fieldDepths = [None] + + for comparisonGridName in comparisonGridNames: + for depthIndex, depth in enumerate(fieldDepths): + for season in seasons: + + subtaskName = 'plot{}_{}_{}'.format(upperFieldPrefix, + season, + comparisonGridName) + if depth is not None: + subtaskName = '{}_depth_{}'.format(subtaskName, + depth) + + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapMpasSubtask, + remapObsClimatologySubtask=remapObsSubtask, + controlConfig=controlConfig, + depth=depth, + subtaskName=subtaskName) + + configSectionName = 'climatologyMapSose{}'.format( + upperFieldPrefix) + + # if available, use a separate color map for shallow + # and deep + if depth is not None: + if shallow[depthIndex]: + suffix = 'Shallow' + else: + suffix = 'Deep' + testSectionName = '{}{}'.format(configSectionName, + suffix) + if config.has_section(testSectionName): + configSectionName = testSectionName + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=field['titleName'], + mpasFieldName=field['mpas'], + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=field['units'], + imageCaption=field['titleName'], + galleryGroup=field['titleName'], + groupSubtitle=None, + groupLink='{}Sose'.format(fieldPrefix), + galleryName=galleryName, + configSectionName=configSectionName) + + self.add_subtask(subtask)
+
+ + + +class RemapMpasVelMagClimatology(RemapDepthSlicesSubtask): + """ + A subtask for computing climatologies of velocity magnitude from zonal + and meridional components + """ + # Authors + # ------- + # Xylar Asay-Davis + + def customize_masked_climatology(self, climatology, season): + """ + Construct velocity magnitude as part of the climatology + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the base class's version of this function so we extract + # the desired slices. + climatology = super(RemapMpasVelMagClimatology, + self).customize_masked_climatology(climatology, + season) + + if 'timeMonthly_avg_velocityZonal' in climatology and \ + 'timeMonthly_avg_velocityMeridional' in climatology: + zonalVel = climatology.timeMonthly_avg_velocityZonal + meridVel = climatology.timeMonthly_avg_velocityMeridional + climatology['velMag'] = numpy.sqrt(zonalVel**2 + meridVel**2) + climatology.velMag.attrs['units'] = 'm s$^{-1}$' + climatology.velMag.attrs['description'] = 'velocity magnitude' + + return climatology +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ssh.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ssh.html new file mode 100644 index 000000000..1feb4a2a9 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_ssh.html @@ -0,0 +1,384 @@ + + + + + + mpas_analysis.ocean.climatology_map_ssh — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_ssh

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.constants import constants
+
+
+
+[docs] +class ClimatologyMapSSH(AnalysisTask): + """ + An analysis task for comparison of sea surface height (ssh) against + observations + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'ssh' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSSH, self).__init__( + config=config, taskName='climatologyMapSSH', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs']) + + mpasFieldName = 'timeMonthly_avg_pressureAdjustedSSH' + + iselValues = None + + sectionName = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapSSHClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + + refTitleLabel = 'Observations (AVISO Dynamic ' \ + 'Topography, 1993-2010)' + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + obsFileName = \ + "{}/zos_AVISO_L4_199210-201012_20180710.nc".format( + observationsDirectory) + refFieldName = 'zos' + outFileLabel = 'sshAVISO' + galleryName = 'Observations: AVISO' + + remapObservationsSubtask = RemapObservedSSHClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix=refFieldName, + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + diffTitleLabel = 'Model - Observations' + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'ssh' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig, + removeMean=True) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='Zero-mean SSH', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'cm', + imageCaption='Mean Sea Surface Height', + galleryGroup='Sea Surface Height', + groupSubtitle=None, + groupLink='ssh', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +class RemapSSHClimatology(RemapMpasClimatologySubtask): + """ + Change units from m to cm + """ + # Authors + # ------- + # Xylar Asay-Davis + + def customize_masked_climatology(self, climatology, season): + """ + Mask the melt rates using ``landIceMask`` and rescale it to m/yr + + Parameters + ---------- + climatology : ``xarray.Dataset``` + The MPAS climatology data set that has had a mask added but has + not yet been remapped + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = self.variableList[0] + + # scale the field to cm from m + climatology[fieldName] = constants.cm_per_m * climatology[fieldName] + + return climatology + + +class RemapObservedSSHClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping SSH observations + """ + # Authors + # ------- + # Xylar Asay-Davis + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='lat', + lonVarName='lon') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'time': 'Time'}) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + dsObs = dsObs.drop_vars(['Time', 'time_bnds']) + + # scale the field to cm from m + dsObs['zos'] = constants.cm_per_m * dsObs['zos'] + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sss.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sss.html new file mode 100644 index 000000000..a7b18e042 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sss.html @@ -0,0 +1,343 @@ + + + + + + mpas_analysis.ocean.climatology_map_sss — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_sss

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import datetime
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+
+
+[docs] +class ClimatologyMapSSS(AnalysisTask): + """ + An analysis task for comparison of sea surface salinity (sss) against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'sss' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSSS, self).__init__( + config=config, taskName='climatologyMapSSS', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs']) + + mpasFieldName = 'timeMonthly_avg_activeTracers_salinity' + iselValues = {'nVertLevels': 0} + + sectionName = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + + refTitleLabel = \ + 'Observations (Aquarius, 2011-2014)' + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + obsFileName = \ + "{}/Aquarius_V3_SSS_Monthly_20180710.nc".format( + observationsDirectory) + refFieldName = 'sss' + outFileLabel = 'sssAquarius' + galleryName = 'Observations: Aquarius' + + remapObservationsSubtask = RemapObservedSSSClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix=refFieldName, + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + diffTitleLabel = 'Model - Observations' + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'sss' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='SSS', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'PSU', + imageCaption='Mean Sea Surface Salinity', + galleryGroup='Sea Surface Salinity', + groupSubtitle=None, + groupLink='sss', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +class RemapObservedSSSClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping SSS observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='lat', + lonVarName='lon') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + timeStart = datetime.datetime(2011, 8, 1) + timeEnd = datetime.datetime(2014, 12, 31) + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'time': 'Time', 'SSS': 'sss'}) + dsObs = dsObs.sel(Time=slice(timeStart, timeEnd)) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sst.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sst.html new file mode 100644 index 000000000..74695ac28 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_sst.html @@ -0,0 +1,355 @@ + + + + + + mpas_analysis.ocean.climatology_map_sst — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_sst

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import datetime
+
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+
+
+[docs] +class ClimatologyMapSST(AnalysisTask): + """ + An analysis task for comparison of sea surface temperature (sst) against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + fieldName = 'sst' + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSST, self).__init__( + config=config, taskName='climatologyMapSST', + componentName='ocean', + tags=['climatology', 'horizontalMap', fieldName, 'publicObs']) + + mpasFieldName = 'timeMonthly_avg_activeTracers_temperature' + iselValues = {'nVertLevels': 0} + + sectionName = self.taskName + + climStartYear = config.getint(sectionName, 'obsStartYear') + climEndYear = config.getint(sectionName, 'obsEndYear') + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=fieldName, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + if climStartYear < 1925: + period = 'pre-industrial' + else: + period = 'present-day' + + refTitleLabel = \ + 'Observations (Hadley/OI, {} {:04d}-{:04d})'.format( + period, climStartYear, climEndYear) + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + obsFileName = \ + "{}/MODEL.SST.HAD187001-198110.OI198111-201203_" \ + "20180710.nc".format(observationsDirectory) + refFieldName = 'sst' + outFileLabel = 'sstHADOI' + galleryName = 'Observations: Hadley-NOAA-OI' + + remapObservationsSubtask = RemapObservedSSTClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix=refFieldName, + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + diffTitleLabel = 'Model - Observations' + + else: + remapObservationsSubtask = None + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + refFieldName = mpasFieldName + outFileLabel = 'sst' + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle='SST', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'$^o$C', + imageCaption='Mean Sea Surface Temperature', + galleryGroup='Sea Surface Temperature', + groupSubtitle=None, + groupLink='sst', + galleryName=galleryName) + + self.add_subtask(subtask)
+
+ + + +class RemapObservedSSTClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping SST observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='lat', + lonVarName='lon') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + sectionName = self.taskName + climStartYear = self.config.getint(sectionName, 'obsStartYear') + climEndYear = self.config.getint(sectionName, 'obsEndYear') + timeStart = datetime.datetime(year=climStartYear, month=1, day=1) + timeEnd = datetime.datetime(year=climEndYear, month=12, day=31) + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'time': 'Time', 'SST': 'sst'}) + dsObs = dsObs.sel(Time=slice(timeStart, timeEnd)) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_waves.html b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_waves.html new file mode 100644 index 000000000..bc31a4984 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/climatology_map_waves.html @@ -0,0 +1,677 @@ + + + + + + mpas_analysis.ocean.climatology_map_waves — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.climatology_map_waves

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2020 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2020 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2020 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+from __future__ import absolute_import, division, print_function, \
+    unicode_literals
+
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+from mpas_analysis.shared.interpolation.utility import add_periodic_lon
+
+import datetime
+
+
+
+[docs] +class ClimatologyMapWaves(AnalysisTask): # {{{ + """ + An analysis task for the computation of wave climatologies + """ + # Authors + # ------- + # Steven Brus + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, regionMasksTask, + controlConfig): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : ``MpasAnalysisConfigParser`` + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + controlConfig : ``MpasAnalysisConfigParser`` + Configuration options for a control run + """ + # Authors + # ------- + # Steven Brus + # Xylar Asay-Davis + + fields = [{'prefix': 'significantWaveHeight', + 'mpas': 'timeMonthly_avg_significantWaveHeight', + 'units': r'm', + 'titleName': 'Significant Wave Height', + 'era5': 'swh', + 'ss_cci': 'swh_mean'}, + {'prefix': 'peakWavePeriod', + 'mpas': 'timeMonthly_avg_peakWaveFrequency', + 'units': r's', + 'titleName': 'Peak Wave Period', + 'era5': 'pp1d'}, + {'prefix': 'iceFraction', + 'mpas': 'timeMonthly_avg_iceFraction', + 'units': r'', + 'titleName': 'Ice Fraction'}, + ] + + # call the constructor from the base class (AnalysisTask) + super().__init__( + config=config, taskName='climatologyMapWaves', + componentName='ocean', + tags=['climatology', 'horizontalMap', 'waves']) + + sectionName = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {sectionName} ' + 'does not contain valid list of seasons') + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {sectionName} does ' + 'not contain valid list of comparison grids') + + fieldList = config.getexpression(sectionName, 'fieldList') + fieldsRequested = [field for field in fields + if field['prefix'] in fieldList] + + variableListMpas = [field['mpas'] for field in fieldsRequested] + variableListEraObs = [field['era5'] for field in fieldsRequested] + variableListSscciObs = [field['ss_cci'] for field in fieldsRequested + if 'ss_cci' in field] + + era5ObsStartYear = config.getint(sectionName, 'era5ObsStartYear') + era5ObsEndYear = config.getint(sectionName, 'era5ObsEndYear') + sscciObsStartYear = config.getint(sectionName, 'sscciObsStartYear') + sscciObsEndYear = config.getint(sectionName, 'sscciObsEndYear') + + # the variableList variables will be added to + # mpasClimatologyTask along with the seasons. + remapClimatologySubtask = RemapMpasWavesClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='wave', + variableList=variableListMpas, + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=None) + + if controlConfig is None: + observationsDirectory = build_obs_path( + config, 'ocean', 'era5Subdirectory') + + obsFileName = f'{observationsDirectory}/ERA5_Monthly_1959-2021.nc' + remapEraObservationsSubtask = RemapEra5ObservedWaveClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='waveERA5', + comparisonGridNames=comparisonGridNames, + variableList=variableListEraObs, + subtaskName='remapEraObservations') + + observationsDirectory = build_obs_path( + config, 'ocean', 'sscciSubdirectory') + + obsFileName = f'{observationsDirectory}/SS_CCI_1991_2018.nc' + remapSscciObservationsSubtask = RemapSscciObservedWaveClimatology( + parentTask=self, seasons=seasons, fileName=obsFileName, + outFilePrefix='waveSSCCI', + comparisonGridNames=comparisonGridNames, + variableList=variableListSscciObs, + subtaskName='remapSscciObservations') + + else: + remapObservationsSubtask = None + + for field in fieldsRequested: + + fieldPrefix = field['prefix'] + upperFieldPrefix = fieldPrefix[0].upper() + fieldPrefix[1:] + configSectionName = f'{self.taskName}{upperFieldPrefix}' + + if (field['prefix'] == 'significantWaveHeight') \ + and (controlConfig is None): + obs_names = ['era5', 'ss_cci'] + else: + obs_names = ['era5'] + + for obs in obs_names: + + if controlConfig is None: + + refFieldName = field[obs] + diffTitleLabel = None + if obs == 'era5': + obs_type = 'Reanalysis' + remapObservationsSubtask = \ + remapEraObservationsSubtask + refTitleLabel = f'{obs_type} ({obs.upper()})' \ + f'{era5ObsStartYear}-{era5ObsEndYear}' + elif obs == 'ss_cci': + obs_type = 'Observations' + remapObservationsSubtask = \ + remapSscciObservationsSubtask + refTitleLabel = f'{obs_type} ({obs.upper()})' \ + f'{sscciObsStartYear}-{sscciObsEndYear}' + + galleryName = f"{field['titleName']} " \ + f"({obs.upper()} {obs_type})" + + else: + controlRunName = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'{field["titleName"]}' \ + f'(Control: {controlRunName})' + diffTitleLabel = 'Main - Control' + refFieldName = field['prefix'] + galleryName = field['titleName'] + + outFileLabel = f'{fieldPrefix}Wave{obs}' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + + subtaskName = f'plot{upperFieldPrefix}_{season}_' \ + f'{comparisonGridName}_{obs}' + + # make a new subtask for this + # season and comparison grid + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + controlConfig=controlConfig, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=field['titleName'], + mpasFieldName=field['prefix'], + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=field['units'], + imageCaption=field['titleName'], + galleryGroup='Waves', + groupSubtitle=None, + groupLink='waves', + galleryName=galleryName, + configSectionName=configSectionName) + + self.add_subtask(subtask)
+ + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Steven Brus + # Xylar Asay-Davis + + # first, call setup_and_check from the base class + # (AnalysisTask), which will perform some common setup + super().setup_and_check() + + if not self.namelist.getbool('config_use_active_wave'): + + raise ValueError('*** climatologyMapWaves requires ' + 'config_use_active_wave\n' + ' to be .true. Otherwise, no ' + 'wave data is available \n' + ' for plotting.')
+ + # }}} + # }}} + + +class RemapMpasWavesClimatology(RemapMpasClimatologySubtask): # {{{ + """ + A subtask for reading and remapping wave climatology + """ + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + def customize_masked_climatology(self, climatology, season): # {{{ + """ + Convert peak freqency to peak period + + The descriptor for the observation grid + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Steven Brus + # Xylar Asay-Davis + + if 'timeMonthly_avg_peakWaveFrequency' in climatology: + climatology['peakWavePeriod'] = \ + 1.0/climatology['timeMonthly_avg_peakWaveFrequency'] + + if 'timeMonthly_avg_significantWaveHeight' in climatology: + climatology['significantWaveHeight'] = \ + climatology['timeMonthly_avg_significantWaveHeight'] + + return climatology + + # }}} + # }}} + + +class RemapEra5ObservedWaveClimatology(RemapObservedClimatologySubtask): # {{{ + """ + A subtask for reading and remapping ERA5 wave observations + """ + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + def __init__(self, parentTask, seasons, fileName, outFilePrefix, + variableList, subtaskName, + comparisonGridNames=['latlon']): + # {{{ + ''' + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent (main) task for this subtask + + seasons : list of str + A list of seasons (keys in ``constants.monthDictionary``) over + which the climatology should be computed. + + fileName : str + The name of the observation file + + outFilePrefix : str + The prefix in front of output files and mapping files, typically + the name of the field being remapped + + variableList : list + List of observational variables to remap + + comparisonGridNames : list of str, + optional + The name(s) of the comparison grid to use for remapping. + + subtaskName : str, optional + The name of the subtask + ''' + # Authors + # ------- + # Steven Brus + + self.variableList = variableList + + # call the constructor from the base class (AnalysisTask) + super().__init__(parentTask, seasons, fileName, outFilePrefix, + comparisonGridNames=comparisonGridNames, + subtaskName=subtaskName) + # }}} + + def get_observation_descriptor(self, fileName): # {{{ + ''' + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + ''' + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + dsObs = self.build_observational_dataset(fileName) + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, + latVarName='latitude', + lonVarName='longitude') + return obsDescriptor # }}} + + def build_observational_dataset(self, fileName): # {{{ + ''' + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + ''' + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + sectionName = self.taskName + climStartYear = self.config.getint(sectionName, 'era5ObsStartYear') + climEndYear = self.config.getint(sectionName, 'era5ObsEndYear') + timeStart = datetime.datetime(year=climStartYear, month=1, day=1) + timeEnd = datetime.datetime(year=climEndYear, month=12, day=31) + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'time': 'Time'}) + dsObs = dsObs.sel(Time=slice(timeStart, timeEnd)) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + + dsObs = dsObs[self.variableList] + + degrees = 'degree' in dsObs.longitude.units + dsObs = add_periodic_lon(ds=dsObs, lonDim='longitude', degrees=degrees) + + return dsObs # }}} + + # }}} + + +class RemapSscciObservedWaveClimatology(RemapObservedClimatologySubtask): # {{{ + """ + A subtask for reading and remapping SS_CCI wave observations + """ + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + def __init__(self, parentTask, seasons, fileName, outFilePrefix, + variableList, subtaskName, + comparisonGridNames=['latlon']): + # {{{ + ''' + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent (main) task for this subtask + + seasons : list of str + A list of seasons (keys in ``constants.monthDictionary``) over + which the climatology should be computed. + + fileName : str + The name of the observation file + + outFilePrefix : str + The prefix in front of output files and mapping files, typically + the name of the field being remapped + + variableList : list + List of observational variables to remap + + comparisonGridNames : list of str, + optional + The name(s) of the comparison grid to use for remapping. + + subtaskName : str, optional + The name of the subtask + ''' + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + self.variableList = variableList + + # call the constructor from the base class (AnalysisTask) + super().__init__(parentTask, seasons, fileName, outFilePrefix, + comparisonGridNames=comparisonGridNames, + subtaskName=subtaskName) + # }}} + + def get_observation_descriptor(self, fileName): # {{{ + ''' + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + ''' + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + dsObs = self.build_observational_dataset(fileName) + obsDescriptor = LatLonGridDescriptor.read(ds=dsObs, + latVarName='lat', + lonVarName='lon') + return obsDescriptor # }}} + + def build_observational_dataset(self, fileName): # {{{ + ''' + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + ''' + # Authors + # ------- + # Steven Brus, Xylar Asay-Davis + + sectionName = self.taskName + climStartYear = self.config.getint(sectionName, 'sscciObsStartYear') + climEndYear = self.config.getint(sectionName, 'sscciObsEndYear') + timeStart = datetime.datetime(year=climStartYear, month=1, day=1) + timeEnd = datetime.datetime(year=climEndYear, month=12, day=31) + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'time': 'Time'}) + dsObs = dsObs.sel(Time=slice(timeStart, timeEnd)) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + + dsObs = dsObs[self.variableList] + + degrees = 'degree' in dsObs.lon.units + dsObs = add_periodic_lon(ds=dsObs, lonDim='lon', degrees=degrees) + + return dsObs # }}} + + # }}} + +# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/compute_anomaly_subtask.html b/1.11.0rc1/_modules/mpas_analysis/ocean/compute_anomaly_subtask.html new file mode 100644 index 000000000..68a605c5c --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/compute_anomaly_subtask.html @@ -0,0 +1,330 @@ + + + + + + mpas_analysis.ocean.compute_anomaly_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.compute_anomaly_subtask

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import os
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.timekeeping.utility import \
+    get_simulation_start_time, string_to_datetime
+
+from mpas_analysis.shared.timekeeping.MpasRelativeDelta import \
+    MpasRelativeDelta
+
+from mpas_analysis.shared.io.utility import build_config_full_path
+
+from mpas_analysis.shared.time_series import \
+    compute_moving_avg_anomaly_from_start
+
+
+
+[docs] +class ComputeAnomalySubtask(AnalysisTask): + """ + A subtask for computing anomalies of moving averages and writing them out. + + Attributes + ---------- + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + outFileName : str + The file name (usually without full path) where the resulting + data set should be written + + variableList : list of str + Variables to be included in the data set + + movingAveragePoints : int + The number of points (months) used in the moving average used to + smooth the data set + + alter_dataset : function + A function that takes an ``xarray.Dataset`` and returns an + ``xarray.Dataset`` for manipulating the data set (e.g. adding a new + variable computed from others). This operation is performed before + computing moving averages and anomalies, so that these operations are + also performed on any new variables added to the data set. + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, parentTask, mpasTimeSeriesTask, outFileName, + variableList, movingAveragePoints, + subtaskName='computeAnomaly', alter_dataset=None): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task of which this is a subtask + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + outFileName : str + The file name (usually without full path) where the resulting + data set should be written + + variableList : list of str + Variables to be included in the data set + + movingAveragePoints : int + The number of points (months) used in the moving average used to + smooth the data set + + subtaskName : str, optional + The name of the subtask + + alter_dataset : function + A function that takes an ``xarray.Dataset`` and returns an + ``xarray.Dataset`` for manipulating the data set (e.g. adding a new + variable computed from others). This operation is performed before + computing moving averages and anomalies, so that these operations + are also performed on any new variables added to the data set. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeAnomalySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName='ocean', + tags=parentTask.tags, + subtaskName=subtaskName) + + self.mpasTimeSeriesTask = mpasTimeSeriesTask + + self.run_after(mpasTimeSeriesTask) + + self.outFileName = outFileName + self.variableList = variableList + self.movingAveragePoints = movingAveragePoints + + self.alter_dataset = alter_dataset
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeAnomalySubtask, self).setup_and_check() + + startDate = self.config.get('timeSeries', 'startDate') + endDate = self.config.get('timeSeries', 'endDate') + + delta = MpasRelativeDelta(string_to_datetime(endDate), + string_to_datetime(startDate), + calendar=self.calendar) + + months = delta.months + 12*delta.years + + if months <= self.movingAveragePoints: + raise ValueError('Cannot meaninfully perform a rolling mean ' + 'because the time series is too short.') + + self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) + + self.inputFile = self.mpasTimeSeriesTask.outputFile + + def run_task(self): + """ + Performs analysis of ocean heat content (OHC) from time-series output. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + self.logger.info("\nComputing anomalies...") + + config = self.config + startDate = config.get('timeSeries', 'startDate') + endDate = config.get('timeSeries', 'endDate') + + if config.has_option('timeSeries', 'anomalyRefYear'): + anomalyYear = config.getint('timeSeries', 'anomalyRefYear') + anomalyRefDate = '{:04d}-01-01_00:00:00'.format(anomalyYear) + anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear) + else: + anomalyRefDate = get_simulation_start_time(self.runStreams) + anomalyYear = int(anomalyRefDate[0:4]) + anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear) + + ds = compute_moving_avg_anomaly_from_start( + timeSeriesFileName=self.inputFile, + variableList=self.variableList, + anomalyStartTime=anomalyRefDate, + anomalyEndTime=anomalyEndDate, + startDate=startDate, + endDate=endDate, + calendar=self.calendar, + movingAveragePoints=self.movingAveragePoints, + alter_dataset=self.alter_dataset) + + outFileName = self.outFileName + if not os.path.isabs(outFileName): + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + outFileName = '{}/{}'.format(baseDirectory, + outFileName) + + write_netcdf_with_fill(ds, outFileName)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/conservation.html b/1.11.0rc1/_modules/mpas_analysis/ocean/conservation.html new file mode 100644 index 000000000..08b0b271b --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/conservation.html @@ -0,0 +1,761 @@ + + + + + + mpas_analysis.ocean.conservation — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.conservation

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+# Author
+# -------
+# Carolyn Begeman
+
+from distutils.spawn import find_executable
+import numpy as np
+import matplotlib.pyplot as plt
+import os
+import subprocess
+import xarray as xr
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+from mpas_analysis.shared.constants import constants
+from mpas_analysis.shared.html import write_image_xml
+from mpas_analysis.shared.io import open_mpas_dataset
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_files_year_month, decode_strings
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig
+from mpas_analysis.shared.timekeeping.utility import date_to_days, \
+    days_to_datetime
+
+
+
+[docs] +class ConservationTask(AnalysisTask): + """ + This task generates time series plots from output from the conservation + analysis member. A number of different plot types are supported, as indicated + in the `plotTypes` config option in the `conservation` section. + + Attributes + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + controlConfig : mpas_tools.config.MpasConfigParser + Contains configuration options for a control run, if provided + + outputFile : str + The path to the output file produced by this analysis + + runDirectory : str + The path to the restart files from the main simulation being analyzed + + historyDirectory : str + The path to the history files from the main simulation being analyzed + + startYear : int + The year to start the analysis + + endYear : int + The year to end the analysis + + inputFiles : list of str + The paths to all conservation AM files + + mainRunName : str + The name of the main run from the config file + + plotTypes : list of str + The plot types requested in the config file + + masterVariableList : dict of key-[list of str] pairs + Keys are the supported plot types. Entries are lists of the variables + that are needed to produce that plot type. + + derivedVariableList : dict of key-[list of str] pairs + Keys are the derived variables. Entries are lists of variables in the + AM output that are needed to derive that variable. + + xmlFileNames : list of str + File names for xml output with full path + + filePrefixes : list of str + File prefixes for xml files + + variableList : dict of key-[list of str] pairs + Keys are the requested plot types. Entries are lists of the variables + in the AM output that are needed to produce that plot type. + """ + + # Authors + # ------- + # Carolyn Begeman + +
+[docs] + def __init__(self, config, controlConfig): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + """ + # Authors + # ------- + # Carolyn Begeman + + super(ConservationTask, self).__init__( + config=config, + taskName='oceanConservation', + componentName='ocean', + tags=['timeSeries', 'conservation']) + + self.controlConfig = controlConfig
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + Also, adds attributes to the task that will be needed later. + + Raises + ------ + ValueError: if plot type is not supported, conservation analysis member + is inactive or input files are missing + """ + # Authors + # ------- + # Carolyn Begeman + + super(ConservationTask, self).setup_and_check() + + # Check that the conservation analysis member is active + self.check_analysis_enabled( + analysisOptionName='config_am_conservationcheck_enable', + raiseException=True) + + # Specify where to put analysis task output + config = self.config + baseDirectory = build_config_full_path( + config, 'output', 'conservationSubdirectory') + make_directories(baseDirectory) + self.outputFile = f'{baseDirectory}/{self.fullTaskName}.nc' + + # get a list of conservationCheck output files from the streams file, + # reading only those that are between the start and end dates + + # the run directory contains the restart files + self.runDirectory = build_config_full_path(self.config, 'input', + 'runSubdirectory') + # if the history directory exists, use it; if not, fall back on + # runDirectory + self.historyDirectory = build_config_full_path( + self.config, 'input', + f'{self.componentName}HistorySubdirectory', + defaultPath=self.runDirectory) + + self.startYear = self.config.getint('timeSeries', 'startYear') + self.endYear = self.config.getint('timeSeries', 'endYear') + self.inputFiles = sorted(self.historyStreams.readpath( + 'conservationCheckOutput', + startDate=f'{self.startYear:04d}-01-01_00:00:00', + endDate=f'{self.endYear:04d}-01-01_00:00:00', + calendar=self.calendar)) + + if len(self.inputFiles) == 0: + raise IOError(f'No files were found matching {self.inputFiles}') + + with xr.open_dataset(self.inputFiles[0]) as ds: + self.allVariables = list(ds.data_vars.keys()) + + self.mainRunName = self.config.get('runs', 'mainRunName') + + self.plotTypes = self.config.getexpression('timeSeriesConservation', 'plotTypes') + + self.masterVariableList = {'absolute_energy_error': ['absoluteEnergyError'], + 'total_energy_flux': ['netEnergyFlux'], + 'absolute_salt_error': ['absoluteSaltError'], + 'ice_salt_flux': ['netSaltFlux'], + 'total_mass_flux': ['netMassFlux'], + 'total_mass_change': ['netMassChange'], + 'land_ice_mass_change': ['landIceMassChange'], + 'land_ice_ssh_change': ['landIceSshChange'], + 'land_ice_mass_flux': ['landIceMassFlux'], + 'land_ice_mass_flux_components': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux']} + + # for each derived variable, which source variables are needed + self.derivedVariableList = {'netMassChange': ['massChange'], + 'landIceMassFlux': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux'], + 'landIceSshChange': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux'], + 'landIceMassChange': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux']} + + # Determine the xml files for each plot and the variables each plot will use + self.xmlFileNames = [] + self.filePrefixes = {} + self.variableList = {} + for plot_type in self.plotTypes: + if plot_type not in self.masterVariableList.keys(): + raise ValueError(f'plot type {plot_type} not supported') + filePrefix = f'conservation_{self.mainRunName}_{plot_type}_' \ + f'years{self.startYear:04d}-{self.endYear:04d}' + self.xmlFileNames.append(f'{self.plotsDirectory}/{filePrefix}.xml') + self.filePrefixes[plot_type] = filePrefix + self.variableList[plot_type] = self._add_variables(self.masterVariableList[plot_type]) + + def run_task(self): + """ + Create an output netCDF file that has all of the requested conservation AM variables + in the requested time window. Then generate all requested conservation plot types. + """ + # Authors + # ------- + # Carolyn Begeman + + all_plots_variable_list = [] + for plot_type in self.plotTypes: + for varname in self.variableList[plot_type]: + all_plots_variable_list.append(varname) + self._compute_time_series_with_ncrcat(all_plots_variable_list) + for plot_type in self.plotTypes: + self._make_plot(plot_type) + + def _add_variables(self, target_variable_list): + """ + Add one or more variables to extract as a time series. + + Parameters + ---------- + variableList : list of str + A list of variable names in ``conservationCheck`` to be + included in the time series + + Raises + ------ + ValueError + if this function is called before this task has been set up (so + the list of available variables has not yet been set) or if one + or more of the requested variables is not available in the + ``conservationCheck`` output. + """ + # Authors + # ------- + # Xylar Asay-Davis + + variable_list = [] + if self.allVariables is None: + raise ValueError('add_variables() can only be called after ' + 'setup_and_check() in ConservationTask.\n' + 'Presumably tasks were added in the wrong order ' + 'or add_variables() is being called in the wrong ' + 'place.') + + for variable in target_variable_list: + if variable not in self.allVariables and \ + variable not in self.derivedVariableList.keys(): + raise ValueError( + f'{variable} is not available in conservationCheck' + 'output:\n{self.allVariables}') + + if variable in self.allVariables and variable not in variable_list: + variable_list.append(variable) + # If it's a derived variable, add all of the variables it depends on + if variable in self.derivedVariableList.keys() and \ + variable not in variable_list: + for var in self.derivedVariableList[variable]: + variable_list.append(var) + + return variable_list + + def _make_plot(self, plot_type): + """ + Generate time series plots from conservation AM output. + + Parameters + ---------- + plot_type: str + The type of plot to generate from conservationCheck variables + """ + config = self.config + filePrefix = self.filePrefixes[plot_type] + outFileName = f'{self.plotsDirectory}/{filePrefix}.png' + + titles = {} + titles['total_energy_flux'] = 'Total energy flux' + titles['absolute_energy_error'] = 'Energy error' + titles['ice_salt_flux'] = 'Salt flux related to land ice and sea ice' + titles['absolute_salt_error'] = 'Salt conservation error' + titles['total_mass_flux'] = 'Total mass flux' + titles['total_mass_change'] = 'Total mass anomaly' + titles['land_ice_mass_flux'] = 'Mass flux due to land ice' + titles['land_ice_mass_change'] = 'Mass anomaly due to land ice fluxes' + titles['land_ice_ssh_change'] = 'SSH anomaly due to land ice fluxes' + titles['land_ice_mass_flux_components'] = 'Mass fluxes from land ice' + + y_labels = {} + y_labels['total_energy_flux'] = 'Energy flux (W)' + y_labels['absolute_energy_error'] = 'Energy (J)' + y_labels['ice_salt_flux'] = 'Salt flux (Gt/yr)' + y_labels['absolute_salt_error'] = 'Salt (Gt)' + y_labels['total_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['total_mass_change'] = 'Mass (Gt)' + y_labels['land_ice_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['land_ice_mass_change'] = 'Mass (Gt)' + y_labels['land_ice_ssh_change'] = 'SSH anomaly (mm)' + y_labels['land_ice_mass_flux_components'] = 'Mass flux (Gt/yr)' + + captions = {} + captions['total_energy_flux'] = 'Total energy flux' + captions['absolute_energy_error'] = 'Absolute energy conservation error' + captions['ice_salt_flux'] = 'Salt flux related to land ice and sea ice ' \ + '(sea ice salinity flux, sea ice frazil flux, and land ice frazil flux)' + captions['absolute_salt_error'] = 'Absolute salt conservation error' + captions['total_mass_flux'] = 'Total mass flux' + captions['total_mass_change'] = 'Total mass anomaly' + captions['land_ice_mass_flux'] = 'Mass flux due to land ice' + captions['land_ice_mass_change'] = 'Mass anomaly due to land ice fluxes' + captions['land_ice_ssh_change'] = 'SSH anomaly due to land ice fluxes. Assumes a constant ocean area.' + captions['land_ice_mass_flux_components'] = 'Mass flux components from land ice' + + self.logger.info(f' Open conservation file {self.outputFile}...') + ds = open_mpas_dataset(fileName=self.outputFile, + calendar=self.calendar, + variableList=self.variableList[plot_type], + timeVariableNames='xtime', + startDate=f'{self.startYear:04d}-01-01_00:00:00', + endDate=f'{self.endYear:04d}-01-01_00:00:00') + + if self.controlConfig is not None: + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + controlFileName = f'{baseDirectory}/{self.fullTaskName}.nc' + self.logger.info(' Load in conservation for a control run ' + f'{controlFileName}...') + ds_ref = open_mpas_dataset(fileName=controlFileName, + calendar=self.calendar, + variableList=self.variableList[plot_type], + timeVariableNames='xtime') + controlEndYear = self.controlConfig.getint('timeSeries', 'endYear') + if self.startYear <= controlEndYear: + timeStart = date_to_days(year=self.startYear, month=1, day=1, + calendar=self.calendar) + timeEnd = date_to_days(year=self.endYear, month=12, day=31, + calendar=self.calendar) + ds_ref_slice = \ + ds_ref.sel(Time=slice(timeStart, timeEnd)) + else: + self.logger.warning('Control time series ends before the ' + 'timeSeries startYear and will not be ' + 'plotted.') + self.controlConfig = None + + # make the plot + self.logger.info(' Make conservation plots...') + xLabel = 'Time (years)' + title = titles[plot_type] + yLabel = y_labels[plot_type] + lineStylesBase = ['-', '--', '-.', ':'] + + # gather all the variables for this plot type + fields = [] + legendText = [] + lineColors = [] + lineStyles = [] + for index, varname in enumerate(self.masterVariableList[plot_type]): + variable = self._get_variable(ds, varname) + fields.append(variable) + legend_text = '' + if self.controlConfig is not None: + legend_text = self.mainRunName + if len(self.masterVariableList[plot_type]) > 1: + if len(legend_text) > 0: + legend_text = f'{legend_text}, ' + legend_text = f"{legend_text}{varname.replace('accumulated', '').replace('Flux', '')}" + legendText.append(legend_text) + lineColors.append(config.get('timeSeries', 'mainColor')) + lineStyles.append(lineStylesBase[index]) + if self.controlConfig is not None: + variable = self._get_variable(ds_ref, varname) + fields.append(variable) + legend_text = self.controlConfig.get('runs', 'mainRunName') + if len(self.masterVariableList[plot_type]) > 1: + legend_text = f"{legend_text}, {varname.replace('accumulated', '').replace('Flux', '')}" + legendText.append(legend_text) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineStyles.append(lineStylesBase[index]) + + lineWidths = [3 for i in fields] + if config.has_option('timeSeries', 'movingAveragePoints'): + movingAveragePoints = config.getint('timeSeries', + 'movingAveragePoints') + else: + movingAveragePoints = None + + if config.has_option('timeSeries', 'firstYearXTicks'): + firstYearXTicks = config.getint('timeSeries', + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option('timeSeries', 'yearStrideXTicks'): + yearStrideXTicks = config.getint('timeSeries', + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + timeseries_analysis_plot(config, fields, calendar=self.calendar, + title=title, xlabel=xLabel, ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, + lineStyles=lineStyles[:len(fields)], + lineWidths=lineWidths, + legendText=legendText, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks) + + # save the plot to the output file + plt.savefig(outFileName) + + caption = captions[plot_type] + write_image_xml( + config=self.config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Time Series', + groupLink='timeseries', + gallery='Conservation', + thumbnailDescription=title, + imageDescription=caption, + imageCaption=caption) + + def _get_variable(self, ds, varname, mks=False): + if varname not in self.derivedVariableList: + variable = ds[varname] + else: + # Here we keep the units mks + if varname == 'netMassChange': + variable = self._get_variable(ds, 'massChange', mks=True) + # mass_flux = self._get_variable(ds, 'netMassFlux') + # # Assume that the frequency of output is monthly + # dt = constants.sec_per_month + # # Convert from kg/s to kg + # derived_variable = mass_flux.cumsum(axis=0) * dt + elif varname == 'landIceMassChange': + land_ice_mass_flux = self._get_variable(ds, 'landIceMassFlux', mks=True) + # Assume that the frequency of output is monthly + dt = constants.sec_per_month + # Convert from kg/s to kg/month + land_ice_mass_flux = land_ice_mass_flux * dt + # Convert from kg/month to kg + variable = np.cumsum(land_ice_mass_flux) + + elif varname == 'landIceMassFlux': + variable = self._get_variable(ds, 'accumulatedIcebergFlux', mks=True) + \ + self._get_variable(ds, 'accumulatedLandIceFlux', mks=True) + \ + self._get_variable(ds, 'accumulatedRemovedRiverRunoffFlux', mks=True) + \ + self._get_variable(ds, 'accumulatedRemovedIceRunoffFlux', mks=True) + + elif varname == 'landIceSshChange': + ts_files = sorted(self.historyStreams.readpath( + 'timeSeriesStatsMonthlyOutput', + startDate=f'{self.startYear:04d}-01-01_00:00:00', + endDate=f'{self.endYear:04d}-01-01_00:00:00', + calendar=self.calendar)) + # Note that here we assume that the area of the ocean is constant in time + # to save computational expense because most configurations do not allow + # the area of the ocean to change + ts_file = ts_files[0] + if not os.path.exists(ts_file): + raise ValueError(f'Could not find timeMonthlyStats file {ts_file}') + var = 'timeMonthly_avg_areaCellGlobal' + ds_ts = open_mpas_dataset(fileName=ts_file, + calendar=self.calendar, + variableList=[var]) + A = ds_ts[var].mean() + land_ice_mass_change = self._get_variable(ds, 'landIceMassChange', mks=True) + rho = self.namelist.getfloat('config_density0') + # Convert from to kg to m + variable = land_ice_mass_change / (rho * A) + + else: + raise ValueError(f'Attempted to derive non-supported variable {varname}') + + removed_vars = ['accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux'] + if varname in removed_vars: + variable = -variable + + if not mks: + # Here we do all the unit conversion from mks into whatever we want + mass_vars = ['initialMass', 'finalMass', 'absoluteMassError', + 'relativeMassError', 'massChange', 'landIceMassChange'] + salt_vars = ['initialSalt', 'finalSalt', 'absoluteSaltError', + 'relativeSaltError'] + mass_flux_vars = ['netMassFlux', 'landIceMassFlux'] + salt_flux_vars = ['netSaltFlux'] + ssh_vars = ['landIceSshChange', 'sshChange'] + if (varname in mass_vars) or (varname in salt_vars): + # Convert from kg to Gt + variable = variable * 1e-12 + if (varname in mass_flux_vars) or (varname in salt_flux_vars): + # Convert from kg/s to Gt/yr + variable = variable * 1e-12 * constants.sec_per_year + if varname in ssh_vars: + # Convert from m to mm + variable = variable * 1e3 + + return variable + + def _compute_time_series_with_ncrcat(self, variable_list): + + """ + Uses ncrcat to extact time series from conservationCheckOutput files + + Raises + ------ + OSError + If ``ncrcat`` is not in the system path. + """ + + if find_executable('ncrcat') is None: + raise OSError('ncrcat not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + inputFiles = self.inputFiles + append = False + if os.path.exists(self.outputFile): + # make sure all the necessary variables are also present + with xr.open_dataset(self.outputFile) as ds: + if ds.sizes['Time'] == 0: + updateSubset = False + else: + updateSubset = True + for variableName in variable_list: + if variableName not in ds.variables: + updateSubset = False + break + + if updateSubset: + # add only input files with times that aren't already in + # the output file + + append = True + + fileNames = sorted(self.inputFiles) + inYears, inMonths = get_files_year_month( + fileNames, self.historyStreams, + 'conservationCheckOutput') + + inYears = np.array(inYears) + inMonths = np.array(inMonths) + totalMonths = 12 * inYears + inMonths + + dates = decode_strings(ds.xtime) + + lastDate = dates[-1] + + lastYear = int(lastDate[0:4]) + lastMonth = int(lastDate[5:7]) + lastTotalMonths = 12 * lastYear + lastMonth + + inputFiles = [] + for index, inputFile in enumerate(fileNames): + if totalMonths[index] > lastTotalMonths: + inputFiles.append(inputFile) + + if len(inputFiles) == 0: + # nothing to do + return + else: + # there is an output file but it has the wrong variables + # so we need ot delete it. + self.logger.warning('Warning: deleting file {self.outputFile}' + ' because it is empty or some variables' + ' were missing') + os.remove(self.outputFile) + + variableList = variable_list + ['xtime'] + + args = ['ncrcat', '-4', '--no_tmp_fl', + '-v', ','.join(variableList)] + + if append: + args.append('--record_append') + + printCommand = '{} {} ... {} {}'.format(' '.join(args), inputFiles[0], + inputFiles[-1], + self.outputFile) + args.extend(inputFiles) + args.append(self.outputFile) + + self.logger.info(f'running: {printCommand}') + for handler in self.logger.handlers: + handler.flush() + + process = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + if stdout: + stdout = stdout.decode('utf-8') + for line in stdout.split('\n'): + self.logger.info(line) + if stderr: + stderr = stderr.decode('utf-8') + for line in stderr.split('\n'): + self.logger.error(line) + + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, + ' '.join(args))
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/histogram.html b/1.11.0rc1/_modules/mpas_analysis/ocean/histogram.html new file mode 100644 index 000000000..4bb81a6cb --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/histogram.html @@ -0,0 +1,733 @@ + + + + + + mpas_analysis.ocean.histogram — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.histogram

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+import os
+import xarray
+import numpy
+import matplotlib.pyplot as plt
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    build_obs_path, make_directories, decode_strings
+from mpas_analysis.shared.climatology import compute_climatology, \
+    get_unmasked_mpas_climatology_file_name
+
+from mpas_analysis.shared.constants import constants
+from mpas_analysis.shared.plot import histogram_analysis_plot, savefig
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class OceanHistogram(AnalysisTask): + """ + Plots a histogram of a 2-d ocean variable. + + """ + +
+[docs] + def __init__(self, config, mpasClimatologyTask, regionMasksTask, + controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + controlConfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + """ + + # first, call the constructor from the base class (AnalysisTask) + super().__init__( + config=config, + taskName='oceanHistogram', + componentName='ocean', + tags=['climatology', 'regions', 'histogram', 'publicObs']) + + self.run_after(mpasClimatologyTask) + self.mpasClimatologyTask = mpasClimatologyTask + + self.controlConfig = controlConfig + mainRunName = config.get('runs', 'mainRunName') + + self.regionGroups = config.getexpression(self.taskName, 'regionGroups') + self.regionNames = config.getexpression(self.taskName, 'regionNames') + self.seasons = config.getexpression(self.taskName, 'seasons') + self.variableList = config.getexpression(self.taskName, 'variableList') + if config.has_option(self.taskName, 'weightList'): + self.weightList = config.getexpression(self.taskName, 'weightList') + if not self.weightList: + self.weightList = None + elif len(self.weightList) != len(self.variableList): + raise ValueError('Histogram weightList is not the same ' + 'length as variableList') + else: + self.weightList = None + + baseDirectory = build_config_full_path( + config, 'output', 'histogramSubdirectory') + if not os.path.exists(baseDirectory): + make_directories(baseDirectory) + + self.obsList = config.getexpression(self.taskName, 'obsList') + obsDicts = { + 'AVISO': { + 'suffix': 'AVISO', + 'gridName': 'Global_1.0x1.0degree', + 'gridFileName': 'SSH/zos_AVISO_L4_199210-201012_20180710.nc', + 'lonVar': 'lon', + 'latVar': 'lat', + 'sshVar': 'zos', + 'pressureAdjustedSSHVar': 'zos'}} + + for regionGroup in self.regionGroups: + groupObsDicts = {} + mpasMasksSubtask = regionMasksTask.add_mask_subtask( + regionGroup=regionGroup) + regionNames = mpasMasksSubtask.expand_region_names( + self.regionNames) + + regionGroupSuffix = regionGroup.replace(' ', '_') + filePrefix = f'histogram_{regionGroupSuffix}' + + # Add mask subtasks for observations and prep groupObsDicts + # groupObsDicts is a subsetted version of localObsDicts with an + # additional attribute for the maskTask + for obsName in self.obsList: + localObsDict = dict(obsDicts[obsName]) + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=localObsDict['gridFileName']) + obsMasksSubtask = regionMasksTask.add_mask_subtask( + regionGroup, obsFileName=obsFileName, + lonVar=localObsDict['lonVar'], + latVar=localObsDict['latVar'], + meshName=localObsDict['gridName']) + localObsDict['maskTask'] = obsMasksSubtask + groupObsDicts[obsName] = localObsDict + + for regionName in regionNames: + sectionName = None + + # Compute weights for histogram + if self.weightList is not None: + computeWeightsSubtask = ComputeHistogramWeightsSubtask( + self, regionName, mpasMasksSubtask, filePrefix, + self.variableList, self.weightList) + self.add_subtask(computeWeightsSubtask) + + for season in self.seasons: + + # Generate histogram plots + plotRegionSubtask = PlotRegionHistogramSubtask( + self, regionGroup, regionName, controlConfig, + sectionName, filePrefix, mpasClimatologyTask, + mpasMasksSubtask, obsMasksSubtask, groupObsDicts, + self.variableList, self.weightList, season) + plotRegionSubtask.run_after(mpasMasksSubtask) + plotRegionSubtask.run_after(obsMasksSubtask) + if self.weightList is not None: + plotRegionSubtask.run_after(computeWeightsSubtask) + self.add_subtask(plotRegionSubtask)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + OSError + If files are not present + """ + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super().setup_and_check() + + # Add variables and seasons to climatology task + variableList = [] + for var in self.variableList: + variableList.append(f'timeMonthly_avg_{var}') + + self.mpasClimatologyTask.add_variables(variableList=variableList, + seasons=self.seasons) + + if len(self.obsList) > 1: + raise ValueError('Histogram analysis does not currently support' + 'more than one observational product')
+ + + +class ComputeHistogramWeightsSubtask(AnalysisTask): + """ + Fetches weight variables from MPAS output files for each variable in + variableList. + + """ + def __init__(self, parentTask, regionName, mpasMasksSubtask, fullSuffix, + variableList, weightList): + """ + Initialize weights task + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionName : str + Name of the region to plot + + mpasMasksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask MPAS files for each region to plot, used + to get the mask file name + + obsDicts : dict of dicts + Information on the observations to compare agains + + fullSuffix : str + The regionGroup and regionName combined and modified to be + appropriate as a task or file suffix + + variableList: list of str + List of variables which will be weighted + + weightList: list of str + List of variables by which to weight the variables in + variableList, of the same length as variableList + + """ + + super(ComputeHistogramWeightsSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=f'weights_{fullSuffix}_{regionName}') + + self.mpasMasksSubtask = mpasMasksSubtask + self.regionName = regionName + self.filePrefix = fullSuffix + self.variableList = variableList + self.weightList = weightList + + def run_task(self): + """ + Apply the region mask to each weight variable and save in a file common + to that region. + """ + + config = self.config + base_directory = build_config_full_path( + config, 'output', 'histogramSubdirectory') + + # Get cell mask for the region + region_mask_filename = self.mpasMasksSubtask.maskFileName + ds_region_mask = xarray.open_dataset(region_mask_filename) + mask_region_names = decode_strings(ds_region_mask.regionNames) + region_index = mask_region_names.index(self.regionName) + ds_mask = ds_region_mask.isel(nRegions=region_index) + cell_mask = ds_mask.regionCellMasks == 1 + + # Open the restart file, which contains unmasked weight variables + restart_filename = self.runStreams.readpath('restart')[0] + ds_restart = xarray.open_dataset(restart_filename) + ds_restart = ds_restart.isel(Time=0) + + # Save the cell mask only for the region in its own file, which may be + # referenced by future analysis (i.e., as a control run) + new_region_mask_filename = \ + f'{base_directory}/{self.filePrefix}_{self.regionName}_mask.nc' + write_netcdf_with_fill(ds_mask, new_region_mask_filename) + + if self.weightList is not None: + ds_weights = xarray.Dataset() + # Fetch the weight variables and mask them for each region + for index, var in enumerate(self.variableList): + weight_var_name = self.weightList[index] + if weight_var_name in ds_restart.keys(): + var_name = f'timeMonthly_avg_{var}' + ds_weights[f'{var_name}_weight'] = \ + ds_restart[weight_var_name].where(cell_mask, drop=True) + else: + self.logger.warn(f'Weight variable {weight_var_name} is ' + f'not in the restart file, skipping') + + weights_filename = \ + f'{base_directory}/{self.filePrefix}_{self.regionName}_weights.nc' + write_netcdf_with_fill(ds_weights, weights_filename) + + +class PlotRegionHistogramSubtask(AnalysisTask): + """ + Plots a histogram diagram for a given ocean region + + Attributes + ---------- + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + sectionName : str + The section of the config file to get options from + + controlConfig : mpas_tools.config.MpasConfigParser + The configuration options for the control run (if any) + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + mpasMasksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask MPAS files for each region to plot, used + to get the mask file name + + obsDicts : dict of dicts + Information on the observations to compare against + + variableList: list of str + list of variables to plot + + season : str + The season to compute the climatology for + """ + + def __init__(self, parentTask, regionGroup, regionName, controlConfig, + sectionName, fullSuffix, mpasClimatologyTask, + mpasMasksSubtask, obsMasksSubtask, obsDicts, variableList, + weightList, season): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + + sectionName : str + The config section with options for this regionGroup + + fullSuffix : str + The regionGroup and regionName combined and modified to be + appropriate as a task or file suffix + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + mpasMasksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask MPAS files for each region to plot, used + to get the mask file name + + obsDicts : dict of dicts + Information on the observations to compare agains + + season : str + The season to comput the climatogy for + """ + + # first, call the constructor from the base class (AnalysisTask) + super(PlotRegionHistogramSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=f'plot_{fullSuffix}_{regionName}_{season}') + + self.run_after(mpasClimatologyTask) + self.regionGroup = regionGroup + self.regionName = regionName + self.sectionName = sectionName + self.controlConfig = controlConfig + self.mpasClimatologyTask = mpasClimatologyTask + self.mpasMasksSubtask = mpasMasksSubtask + self.obsMasksSubtask = obsMasksSubtask + self.obsDicts = obsDicts + self.variableList = variableList + self.weightList = weightList + self.season = season + self.filePrefix = fullSuffix + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If files are not present + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(PlotRegionHistogramSubtask, self).setup_and_check() + + self.xmlFileNames = [] + for var in self.variableList: + self.xmlFileNames.append( + f'{self.plotsDirectory}/{self.filePrefix}_{var}_' + f'{self.regionName}_{self.season}.xml') + + def run_task(self): + """ + Plots histograms of properties in an ocean region. + """ + + self.logger.info(f"\nPlotting {self.season} histograms for " + f"{self.regionName}") + + config = self.config + sectionName = self.sectionName + + calendar = self.calendar + + main_run_name = config.get('runs', 'mainRunName') + + region_mask_filename = self.mpasMasksSubtask.maskFileName + + ds_region_mask = xarray.open_dataset(region_mask_filename) + + mask_region_names = decode_strings(ds_region_mask.regionNames) + region_index = mask_region_names.index(self.regionName) + + ds_mask = ds_region_mask.isel(nRegions=region_index) + cell_mask = ds_mask.regionCellMasks == 1 + + if len(self.obsDicts) > 0: + obs_region_mask_filename = self.obsMasksSubtask.maskFileName + ds_obs_region_mask = xarray.open_dataset(obs_region_mask_filename) + mask_region_names = decode_strings(ds_region_mask.regionNames) + region_index = mask_region_names.index(self.regionName) + + ds_obs_mask = ds_obs_region_mask.isel(nRegions=region_index) + obs_cell_mask = ds_obs_mask.regionMasks == 1 + + in_filename = get_unmasked_mpas_climatology_file_name( + config, self.season, self.componentName, op='avg') + ds = xarray.open_dataset(in_filename) + + base_directory = build_config_full_path( + config, 'output', 'histogramSubdirectory') + + if self.weightList is not None: + weights_filename = \ + f'{base_directory}/{self.filePrefix}_{self.regionName}_' \ + 'weights.nc' + ds_weights = xarray.open_dataset(weights_filename) + + if self.controlConfig is not None: + control_run_name = self.controlConfig.get('runs', 'mainRunName') + control_filename = get_unmasked_mpas_climatology_file_name( + self.controlConfig, self.season, self.componentName, op='avg') + ds_control = xarray.open_dataset(control_filename) + base_directory = build_config_full_path( + self.controlConfig, 'output', 'histogramSubdirectory') + control_region_mask_filename = \ + f'{base_directory}/{self.filePrefix}_{self.regionName}_mask.nc' + ds_control_region_masks = xarray.open_dataset( + control_region_mask_filename) + control_cell_mask = ds_control_region_masks.regionCellMasks == 1 + if self.weightList is not None: + control_weights_filename = f'{base_directory}/' \ + f'{self.filePrefix}_{self.regionName}_weights.nc' + ds_control_weights = xarray.open_dataset( + control_weights_filename) + + if config.has_option(self.taskName, 'mainColor'): + mainColor = config.get(self.taskName, 'mainColor') + else: + mainColor = 'C0' + if config.has_option(self.taskName, 'obsColor'): + obsColor = config.get(self.taskName, 'obsColor') + else: + obsColor = 'C1' + if config.has_option(self.taskName, 'controlColor'): + controlColor = config.get(self.taskName, 'controlColor') + else: + controlColor = 'C2' + + if config.has_option(self.taskName, 'lineWidth'): + lineWidth = config.getfloat(self.taskName, 'lineWidth') + else: + lineWidth = None + + if config.has_option(self.taskName, 'titleFontSize'): + titleFontSize = config.getint(self.taskName, + 'titleFontSize') + else: + titleFontSize = None + if config.has_option(self.taskName, 'axisFontSize'): + axisFontSize = config.getint(self.taskName, + 'axisFontSize') + else: + axisFontSize = None + + if config.has_option(self.taskName, 'defaultFontSize'): + defaultFontSize = config.getint(self.taskName, + 'defaultFontSize') + else: + defaultFontSize = None + if config.has_option(self.taskName, 'bins'): + bins = config.getint(self.taskName, 'bins') + else: + bins = None + + yLabel = 'normalized Probability Density Function' + + for index, var in enumerate(self.variableList): + + fields = [] + weights = [] + legendText = [] + lineColors = [] + + var_name = f'timeMonthly_avg_{var}' + + title = f'{self.regionName.replace("_", " ")}, {self.season}' + + caption = f'Normalized probability density function for ' \ + f'{self.season} {var} climatologies in ' \ + f'{self.regionName.replace("_", " ")}' + + # Note: consider modifying this for more professional headings + varTitle = var + + fields.append(ds[var_name].where(cell_mask, drop=True)) + if self.weightList is not None: + if f'{var_name}_weight' in ds_weights.keys(): + weights.append(ds_weights[f'{var_name}_weight'].values) + caption = f'{caption} weighted by {self.weightList[index]}' + else: + weights.append(None) + else: + weights.append(None) + + legendText.append(main_run_name) + lineColors.append(mainColor) + + xLabel = f"{ds[var_name].attrs['long_name']} " \ + f"({ds[var_name].attrs['units']})" + + for obs_name in self.obsDicts: + localObsDict = dict(self.obsDicts[obs_name]) + obs_filename = build_obs_path( + config, component=self.componentName, + relativePath=localObsDict['gridFileName']) + if f'{var}Var' not in localObsDict.keys(): + self.logger.warn( + f'{var}Var is not present in {obs_name}, skipping ' + f'{obs_name}') + continue + obs_var_name = localObsDict[f'{var}Var'] + ds_obs = xarray.open_dataset(obs_filename) + ds_obs = ds_obs.where(obs_cell_mask, drop=True) + fields.append(ds_obs[obs_var_name]) + legendText.append(obs_name) + lineColors.append(obsColor) + weights.append(None) + if self.controlConfig is not None: + fields.append(ds_control[var_name].where(control_cell_mask, + drop=True)) + control_run_name = self.controlConfig.get('runs', + 'mainRunName') + legendText.append(control_run_name) + lineColors.append(controlColor) + weights.append(ds_control_weights[f'{var_name}_weight'].values) + + if lineWidth is not None: + lineWidths = [lineWidth for i in fields] + else: + lineWidths = None + + histogram_analysis_plot(config, fields, calendar=calendar, + title=title, xLabel=xLabel, yLabel=yLabel, + bins=bins, weights=weights, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + titleFontSize=titleFontSize, + defaultFontSize=defaultFontSize) + + out_filename = f'{self.plotsDirectory}/{self.filePrefix}_{var}_' \ + f'{self.regionName}_{self.season}.png' + savefig(out_filename, config) + + write_image_xml( + config=config, + filePrefix=f'{self.filePrefix}_{var}_{self.regionName}_' + f'{self.season}', + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup=f'{self.regionGroup} Histograms', + groupLink=f'histogram{var}', + gallery=varTitle, + thumbnailDescription=f'{self.regionName.replace("_", " ")} ' + f'{self.season}', + imageDescription=caption, + imageCaption=caption) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/index_nino34.html b/1.11.0rc1/_modules/mpas_analysis/ocean/index_nino34.html new file mode 100644 index 000000000..8f33b7898 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/index_nino34.html @@ -0,0 +1,918 @@ + + + + + + mpas_analysis.ocean.index_nino34 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.index_nino34

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import datetime
+import xarray as xr
+import pandas as pd
+import numpy as np
+from scipy import signal, stats
+from scipy.signal.windows import tukey
+import matplotlib.pyplot as plt
+
+from mpas_analysis.shared.climatology import climatology
+from mpas_analysis.shared.constants import constants
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    build_obs_path
+
+from mpas_analysis.shared.timekeeping.utility import datetime_to_days, \
+    string_to_days_since_date, string_to_datetime
+
+from mpas_analysis.shared.timekeeping.MpasRelativeDelta import \
+    MpasRelativeDelta
+
+from mpas_analysis.shared.io import open_mpas_dataset
+
+from mpas_analysis.shared.plot.ticks import plot_xtick_format
+from mpas_analysis.shared.plot.save import savefig
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class IndexNino34(AnalysisTask): + """ + A task for computing and plotting time series and spectra of the El Nino + 3.4 climate index + + Attributes + ---------- + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(IndexNino34, self).__init__( + config=config, + taskName='indexNino34', + componentName='ocean', + tags=['timeSeries', 'index', 'nino', 'publicObs']) + + self.mpasTimeSeriesTask = mpasTimeSeriesTask + self.controlConfig = controlConfig + + self.run_after(mpasTimeSeriesTask)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(IndexNino34, self).setup_and_check() + + startDate = self.config.get('index', 'startDate') + endDate = self.config.get('index', 'endDate') + + delta = MpasRelativeDelta(string_to_datetime(endDate), + string_to_datetime(startDate), + calendar=self.calendar) + + months = delta.months + 12*delta.years + + if months <= 12: + raise ValueError('Cannot meaninfully analyze El Nino climate ' + 'index because the time series is too short.') + + self.variableList = \ + ['timeMonthly_avg_avgValueWithinOceanRegion_avgSurfaceTemperature'] + self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) + + self.inputFile = self.mpasTimeSeriesTask.outputFile + + mainRunName = self.config.get('runs', 'mainRunName') + + config = self.config + regionToPlot = config.get('indexNino34', 'region') + + if regionToPlot not in ['nino3.4', 'nino3', 'nino4']: + raise ValueError('Unexpectes El Nino Index region {}'.format( + regionToPlot)) + ninoIndexNumber = regionToPlot[4:] + + self.xmlFileNames = [] + for filePrefix in ['nino{}_{}'.format(ninoIndexNumber, mainRunName), + 'nino{}_spectra_{}'.format(ninoIndexNumber, + mainRunName)]: + self.xmlFileNames.append('{}/{}.xml'.format(self.plotsDirectory, + filePrefix)) + + def run_task(self): + """ + Computes NINO34 index and plots the time series and power spectrum with + 95 and 99% confidence bounds + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + config = self.config + calendar = self.calendar + + regionToPlot = config.get('indexNino34', 'region') + + ninoIndexNumber = regionToPlot[4:] + + self.logger.info("\nPlotting El Nino {} Index time series and power " + "spectrum....".format(ninoIndexNumber)) + + self.logger.info(' Load SST data...') + fieldName = 'nino' + + startDate = self.config.get('index', 'startDate') + endDate = self.config.get('index', 'endDate') + + startYear = self.config.getint('index', 'startYear') + endYear = self.config.getint('index', 'endYear') + + dataSource = config.get('indexNino34', 'observationData') + + observationsDirectory = build_obs_path( + config, 'ocean', '{}Subdirectory'.format(fieldName)) + + # specify obsTitle based on data path + # These are the only data sets supported + if dataSource == 'HADIsst': + dataPath = "{}/HADIsst_nino34_20180710.nc".format( + observationsDirectory) + obsTitle = 'HADSST' + refDate = '1870-01-01' + elif dataSource == 'ERS_SSTv4': + dataPath = "{}/ERS_SSTv4_nino34_20180710.nc".format( + observationsDirectory) + obsTitle = 'ERS SSTv4' + refDate = '1800-01-01' + else: + raise ValueError('Bad value for config option observationData {} ' + 'in [indexNino34] section.'.format(dataSource)) + + mainRunName = config.get('runs', 'mainRunName') + + # regionIndex should correspond to NINO34 in surface weighted Average + # AM + regions = config.getexpression('regions', 'regions') + regionToPlot = config.get('indexNino34', 'region') + regionIndex = regions.index(regionToPlot) + + # Load data: + ds = open_mpas_dataset(fileName=self.inputFile, + calendar=calendar, + variableList=self.variableList, + startDate=startDate, + endDate=endDate) + + # Observations have been processed to the nino34Index prior to reading + dsObs = xr.open_dataset(dataPath, decode_cf=False, decode_times=False) + # add the days between 0001-01-01 and the refDate so we have a new + # reference date of 0001-01-01 (like for the model Time) + dsObs["Time"] = dsObs.Time + \ + string_to_days_since_date(dateString=refDate, calendar=calendar) + nino34Obs = dsObs.sst + + self.logger.info(' Compute El Nino {} Index...'.format( + ninoIndexNumber)) + varName = self.variableList[0] + regionSST = ds[varName].isel(nOceanRegions=regionIndex) + nino34Main = self._compute_nino34_index(regionSST, calendar) + + # Compute the observational index over the entire time range + # nino34Obs = compute_nino34_index(dsObs.sst, calendar) + + self.logger.info(' Computing El Nino {} power spectra...'.format( + ninoIndexNumber)) + spectraMain = self._compute_nino34_spectra(nino34Main) + + # Compute the observational spectra over the whole record + spectraObs = self._compute_nino34_spectra(nino34Obs) + + # Compute the observational spectra over the last 30 years for + # comparison. Only saving the spectra + subsetEndYear = 2016 + if self.controlConfig is None: + subsetStartYear = 1976 + else: + # make the subset the same length as the input data set + subsetStartYear = subsetEndYear - (endYear - startYear) + time_start = datetime_to_days(datetime.datetime(subsetStartYear, 1, 1), + calendar=calendar) + time_end = datetime_to_days(datetime.datetime(subsetEndYear, 12, 31), + calendar=calendar) + nino34Subset = nino34Obs.sel(Time=slice(time_start, time_end)) + spectraSubset = self._compute_nino34_spectra(nino34Subset) + + if self.controlConfig is None: + nino34s = [nino34Obs[2:-3], nino34Subset, nino34Main[2:-3]] + titles = ['{} (Full Record)'.format(obsTitle), + '{} ({} - {})'.format(obsTitle, subsetStartYear, + subsetEndYear), + mainRunName] + spectra = [spectraObs, spectraSubset, spectraMain] + else: + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + refFileName = '{}/{}.nc'.format( + baseDirectory, self.mpasTimeSeriesTask.fullTaskName) + + dsRef = open_mpas_dataset( + fileName=refFileName, + calendar=calendar, + variableList=self.variableList) + + regionSSTRef = dsRef[varName].isel(nOceanRegions=regionIndex) + nino34Ref = self._compute_nino34_index(regionSSTRef, calendar) + + nino34s = [nino34Subset, nino34Main[2:-3], nino34Ref[2:-3]] + controlRunName = self.controlConfig.get('runs', 'mainRunName') + + spectraRef = self._compute_nino34_spectra(nino34Ref) + + titles = ['{} ({} - {})'.format(obsTitle, subsetStartYear, + subsetEndYear), + mainRunName, + 'Control: {}'.format(controlRunName)] + spectra = [spectraSubset, spectraMain, spectraRef] + + # Convert frequencies to period in years + for s in spectra: + s['period'] = \ + 1.0 / (constants.eps + s['f'] * constants.sec_per_year) + + self.logger.info(' Plot El Nino {} index and spectra...'.format( + ninoIndexNumber)) + + outFileName = '{}/nino{}_{}.png'.format(self.plotsDirectory, + ninoIndexNumber, mainRunName) + self._nino34_timeseries_plot( + nino34s=nino34s, + title=u'El Niño {} Index'.format(ninoIndexNumber), + panelTitles=titles, + outFileName=outFileName) + + self._write_xml(filePrefix='nino{}_{}'.format(ninoIndexNumber, + mainRunName), + plotType='Time Series', + ninoIndexNumber=ninoIndexNumber) + + outFileName = '{}/nino{}_spectra_{}.png'.format(self.plotsDirectory, + ninoIndexNumber, + mainRunName) + self._nino34_spectra_plot( + spectra=spectra, + title=u'El Niño {} power spectrum'.format(ninoIndexNumber), + panelTitles=titles, + outFileName=outFileName) + + self._write_xml(filePrefix='nino{}_spectra_{}'.format(ninoIndexNumber, + mainRunName), + plotType='Spectra', + ninoIndexNumber=ninoIndexNumber) + + def _compute_nino34_index(self, regionSST, calendar): + """ + Computes nino34 index time series. It follow the standard nino34 + algorithm, i.e., + + 1. Compute monthly average SST in the region + 2. Computes anomalous SST + 3. Performs a 5 month running mean over the anomalies + + This routine requires regionSST to be the SSTs in the nino3.4 region + ONLY. It is defined as lat > -5S and lat < 5N and lon > 190E and + lon < 240E. + + Parameters + ---------- + regionSST : xarray.DataArray object + values of SST in the nino region + + calendar: {'gregorian', 'noleap'} + The name of the calendars used in the MPAS run + + Returns + ------- + xarray.DataArray object containing the nino34index + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + if not isinstance(regionSST, xr.core.dataarray.DataArray): + raise ValueError('regionSST should be an xarray DataArray') + + # add 'month' data array so we can group by month below. + regionSST = climatology.add_years_months_days_in_month(regionSST, + calendar) + + # Compute monthly average and anomaly of climatology of SST + monthlyClimatology = \ + climatology.compute_monthly_climatology(regionSST, + maskVaries=False) + + anomaly = regionSST.groupby('month') - monthlyClimatology + + # Remove the long term trend from the anomalies + detrendedAnomal = signal.detrend(anomaly.values) + anomaly.values = detrendedAnomal + + # Compute 5 month running mean + wgts = np.ones(5) / 5. + return self._running_mean(anomaly, wgts) + + def _compute_nino34_spectra(self, nino34Index): + """ + Computes power spectra of Nino34 index. + + nino34Index is the NINO index computed by compute_nino34_index + + The algorithm follows the NCL cvdp package see + http://www.cesm.ucar.edu/working_groups/CVC/cvdp/code.html + + Parameters + ---------- + nino34Index : xarray.DataArray object + nino34Index for analysis + + Returns + ------- + pxxSmooth : xarray.DataArray object + nino34Index power spectra that has been smoothed with a modified + Daniell window (https://www.ncl.ucar.edu/Document/Functions/Built-in/specx_anal.shtml) + + f : numpy.array + array of frequencies corresponding to the center of the spectral + bins resulting from the analysis + + mkov*scale : numpy.array + Red noise fit to pxxSmooth + + mkov*scale*xLow : numpy.array + 95% confidence threshold from chi-squared test + + mkov*scale*xHigh : numpy.array + 99% confidence threshold from chi-squared test + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + # Move nino34Index to numpy to allow functionality with scipy routines + ninoIndex = nino34Index.values + window = tukey(len(ninoIndex), alpha=0.1) + f, Pxx = signal.periodogram(window * ninoIndex, + 1.0 / constants.sec_per_month) + + # computes power spectra, smoothed with a weighted running mean + nwts = max(1, int(7 * len(ninoIndex) / 1200)) + # verify window length is odd, if not, add 1 + if nwts % 2 == 0: + nwts += 1 + # Calculate the weights for the running mean + # Weights are from the modified Daniell Window + wgts = np.ones(nwts) + wgts[0] = 0.5 + wgts[-1] = 0.5 + wgts /= sum(wgts) + + pxxSmooth = (self._running_mean(pd.Series(Pxx), wgts) / + constants.sec_per_month) + + # compute 99 and 95% confidence intervals and red-noise process + # Uses Chi squared test + + r = self._autocorr(ninoIndex)[0, 1] + r2 = 2. * r + rsq = r**2 + + # In the temp2 variable, f is converted to give wavenumber, i.e. + # 0,1,2,...,N/2 + temp2 = r2 * np.cos(2. * np.pi * f * constants.sec_per_month) + mkov = 1. / (1. + rsq - temp2) + + sum1 = np.sum(mkov) + sum2 = np.sum(pxxSmooth.values) + scale = sum2 / sum1 + + df = 2. / (constants.tapcoef * sum(wgts**2)) + xLow = stats.chi2.interval(0.95, df)[1] / df + xHigh = stats.chi2.interval(0.99, df)[1] / df + + # return Spectra, 99% confidence level, 95% confidence level, + # and Red-noise fit + spectra = {'f': f, 'spectrum': pxxSmooth, + 'conf99': mkov * scale * xHigh, + 'conf95': mkov * scale * xLow, + 'redNoise': mkov * scale} + return spectra + + def _autocorr(self, x, t=1): + """ + Computes lag one auto-correlation for the NINO34 spectra calculation + + Parameters + ---------- + x : numpy 1-D array + time series array + + Returns + ------- + Single value giving the lag one auto-correlation + If t != 1, this is no longer a lag one auto-correlation + """ + # Authors + # ------- + # Luke Van Roekel + + return np.corrcoef(np.array([x[0:len(x) - t], x[t:len(x)]])) + + def _running_mean(self, inputData, wgts): + """ + Calculates a generic weighted running mean + + Parameters + ---------- + inputData : xr.DataArray + Data to be smoothed + + wgts : numpy.array + array of weights that give the smoothing type + for the nino index this is a 5-point boxcar window + for the nino power spectra this is a modified Daniell window (see + https://www.ncl.ucar.edu/Document/Functions/Built-in/specx_anal.shtml) + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + nt = len(inputData) + sp = (len(wgts) - 1) // 2 + runningMean = inputData.copy() + for k in range(sp, nt - (sp + 1)): + runningMean[k] = sum(wgts * inputData[k - sp:k + sp + 1].values) + + return runningMean + + def _nino34_spectra_plot(self, spectra, title, panelTitles, + outFileName, lineWidth=2, xlabel='Period (years)', + ylabel=r'Power ($^o$C / cycles mo$^{-1}$)', + titleFontSize=None, figsize=(9, 21), dpi=None, + periodMin=1., periodMax=10.): + + """ + Plots the nino34 time series and power spectra in an image file + Parameters + ---------- + spectra : list of dict + a dictionary for each panel returned from + ``self._compute_nino34_spectra`` including entries + ``period`` (periods to plot on x-axis), ``spectrum`` (nino34 power + spectra), ``conf95`` (95% confidence level based on chi squared + test), ``conf99`` (99% confidence level based on chi squared test) + and ``redNoise`` (red noise fit to ``spectrum``) + + title : str + the title of the plot + + panelTitles : list of str + title of each panel of the plot + + outFileName : str + the file name to be written + + lineWidth : int, optional + control line width + + xLabel, yLabel : str, optional + axis labels + + titleFontSize : int, optional + the size of the title font + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section + ``plot`` option ``dpi`` in the config file by default + + periodMin, periodMax : float, optional + the maximum and minimum periods (in years) to be plotted + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + config = self.config + + if dpi is None: + dpi = config.getint('plot', 'dpi') + fig = plt.figure(figsize=figsize, dpi=dpi) + + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + + axis_font = {'size': config.get('plot', 'axisFontSize')} + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + if title is not None: + fig.suptitle(title, y=0.92, **title_font) + + spectrumNames = ['spectrum', 'redNoise', 'conf95', 'conf99'] + colors = ['k', 'r', 'g', 'b'] + legends = ['Nino34 spectrum', 'Red noise fit', + '95% confidence threshold', '99% confidence threshold'] + + maxYval = -1e20 + for plotIndex in range(3): + x = spectra[plotIndex]['period'] + ys = [spectra[plotIndex][spectrumNames[curveIndex]] for curveIndex + in range(4)] + maxYval = max(maxYval, + self._plot_size_y_axis(x=x, ys=ys, xmin=periodMin, + xmax=periodMax)) + + for plotIndex in range(3): + plt.subplot(3, 1, plotIndex + 1) + + period = spectra[plotIndex]['period'] + for curveIndex in range(4): + spectrum = spectra[plotIndex][spectrumNames[curveIndex]] + plt.plot(period[2:-3], spectrum[2:-3], colors[curveIndex], + linewidth=lineWidth, label=legends[curveIndex]) + plt.xlim(10, 1) + + plt.legend(loc='upper right') + plt.ylim(0, 0.9 * maxYval) + + if panelTitles[plotIndex] is not None: + plt.title(panelTitles[plotIndex], **title_font) + if xlabel is not None: + plt.xlabel(xlabel, **axis_font) + if ylabel is not None: + plt.ylabel(ylabel, **axis_font) + + plt.tight_layout(rect=[0, 0.03, 1, 0.90]) + + if outFileName is not None: + savefig(outFileName, config) + + plt.close() + + def _nino34_timeseries_plot(self, nino34s, title, panelTitles, outFileName, + xlabel='Time (years)', ylabel=r'($\degree$C)', + titleFontSize=None, figsize=(9, 21), dpi=None, + maxXTicks=20, lineWidth=2): + + """ + Plots the nino34 time series and power spectra in an image file + + Parameters + ---------- + nino34s : list of xarray.dataArray + nino34 timeseries to plot in each panel + + title : str + the title of the plot + + panelTitles : list of str + title of each panel of the plot + + outFileName : str + the file name to be written + + xLabel, yLabel : str + axis labels + + titleFontSize : int, optional + the size of the title font + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section + ``plot`` option ``dpi`` in the config file by default + + lineWidth : int, optional + control line width + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + config = self.config + calendar = self.calendar + + if dpi is None: + dpi = config.getint('plot', 'dpi') + fig = plt.figure(figsize=figsize, dpi=dpi) + + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + + axis_font = {'size': config.get('plot', 'axisFontSize')} + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + if title is not None: + fig.suptitle(title, y=0.92, **title_font) + + for plotIndex in range(3): + plt.subplot(3, 1, plotIndex + 1) + index = nino34s[plotIndex].values + time = nino34s[plotIndex].Time.values + self._plot_nino_timeseries(index, time, xlabel, ylabel, + panelTitles[plotIndex], + title_font, axis_font, lineWidth) + + minDays = time.min() + maxDays = time.max() + + plot_xtick_format(calendar, minDays, maxDays, maxXTicks) + + plt.tight_layout(rect=[0, 0.03, 1, 0.90]) + + if outFileName is not None: + savefig(outFileName, config) + + plt.close() + + def _plot_nino_timeseries(self, ninoIndex, time, xlabel, ylabel, + panelTitle, title_font, axis_font, + lineWidth): + """ + Plot the nino time series on a subplot + + Parameters + ---------- + ninoIndex : numpy.array + nino34 Index values (can be obs or model) + + time : numpy.array + time values for the nino index + + xlabel : string + string for x-axis label + + ylabel : string + string for y-axis label + + panelTitle : string + string to label the subplot with + + lineWidth : list of str + control line width + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + plt.title(panelTitle, y=1.06, **title_font) + y1 = ninoIndex + nt = np.size(ninoIndex) + + y2 = np.zeros(nt) + + plt.plot(time, 0.4 * np.ones(nt), '--k', + linewidth=lineWidth) + plt.plot(time, -0.4 * np.ones(nt), '--k', + linewidth=lineWidth) + plt.fill_between(time, y1, y2, where=y1 > y2, + facecolor='red', interpolate=True, linewidth=0) + plt.fill_between(time, y1, y2, where=y1 < y2, + facecolor='blue', interpolate=True, linewidth=0) + + if xlabel is not None: + plt.xlabel(xlabel, **axis_font) + if ylabel is not None: + plt.ylabel(ylabel, **axis_font) + + def _write_xml(self, filePrefix, plotType, ninoIndexNumber): + caption = u'{} of El Niño {} Climate Index'.format(plotType, + ninoIndexNumber) + write_image_xml( + config=self.config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup=u'El Niño {} Climate Index'.format(ninoIndexNumber), + groupLink='nino', + thumbnailDescription=plotType, + imageDescription=caption, + imageCaption=caption) + + def _plot_size_y_axis(self, x, ys, xmin, xmax): + """ + Get the maximum y value over the given range of x values + + Parameters + ---------- + x : numpy.array + x values + + ys : list of numpy.array + a list of curves (y values) + + xmin : float + The minimum x value + + xmax : float, optional + The maximum x values + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis + + mask = np.logical_and(x >= xmin, x <= xmax) + + # find maximum value of three curves plotted + maxY = -1E20 + for y in ys: + maxY = max(y[mask].max(), maxY) + # check the function interpolated to the max/min as well + # Note: flipping the axis so x is in increasing order + maxY = max(np.interp(xmin, x[::-1], y[::-1]), maxY) + maxY = max(np.interp(xmax, x[::-1], y[::-1]), maxY) + + return maxY
+ + +# }}} +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/meridional_heat_transport.html b/1.11.0rc1/_modules/mpas_analysis/ocean/meridional_heat_transport.html new file mode 100644 index 000000000..9de31058a --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/meridional_heat_transport.html @@ -0,0 +1,509 @@ + + + + + + mpas_analysis.ocean.meridional_heat_transport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.meridional_heat_transport

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray as xr
+import numpy as np
+import os
+
+from mpas_analysis.shared.plot import plot_vertical_section, plot_1D, savefig
+
+from mpas_analysis.shared.io.utility import make_directories, build_obs_path
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.html import write_image_xml
+from mpas_analysis.shared.climatology.climatology import \
+    get_climatology_op_directory
+
+
+
+[docs] +class MeridionalHeatTransport(AnalysisTask): + """ + Plot meridional heat transport from the analysis member output. + + Attributes + ---------- + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + """ + + # Authors + # ------- + # Mark Petersen, Milena Veneziani, Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(MeridionalHeatTransport, self).__init__( + config=config, + taskName='meridionalHeatTransport', + componentName='ocean', + tags=['climatology', 'publicObs']) + + self.mpasClimatologyTask = mpasClimatologyTask + self.run_after(mpasClimatologyTask) + + self.controlConfig = controlConfig
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Mark Petersen, Milena Veneziani, Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(MeridionalHeatTransport, self).setup_and_check() + + self.startYear = self.mpasClimatologyTask.startYear + self.startDate = self.mpasClimatologyTask.startDate + self.endYear = self.mpasClimatologyTask.endYear + self.endDate = self.mpasClimatologyTask.endDate + + config = self.config + + self.check_analysis_enabled( + analysisOptionName='config_am_meridionalheattransport_enable', + raiseException=True) + + self.sectionName = 'meridionalHeatTransport' + + # Read in obs file information + compareWithObs = config.getboolean(self.sectionName, + 'compareWithObservations') + self.observationsFile = None + if compareWithObs: + observationsDirectory = build_obs_path( + config, 'ocean', 'mhtSubdirectory') + observationsFile = config.get(self.sectionName, 'observationData') + observationsFile = '{}/{}'.format(observationsDirectory, + observationsFile) + if os.path.exists(observationsFile): + self.observationsFile = observationsFile + else: + print('Warning: No MHT observations file found: skip plotting ' + 'obs') + + mainRunName = self.config.get('runs', 'mainRunName') + + variableList = ['timeMonthly_avg_meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ'] + + self.mpasClimatologyTask.add_variables(variableList=variableList, + seasons=['ANN']) + + self.xmlFileNames = [] + self.filePrefixes = {} + + prefixes = ['mht'] + if config.getboolean(self.sectionName, 'plotVerticalSection'): + prefixes.append('mhtZ') + + for prefix in prefixes: + filePrefix = '{}_{}_years{:04d}-{:04d}'.format( + prefix, mainRunName, + self.startYear, self.endYear) + self.xmlFileNames.append('{}/{}.xml'.format(self.plotsDirectory, + filePrefix)) + self.filePrefixes[prefix] = filePrefix + + def run_task(self): + """ + Process MHT analysis member data if available. + Plots MHT as: + 1D function of latitude + 2D function of latitude and depth + """ + # Authors + # ------- + # Mark Petersen, Milena Veneziani, Xylar Asay-Davis + + self.logger.info("\nPlotting meridional heat transport (MHT)...") + + config = self.config + + mainRunName = config.get('runs', 'mainRunName') + + depthLimGlobal = config.getexpression(self.sectionName, + 'depthLimGlobal') + xLimGlobal = config.getexpression(self.sectionName, 'xLimGlobal') + movingAveragePoints = config.getint('meridionalHeatTransport', + 'movingAveragePoints') + + outputDirectory = get_climatology_op_directory(config) + + make_directories(outputDirectory) + + outFileName = \ + '{}/meridionalHeatTransport_years{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, self.endYear) + + if os.path.exists(outFileName): + self.logger.info(' Reading results from previous analysis run...') + annualClimatology = xr.open_dataset(outFileName) + refZMid = annualClimatology.refZMid + binBoundaryMerHeatTrans = \ + annualClimatology.binBoundaryMerHeatTrans + else: + + # Read in depth and MHT latitude points + # Latitude is from binBoundaryMerHeatTrans + try: + restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least ' + 'one for MHT calcuation') + + with xr.open_dataset(restartFileName) as dsRestart: + refBottomDepth = dsRestart.refBottomDepth + + nVertLevels = refBottomDepth.sizes['nVertLevels'] + refLayerThickness = np.zeros(nVertLevels) + refLayerThickness[0] = refBottomDepth[0] + refLayerThickness[1:nVertLevels] = \ + refBottomDepth[1:nVertLevels] - \ + refBottomDepth[0:nVertLevels - 1] + + refLayerThickness = xr.DataArray(dims='nVertLevels', + data=refLayerThickness) + + refZMid = -refBottomDepth + 0.5 * refLayerThickness + + binBoundaryMerHeatTrans = None + # first try timeSeriesStatsMonthly for bin boundaries, then try + # meridionalHeatTransport stream as a backup option + for streamName in ['timeSeriesStatsMonthlyOutput', + 'meridionalHeatTransportOutput']: + try: + inputFile = self.historyStreams.readpath(streamName)[0] + except ValueError: + raise IOError('At least one file from stream {} is needed ' + 'to compute MHT'.format(streamName)) + + with xr.open_dataset(inputFile) as ds: + if 'binBoundaryMerHeatTrans' in ds.data_vars: + binBoundaryMerHeatTrans = \ + ds.binBoundaryMerHeatTrans + break + + if binBoundaryMerHeatTrans is None: + raise ValueError('Could not find binBoundaryMerHeatTrans in ' + 'either timeSeriesStatsMonthlyOutput or ' + 'meridionalHeatTransportOutput streams') + + binBoundaryMerHeatTrans = np.rad2deg(binBoundaryMerHeatTrans) + + ################################################################### + # Mark P Note: Currently only supports global MHT. + # Need to add variables merHeatTransLatRegion and + # merHeatTransLatZRegion + # These are not computed by default in ACME right now. + # Then we will need to add another section for regions with a loop + # over number of regions. + ################################################################### + + self.logger.info('\n Plotting global meridional heat transport') + + self.logger.info(' Load data...') + + climatologyFileName = self.mpasClimatologyTask.get_file_name( + season='ANN') + + variableList = ['timeMonthly_avg_meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ'] + + annualClimatology = xr.open_dataset(climatologyFileName) + annualClimatology = annualClimatology[variableList] + annualClimatology = annualClimatology.rename( + {'timeMonthly_avg_meridionalHeatTransportLat': + 'meridionalHeatTransportLat', + 'timeMonthly_avg_meridionalHeatTransportLatZ': + 'meridionalHeatTransportLatZ'}) + if 'Time' in annualClimatology.dims: + annualClimatology = annualClimatology.isel(Time=0) + + annualClimatology.coords['refZMid'] = refZMid + annualClimatology.coords['binBoundaryMerHeatTrans'] = \ + binBoundaryMerHeatTrans + + if config.getboolean(self.sectionName, 'plotVerticalSection'): + # normalize 2D MHT by layer thickness + annualClimatology['meridionalHeatTransportLatZ'] /= \ + refLayerThickness + + write_netcdf_with_fill(annualClimatology, outFileName) + + # **** Plot MHT **** + maxTitleLength = 70 + self.logger.info(' Plot global MHT...') + # Plot 1D MHT (zonally averaged, depth integrated) + x = binBoundaryMerHeatTrans + y = annualClimatology.meridionalHeatTransportLat + xLabel = 'latitude (deg)' + yLabel = 'meridional heat transport (PW)' + + title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( + self.startYear, self.endYear, mainRunName) + filePrefix = self.filePrefixes['mht'] + figureName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + lineColors = ['k'] + lineWidths = [1.6] + legendText = [mainRunName] + xArrays = [x] + fieldArrays = [y] + errArrays = [None] + if self.observationsFile is not None: + # Load in observations + dsObs = xr.open_dataset(self.observationsFile) + xObs = dsObs.LATITUDE + ncepGlobal = dsObs.GLOBALNCEP_ADJUSTED + ncepErrGlobal = dsObs.GLOBALNCEP_ERR + ecmwfGlobal = dsObs.GLOBALECMWF_ADJUSTED + ecmwfErrGlobal = dsObs.GLOBALECMWF_ERR + + lineColors.extend(['b', 'g']) + lineWidths.extend([1.2, 1.2]) + legendText.extend(['Trenberth and Caron - NCEP', + 'Trenberth and Caron - ECMWF']) + xArrays.extend([xObs, xObs]) + fieldArrays.extend([ncepGlobal, ecmwfGlobal]) + errArrays.extend([ncepErrGlobal, ecmwfErrGlobal]) + + if self.controlConfig is not None: + controlStartYear = self.controlConfig.getint('climatology', + 'startYear') + controlEndYear = self.controlConfig.getint('climatology', + 'endYear') + controlDirectory = get_climatology_op_directory(self.controlConfig) + + controlFileName = \ + '{}/meridionalHeatTransport_years{:04d}-{:04d}.nc'.format( + controlDirectory, controlStartYear, controlEndYear) + + dsControl = xr.open_dataset(controlFileName) + controlRunName = self.controlConfig.get('runs', 'mainRunName') + + lineColors.append('r') + lineWidths.append(1.2) + legendText.append(controlRunName) + xArrays.append(dsControl.binBoundaryMerHeatTrans) + fieldArrays.append(dsControl.meridionalHeatTransportLat) + errArrays.append(None) + + if len(legendText) == 1: + # no need for a legend + legendText = [None] + + plot_1D(config, xArrays, fieldArrays, errArrays, + lineColors=lineColors, lineWidths=lineWidths, + legendText=legendText, title=title, xlabel=xLabel, + ylabel=yLabel, fileout=figureName, xLim=xLimGlobal, + maxTitleLength=maxTitleLength) + + self._write_xml(filePrefix) + + if config.getboolean(self.sectionName, 'plotVerticalSection'): + # Plot 2D MHT (zonally integrated) + + x = binBoundaryMerHeatTrans + y = refZMid + z = annualClimatology.meridionalHeatTransportLatZ + xLabel = 'latitude (deg)' + yLabel = 'depth (m)' + title = 'Global MHT (ANN, years {:04d}-{:04d})\n {}'.format( + self.startYear, self.endYear, mainRunName) + filePrefix = self.filePrefixes['mhtZ'] + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + colorbarLabel = '(PW/m)' + plot_vertical_section(config, z, self.sectionName, xCoords=x, + zCoord=y, suffix='', + colorbarLabel=colorbarLabel, + title=title, xlabels=xLabel, ylabel=yLabel, + xLim=xLimGlobal, + yLim=depthLimGlobal, invertYAxis=False, + movingAveragePoints=movingAveragePoints, + maxTitleLength=maxTitleLength) + + savefig(outFileName, config) + + self._write_xml(filePrefix) + + def _write_xml(self, filePrefix): + caption = 'Meridional Heat Transport' + write_image_xml( + config=self.config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Meridional Heat Transport', + groupLink='mht', + imageDescription=caption, + imageCaption=caption)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.html b/1.11.0rc1/_modules/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.html new file mode 100644 index 000000000..17246d887 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/plot_depth_integrated_time_series_subtask.html @@ -0,0 +1,635 @@ + + + + + + mpas_analysis.ocean.plot_depth_integrated_time_series_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.plot_depth_integrated_time_series_subtask

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import os
+import xarray
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig
+
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+
+from mpas_analysis.shared.timekeeping.utility import date_to_days, \
+    days_to_datetime
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories
+
+from mpas_analysis.shared.html import write_image_xml
+
+from mpas_analysis.shared.time_series import compute_moving_avg, \
+    combine_time_series_with_ncrcat
+
+
+
+[docs] +class PlotDepthIntegratedTimeSeriesSubtask(AnalysisTask): + """ + Plots a time series, summed or averaged over various depth ranges + + Attributes + ---------- + + regionName : str + The name of the region to plot + + inFileName : str + The file containing the time-depth data set to plot + + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + yAxisLabel : str + the y-axis label of the plotted field (including units) + + sectionName : str + A section in the config file where the colormap and contour values + are defined + + thumbnailSuffix : str + The text to be displayed under the thumbnail image, to which the + region name will be prepended + + imageCaption : str + The caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + The name of the group of galleries in which this plot belongs + + groupSubtitle : str + The subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + A short name (with no spaces) for the link to the gallery group + + galleryName : str + The name of the gallery in which this plot belongs + + controlConfig : mpas_tools.config.MpasConfigParser + The configuration options for the control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + +
+[docs] + def __init__(self, parentTask, regionName, inFileName, outFileLabel, + fieldNameInTitle, mpasFieldName, yAxisLabel, sectionName, + thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, + groupLink, galleryName, subtaskName=None, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task of which this is a subtask + + regionName : str + The name of the region to plot + + inFileName : str + The file containing the time-depth data set to plot + + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + yAxisLabel : str + the y-axis label of the plotted field + + sectionName : str + a section in the config file where the colormap and contour values + are defined + + thumbnailSuffix : str + The text to be displayed under the thumbnail image, to which the + region name will be prepended + + imageCaption : str + the caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + the name of the group of galleries in which this plot belongs + + groupSubtitle : str + the subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + a short name (with no spaces) for the link to the gallery group + + galleryName : str + the name of the gallery in which this plot belongs + + subtaskName : str, optional + The name of the subtask (``plotTimeSeries<RegionName>`` by default) + + controlConfig : mpas_tools.config.MpasConfigParser, optional + The configuration options for the control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + if subtaskName is None: + suffix = regionName[0].upper() + regionName[1:] + subtaskName = 'plotDepthIntegratedTimeSeries{}'.format(suffix) + + # first, call the constructor from the base class (AnalysisTask) + super(PlotDepthIntegratedTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName='ocean', + tags=parentTask.tags, + subtaskName=subtaskName) + + self.regionName = regionName + self.inFileName = inFileName + self.outFileLabel = outFileLabel + self.fieldNameInTitle = fieldNameInTitle + self.mpasFieldName = mpasFieldName + self.yAxisLabel = yAxisLabel + self.sectionName = sectionName + + self.controlConfig = controlConfig + + # xml/html related variables + self.thumbnailSuffix = thumbnailSuffix + self.imageCaption = imageCaption + self.galleryGroup = galleryGroup + self.groupSubtitle = groupSubtitle + self.groupLink = groupLink + self.galleryName = galleryName
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis, Greg Streletz + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(PlotDepthIntegratedTimeSeriesSubtask, self).setup_and_check() + + config = self.config + + if self.controlConfig is not None: + # we need to know what file to read from the control run so + # an absolute path won't work + assert(not os.path.isabs(self.inFileName)) + + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + self.refFileName = '{}/{}'.format(baseDirectory, + self.inFileName) + + preprocessedReferenceRunName = config.get( + 'runs', 'preprocessedReferenceRunName') + if preprocessedReferenceRunName != 'None': + + assert(not os.path.isabs(self.inFileName)) + + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + make_directories('{}/preprocessed'.format(baseDirectory)) + + self.preprocessedIntermediateFileName = \ + '{}/preprocessed/intermediate_{}'.format(baseDirectory, + self.inFileName) + self.preprocessedFileName = '{}/preprocessed/{}'.format( + baseDirectory, self.inFileName) + + if not os.path.isabs(self.inFileName): + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + self.inFileName = '{}/{}'.format(baseDirectory, + self.inFileName) + + mainRunName = self.config.get('runs', 'mainRunName') + + self.filePrefix = '{}_{}_{}'.format(self.outFileLabel, + self.regionName, + mainRunName) + self.xmlFileNames = ['{}/{}.xml'.format( + self.plotsDirectory, self.filePrefix)] + + return + + def run_task(self): + """ + Compute vertical aggregates of the data and plot the time series + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + self.logger.info("\nPlotting depth-integrated time series of " + "{}...".format(self.fieldNameInTitle)) + + config = self.config + calendar = self.calendar + + mainRunName = config.get('runs', 'mainRunName') + + plotTitles = config.getexpression('regions', 'plotTitles') + allRegionNames = config.getexpression('regions', 'regions') + regionIndex = allRegionNames.index(self.regionName) + regionNameInTitle = plotTitles[regionIndex] + + startDate = config.get('timeSeries', 'startDate') + endDate = config.get('timeSeries', 'endDate') + + # Load data + self.logger.info(' Load ocean data...') + ds = open_mpas_dataset(fileName=self.inFileName, + calendar=calendar, + variableList=[self.mpasFieldName, 'depth'], + timeVariableNames=None, + startDate=startDate, + endDate=endDate) + ds = ds.isel(nOceanRegionsTmp=regionIndex) + + depths = ds.depth.values + + divisionDepths = config.getexpression(self.sectionName, 'depths') + + # for each depth interval to plot, determine the top and bottom depth + topDepths = [0, 0] + divisionDepths + bottomDepths = [depths[-1]] + divisionDepths + [depths[-1]] + + legends = [] + for top, bottom in zip(topDepths, bottomDepths): + if bottom == depths[-1]: + legends.append('{}m-bottom'.format(top)) + else: + legends.append('{}m-{}m'.format(top, bottom)) + + # more possible symbols than we typically use + lines = ['-', '-', '--', None, None, None, None] + markers = [None, None, None, '+', 'o', '^', 'v'] + widths = [5, 3, 3, 3, 3, 3, 3] + points = [None, None, None, 300, 300, 300, 300] + + color = config.get('timeSeries', 'mainColor') + + xLabel = 'Time [years]' + yLabel = self.yAxisLabel + + title = '{}, {} \n {} (black)'.format(self.fieldNameInTitle, + regionNameInTitle, + mainRunName) + + outFileName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix) + + timeSeries = [] + lineColors = [] + lineStyles = [] + lineMarkers = [] + lineWidths = [] + maxPoints = [] + legendText = [] + + for rangeIndex in range(len(topDepths)): + top = topDepths[rangeIndex] + bottom = bottomDepths[rangeIndex] + field = ds[self.mpasFieldName].where(ds.depth > top) + field = field.where(ds.depth <= bottom) + timeSeries.append(field.sum('nVertLevels')) + + lineColors.append(color) + lineStyles.append(lines[rangeIndex]) + lineMarkers.append(markers[rangeIndex]) + lineWidths.append(widths[rangeIndex]) + maxPoints.append(points[rangeIndex]) + legendText.append(legends[rangeIndex]) + + preprocessedReferenceRunName = config.get( + 'runs', 'preprocessedReferenceRunName') + if preprocessedReferenceRunName != 'None': + preprocessedInputDirectory = config.get( + 'oceanPreprocessedReference', 'baseDirectory') + + self.logger.info(' Load in preprocessed reference data...') + preprocessedFilePrefix = config.get(self.sectionName, + 'preprocessedFilePrefix') + inFilesPreprocessed = '{}/{}.{}.year*.nc'.format( + preprocessedInputDirectory, preprocessedFilePrefix, + preprocessedReferenceRunName) + + combine_time_series_with_ncrcat( + inFilesPreprocessed, self.preprocessedIntermediateFileName, + logger=self.logger) + dsPreprocessed = open_mpas_dataset( + fileName=self.preprocessedIntermediateFileName, + calendar=calendar, + timeVariableNames='xtime') + + yearStart = days_to_datetime(ds.Time.min(), calendar=calendar).year + yearEnd = days_to_datetime(ds.Time.max(), calendar=calendar).year + timeStart = date_to_days(year=yearStart, month=1, day=1, + calendar=calendar) + timeEnd = date_to_days(year=yearEnd, month=12, day=31, + calendar=calendar) + + yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(), + calendar=calendar).year + if yearStart <= yearEndPreprocessed: + dsPreprocessed = dsPreprocessed.sel(Time=slice(timeStart, + timeEnd)) + else: + self.logger.warning('Warning: Preprocessed time series ends ' + 'before the timeSeries startYear and will ' + 'not be plotted.') + preprocessedReferenceRunName = 'None' + + # rolling mean seems to have trouble with dask data sets so we + # write out the data set and read it back as a single-file data set + # (without dask) + dsPreprocessed = dsPreprocessed.drop_vars('xtime') + write_netcdf_with_fill(dsPreprocessed, self.preprocessedFileName) + dsPreprocessed = xarray.open_dataset(self.preprocessedFileName) + + if preprocessedReferenceRunName != 'None': + color = 'purple' + title = '{} \n {} (purple)'.format(title, + preprocessedReferenceRunName) + + preprocessedFieldPrefix = config.get(self.sectionName, + 'preprocessedFieldPrefix') + + movingAveragePoints = config.getint(self.sectionName, + 'movingAveragePoints') + + suffixes = ['tot'] + ['{}m'.format(depth) for depth in + divisionDepths] + ['btm'] + + # these preprocessed data are already anomalies + dsPreprocessed = compute_moving_avg(dsPreprocessed, + movingAveragePoints) + for rangeIndex in range(len(suffixes)): + variableName = '{}_{}'.format(preprocessedFieldPrefix, + suffixes[rangeIndex]) + if variableName in list(dsPreprocessed.data_vars.keys()): + timeSeries.append(dsPreprocessed[variableName]) + else: + self.logger.warning('Warning: Preprocessed variable {} ' + 'not found. Skipping.'.format( + variableName)) + timeSeries.extend(None) + + lineColors.append(color) + lineStyles.append(lines[rangeIndex]) + lineMarkers.append(markers[rangeIndex]) + lineWidths.append(widths[rangeIndex]) + maxPoints.append(points[rangeIndex]) + legendText.append(None) + + if self.controlConfig is not None: + + controlRunName = self.controlConfig.get('runs', 'mainRunName') + + title = '{} \n {} (red)'.format(title, controlRunName) + + self.logger.info(' Load ocean data from control run...') + controlStartYear = self.controlConfig.getint('timeSeries', + 'startYear') + controlEndYear = self.controlConfig.getint('timeSeries', + 'endYear') + controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear) + controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear) + dsRef = open_mpas_dataset(fileName=self.refFileName, + calendar=calendar, + variableList=[self.mpasFieldName, + 'depth'], + timeVariableNames=None, + startDate=controlStartDate, + endDate=controlEndDate) + dsRef = dsRef.isel(nOceanRegionsTmp=regionIndex) + + color = config.get('timeSeries', 'controlColor') + + for rangeIndex in range(len(topDepths)): + top = topDepths[rangeIndex] + bottom = bottomDepths[rangeIndex] + field = dsRef[self.mpasFieldName].where(dsRef.depth > top) + field = field.where(dsRef.depth <= bottom) + timeSeries.append(field.sum('nVertLevels')) + + lineColors.append(color) + lineStyles.append(lines[rangeIndex]) + lineMarkers.append(markers[rangeIndex]) + lineWidths.append(widths[rangeIndex]) + maxPoints.append(points[rangeIndex]) + legendText.append(None) + + if config.has_option(self.taskName, 'firstYearXTicks'): + firstYearXTicks = config.getint(self.taskName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(self.taskName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(self.taskName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + fig = timeseries_analysis_plot( + config=config, dsvalues=timeSeries, calendar=calendar, + title=title, xlabel=xLabel, ylabel=yLabel, movingAveragePoints=None, + lineColors=lineColors, lineStyles=lineStyles, markers=lineMarkers, + lineWidths=lineWidths, legendText=legendText, maxPoints=maxPoints, + firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks) + + self.customize_fig(fig) + + savefig(outFileName, config) + + write_image_xml( + config=config, + filePrefix=self.filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup=self.galleryGroup, + groupLink=self.groupLink, + gallery=self.galleryName, + thumbnailDescription='{} {}'.format(self.regionName, + self.thumbnailSuffix), + imageDescription=self.imageCaption, + imageCaption=self.imageCaption) + + def customize_fig(self, fig): + """ + A function to override to customize the figure. + + fig : matplotlib.pyplot.Figure + The figure + """ + pass
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/plot_hovmoller_subtask.html b/1.11.0rc1/_modules/mpas_analysis/ocean/plot_hovmoller_subtask.html new file mode 100644 index 000000000..73a4c87da --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/plot_hovmoller_subtask.html @@ -0,0 +1,561 @@ + + + + + + mpas_analysis.ocean.plot_hovmoller_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.plot_hovmoller_subtask

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import xarray as xr
+import numpy as np
+import os
+import matplotlib.pyplot as plt
+from geometric_features import FeatureCollection, read_feature_collection
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.plot import plot_vertical_section_comparison, \
+    savefig, add_inset
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    decode_strings
+
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class PlotHovmollerSubtask(AnalysisTask): + """ + Plots a time series vs. depth + + Attributes + ---------- + + controlconfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + + regionName : str + The name of the region to plot + + inFileName : str + The file containing the time-depth data set to plot + + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + unitsLabel : str + The units of the plotted field, to be displayed on color bars + + sectionName : str + A section in the config file where the colormap and contour values + are defined + + regionMaskFile : str + A geojson file with regions (including one corresponding to + ``regionName``) that will be used to make an inset + + thumbnailSuffix : str + The text to be displayed under the thumbnail image, to which the + region name will be prepended + + imageCaption : str + The caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + The name of the group of galleries in which this plot belongs + + groupSubtitle : str + The subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + A short name (with no spaces) for the link to the gallery group + + galleryName : str + The name of the gallery in which this plot belongs + + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + +
+[docs] + def __init__(self, parentTask, regionName, inFileName, outFileLabel, + fieldNameInTitle, mpasFieldName, unitsLabel, sectionName, + thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, + groupLink, galleryName, subtaskName=None, + controlConfig=None, regionMaskFile=None): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task of which this is a subtask + + regionName : str + The name of the region to plot + + inFileName : str + The file containing the time-depth data set to plot + + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + unitsLabel : str + the units of the plotted field, to be displayed on color bars + + sectionName : str + a section in the config file where the colormap and contour values + are defined + + thumbnailSuffix : str + The text to be displayed under the thumbnail image, to which the + region name will be prepended + + imageCaption : str + the caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + the name of the group of galleries in which this plot belongs + + groupSubtitle : str + the subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + a short name (with no spaces) for the link to the gallery group + + galleryName : str + the name of the gallery in which this plot belongs + + subtaskName : str, optional + The name of the subtask (``plotHovmoller<RegionName>`` by default) + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + + regionMaskFile : str, optional + A geojson file with regions (including one corresponding to + ``regionName``) that will be used to make an inset + """ + # Authors + # ------- + # Xylar Asay-Davis + + if subtaskName is None: + suffix = regionName[0].upper() + regionName[1:] + subtaskName = 'plotHovmoller{}'.format(suffix) + + # first, call the constructor from the base class (AnalysisTask) + super(PlotHovmollerSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName='ocean', + tags=parentTask.tags, + subtaskName=subtaskName) + + self.controlConfig = controlConfig + + self.regionName = regionName + self.inFileName = inFileName + self.outFileLabel = outFileLabel + self.fieldNameInTitle = fieldNameInTitle + self.mpasFieldName = mpasFieldName + self.unitsLabel = unitsLabel + self.sectionName = sectionName + self.regionMaskFile = regionMaskFile + + # xml/html related variables + self.thumbnailSuffix = thumbnailSuffix + self.imageCaption = imageCaption + self.galleryGroup = galleryGroup + self.groupSubtitle = groupSubtitle + self.groupLink = groupLink + self.galleryName = galleryName
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis, Greg Streletz + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(PlotHovmollerSubtask, self).setup_and_check() + + config = self.config + + if self.controlConfig is not None: + assert(not os.path.isabs(self.inFileName)) + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + self.controlFileName = '{}/{}'.format(baseDirectory, + self.inFileName) + else: + self.controlFileName = None + + if not os.path.isabs(self.inFileName): + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + self.inFileName = '{}/{}'.format(baseDirectory, + self.inFileName) + + mainRunName = self.config.get('runs', 'mainRunName') + + self.filePrefix = '{}_{}_{}'.format(self.outFileLabel, + self.regionName, + mainRunName) + self.xmlFileNames = ['{}/{}.xml'.format( + self.plotsDirectory, self.filePrefix)] + + return + + def run_task(self): + """ + Make the Hovmoller plot from the time series. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + self.logger.info("\nPlotting {} time series vs. depth...".format( + self.fieldNameInTitle)) + + config = self.config + + mainRunName = config.get('runs', 'mainRunName') + + self.logger.info(' Load ocean data...') + ds = xr.open_dataset(self.inFileName) + + if 'regionNames' in ds.coords: + + allRegionNames = decode_strings(ds.regionNames) + regionIndex = allRegionNames.index(self.regionName) + regionNameInTitle = self.regionName.replace('_', ' ') + regionDim = ds.regionNames.dims[0] + else: + plotTitles = config.getexpression('regions', 'plotTitles') + allRegionNames = config.getexpression('regions', 'regions') + regionIndex = allRegionNames.index(self.regionName) + regionNameInTitle = plotTitles[regionIndex] + regionDim = 'nOceanRegionsTmp' + + ds = ds.isel(**{regionDim: regionIndex}) + + # Note: restart file, not a mesh file because we need refBottomDepth, + # not in a mesh file + try: + restartFile = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for plotting time series vs. depth') + + # Define/read in general variables + self.logger.info(' Read in depth...') + with xr.open_dataset(restartFile) as dsRestart: + # reference depth [m] + depths = dsRestart.refBottomDepth.values + z = np.zeros(depths.shape) + z[0] = -0.5 * depths[0] + z[1:] = -0.5 * (depths[0:-1] + depths[1:]) + z = xr.DataArray(dims='nVertLevels', data=z) + + Time = ds.Time + field = ds[self.mpasFieldName] + + # drop any NaN values, because this causes issues with rolling averages + mask = field.notnull().all(dim='Time') + + xLabel = 'Time (years)' + yLabel = 'Depth (m)' + + title = '{}\n{}'.format(self.fieldNameInTitle, regionNameInTitle) + + outFileName = '{}/{}.png'.format(self.plotsDirectory, self.filePrefix) + + sectionName = self.sectionName + if config.has_option(sectionName, 'firstYearXTicks'): + firstYearXTicks = config.getint(sectionName, 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(sectionName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(sectionName, 'yearStrideXTicks') + else: + yearStrideXTicks = None + + movingAveragePoints = config.getint( + sectionName, 'movingAveragePoints') + + if config.has_option(sectionName, 'yLim'): + yLim = config.getexpression(sectionName, 'yLim') + else: + yLim = None + + if self.controlConfig is None: + refField = None + diff = None + refTitle = None + diffTitle = None + z = z.where(mask, drop=True) + field = field.where(mask, drop=True) + else: + controlConfig = self.controlConfig + dsRef = xr.open_dataset(self.controlFileName) + + if 'regionNames' in dsRef.coords: + allRegionNames = decode_strings(dsRef.regionNames) + regionIndex = allRegionNames.index(self.regionName) + regionNameInTitle = self.regionName.replace('_', ' ') + regionDim = dsRef.regionNames.dims[0] + else: + plotTitles = controlConfig.getexpression('regions', + 'plotTitles') + allRegionNames = controlConfig.getexpression('regions', + 'regions') + regionIndex = allRegionNames.index(self.regionName) + regionNameInTitle = plotTitles[regionIndex] + regionDim = 'nOceanRegionsTmp' + + dsRef = dsRef.isel(**{regionDim: regionIndex}) + refField = dsRef[self.mpasFieldName] + # drop any NaN values, because this causes issues with rolling + # averages + refMask = refField.notnull().all(dim='Time') + # if the masks differ, we want only locations where both are valid + mask = np.logical_and(mask, refMask) + z = z.where(mask, drop=True) + field = field.where(mask, drop=True) + refField = refField.where(mask, drop=True) + assert (field.shape == refField.shape) + # make sure the start and end time sare the same + assert(int(field.Time.values[0]) == int(refField.Time.values[0])) + assert(int(field.Time.values[-1]) == int(refField.Time.values[-1])) + # we're seeing issues with slightly different times between runs + # so let's copy them + refField['Time'] = field.Time + diff = field - refField + assert (field.shape == diff.shape) + refTitle = self.controlConfig.get('runs', 'mainRunName') + diffTitle = 'Main - Control' + + if config.has_option(sectionName, 'titleFontSize'): + titleFontSize = config.getint(sectionName, 'titleFontSize') + else: + titleFontSize = None + + if config.has_option(sectionName, 'axisFontSize'): + axisFontSize = config.getint(sectionName, 'axisFontSize') + else: + axisFontSize = None + + if config.has_option(sectionName, 'defaultFontSize'): + defaultFontSize = config.getint(sectionName, 'defaultFontSize') + else: + defaultFontSize = None + + fig, _, suptitle = plot_vertical_section_comparison( + config, field, refField, diff, self.sectionName, xCoords=Time, + zCoord=z, colorbarLabel=self.unitsLabel, title=title, + modelTitle=mainRunName, refTitle=refTitle, diffTitle=diffTitle, + xlabels=xLabel, ylabel=yLabel, lineWidth=1, xCoordIsTime=True, + movingAveragePoints=movingAveragePoints, calendar=self.calendar, + firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, + yLim=yLim, invertYAxis=False, titleFontSize=titleFontSize, + axisFontSize=axisFontSize, defaultFontSize=defaultFontSize) + + if self.regionMaskFile is not None: + + # shift the super-title a little to the left to make room for the + # inset + pos = suptitle.get_position() + suptitle.set_position((pos[0] - 0.05, pos[1])) + + fcAll = read_feature_collection(self.regionMaskFile) + + fc = FeatureCollection() + for feature in fcAll.features: + if feature['properties']['name'] == self.regionName: + fc.add_feature(feature) + break + + add_inset(fig, fc, width=1.0, height=1.0, xbuffer=0.1, ybuffer=0.1) + + savefig(outFileName, config, tight=False) + + else: + savefig(outFileName, config) + + write_image_xml( + config=config, + filePrefix=self.filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup=self.galleryGroup, + groupSubtitle=self.groupSubtitle, + groupLink=self.groupLink, + gallery=self.galleryName, + thumbnailDescription='{} {}'.format( + regionNameInTitle, self.thumbnailSuffix), + imageDescription=self.imageCaption, + imageCaption=self.imageCaption)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/streamfunction_moc.html b/1.11.0rc1/_modules/mpas_analysis/ocean/streamfunction_moc.html new file mode 100644 index 000000000..28437495d --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/streamfunction_moc.html @@ -0,0 +1,1837 @@ + + + + + + mpas_analysis.ocean.streamfunction_moc — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.streamfunction_moc

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import xarray as xr
+import numpy as np
+import netCDF4
+import os
+from mpas_tools.ocean.moc import add_moc_southern_boundary_transects
+from mpas_tools.io import write_netcdf
+
+from mpas_analysis.shared.constants.constants import m3ps_to_Sv
+from mpas_analysis.shared.plot import plot_vertical_section_comparison, \
+    timeseries_analysis_plot, savefig
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_files_year_month, get_region_mask
+
+from mpas_analysis.shared.io import open_mpas_dataset
+
+from mpas_analysis.shared.timekeeping.utility import days_to_datetime
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.html import write_image_xml
+from mpas_analysis.shared.climatology.climatology import \
+    get_climatology_op_directory
+
+from mpas_analysis.shared.regions import ComputeRegionMasksSubtask
+
+
+
+[docs] +class StreamfunctionMOC(AnalysisTask): + """ + Computation and plotting of model meridional overturning circulation. + Will eventually support: + + * MOC streamfunction, post-processed + * MOC streamfunction, from MOC analysis member + * MOC time series (max value at 24.5N), post-processed + * MOC time series (max value at 24.5N), from MOC analysis member + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(StreamfunctionMOC, self).__init__( + config=config, + taskName='streamfunctionMOC', + componentName='ocean', + tags=['streamfunction', 'moc', 'climatology', 'timeSeries', + 'publicObs']) + + maskSubtask = ComputeMOCMasksSubtask(self) + self.add_subtask(maskSubtask) + + computeClimSubtask = ComputeMOCClimatologySubtask( + self, mpasClimatologyTask, maskSubtask) + plotClimSubtask = PlotMOCClimatologySubtask(self, controlConfig) + plotClimSubtask.run_after(computeClimSubtask) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + years = range(startYear, endYear + 1) + + # in the end, we'll combine all the time series into one, but we create + # this task first so it's easier to tell it to run after all the + # compute tasks + combineTimeSeriesSubtask = CombineMOCTimeSeriesSubtask( + self, startYears=years, endYears=years) + + # run one subtask per year + for year in years: + computeTimeSeriesSubtask = ComputeMOCTimeSeriesSubtask( + self, startYear=year, endYear=year, maskSubtask=maskSubtask) + combineTimeSeriesSubtask.run_after(computeTimeSeriesSubtask) + + plotTimeSeriesSubtask = PlotMOCTimeSeriesSubtask(self, controlConfig) + plotTimeSeriesSubtask.run_after(combineTimeSeriesSubtask)
+
+ + + +class ComputeMOCMasksSubtask(ComputeRegionMasksSubtask): + """ + An analysis subtasks for computing cell masks and southern transects for + MOC regions + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask): + + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = parentTask.config + meshName = config.get('input', 'mpasMeshName') + regionGroup = 'MOC Basins' + + subprocessCount = config.getint('execute', 'parallelTaskCount') + + # call the constructor from the base class (ComputeRegionMasksSubtask) + super().__init__( + parentTask, regionGroup=regionGroup, meshName=meshName, + subprocessCount=subprocessCount, + useMpasMaskCreator=False) + + self.maskAndTransectFileName = None + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the parent class + super().setup_and_check() + + self.maskAndTransectFileName = get_region_mask( + self.config, '{}_mocBasinsAndTransects{}.nc'.format( + self.meshName, self.date)) + + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + if os.path.exists(self.maskAndTransectFileName): + return + + # call ComputeRegionMasksSubtask.run_task() first + super().run_task() + + config = self.config + + dsMesh = xr.open_dataset(self.obsFileName) + dsMask = xr.open_dataset(self.maskFileName) + + dsMasksAndTransects = add_moc_southern_boundary_transects( + dsMask, dsMesh, logger=self.logger) + + write_netcdf(dsMasksAndTransects, self.maskAndTransectFileName, + char_dim_name='StrLen') +# }}} + + +class ComputeMOCClimatologySubtask(AnalysisTask): + """ + Computation of a climatology of the model meridional overturning + circulation. + + Attributes + ---------- + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + + def __init__(self, parentTask, mpasClimatologyTask, maskSubtask): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``StreamfunctionMOC`` + The main task of which this is a subtask + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + maskSubtask : mpas_analysis.ocean.streamfunction_moc.ComputeMOCMasksSubtask + The subtask for computing MOC region masks that runs before this + subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeMOCClimatologySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='computeMOCClimatology') + + self.mpasClimatologyTask = mpasClimatologyTask + self.run_after(mpasClimatologyTask) + self.maskSubtask = maskSubtask + self.run_after(maskSubtask) + + parentTask.add_subtask(self) + + self.includeBolus = None + self.includeSubmesoscale = None + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError + if timeSeriesStatsMonthly is not enabled in the MPAS run + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeMOCClimatologySubtask, self).setup_and_check() + + self.startYear = self.mpasClimatologyTask.startYear + self.startDate = self.mpasClimatologyTask.startDate + self.endYear = self.mpasClimatologyTask.endYear + self.endDate = self.mpasClimatologyTask.endDate + + config = self.config + + self.mocAnalysisMemberEnabled = self.check_analysis_enabled( + analysisOptionName='config_am_mocstreamfunction_enable', + raiseException=False) + + self.sectionName = 'streamfunctionMOC' + + self.usePostprocessing = config.getexpression( + self.sectionName, 'usePostprocessingScript') + + if not self.usePostprocessing and self.mocAnalysisMemberEnabled: + variableList = \ + ['timeMonthly_avg_mocStreamvalLatAndDepth', + 'timeMonthly_avg_mocStreamvalLatAndDepthRegion'] + else: + variableList = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_vertVelocityTop', + 'timeMonthly_avg_layerThickness'] + + # Add the bolus velocity if GM is enabled + try: + # the new name + self.includeBolus = self.namelist.getbool('config_use_gm') + except KeyError: + # the old name + self.includeBolus = self.namelist.getbool( + 'config_use_standardgm') + try: + self.includeSubmesoscale = \ + self.namelist.getbool('config_submesoscale_enable') + except KeyError: + # an old run without submesoscale + self.includeSubmesoscale = False + + if self.includeBolus: + variableList.extend( + ['timeMonthly_avg_normalGMBolusVelocity', + 'timeMonthly_avg_vertGMBolusVelocityTop']) + + if self.includeSubmesoscale: + variableList.extend( + ['timeMonthly_avg_normalMLEvelocity', + 'timeMonthly_avg_vertMLEBolusVelocityTop']) + + + self.mpasClimatologyTask.add_variables(variableList=variableList, + seasons=['ANN']) + + def run_task(self): + """ + Process MOC analysis member data if available, or compute MOC at + post-processing if not. + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis + + self.logger.info("Computing climatology of Meridional Overturning " + "Circulation (MOC)...") + + # **** Compute MOC **** + if not self.usePostprocessing and self.mocAnalysisMemberEnabled: + self._compute_moc_climo_analysismember() + else: + self._compute_moc_climo_postprocess() + + def _compute_moc_climo_analysismember(self): + """compute mean MOC streamfunction from analysis member""" + + config = self.config + + outputDirectory = get_climatology_op_directory(config) + + make_directories(outputDirectory) + + outputFileName = '{}/mocStreamfunction_years{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, + self.endYear) + + if os.path.exists(outputFileName): + return + + regionNames = config.getexpression(self.sectionName, 'regionNames') + regionNames.append('Global') + + # Read in depth and bin latitudes + try: + restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least ' + 'one for MHT calcuation') + + with xr.open_dataset(restartFileName) as dsRestart: + refBottomDepth = dsRestart.refBottomDepth.values + + nVertLevels = len(refBottomDepth) + refLayerThickness = np.zeros(nVertLevels) + refLayerThickness[0] = refBottomDepth[0] + refLayerThickness[1:nVertLevels] = \ + refBottomDepth[1:nVertLevels] - refBottomDepth[0:nVertLevels - 1] + + refZMid = refBottomDepth - 0.5 * refLayerThickness + + binBoundaryMocStreamfunction = None + # first try timeSeriesStatsMonthly for bin boundaries, then try + # mocStreamfunctionOutput stream as a backup option + for streamName in ['timeSeriesStatsMonthlyOutput', + 'mocStreamfunctionOutput']: + try: + inputFileName = self.historyStreams.readpath(streamName)[0] + except ValueError: + raise IOError('At least one file from stream {} is needed ' + 'to compute MOC'.format(streamName)) + + with xr.open_dataset(inputFileName) as ds: + if 'binBoundaryMocStreamfunction' in ds.data_vars: + binBoundaryMocStreamfunction = \ + ds.binBoundaryMocStreamfunction.values + break + + if binBoundaryMocStreamfunction is None: + raise ValueError('Could not find binBoundaryMocStreamfunction in ' + 'either timeSeriesStatsMonthlyOutput or ' + 'mocStreamfunctionOutput streams') + + binBoundaryMocStreamfunction = np.rad2deg(binBoundaryMocStreamfunction) + + # Compute and plot annual climatology of MOC streamfunction + self.logger.info('\n Compute climatology of MOC streamfunction...') + self.logger.info(' Load data...') + + climatologyFileName = self.mpasClimatologyTask.get_file_name( + season='ANN') + annualClimatology = xr.open_dataset(climatologyFileName) + if 'Time' in annualClimatology.dims: + annualClimatology = annualClimatology.isel(Time=0) + + # rename some variables for convenience + annualClimatology = annualClimatology.rename( + {'timeMonthly_avg_mocStreamvalLatAndDepth': + 'avgMocStreamfunGlobal', + 'timeMonthly_avg_mocStreamvalLatAndDepthRegion': + 'avgMocStreamfunRegional'}) + + dsMask = xr.open_dataset(self.maskSubtask.maskAndTransectFileName) + regionIndices = {} + for iRegion in range(dsMask.sizes['nRegions']): + regionInFile = str(dsMask.regionNames[iRegion].values.astype('U')) + region = regionInFile.replace('_MOC', '') + regionIndices[region] = iRegion + + # Create dictionary for MOC climatology (NB: need this form + # in order to convert it to xarray dataset later in the script) + depth = refZMid + lat = {} + moc = {} + for region in regionNames: + self.logger.info(' Compute {} MOC...'.format(region)) + if region == 'Global': + mocTop = annualClimatology.avgMocStreamfunGlobal.values + else: + indRegion = regionIndices[region] + mocVar = annualClimatology.avgMocStreamfunRegional + mocTop = mocVar.isel(nRegions=indRegion).values + # Store computed MOC to dictionary + lat[region] = binBoundaryMocStreamfunction + moc[region] = mocTop + + # Save to file + self.logger.info(' Save global and regional MOC to file...') + ncFile = netCDF4.Dataset(outputFileName, mode='w') + # create dimensions + ncFile.createDimension('nz', nVertLevels) + for region in regionNames: + latBins = lat[region] + mocTop = moc[region] + ncFile.createDimension('nx{}'.format(region), len(latBins)) + # create variables + x = ncFile.createVariable('lat{}'.format(region), 'f4', + ('nx{}'.format(region),)) + x.description = 'latitude bins for MOC {}'\ + ' streamfunction'.format(region) + x.units = 'degrees (-90 to 90)' + y = ncFile.createVariable('moc{}'.format(region), 'f4', + ('nz', 'nx{}'.format(region))) + y.description = 'MOC {} streamfunction, annual'\ + ' climatology'.format(region) + y.units = 'Sv (10^6 m^3/s)' + # save variables + x[:] = latBins + y[:, :] = mocTop + depthVar = ncFile.createVariable('depth', 'f4', ('nz',)) + depthVar.description = 'depth' + depthVar.units = 'meters' + depthVar[:] = depth + ncFile.close() + + def _compute_moc_climo_postprocess(self): + """compute mean MOC streamfunction as a post-process""" + + config = self.config + outputDirectory = get_climatology_op_directory(config) + + make_directories(outputDirectory) + + outputFileName = '{}/mocStreamfunction_years{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, + self.endYear) + + if os.path.exists(outputFileName): + return + + dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \ + refTopDepth, refLayerThickness, cellsOnEdge = \ + _load_mesh(self.runStreams) + + regionNames = config.getexpression(self.sectionName, 'regionNames') + + # Load basin region related variables and save them to dictionary + mpasMeshName = config.get('input', 'mpasMeshName') + + masksFileName = self.maskSubtask.maskAndTransectFileName + dictRegion = _build_region_mask_dict( + masksFileName, regionNames, mpasMeshName, self.logger) + + # Add Global regionCellMask=1 everywhere to make the algorithm + # for the global moc similar to that of the regional moc + dictRegion['Global'] = { + 'cellMask': np.ones(np.size(latCell))} + regionNames.append('Global') + + # Compute and plot annual climatology of MOC streamfunction + self.logger.info('\n Compute post-processed climatological of MOC ' + 'streamfunction...') + + self.logger.info(' Load data...') + + climatologyFileName = self.mpasClimatologyTask.get_file_name( + season='ANN') + annualClimatology = xr.open_dataset(climatologyFileName) + if 'Time' in annualClimatology.dims: + annualClimatology = annualClimatology.isel(Time=0) + + # rename some variables for convenience + annualClimatology = annualClimatology.rename( + {'timeMonthly_avg_normalVelocity': 'avgNormalVelocity', + 'timeMonthly_avg_vertVelocityTop': 'avgVertVelocityTop', + 'timeMonthly_avg_layerThickness': 'layerThickness'}) + + if self.includeBolus: + annualClimatology['avgNormalVelocity'] = \ + annualClimatology['avgNormalVelocity'] + \ + annualClimatology['timeMonthly_avg_normalGMBolusVelocity'] + + annualClimatology['avgVertVelocityTop'] = \ + annualClimatology['avgVertVelocityTop'] + \ + annualClimatology['timeMonthly_avg_vertGMBolusVelocityTop'] + + if self.includeSubmesoscale: + annualClimatology['avgNormalVelocity'] = \ + annualClimatology['avgNormalVelocity'] + \ + annualClimatology['timeMonthly_avg_normalMLEvelocity'] + + annualClimatology['avgVertVelocityTop'] = \ + annualClimatology['avgVertVelocityTop'] + \ + annualClimatology['timeMonthly_avg_vertMLEBolusVelocityTop'] + + # Convert to numpy arrays + # (can result in a memory error for large array size) + horizontalVel = annualClimatology.avgNormalVelocity.values + verticalVel = annualClimatology.avgVertVelocityTop.values + velArea = verticalVel * areaCell[:, np.newaxis] + layerThickness = annualClimatology.layerThickness.values + + # Create dictionary for MOC climatology (NB: need this form + # in order to convert it to xarray dataset later in the script) + depth = refTopDepth + lat = {} + moc = {} + for region in regionNames: + self.logger.info(' Compute {} MOC...'.format(region)) + self.logger.info(' Compute transport through region ' + 'southern transect...') + if region == 'Global': + transportZ = np.zeros(nVertLevels) + else: + maxEdgesInTransect = \ + dictRegion[region]['maxEdgesInTransect'] + transectEdgeGlobalIDs = \ + dictRegion[region]['transectEdgeGlobalIDs'] + transectEdgeMaskSigns = \ + dictRegion[region]['transectEdgeMaskSigns'] + transportZ = _compute_transport(maxEdgesInTransect, + transectEdgeGlobalIDs, + transectEdgeMaskSigns, + nVertLevels, dvEdge, + horizontalVel, + layerThickness, + cellsOnEdge) + + regionCellMask = dictRegion[region]['cellMask'] + latBinSize = \ + config.getfloat('streamfunctionMOC{}'.format(region), + 'latBinSize') + if region == 'Global': + latBins = np.arange(-90.0, 90.1, latBinSize) + else: + indRegion = dictRegion[region]['indices'] + latBins = latCell[indRegion] + latBins = np.arange(np.amin(latBins), + np.amax(latBins) + latBinSize, + latBinSize) + mocTop = _compute_moc(latBins, nVertLevels, latCell, + regionCellMask, transportZ, velArea) + + # Store computed MOC to dictionary + lat[region] = latBins + moc[region] = mocTop + + # Save to file + self.logger.info(' Save global and regional MOC to file...') + ncFile = netCDF4.Dataset(outputFileName, mode='w') + # create dimensions + ncFile.createDimension('nz', len(refTopDepth)) + for region in regionNames: + latBins = lat[region] + mocTop = moc[region] + ncFile.createDimension('nx{}'.format(region), len(latBins)) + # create variables + x = ncFile.createVariable('lat{}'.format(region), 'f4', + ('nx{}'.format(region),)) + x.description = 'latitude bins for MOC {}'\ + ' streamfunction'.format(region) + x.units = 'degrees (-90 to 90)' + y = ncFile.createVariable('moc{}'.format(region), 'f4', + ('nz', 'nx{}'.format(region))) + y.description = 'MOC {} streamfunction, annual'\ + ' climatology'.format(region) + y.units = 'Sv (10^6 m^3/s)' + # save variables + x[:] = latBins + y[:, :] = mocTop + depthVar = ncFile.createVariable('depth', 'f4', ('nz',)) + depthVar.description = 'depth' + depthVar.units = 'meters' + depthVar[:] = depth + ncFile.close() + + +class PlotMOCClimatologySubtask(AnalysisTask): + """ + Computation of a climatology of the model meridional overturning + circulation. + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + + def __init__(self, parentTask, controlConfig): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``StreamfunctionMOC`` + The main task of which this is a subtask + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(PlotMOCClimatologySubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='plotMOCClimatology') + + parentTask.add_subtask(self) + + self.controlConfig = controlConfig + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError + if timeSeriesStatsMonthly is not enabled in the MPAS run + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(PlotMOCClimatologySubtask, self).setup_and_check() + + config = self.config + + self.startYear = config.getint('climatology', 'startYear') + self.endYear = config.getint('climatology', 'endYear') + + self.sectionName = 'streamfunctionMOC' + + self.xmlFileNames = [] + self.filePrefixes = {} + + mainRunName = config.get('runs', 'mainRunName') + + self.regionNames = ['Global'] + config.getexpression(self.sectionName, + 'regionNames') + + for region in self.regionNames: + filePrefix = 'moc{}_{}_years{:04d}-{:04d}'.format( + region, mainRunName, + self.startYear, self.endYear) + + self.xmlFileNames.append('{}/{}.xml'.format(self.plotsDirectory, + filePrefix)) + self.filePrefixes[region] = filePrefix + + def run_task(self): + """ + Plot the MOC climatology + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis + + self.logger.info("\nPlotting streamfunction of Meridional Overturning " + "Circulation (MOC)...") + + config = self.config + + depth, lat, moc = self._load_moc(config) + + if self.controlConfig is None: + refTitle = None + diffTitle = None + else: + refDepth, refLat, refMOC = self._load_moc(self.controlConfig) + refTitle = self.controlConfig.get('runs', 'mainRunName') + diffTitle = 'Main - Control' + + # **** Plot MOC **** + # Define plotting variables + mainRunName = config.get('runs', 'mainRunName') + movingAveragePointsClimatological = config.getint( + self.sectionName, 'movingAveragePointsClimatological') + colorbarLabel = '[Sv]' + xLabel = 'latitude [deg]' + yLabel = 'depth [m]' + + for region in self.regionNames: + self.logger.info(' Plot climatological {} MOC...'.format(region)) + title = '{} MOC (ANN, years {:04d}-{:04d})'.format( + region, self.startYear, + self.endYear) + filePrefix = self.filePrefixes[region] + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + + x = lat[region] + z = depth + regionMOC = moc[region] + # Subset lat range + minLat = config.getexpression('streamfunctionMOC{}'.format(region), + 'latBinMin') + maxLat = config.getexpression('streamfunctionMOC{}'.format(region), + 'latBinMax') + indLat = np.logical_and(x >= minLat, x <= maxLat) + x = x.where(indLat, drop=True) + regionMOC = regionMOC.where(indLat, drop=True) + if self.controlConfig is None: + refRegionMOC = None + diff = None + else: + # the coords of the ref MOC won't necessarily match this MOC + # so we need to interpolate + refRegionMOC = _interp_moc(x, z, regionMOC, refLat[region], + refDepth, refMOC[region]) + + diff = regionMOC - refRegionMOC + + plot_vertical_section_comparison( + config, regionMOC, refRegionMOC, diff, xCoords=x, zCoord=z, + colorMapSectionName='streamfunctionMOC{}'.format(region), + colorbarLabel=colorbarLabel, + title=title, + modelTitle=mainRunName, + refTitle=refTitle, + diffTitle=diffTitle, + xlabels=xLabel, + ylabel=yLabel, + movingAveragePoints=movingAveragePointsClimatological, + maxTitleLength=70) + + savefig(outFileName, config) + + caption = '{} Meridional Overturning Streamfunction'.format(region) + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Meridional Overturning Streamfunction', + groupLink='moc', + thumbnailDescription=region, + imageDescription=caption, + imageCaption=caption) + + def _load_moc(self, config): + """compute mean MOC streamfunction from analysis member""" + + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + + outputDirectory = get_climatology_op_directory(config) + + make_directories(outputDirectory) + + inputFileName = '{}/mocStreamfunction_years{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, + endYear) + + # Read from file + ds = xr.open_dataset(inputFileName) + depth = ds['depth'] + lat = {} + moc = {} + for region in self.regionNames: + lat[region] = ds['lat{}'.format(region)] + moc[region] = ds['moc{}'.format(region)] + return depth, lat, moc + + +class ComputeMOCTimeSeriesSubtask(AnalysisTask): + """ + Computation of a time series of max Atlantic MOC at 26.5N. + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + + def __init__(self, parentTask, startYear, endYear, maskSubtask): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : mpas_analysis.ocean.streamfunction_moc.StreamfunctionMOC + The main task of which this is a subtask + + startYear : int + The start year of the time series + + endYear : int + The end year of the time series + + maskSubtask : mpas_analysis.ocean.streamfunction_moc.ComputeMOCMasksSubtask + The subtask for computing MOC region masks that runs before this + subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeMOCTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='computeMOCTimeSeries_{:04d}-{:04d}'.format( + startYear, endYear)) + + self.maskSubtask = maskSubtask + self.run_after(maskSubtask) + + parentTask.add_subtask(self) + self.startYear = startYear + self.endYear = endYear + + self.includeBolus = None + self.includeSubmesoscale = None + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError + if timeSeriesStatsMonthly is not enabled in the MPAS run + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeMOCTimeSeriesSubtask, self).setup_and_check() + + config = self.config + + self.mocAnalysisMemberEnabled = self.check_analysis_enabled( + analysisOptionName='config_am_mocstreamfunction_enable', + raiseException=False) + + self.sectionName = 'streamfunctionMOC' + + self.usePostprocessing = config.getexpression( + self.sectionName, 'usePostprocessingScript') + + if not self.usePostprocessing and self.mocAnalysisMemberEnabled: + self.variableList = \ + ['timeMonthly_avg_mocStreamvalLatAndDepth', + 'timeMonthly_avg_mocStreamvalLatAndDepthRegion'] + else: + self.variableList = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_vertVelocityTop', + 'timeMonthly_avg_layerThickness'] + + # Add the bolus velocity if GM is enabled + try: + # the new name + self.includeBolus = self.namelist.getbool('config_use_gm') + except KeyError: + # the old name + self.includeBolus = self.namelist.getbool( + 'config_use_standardgm') + + try: + self.includeSubmesoscale = \ + self.namelist.getbool('config_submesoscale_enable') + except KeyError: + # an old run without submesoscale + self.includeSubmesoscale = False + + if self.includeBolus: + self.variableList.extend( + ['timeMonthly_avg_normalGMBolusVelocity', + 'timeMonthly_avg_vertGMBolusVelocityTop']) + + if self.includeSubmesoscale: + self.variableList.extend( + ['timeMonthly_avg_normalMLEvelocity', + 'timeMonthly_avg_vertMLEBolusVelocityTop']) + + def run_task(self): + """ + Process MOC analysis member data if available, or compute MOC at + post-processing if not. + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis + + self.logger.info("\nCompute time series of Meridional Overturning " + "Circulation (MOC)...") + + self.startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) + self.endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) + + # **** Compute MOC **** + if not self.usePostprocessing and self.mocAnalysisMemberEnabled: + self._compute_moc_time_series_analysismember() + else: + self._compute_moc_time_series_postprocess() + + def _compute_moc_time_series_analysismember(self): + """compute MOC time series from analysis member""" + + # Compute and plot time series of Atlantic MOC at 26.5N (RAPID array) + self.logger.info('\n Compute Atlantic MOC time series from analysis ' + 'member...') + self.logger.info(' Load data...') + + outputDirectory = '{}/moc/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory')) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outputFileName = '{}/mocTimeSeries_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, self.endYear) + + # Get bin latitudes and index of 26.5N + binBoundaryMocStreamfunction = None + # first try timeSeriesStatsMonthly for bin boundaries, then try + # mocStreamfunctionOutput stream as a backup option + for streamName in ['timeSeriesStatsMonthlyOutput', + 'mocStreamfunctionOutput']: + try: + inputFileName = self.historyStreams.readpath(streamName)[0] + except ValueError: + raise IOError('At least one file from stream {} is needed ' + 'to compute MOC'.format(streamName)) + + with xr.open_dataset(inputFileName) as ds: + if 'binBoundaryMocStreamfunction' in ds.data_vars: + binBoundaryMocStreamfunction = \ + ds.binBoundaryMocStreamfunction.values + break + + if binBoundaryMocStreamfunction is None: + raise ValueError('Could not find binBoundaryMocStreamfunction in ' + 'either timeSeriesStatsMonthlyOutput or ' + 'mocStreamfunctionOutput streams') + + binBoundaryMocStreamfunction = np.rad2deg(binBoundaryMocStreamfunction) + dLat = binBoundaryMocStreamfunction - 26.5 + indlat26 = np.where(np.abs(dLat) == np.amin(np.abs(dLat))) + + inputFiles = sorted(self.historyStreams.readpath( + streamName, startDate=self.startDate, + endDate=self.endDate, calendar=self.calendar)) + + years, months = get_files_year_month(inputFiles, + self.historyStreams, + 'timeSeriesStatsMonthlyOutput') + + mocRegion = np.zeros(len(inputFiles)) + moc = None + refTopDepth = None + times = np.zeros(len(inputFiles)) + computed = np.zeros(len(inputFiles), bool) + + continueOutput = os.path.exists(outputFileName) + if continueOutput: + self.logger.info(' Read in previously computed MOC time series') + with open_mpas_dataset(fileName=outputFileName, + calendar=self.calendar, + timeVariableNames=None, + variableList=['mocAtlantic26', + 'mocAtlantic'], + startDate=self.startDate, + endDate=self.endDate) as dsMOCIn: + + dsMOCIn.load() + + if moc is None: + sizes = dsMOCIn.sizes + moc = np.zeros((len(inputFiles), sizes['depth'], + sizes['lat'])) + refTopDepth = dsMOCIn.depth.values + + # first, copy all computed data + for inIndex in range(dsMOCIn.sizes['Time']): + + mask = np.logical_and( + dsMOCIn.year[inIndex].values == years, + dsMOCIn.month[inIndex].values == months) + + outIndex = np.where(mask)[0][0] + + mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex] + moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :] + times[outIndex] = dsMOCIn.Time[inIndex] + computed[outIndex] = True + + if np.all(computed): + # no need to waste time writing out the data set again + return dsMOCIn + + for timeIndex, fileName in enumerate(inputFiles): + if computed[timeIndex]: + continue + + dsLocal = open_mpas_dataset( + fileName=fileName, + calendar=self.calendar, + variableList=self.variableList, + startDate=self.startDate, + endDate=self.endDate) + dsLocal = dsLocal.isel(Time=0) + time = dsLocal.Time.values + times[timeIndex] = time + date = days_to_datetime(time, calendar=self.calendar) + + self.logger.info(' date: {:04d}-{:02d}'.format(date.year, + date.month)) + + # hard-wire region=0 (Atlantic) for now + indRegion = 0 + mocVar = dsLocal.timeMonthly_avg_mocStreamvalLatAndDepthRegion + mocTop = mocVar[indRegion, :, :].values + mocRegion[timeIndex] = np.amax(mocTop[:, indlat26]) + + if moc is None: + sizes = dsLocal.sizes + moc = np.zeros((len(inputFiles), sizes['nVertLevels']+1, + len(binBoundaryMocStreamfunction))) + try: + restartFile = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at ' + 'least one restart file for MOC calculation') + with xr.open_dataset(restartFile) as dsRestart: + refBottomDepth = dsRestart.refBottomDepth.values + nVertLevels = len(refBottomDepth) + refTopDepth = np.zeros(nVertLevels + 1) + refTopDepth[1:nVertLevels + 1] = refBottomDepth[0:nVertLevels] + + moc[timeIndex, 0:-1, :] = mocTop + + description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \ + 'Array latitude (26.5N)' + + descriptionAtl = 'Atlantic MOC streamfunction' + + dictionary = { + 'dims': ['Time', 'depth', 'lat'], + 'coords': { + 'Time': { + 'dims': ('Time',), + 'data': times, + 'attrs': {'units': 'days since 0001-01-01'}}, + 'year': { + 'dims': ('Time',), + 'data': years, + 'attrs': {'units': 'year'}}, + 'month': { + 'dims': ('Time',), + 'data': months, + 'attrs': {'units': 'month'}}, + 'lat': { + 'dims': ('lat',), + 'data': binBoundaryMocStreamfunction, + 'attrs': {'units': 'degrees north'}}, + 'depth': { + 'dims': ('depth',), + 'data': refTopDepth, + 'attrs': {'units': 'meters'}}}, + 'data_vars': { + 'mocAtlantic26': { + 'dims': ('Time',), + 'data': mocRegion, + 'attrs': {'units': 'Sv (10^6 m^3/s)', + 'description': description}}, + 'mocAtlantic': { + 'dims': ('Time', 'depth', 'lat'), + 'data': moc, + 'attrs': {'units': 'Sv (10^6 m^3/s)', + 'description': descriptionAtl}}}} + dsMOCTimeSeries = xr.Dataset.from_dict(dictionary) + write_netcdf(dsMOCTimeSeries, outputFileName) + + def _compute_moc_time_series_postprocess(self): + """compute MOC time series as a post-process""" + + config = self.config + + # Compute and plot time series of Atlantic MOC at 26.5N (RAPID array) + self.logger.info('\n Compute and/or plot post-processed Atlantic MOC ' + 'time series...') + self.logger.info(' Load data...') + + outputDirectory = '{}/moc/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory')) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outputFileName = '{}/mocTimeSeries_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, self.endYear) + + dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \ + refTopDepth, refLayerThickness, cellsOnEdge = \ + _load_mesh(self.runStreams) + + mpasMeshName = config.get('input', 'mpasMeshName') + + masksFileName = self.maskSubtask.maskAndTransectFileName + dictRegion = _build_region_mask_dict( + masksFileName, ['Atlantic'], mpasMeshName, self.logger) + dictRegion = dictRegion['Atlantic'] + + latBinSize = config.getfloat('streamfunctionMOCAtlantic', + 'latBinSize') + indRegion = dictRegion['indices'] + latBins = latCell[indRegion] + latBins = np.arange(np.amin(latBins), + np.amax(latBins) + latBinSize, + latBinSize) + latAtlantic = latBins + dLat = latAtlantic - 26.5 + indlat26 = np.where(np.abs(dLat) == np.amin(np.abs(dLat))) + + maxEdgesInTransect = dictRegion['maxEdgesInTransect'] + transectEdgeGlobalIDs = dictRegion['transectEdgeGlobalIDs'] + transectEdgeMaskSigns = dictRegion['transectEdgeMaskSigns'] + regionCellMask = dictRegion['cellMask'] + + streamName = 'timeSeriesStatsMonthlyOutput' + inputFiles = sorted(self.historyStreams.readpath( + streamName, startDate=self.startDate, + endDate=self.endDate, calendar=self.calendar)) + + years, months = get_files_year_month(inputFiles, + self.historyStreams, + 'timeSeriesStatsMonthlyOutput') + + mocRegion = np.zeros(len(inputFiles)) + moc = np.zeros((len(inputFiles), nVertLevels+1, len(latBins))) + times = np.zeros(len(inputFiles)) + computed = np.zeros(len(inputFiles), bool) + + continueOutput = os.path.exists(outputFileName) + if continueOutput: + self.logger.info(' Read in previously computed MOC time series') + with open_mpas_dataset(fileName=outputFileName, + calendar=self.calendar, + timeVariableNames=None, + variableList=['mocAtlantic26', + 'mocAtlantic'], + startDate=self.startDate, + endDate=self.endDate) as dsMOCIn: + + dsMOCIn.load() + + # first, copy all computed data + for inIndex in range(dsMOCIn.sizes['Time']): + + mask = np.logical_and( + dsMOCIn.year[inIndex].values == years, + dsMOCIn.month[inIndex].values == months) + + outIndex = np.where(mask)[0][0] + + mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex] + moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :] + times[outIndex] = dsMOCIn.Time[inIndex] + computed[outIndex] = True + + if np.all(computed): + # no need to waste time writing out the data set again + return dsMOCIn + + for timeIndex, fileName in enumerate(inputFiles): + if computed[timeIndex]: + continue + + dsLocal = open_mpas_dataset( + fileName=fileName, + calendar=self.calendar, + variableList=self.variableList, + startDate=self.startDate, + endDate=self.endDate) + dsLocal = dsLocal.isel(Time=0) + time = dsLocal.Time.values + times[timeIndex] = time + date = days_to_datetime(time, calendar=self.calendar) + + self.logger.info(' date: {:04d}-{:02d}'.format(date.year, + date.month)) + + # rename some variables for convenience + dsLocal = dsLocal.rename( + {'timeMonthly_avg_normalVelocity': 'avgNormalVelocity', + 'timeMonthly_avg_vertVelocityTop': 'avgVertVelocityTop', + 'timeMonthly_avg_layerThickness': 'layerThickness'}) + + if self.includeBolus: + dsLocal['avgNormalVelocity'] = \ + dsLocal['avgNormalVelocity'] + \ + dsLocal['timeMonthly_avg_normalGMBolusVelocity'] + + dsLocal['avgVertVelocityTop'] = \ + dsLocal['avgVertVelocityTop'] + \ + dsLocal['timeMonthly_avg_vertGMBolusVelocityTop'] + + if self.includeSubmesoscale: + dsLocal['avgNormalVelocity'] = \ + dsLocal['avgNormalVelocity'] + \ + dsLocal['timeMonthly_avg_normalMLEvelocity'] + + dsLocal['avgVertVelocityTop'] = \ + dsLocal['avgVertVelocityTop'] + \ + dsLocal['timeMonthly_avg_vertMLEBolusVelocityTop'] + + horizontalVel = dsLocal.avgNormalVelocity.values + verticalVel = dsLocal.avgVertVelocityTop.values + velArea = verticalVel * areaCell[:, np.newaxis] + layerThickness = dsLocal.layerThickness.values + + transportZ = _compute_transport(maxEdgesInTransect, + transectEdgeGlobalIDs, + transectEdgeMaskSigns, + nVertLevels, dvEdge, + horizontalVel, + layerThickness, + cellsOnEdge) + mocTop = _compute_moc(latAtlantic, nVertLevels, latCell, + regionCellMask, transportZ, velArea) + moc[timeIndex, :, :] = mocTop + mocRegion[timeIndex] = np.amax(mocTop[:, indlat26]) + + description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \ + 'Array latitude (26.5N)' + + descriptionAtl = 'Atlantic MOC streamfunction' + + dictionary = { + 'dims': ['Time', 'depth', 'lat'], + 'coords': { + 'Time': { + 'dims': ('Time',), + 'data': times, + 'attrs': {'units': 'days since 0001-01-01'}}, + 'year': { + 'dims': ('Time',), + 'data': years, + 'attrs': {'units': 'year'}}, + 'month': { + 'dims': ('Time',), + 'data': months, + 'attrs': {'units': 'month'}}, + 'lat': { + 'dims': ('lat',), + 'data': latAtlantic, + 'attrs': {'units': 'degrees north'}}, + 'depth': { + 'dims': ('depth',), + 'data': refTopDepth, + 'attrs': {'units': 'meters'}}}, + 'data_vars': { + 'mocAtlantic26': { + 'dims': ('Time',), + 'data': mocRegion, + 'attrs': {'units': 'Sv (10^6 m^3/s)', + 'description': description}}, + 'mocAtlantic': { + 'dims': ('Time', 'depth', 'lat'), + 'data': moc, + 'attrs': {'units': 'Sv (10^6 m^3/s)', + 'description': descriptionAtl}}}} + dsMOCTimeSeries = xr.Dataset.from_dict(dictionary) + write_netcdf(dsMOCTimeSeries, outputFileName) + + +class CombineMOCTimeSeriesSubtask(AnalysisTask): + """ + Combine individual time series of max Atlantic MOC at 26.5N into a single + data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYears, endYears): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``StreamfunctionMOC`` + The main task of which this is a subtask + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(CombineMOCTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='combineMOCTimeSeries') + + parentTask.add_subtask(self) + self.startYears = startYears + self.endYears = endYears + + def run_task(self): + """ + Plot the MOC time series + """ + # Authors + # ------- + # Xylar Asay-Davis + outputDirectory = '{}/moc/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory')) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outputFileNames = [] + for startYear, endYear in zip(self.startYears, self.endYears): + outputFileName = '{}/mocTimeSeries_{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, endYear) + outputFileNames.append(outputFileName) + + outputFileName = '{}/mocTimeSeries_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYears[0], self.endYears[-1]) + + if outputFileName in outputFileNames: + # don't try to write to read from and write to the same file + return + + ds = xr.open_mfdataset(outputFileNames, concat_dim='Time', + combine='nested', decode_times=False) + + ds.load() + + write_netcdf(ds, outputFileName) + + +class PlotMOCTimeSeriesSubtask(AnalysisTask): + """ + Plots a time series of max Atlantic MOC at 26.5N. + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + + def __init__(self, parentTask, controlConfig): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``StreamfunctionMOC`` + The main task of which this is a subtask + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(PlotMOCTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='plotMOCTimeSeries') + + parentTask.add_subtask(self) + + self.controlConfig = controlConfig + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError + if timeSeriesStatsMonthly is not enabled in the MPAS run + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(PlotMOCTimeSeriesSubtask, self).setup_and_check() + + config = self.config + + self.sectionName = 'streamfunctionMOC' + + mainRunName = config.get('runs', 'mainRunName') + + filePrefix = 'mocTimeseries_{}'.format(mainRunName) + self.xmlFileNames = ['{}/{}.xml'.format(self.plotsDirectory, + filePrefix)] + self.filePrefix = filePrefix + + def run_task(self): + """ + Plot the MOC time series + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Phillip J. Wolfram, Xylar Asay-Davis + + self.logger.info("\nPlotting time series of Meridional Overturning " + "Circulation (MOC)...") + + config = self.config + + dsMOCTimeSeries = self._load_moc(config) + + # **** Plot MOC **** + # Define plotting variables + mainRunName = config.get('runs', 'mainRunName') + movingAveragePoints = config.getint(self.sectionName, + 'movingAveragePoints') + + # Plot time series + self.logger.info(' Plot time series of max Atlantic MOC at 26.5N...') + xLabel = 'Time [years]' + yLabel = '[Sv]' + title = '{}\n{}'.format(r'Max Atlantic MOC at $26.5\degree$N', + mainRunName) + filePrefix = self.filePrefix + + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + + if config.has_option(self.taskName, 'firstYearXTicks'): + firstYearXTicks = config.getint(self.taskName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(self.taskName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(self.taskName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + fields = [dsMOCTimeSeries.mocAtlantic26] + lineColors = [config.get('timeSeries', 'mainColor')] + + lineWidths = [2] + legendText = [mainRunName] + + if self.controlConfig is not None: + + dsRefMOC = self._load_moc(self.controlConfig) + fields.append(dsRefMOC.mocAtlantic26) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(2) + controlRunName = self.controlConfig.get('runs', 'mainRunName') + legendText.append(controlRunName) + + timeseries_analysis_plot(config, fields, calendar=self.calendar, + title=title, xlabel=xLabel, ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, lineWidths=lineWidths, + legendText=legendText, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + maxTitleLength=90) + + savefig(outFileName, config) + + caption = u'Time Series of maximum Meridional Overturning ' \ + u'Circulation at 26.5°N' + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Meridional Overturning Streamfunction', + groupLink='moc', + thumbnailDescription='Time Series', + imageDescription=caption, + imageCaption=caption) + + def _load_moc(self, config): + """compute mean MOC streamfunction from analysis member""" + + outputDirectory = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + inputFileName = '{}/moc/mocTimeSeries_{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, endYear) + + dsMOCTimeSeries = xr.open_dataset(inputFileName, decode_times=False) + return dsMOCTimeSeries + + +def _load_mesh(runStreams): + # Load mesh related variables + try: + restartFile = runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for MOC calculation') + ncFile = netCDF4.Dataset(restartFile, mode='r') + dvEdge = ncFile.variables['dvEdge'][:] + areaCell = ncFile.variables['areaCell'][:] + refBottomDepth = ncFile.variables['refBottomDepth'][:] + latCell = np.rad2deg(ncFile.variables['latCell'][:]) + cellsOnEdge = ncFile.variables['cellsOnEdge'][:] - 1 + ncFile.close() + nVertLevels = len(refBottomDepth) + refTopDepth = np.zeros(nVertLevels + 1) + refTopDepth[1:nVertLevels + 1] = refBottomDepth[0:nVertLevels] + refLayerThickness = np.zeros(nVertLevels) + refLayerThickness[0] = refBottomDepth[0] + refLayerThickness[1:nVertLevels] = \ + (refBottomDepth[1:nVertLevels] - + refBottomDepth[0:nVertLevels - 1]) + + return dvEdge, areaCell, refBottomDepth, latCell, nVertLevels, \ + refTopDepth, refLayerThickness, cellsOnEdge + + +def _build_region_mask_dict(regionMaskFile, regionNames, mpasMeshName, logger): + if not os.path.exists(regionMaskFile): + raise IOError('Regional masking file {} for MOC calculation ' + 'does not exist'.format(regionMaskFile)) + + dsMask = xr.open_dataset(regionMaskFile) + dsMask.load() + + regionIndices = {} + for iRegion in range(dsMask.sizes['nRegions']): + regionInFile = str(dsMask.regionNames[iRegion].values.astype('U')) + region = regionInFile.replace('_MOC', '') + regionIndices[region] = iRegion + + dictRegion = {} + for region in regionNames: + logger.info('\n Reading region and transect mask for ' + '{}...'.format(region)) + iRegion = regionIndices[region] + maxEdgesInTransect = dsMask.sizes['maxEdgesInTransect'] + transectEdgeMaskSigns = \ + dsMask.transectEdgeMaskSigns.isel(nTransects=iRegion).values + transectEdgeGlobalIDs = \ + dsMask.transectEdgeGlobalIDs.isel(nTransects=iRegion).values + regionCellMask = \ + dsMask.regionCellMasks.isel(nRegions=iRegion).values + + indRegion = np.where(regionCellMask == 1) + dictRegion[region] = { + 'indices': indRegion, + 'cellMask': regionCellMask, + 'maxEdgesInTransect': maxEdgesInTransect, + 'transectEdgeMaskSigns': transectEdgeMaskSigns, + 'transectEdgeGlobalIDs': transectEdgeGlobalIDs} + + return dictRegion + + +def _compute_transport(maxEdgesInTransect, transectEdgeGlobalIDs, + transectEdgeMaskSigns, nz, dvEdge, + horizontalVel, layerThickness, cellsOnEdge): + """compute mass transport across southern transect of ocean basin""" + + transportZEdge = np.zeros([nz, maxEdgesInTransect]) + for i in range(maxEdgesInTransect): + if transectEdgeGlobalIDs[i] == 0: + break + # subtract 1 because of python 0-indexing + iEdge = transectEdgeGlobalIDs[i] - 1 + coe0 = cellsOnEdge[iEdge, 0] + coe1 = cellsOnEdge[iEdge, 1] + layerThicknessEdge = 0.5*(layerThickness[coe0, :] + + layerThickness[coe1, :]) + transportZEdge[:, i] = horizontalVel[iEdge, :] * \ + transectEdgeMaskSigns[iEdge, np.newaxis] * \ + dvEdge[iEdge, np.newaxis] * \ + layerThicknessEdge[np.newaxis, :] + transportZ = np.nansum(transportZEdge, axis=1) + return transportZ + + +def _compute_moc(latBins, nz, latCell, regionCellMask, transportZ, + velArea): + """compute meridionally integrated MOC streamfunction""" + + mocTop = np.zeros([np.size(latBins), nz + 1]) + mocSouthBottomUp = - transportZ[::-1].cumsum() + mocTop[0, 0:nz] = mocSouthBottomUp[::-1] + for iLat in range(1, np.size(latBins)): + indlat = np.logical_and(np.logical_and( + regionCellMask == 1, latCell >= latBins[iLat - 1]), + latCell < latBins[iLat]) + mocTop[iLat, :] = mocTop[iLat - 1, :] + \ + np.nansum(velArea[indlat, :], axis=0) + # convert m^3/s to Sverdrup + mocTop = mocTop * m3ps_to_Sv + mocTop = mocTop.T + return mocTop + + +def _interp_moc(x, z, regionMOC, refX, refZ, refMOC): + x = x.values + z = z.values + dims = regionMOC.dims + regionMOC = regionMOC.values + refX = refX.values + refZ = refZ.values + refMOC = refMOC.values + + nz, nx = regionMOC.shape + refNz, refNx = refMOC.shape + temp = np.zeros((refNz, nx)) + for zIndex in range(refNz): + temp[zIndex, :] = np.interp( + x, refX, refMOC[zIndex, :], + left=np.nan, right=np.nan) + refRegionMOC = np.zeros((nz, nx)) + for xIndex in range(nx): + refRegionMOC[:, xIndex] = np.interp( + z, refZ, temp[:, xIndex], + left=np.nan, right=np.nan) + + return xr.DataArray(dims=dims, data=refRegionMOC) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_antarctic_melt.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_antarctic_melt.html new file mode 100644 index 000000000..08d4ced40 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_antarctic_melt.html @@ -0,0 +1,975 @@ + + + + + + mpas_analysis.ocean.time_series_antarctic_melt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_antarctic_melt

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import os
+import xarray
+import numpy
+import csv
+import matplotlib.pyplot as plt
+
+from geometric_features import FeatureCollection, read_feature_collection
+from geometric_features.aggregation import get_aggregator_by_name
+from mpas_tools.cime.constants import constants as cime_constants
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig, \
+    add_inset
+
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, build_obs_path, decode_strings
+
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class TimeSeriesAntarcticMelt(AnalysisTask): + """ + Performs analysis of the time-series output of Antarctic sub-ice-shelf + melt rates. + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask, regionMasksTask, + controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + controlConfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesAntarcticMelt, self).__init__( + config=config, + taskName='timeSeriesAntarcticMelt', + componentName='ocean', + tags=['timeSeries', 'melt', 'landIceCavities', 'antarctic']) + + regionGroup = 'Ice Shelves' + iceShelvesToPlot = config.getexpression('timeSeriesAntarcticMelt', + 'iceShelvesToPlot') + if len(iceShelvesToPlot) == 0: + # nothing else to do + return + + masksSubtask = \ + regionMasksTask.add_mask_subtask(regionGroup=regionGroup) + self.iceShelfMasksFile = masksSubtask.geojsonFileName + + iceShelvesToPlot = masksSubtask.expand_region_names(iceShelvesToPlot) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + years = list(range(startYear, endYear + 1)) + + # in the end, we'll combine all the time series into one, but we + # create this task first so it's easier to tell it to run after all + # the compute tasks + combineSubtask = CombineMeltSubtask( + self, startYears=years, endYears=years) + + # run one subtask per year + for year in years: + computeSubtask = ComputeMeltSubtask( + self, startYear=year, endYear=year, + mpasTimeSeriesTask=mpasTimeSeriesTask, + masksSubtask=masksSubtask, + iceShelvesToPlot=iceShelvesToPlot) + self.add_subtask(computeSubtask) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + + self.add_subtask(combineSubtask) + + for index, iceShelf in enumerate(iceShelvesToPlot): + plotMeltSubtask = PlotMeltSubtask(self, iceShelf, index, + controlConfig) + plotMeltSubtask.run_after(combineSubtask) + self.add_subtask(plotMeltSubtask)
+
+ + + +class ComputeMeltSubtask(AnalysisTask): + """ + Computes time-series of Antarctic sub-ice-shelf melt rates. + + Attributes + ---------- + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each ice shelf to plot + + iceShelvesToPlot : list of str + A list of ice shelves to plot + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + def __init__(self, parentTask, startYear, endYear, mpasTimeSeriesTask, + masksSubtask, iceShelvesToPlot): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesAntarcticMelt + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each ice shelf to plot + + iceShelvesToPlot : list of str + A list of ice shelves to plot + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeMeltSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=f'computeMeltRates_{startYear:04d}-{endYear:04d}') + + self.mpasTimeSeriesTask = mpasTimeSeriesTask + self.run_after(mpasTimeSeriesTask) + + self.masksSubtask = masksSubtask + self.run_after(masksSubtask) + + self.iceShelvesToPlot = iceShelvesToPlot + self.restartFileName = None + self.startYear = startYear + self.endYear = endYear + self.startDate = f'{self.startYear:04d}-01-01_00:00:00' + self.endDate = f'{self.endYear:04d}-12-31_23:59:59' + self.variableList = None + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If a restart file is not present + + ValueError + If ``config_land_ice_flux_mode`` is not one of ``standalone`` or + ``coupled`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(ComputeMeltSubtask, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + landIceFluxMode = self.namelist.get('config_land_ice_flux_mode') + if landIceFluxMode not in ['data', 'standalone', 'coupled']: + raise ValueError('*** timeSeriesAntarcticMelt requires ' + 'config_land_ice_flux_mode \n' + ' to be data, standalone or coupled. ' + ' Otherwise, no melt rates are available \n' + ' for plotting.') + + # Load mesh related variables + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for Antarctic melt calculations') + + totalFluxVar = 'timeMonthly_avg_landIceFreshwaterFluxTotal' + landIceFluxVar = 'timeMonthly_avg_landIceFreshwaterFlux' + if totalFluxVar in self.mpasTimeSeriesTask.allVariables: + self.variableList = [totalFluxVar] + else: + self.variableList = [landIceFluxVar] + + self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) + + return + + def run_task(self): + """ + Computes time-series of Antarctic sub-ice-shelf melt rates. + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + self.logger.info("Computing Antarctic melt rate time series...") + + mpasTimeSeriesTask = self.mpasTimeSeriesTask + config = self.config + + timeSeriesBase = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + outputDirectory = f'{timeSeriesBase}/iceShelfFluxes/' + + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outFileName = f'{outputDirectory}/iceShelfFluxes_' \ + f'{self.startYear:04d}-{self.endYear:04d}.nc' + + # Load data: + inputFile = mpasTimeSeriesTask.outputFile + dsIn = open_mpas_dataset(fileName=inputFile, + calendar=self.calendar, + variableList=self.variableList, + startDate=self.startDate, + endDate=self.endDate) + try: + if os.path.exists(outFileName): + # The file already exists so load it + dsOut = xarray.open_dataset(outFileName) + if numpy.all(dsOut.Time.values == dsIn.Time.values): + return + else: + self.logger.warning(f'File {outFileName} is incomplete. ' + f'Deleting it.') + os.remove(outFileName) + except OSError: + # something is potentially wrong with the file, so let's delete + # it and try again + self.logger.warning(f'Problems reading file {outFileName}. ' + f'Deleting it.') + os.remove(outFileName) + + restartFileName = \ + mpasTimeSeriesTask.runStreams.readpath('restart')[0] + + dsRestart = xarray.open_dataset(restartFileName) + landIceFraction = dsRestart.landIceFraction.isel(Time=0) + areaCell = dsRestart.areaCell + + regionMaskFileName = self.masksSubtask.maskFileName + + dsRegionMask = xarray.open_dataset(regionMaskFileName) + + # figure out the indices of the regions to plot + regionNames = decode_strings(dsRegionMask.regionNames) + + regionIndices = [] + for iceShelf in self.iceShelvesToPlot: + for index, regionName in enumerate(regionNames): + if iceShelf == regionName: + regionIndices.append(index) + break + + # select only those regions we want to plot + dsRegionMask = dsRegionMask.isel(nRegions=regionIndices) + + regionNames = decode_strings(dsRegionMask.regionNames) + + fluxVar = self.variableList[0] + + datasets = [] + nTime = dsIn.sizes['Time'] + for tIndex in range(nTime): + self.logger.info(f' {tIndex + 1}/{nTime}') + + freshwaterFlux = dsIn[fluxVar].isel(Time=tIndex) + + nRegions = dsRegionMask.sizes['nRegions'] + meltRates = numpy.zeros((nRegions,)) + integratedMeltFluxes = numpy.zeros((nRegions,)) + + for regionIndex in range(nRegions): + self.logger.info(f' {regionNames[regionIndex]}') + cellMask = \ + dsRegionMask.regionCellMasks.isel(nRegions=regionIndex) + + # convert from kg/s to kg/yr + integratedMeltFlux = constants.sec_per_year * \ + (cellMask * areaCell * freshwaterFlux).sum(dim='nCells') + + totalArea = \ + (landIceFraction * cellMask * areaCell).sum(dim='nCells') + + # from kg/m^2/yr to m/yr + meltRates[regionIndex] = ((1. / constants.rho_fw) * + (integratedMeltFlux / totalArea)) + + # convert from kg/yr to GT/yr + integratedMeltFlux /= constants.kg_per_GT + integratedMeltFluxes[regionIndex] = integratedMeltFlux + + dsOut = xarray.Dataset() + dsOut.coords['Time'] = dsIn.Time.isel(Time=tIndex) + dsOut['integratedMeltFlux'] = (('nRegions',), integratedMeltFluxes) + dsOut['meltRates'] = (('nRegions',), meltRates) + datasets.append(dsOut) + + dsOut = xarray.concat(objs=datasets, dim='Time') + dsOut['regionNames'] = dsRegionMask.regionNames + dsOut.integratedMeltFlux.attrs['units'] = 'GT a$^{-1}$' + dsOut.integratedMeltFlux.attrs['description'] = \ + 'Integrated melt flux summed over each ice shelf or region' + dsOut.meltRates.attrs['units'] = 'm a$^{-1}$' + dsOut.meltRates.attrs['description'] = \ + 'Melt rate averaged over each ice shelf or region' + + write_netcdf_with_fill(dsOut, outFileName) + + +class CombineMeltSubtask(AnalysisTask): + """ + Combine individual time series into a single data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYears, endYears): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesAntarcticMelt + The main task of which this is a subtask + + startYears, endYears : list + The beginning and end of each time series to combine + """ + # Authors + # ------- + # Xylar Asay-Davis + + subtaskName = 'combineAntarcticMeltTimeSeries' + + # first, call the constructor from the base class (AnalysisTask) + super(CombineMeltSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=subtaskName) + + self.startYears = startYears + self.endYears = endYears + + def run_task(self): + """ + Combine the time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + timeSeriesBase = build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory') + outputDirectory = f'{timeSeriesBase}/iceShelfFluxes/' + + outFileName = f'{outputDirectory}/iceShelfFluxes_' \ + f'{self.startYears[0]:04d}-{self.endYears[-1]:04d}.nc' + + if not os.path.exists(outFileName): + inFileNames = [] + for startYear, endYear in zip(self.startYears, self.endYears): + inFileName = f'{outputDirectory}/iceShelfFluxes_' \ + f'{startYear:04d}-{endYear:04d}.nc' + inFileNames.append(inFileName) + + ds = xarray.open_mfdataset(inFileNames, combine='nested', + concat_dim='Time', decode_times=False) + + ds.load() + + write_netcdf_with_fill(ds, outFileName) + + +class PlotMeltSubtask(AnalysisTask): + """ + Plots time-series output of Antarctic sub-ice-shelf melt rates. + + Attributes + ---------- + iceShelf : str + Name of the ice shelf to plot + + regionIndex : int + The index into the dimension ``nRegions`` of the ice shelf to plot + + controlConfig : mpas_tools.config.MpasConfigParser + The configuration options for the control run (if any) + + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + def __init__(self, parentTask, iceShelf, regionIndex, controlConfig): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesAntarcticMelt + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + iceShelf : str + Name of the ice shelf to plot + + regionIndex : int + The index into the dimension ``nRegions`` of the ice shelf to plot + + controlConfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(PlotMeltSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=f'plotMeltRates_{iceShelf.replace(" ", "_")}') + + self.iceShelfMasksFile = parentTask.iceShelfMasksFile + self.iceShelf = iceShelf + self.regionIndex = regionIndex + self.controlConfig = controlConfig + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If files are not present + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(PlotMeltSubtask, self).setup_and_check() + + self.xmlFileNames = [] + + for prefix in ['melt_flux', 'melt_rate']: + iceShelfSuffix = self.iceShelf.replace(" ", "_") + self.xmlFileNames.append( + f'{self.plotsDirectory}/{prefix}_{iceShelfSuffix}.xml') + return + + def run_task(self): + """ + Plots time-series output of Antarctic sub-ice-shelf melt rates. + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + self.logger.info(f'\nPlotting Antarctic melt rate time series for ' + f'{self.iceShelf}...') + + self.logger.info(' Load melt rate data...') + + config = self.config + calendar = self.calendar + + iceShelfMasksFile = self.iceShelfMasksFile + + fcAll = read_feature_collection(iceShelfMasksFile) + + fc = FeatureCollection() + for feature in fcAll.features: + if feature['properties']['name'] == self.iceShelf: + fc.add_feature(feature) + break + + integratedMeltFlux, meltRates = self._load_ice_shelf_fluxes(config) + + plotControl = self.controlConfig is not None + if plotControl: + controlRunName = self.controlConfig.get('runs', 'mainRunName') + + refintegratedMeltFlux, refMeltRates = \ + self._load_ice_shelf_fluxes(self.controlConfig) + else: + controlRunName = None + refintegratedMeltFlux = None + refMeltRates = None + + # Load observations from multiple files and put in dictionary based + # on shelf key name + observationsDirectory = build_obs_path(config, 'ocean', + 'meltSubdirectory') + obsFileNameDict = {'Rignot et al. (2013)': + 'Rignot_2013_melt_rates_20201117.csv', + 'Rignot et al. (2013) SS': + 'Rignot_2013_melt_rates_SS_20201117.csv'} + + obsDict = {} # dict for storing dict of obs data + for obsName in obsFileNameDict: + obsFileName = f'{observationsDirectory}/{obsFileNameDict[obsName]}' + obsDict[obsName] = {} + obsFile = csv.reader(open(obsFileName, 'r')) + next(obsFile, None) # skip the header line + for line in obsFile: # some later useful values commented out + shelfName = line[0] + if shelfName != self.iceShelf: + continue + + # surveyArea = line[1] + meltFlux = float(line[2]) + meltFluxUncertainty = float(line[3]) + meltRate = float(line[4]) + meltRateUncertainty = float(line[5]) + # actualArea = float( line[6] ) # actual area here is in sq km + + # build dict of obs. keyed to filename description + # (which will be used for plotting) + obsDict[obsName] = { + 'meltFlux': meltFlux, + 'meltFluxUncertainty': meltFluxUncertainty, + 'meltRate': meltRate, + 'meltRateUncertainty': meltRateUncertainty} + break + regionGroup = 'Ice Shelves' + _, prefix, date = get_aggregator_by_name(regionGroup) + + obsFileName = f'{observationsDirectory}/Adusumilli/Adusumilli_2020_' \ + f'iceshelf_melt_rates_2010-2018_v0.20230504.' \ + f'{prefix}{date}.nc' + with xarray.open_dataset(obsFileName) as ds_adusumilli: + region_names = [name.values for name in ds_adusumilli.regionNames] + index = region_names.index(self.iceShelf) + ds_shelf = ds_adusumilli.isel(nRegions=index) + obsDict['Adusumilli et al. (2020)'] = { + 'meltFlux': ds_shelf.totalMeltFlux.values, + 'meltFluxUncertainty': ds_shelf.meltFluxUncertainty.values, + 'meltRate': ds_shelf.meanMeltRate.values, + 'meltRateUncertainty': ds_shelf.meltRateUncertainty.values} + + rho_fw = cime_constants['SHR_CONST_RHOFW'] + kg_per_gt = constants.kg_per_GT + gt_per_m3 = rho_fw / kg_per_gt + + obsFileName = f'{observationsDirectory}/Paolo/' \ + f'Paolo_2023_melt_rates.20240220.csv' + obsName = 'Paolo et al. (2023)' + obsDict[obsName] = {} + obsFile = csv.reader(open(obsFileName, 'r')) + next(obsFile, None) # skip the header line + for line in obsFile: # some later useful values commented out + shelfName = line[0] + if shelfName != self.iceShelf: + continue + + # km^2 --> m^2 + area = 1e6 * float(line[1]) + meltRate = float(line[2]) + meltRateUncertainty = float(line[3]) + meltFlux = gt_per_m3 * area * meltRate + meltFluxUncertainty = gt_per_m3 * area * meltRateUncertainty + + # build dict of obs. keyed to filename description + # (which will be used for plotting) + obsDict[obsName] = { + 'meltFlux': meltFlux, + 'meltFluxUncertainty': meltFluxUncertainty, + 'meltRate': meltRate, + 'meltRateUncertainty': meltRateUncertainty} + + mainRunName = config.get('runs', 'mainRunName') + movingAveragePoints = config.getint('timeSeriesAntarcticMelt', + 'movingAveragePoints') + + outputDirectory = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + make_directories(outputDirectory) + + self.logger.info(' Make plots...') + + # get obs melt flux and unc. for shelf (similar for rates) + obsMeltFlux = [] + obsMeltFluxUnc = [] + obsMeltRate = [] + obsMeltRateUnc = [] + for obsName in obsDict: + if len(obsDict[obsName]) > 0: + obsMeltFlux.append( + obsDict[obsName]['meltFlux']) + obsMeltFluxUnc.append( + obsDict[obsName]['meltFluxUncertainty']) + obsMeltRate.append( + obsDict[obsName]['meltRate']) + obsMeltRateUnc.append( + obsDict[obsName]['meltRateUncertainty']) + else: + # append NaN so this particular obs won't plot + self.logger.warning(f'{obsName} observations not available ' + f'for {self.iceShelf}') + obsMeltFlux.append(None) + obsMeltFluxUnc.append(None) + obsMeltRate.append(None) + obsMeltRateUnc.append(None) + + title = self.iceShelf.replace('_', ' ') + suffix = self.iceShelf.replace(' ', '_') + + xLabel = 'Time (yr)' + yLabel = 'Melt Flux (GT/yr)' + + timeSeries = integratedMeltFlux.isel(nRegions=self.regionIndex) + + filePrefix = f'melt_flux_{suffix}' + outFileName = f'{self.plotsDirectory}/{filePrefix}.png' + + fields = [timeSeries] + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [2.5] + legendText = [mainRunName] + if plotControl: + fields.append(refintegratedMeltFlux.isel(nRegions=self.regionIndex)) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(1.2) + legendText.append(controlRunName) + + if config.has_option('timeSeriesAntarcticMelt', 'firstYearXTicks'): + firstYearXTicks = config.getint('timeSeriesAntarcticMelt', + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option('timeSeriesAntarcticMelt', 'yearStrideXTicks'): + yearStrideXTicks = config.getint('timeSeriesAntarcticMelt', + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + if config.has_option('timeSeriesAntarcticMelt', 'titleFontSize'): + titleFontSize = config.getint('timeSeriesAntarcticMelt', + 'titleFontSize') + else: + titleFontSize = None + + if config.has_option('timeSeriesAntarcticMelt', 'defaultFontSize'): + defaultFontSize = config.getint('timeSeriesAntarcticMelt', + 'defaultFontSize') + else: + defaultFontSize = None + + fig = timeseries_analysis_plot(config, fields, calendar=calendar, + title=title, xlabel=xLabel, + ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + legendLocation='upper left', + titleFontSize=titleFontSize, + defaultFontSize=defaultFontSize, + obsMean=obsMeltFlux, + obsUncertainty=obsMeltFluxUnc, + obsLegend=list(obsDict.keys()), + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks) + + # do this before the inset because otherwise it moves the inset + # and cartopy doesn't play too well with tight_layout anyway + plt.tight_layout() + + add_inset(fig, fc, width=2.0, height=2.0) + + savefig(outFileName, config) + + caption = f'Running Mean of Integrated Melt Flux under Ice Shelves in ' \ + f'the {title} Region' + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Antarctic Melt Time Series', + groupLink='antmelttime', + gallery='Integrated Melt Flux', + thumbnailDescription=title, + imageDescription=caption, + imageCaption=caption) + + xLabel = 'Time (yr)' + yLabel = 'Melt Rate (m/yr) freshwater equiv.' + + timeSeries = meltRates.isel(nRegions=self.regionIndex) + + filePrefix = f'melt_rate_{suffix}' + outFileName = f'{self.plotsDirectory}/{filePrefix}.png' + + fields = [timeSeries] + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [2.5] + legendText = [mainRunName] + if plotControl: + fields.append(refMeltRates.isel(nRegions=self.regionIndex)) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(1.2) + legendText.append(controlRunName) + + if config.has_option(self.taskName, 'firstYearXTicks'): + firstYearXTicks = config.getint(self.taskName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(self.taskName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(self.taskName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + fig = timeseries_analysis_plot(config, fields, calendar=calendar, + title=title, xlabel=xLabel, + ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + obsMean=obsMeltRate, + obsUncertainty=obsMeltRateUnc, + obsLegend=list(obsDict.keys())) + + # do this before the inset because otherwise it moves the inset + # and cartopy doesn't play too well with tight_layout anyway + plt.tight_layout() + + add_inset(fig, fc, width=2.0, height=2.0) + + savefig(outFileName, config) + + caption = f'Running Mean of Area-averaged Melt Rate under Ice ' \ + f'Shelves in the {title} Region' + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Antarctic Melt Time Series', + groupLink='antmelttime', + gallery='Area-averaged Melt Rate', + thumbnailDescription=title, + imageDescription=caption, + imageCaption=caption) + + @staticmethod + def _load_ice_shelf_fluxes(config): + """ + Reads melt flux time series and computes regional integrated melt flux + and mean melt rate. + """ + # Authors + # ------- + # Xylar Asay-Davis + + timeSeriesBase = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + outputDirectory = f'{timeSeriesBase}/iceShelfFluxes/' + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + outFileName = f'{outputDirectory}/iceShelfFluxes_' \ + f'{startYear:04d}-{endYear:04d}.nc' + + dsOut = xarray.open_dataset(outFileName) + return dsOut.integratedMeltFlux, dsOut.meltRates +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ocean_regions.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ocean_regions.html new file mode 100644 index 000000000..0a9ebc29f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ocean_regions.html @@ -0,0 +1,1418 @@ + + + + + + mpas_analysis.ocean.time_series_ocean_regions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_ocean_regions

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import os
+import xarray
+import numpy
+import matplotlib.pyplot as plt
+
+from geometric_features import FeatureCollection, read_feature_collection
+from mpas_tools.cime.constants import constants as cime_constants
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig, \
+    add_inset
+
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    build_obs_path, get_files_year_month, decode_strings, get_region_mask
+
+from mpas_analysis.shared.html import write_image_xml
+
+from mpas_analysis.ocean.utility import compute_zmid
+
+from mpas_analysis.shared.constants import constants
+
+
+
+[docs] +class TimeSeriesOceanRegions(AnalysisTask): + """ + Performs analysis of the time-series output of regionoal mean temperature, + salinity, etc. + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, regionMasksTask, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + regionMasksTask : ``ComputeRegionMasks`` + A task for computing region masks + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesOceanRegions, self).__init__( + config=config, + taskName='timeSeriesOceanRegions', + componentName='ocean', + tags=['timeSeries', 'regions', 'antarctic']) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + regionGroups = config.getexpression(self.taskName, 'regionGroups') + + obsDicts = { + 'SOSE': { + 'suffix': 'SOSE', + 'gridName': 'SouthernOcean_0.167x0.167degree', + 'gridFileName': 'SOSE/SOSE_2005-2010_monthly_pot_temp_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'TFileName': 'SOSE/SOSE_2005-2010_monthly_pot_temp_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'SFileName': 'SOSE/SOSE_2005-2010_monthly_salinity_' + 'SouthernOcean_0.167x0.167degree_20180710.nc', + 'volFileName': 'SOSE/SOSE_volume_' + 'SouthernOcean_0.167x0.167degree_20190815.nc', + 'lonVar': 'lon', + 'latVar': 'lat', + 'TVar': 'theta', + 'SVar': 'salinity', + 'volVar': 'volume', + 'zVar': 'z', + 'tDim': 'Time', + 'legend': 'SOSE 2005-2010 ANN mean'}, + 'WOA18': { + 'suffix': 'WOA18', + 'gridName': 'Global_0.25x0.25degree', + 'gridFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'TFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'SFileName': 'WOA18/woa18_decav_04_TS_mon_20190829.nc', + 'volFileName': None, + 'lonVar': 'lon', + 'latVar': 'lat', + 'TVar': 't_an', + 'SVar': 's_an', + 'volVar': 'volume', + 'zVar': 'depth', + 'tDim': 'month', + 'legend': 'WOA18 1955-2017 ANN mean'}} + + for regionGroup in regionGroups: + sectionSuffix = regionGroup[0].upper() + \ + regionGroup[1:].replace(' ', '') + sectionName = 'timeSeries{}'.format(sectionSuffix) + + regionNames = config.getexpression(sectionName, 'regionNames') + if len(regionNames) == 0: + # no regions in this group were requested + continue + + masksSubtask = regionMasksTask.add_mask_subtask( + regionGroup=regionGroup) + + try: + regionNames = masksSubtask.expand_region_names(regionNames) + except FileNotFoundError: + # this may happen if we can't create the geojson file to expand + # its contents, e.g. if we're just doing mpas_analysis --list + regionNames = [] + + years = list(range(startYear, endYear + 1)) + + obsList = config.getexpression(sectionName, 'obs') + groupObsDicts = {} + + for obsName in obsList: + localObsDict = dict(obsDicts[obsName]) + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=localObsDict['gridFileName']) + obsMasksSubtask = regionMasksTask.add_mask_subtask( + regionGroup=regionGroup, obsFileName=obsFileName, + lonVar=localObsDict['lonVar'], + latVar=localObsDict['latVar'], + meshName=localObsDict['gridName']) + + obsDicts[obsName]['maskTask'] = obsMasksSubtask + + localObsDict['maskTask'] = obsMasksSubtask + groupObsDicts[obsName] = localObsDict + + # in the end, we'll combine all the time series into one, but we + # create this task first so it's easier to tell it to run after all + # the compute tasks + combineSubtask = CombineRegionalProfileTimeSeriesSubtask( + self, startYears=years, endYears=years, + regionGroup=regionGroup) + + depthMasksSubtask = ComputeRegionDepthMasksSubtask( + self, masksSubtask=masksSubtask, regionGroup=regionGroup, + regionNames=regionNames) + depthMasksSubtask.run_after(masksSubtask) + + # run one subtask per year + for year in years: + computeSubtask = ComputeRegionTimeSeriesSubtask( + self, startYear=year, endYear=year, + masksSubtask=masksSubtask, regionGroup=regionGroup, + regionNames=regionNames) + self.add_subtask(computeSubtask) + computeSubtask.run_after(depthMasksSubtask) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + + self.add_subtask(combineSubtask) + + for index, regionName in enumerate(regionNames): + + fullSuffix = sectionSuffix + '_' + regionName.replace(' ', '') + + obsSubtasks = {} + for obsName in obsList: + localObsDict = dict(groupObsDicts[obsName]) + + obsSubtask = ComputeObsRegionalTimeSeriesSubtask( + self, regionGroup, regionName, fullSuffix, localObsDict) + obsSubtasks[obsName] = obsSubtask + + plotRegionSubtask = PlotRegionTimeSeriesSubtask( + self, regionGroup, regionName, index, controlConfig, + sectionName, fullSuffix, obsSubtasks, + masksSubtask.geojsonFileName) + plotRegionSubtask.run_after(combineSubtask) + self.add_subtask(plotRegionSubtask)
+
+ + + +class ComputeRegionDepthMasksSubtask(AnalysisTask): + """ + Compute masks for regional and depth mean + + Attributes + ---------- + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each region to plot + + regionGroup : str + The name of the region group being computed (e.g. "Antarctic Basins") + + regionNames : list of str + The names of the regions to compute + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, masksSubtask, regionGroup, regionNames): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``TimeSeriesOceanRegions`` + The main task of which this is a subtask + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each region to plot + + regionGroup : str + The name of the region group being computed (e.g. "Antarctic + Basins") + + regionNames : list of str + The names of the regions to compute + """ + # Authors + # ------- + # Xylar Asay-Davis + + suffix = regionGroup[0].upper() + regionGroup[1:].replace(' ', '') + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeRegionDepthMasksSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='computeDepthMask{}'.format(suffix)) + + parentTask.add_subtask(self) + self.masksSubtask = masksSubtask + self.regionGroup = regionGroup + self.regionNames = regionNames + + def run_task(self): + """ + Compute the regional-mean time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + self.logger.info("\nCompute depth mask for regional means...") + + regionGroup = self.regionGroup + sectionSuffix = regionGroup[0].upper() + \ + regionGroup[1:].replace(' ', '') + timeSeriesName = sectionSuffix + sectionName = 'timeSeries{}'.format(sectionSuffix) + + outputDirectory = '{}/{}/'.format( + build_config_full_path(config, 'output', 'timeseriesSubdirectory'), + timeSeriesName) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outFileName = '{}/depthMasks_{}.nc'.format(outputDirectory, + timeSeriesName) + + if os.path.exists(outFileName): + self.logger.info(' Mask file exists -- Done.') + return + + # Load mesh related variables + try: + restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for ocean region time series') + + if config.has_option(sectionName, 'zmin'): + config_zmin = config.getfloat(sectionName, 'zmin') + else: + config_zmin = None + + if config.has_option(sectionName, 'zmax'): + config_zmax = config.getfloat(sectionName, 'zmax') + else: + config_zmax = None + + dsRestart = xarray.open_dataset(restartFileName).isel(Time=0) + zMid = compute_zmid(dsRestart.bottomDepth, dsRestart.maxLevelCell-1, + dsRestart.layerThickness) + areaCell = dsRestart.areaCell + if 'landIceMask' in dsRestart: + # only the region outside of ice-shelf cavities + openOceanMask = dsRestart.landIceMask == 0 + else: + openOceanMask = None + + regionMaskFileName = self.masksSubtask.maskFileName + dsRegionMask = xarray.open_dataset(regionMaskFileName) + maskRegionNames = decode_strings(dsRegionMask.regionNames) + + regionIndices = [] + for regionName in self.regionNames: + for index, otherName in enumerate(maskRegionNames): + if regionName == otherName: + regionIndices.append(index) + break + + # select only those regions we want to plot + dsRegionMask = dsRegionMask.isel(nRegions=regionIndices) + + nRegions = dsRegionMask.sizes['nRegions'] + + datasets = [] + for regionIndex in range(nRegions): + self.logger.info(' region: {}'.format( + self.regionNames[regionIndex])) + dsRegion = dsRegionMask.isel(nRegions=regionIndex) + cellMask = dsRegion.regionCellMasks == 1 + + if openOceanMask is not None: + cellMask = numpy.logical_and(cellMask, openOceanMask) + + totalArea = areaCell.where(cellMask).sum() + self.logger.info(' totalArea: {} mil. km^2'.format( + 1e-12 * totalArea.values)) + + if config_zmin is None: + if 'zminRegions' in dsRegion: + zmin = dsRegion.zminRegions.values + else: + zmin = dsRegion.zmin.values + else: + zmin = config_zmin + + if config_zmax is None: + if 'zmaxRegions' in dsRegion: + zmax = dsRegion.zmaxRegions.values + else: + zmax = dsRegion.zmax.values + else: + zmax = config_zmax + depthMask = numpy.logical_and(zMid >= zmin, zMid <= zmax) + dsOut = xarray.Dataset() + dsOut['zmin'] = ('nRegions', [zmin]) + dsOut['zmax'] = ('nRegions', [zmax]) + dsOut['totalArea'] = totalArea + dsOut['cellMask'] = cellMask + dsOut['depthMask'] = depthMask + datasets.append(dsOut) + + dsOut = xarray.concat(objs=datasets, dim='nRegions') + zbounds = numpy.zeros((nRegions, 2)) + zbounds[:, 0] = dsOut.zmin.values + zbounds[:, 1] = dsOut.zmax.values + dsOut['zbounds'] = (('nRegions', 'nbounds'), zbounds) + dsOut['areaCell'] = areaCell + dsOut['regionNames'] = dsRegionMask.regionNames + write_netcdf_with_fill(dsOut, outFileName) + + +class ComputeRegionTimeSeriesSubtask(AnalysisTask): + """ + Compute regional and depth mean at a function of time for a set of MPAS + fields + + Attributes + ---------- + startYear, endYear : int + The beginning and end of the time series to compute + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each region to plot + + regionGroup : str + The name of the region group being computed (e.g. "Antarctic Basins") + + regionNames : list of str + The names of the regions to compute + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYear, endYear, masksSubtask, + regionGroup, regionNames): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesOceanRegions + The main task of which this is a subtask + + startYear, endYear : int + The beginning and end of the time series to compute + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each region to plot + + regionGroup : str + The name of the region group being computed (e.g. "Antarctic + Basins") + + regionNames : list of str + The names of the regions to compute + """ + # Authors + # ------- + # Xylar Asay-Davis + + suffix = regionGroup[0].upper() + regionGroup[1:].replace(' ', '') + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeRegionTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='compute{}_{:04d}-{:04d}'.format(suffix, startYear, + endYear)) + + parentTask.add_subtask(self) + self.startYear = startYear + self.endYear = endYear + self.masksSubtask = masksSubtask + self.regionGroup = regionGroup + self.regionNames = regionNames + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError + if timeSeriesStatsMonthly is not enabled in the MPAS run + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeRegionTimeSeriesSubtask, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + def run_task(self): + """ + Compute the regional-mean time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + self.logger.info("\nCompute time series of regional means...") + + startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) + endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) + + regionGroup = self.regionGroup + sectionSuffix = regionGroup[0].upper() + \ + regionGroup[1:].replace(' ', '') + timeSeriesName = sectionSuffix + sectionName = 'timeSeries{}'.format(sectionSuffix) + + outputDirectory = '{}/{}/'.format( + build_config_full_path(config, 'output', 'timeseriesSubdirectory'), + timeSeriesName) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outFileName = '{}/{}_{:04d}-{:04d}.nc'.format( + outputDirectory, timeSeriesName, self.startYear, self.endYear) + + inputFiles = sorted(self.historyStreams.readpath( + 'timeSeriesStatsMonthlyOutput', startDate=startDate, + endDate=endDate, calendar=self.calendar)) + + years, months = get_files_year_month(inputFiles, + self.historyStreams, + 'timeSeriesStatsMonthlyOutput') + + variables = config.getexpression(sectionName, 'variables') + + variableList = {'timeMonthly_avg_layerThickness'} + + for var in variables: + mpas_var = var['mpas'] + if mpas_var == 'none': + continue + if isinstance(mpas_var, (list, tuple)): + for v in mpas_var: + variableList.add(v) + else: + variableList.add(mpas_var) + + outputExists = os.path.exists(outFileName) + outputValid = outputExists + if outputExists: + with open_mpas_dataset(fileName=outFileName, + calendar=self.calendar, + timeVariableNames=None, + variableList=None, + startDate=startDate, + endDate=endDate) as dsOut: + + for inIndex in range(dsOut.sizes['Time']): + + mask = numpy.logical_and( + dsOut.year[inIndex].values == years, + dsOut.month[inIndex].values == months) + if numpy.count_nonzero(mask) == 0: + outputValid = False + break + + if outputValid: + self.logger.info(' Time series exists -- Done.') + return + + regionMaskFileName = '{}/depthMasks_{}.nc'.format(outputDirectory, + timeSeriesName) + dsRegionMask = xarray.open_dataset(regionMaskFileName) + nRegions = dsRegionMask.sizes['nRegions'] + areaCell = dsRegionMask.areaCell + + datasets = [] + nTime = len(inputFiles) + for tIndex in range(nTime): + self.logger.info(' {}/{}'.format(tIndex + 1, nTime)) + + dsIn = open_mpas_dataset( + fileName=inputFiles[tIndex], + calendar=self.calendar, + variableList=variableList, + startDate=startDate, + endDate=endDate).isel(Time=0) + dsIn.load() + + layerThickness = dsIn.timeMonthly_avg_layerThickness + + innerDatasets = [] + for regionIndex in range(nRegions): + self.logger.info(' region: {}'.format( + self.regionNames[regionIndex])) + dsRegion = dsRegionMask.isel(nRegions=regionIndex) + dsRegion.load() + cellMask = dsRegion.cellMask + totalArea = dsRegion.totalArea + depthMask = dsRegion.depthMask.where(cellMask, drop=True) + localArea = areaCell.where(cellMask, drop=True) + localThickness = layerThickness.where(cellMask, drop=True) + + volCell = (localArea*localThickness).where(depthMask) + volCell = volCell.transpose('nCells', 'nVertLevels') + totalVol = volCell.sum(dim='nVertLevels').sum(dim='nCells') + self.logger.info(' totalVol (mil. km^3): {}'.format( + 1e-15*totalVol.values)) + + dsOut = xarray.Dataset() + dsOut['totalVol'] = totalVol + dsOut.totalVol.attrs['units'] = 'm^3' + + for var in variables: + outName = var['name'] + self.logger.info(' {}'.format(outName)) + if outName == 'thermalForcing': + timeSeries = self._add_thermal_forcing(dsIn, cellMask) + units = 'degrees Celsius' + description = 'potential temperature minus the ' \ + 'potential freezing temperature' + else: + mpasVarName = var['mpas'] + timeSeries = \ + dsIn[mpasVarName].where(cellMask, drop=True) + units = timeSeries.units + description = timeSeries.long_name + + is3d = 'nVertLevels' in timeSeries.dims + if is3d: + timeSeries = \ + (volCell*timeSeries.where(depthMask)).sum( + dim='nVertLevels').sum(dim='nCells') / totalVol + else: + timeSeries = \ + (localArea*timeSeries).sum( + dim='nCells') / totalArea + + dsOut[outName] = timeSeries + dsOut[outName].attrs['units'] = units + dsOut[outName].attrs['description'] = description + dsOut[outName].attrs['is3d'] = str(is3d) + + innerDatasets.append(dsOut) + + datasets.append(innerDatasets) + + # combine data sets into a single data set + dsOut = xarray.combine_nested(datasets, ['Time', 'nRegions'], + combine_attrs='identical') + + dsOut['totalArea'] = dsRegionMask.totalArea + dsOut.totalArea.attrs['units'] = 'm^2' + dsOut['zbounds'] = dsRegionMask.zbounds + dsOut.zbounds.attrs['units'] = 'm' + dsOut.coords['regionNames'] = dsRegionMask.regionNames + dsOut.coords['year'] = (('Time',), years) + dsOut['year'].attrs['units'] = 'years' + dsOut.coords['month'] = (('Time',), months) + dsOut['month'].attrs['units'] = 'months' + + write_netcdf_with_fill(dsOut, outFileName) + + def _add_thermal_forcing(self, dsIn, cellMask): + """ compute the thermal forcing """ + + c0 = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_0') + cs = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_S') + cp = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_p') + cps = self.namelist.getfloat( + 'config_land_ice_cavity_freezing_temperature_coeff_pS') + + vars = ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_activeTracers_salinity', + 'timeMonthly_avg_density', + 'timeMonthly_avg_layerThickness'] + ds = dsIn[vars].where(cellMask, drop=True) + + temp = ds.timeMonthly_avg_activeTracers_temperature + salin = ds.timeMonthly_avg_activeTracers_salinity + dens = ds.timeMonthly_avg_density + thick = ds.timeMonthly_avg_layerThickness + + dp = cime_constants['SHR_CONST_G']*dens*thick + press = dp.cumsum(dim='nVertLevels') - 0.5*dp + + tempFreeze = c0 + cs*salin + cp*press + cps*press*salin + + timeSeries = temp - tempFreeze + + return timeSeries + + +class CombineRegionalProfileTimeSeriesSubtask(AnalysisTask): + """ + Combine individual time series into a single data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYears, endYears, regionGroup): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesOceanRegions + The main task of which this is a subtask + + startYears, endYears : list of int + The beginning and end of each time series to combine + + regionGroup : str + The name of the region group being computed (e.g. "Antarctic + Basins") + + """ + # Authors + # ------- + # Xylar Asay-Davis + + taskSuffix = regionGroup[0].upper() + regionGroup[1:].replace(' ', '') + subtaskName = 'combine{}TimeSeries'.format(taskSuffix) + + # first, call the constructor from the base class (AnalysisTask) + super(CombineRegionalProfileTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=subtaskName) + + self.startYears = startYears + self.endYears = endYears + self.regionGroup = regionGroup + + def run_task(self): + """ + Combine the time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + regionGroup = self.regionGroup + timeSeriesName = regionGroup.replace(' ', '') + + outputDirectory = '{}/{}/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory'), + timeSeriesName) + + outFileName = '{}/{}_{:04d}-{:04d}.nc'.format( + outputDirectory, timeSeriesName, self.startYears[0], + self.endYears[-1]) + + if not os.path.exists(outFileName): + inFileNames = [] + for startYear, endYear in zip(self.startYears, self.endYears): + inFileName = '{}/{}_{:04d}-{:04d}.nc'.format( + outputDirectory, timeSeriesName, startYear, endYear) + inFileNames.append(inFileName) + + ds = xarray.open_mfdataset(inFileNames, combine='nested', + concat_dim='Time', decode_times=False) + + ds.load() + + # a few variables have become time dependent and shouldn't be + for var in ['totalArea', 'zbounds']: + ds[var] = ds[var].isel(Time=0, drop=True) + + write_netcdf_with_fill(ds, outFileName) + + +class ComputeObsRegionalTimeSeriesSubtask(AnalysisTask): + """ + Compute the regional mean of the obs climatology + + Attributes + ---------- + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, regionGroup, regionName, fullSuffix, + obsDict): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + fullSuffix : str + The regionGroup and regionName combined and modified to be + appropriate as a task or file suffix + + obsDict : dict + Information on the observations to compare against + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeObsRegionalTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='compute{}_{}'.format(fullSuffix, obsDict['suffix'])) + + self.regionGroup = regionGroup + self.regionName = regionName + self.obsDict = obsDict + self.prefix = fullSuffix + + timeSeriesName = regionGroup.replace(' ', '') + outputDirectory = '{}/{}/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory'), + timeSeriesName) + + self.outFileName = '{}/TS_{}_{}.nc'.format( + outputDirectory, obsDict['suffix'], self.prefix) + + self.run_after(obsDict['maskTask']) + + def run_task(self): + """ + Compute time-series output of properties in an ocean region. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.logger.info("\nAveraging T and S for {}...".format( + self.regionName)) + + obsDict = self.obsDict + config = self.config + + regionGroup = self.regionGroup + timeSeriesName = regionGroup.replace(' ', '') + + sectionSuffix = regionGroup[0].upper() + \ + regionGroup[1:].replace(' ', '') + sectionName = 'timeSeries{}'.format(sectionSuffix) + + outputDirectory = '{}/{}/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory'), + timeSeriesName) + + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outFileName = '{}/TS_{}_{}.nc'.format( + outputDirectory, obsDict['suffix'], self.prefix) + + if os.path.exists(outFileName): + return + + regionMaskFileName = obsDict['maskTask'].maskFileName + + print(regionMaskFileName) + print(xarray.open_dataset(regionMaskFileName)) + + dsRegionMask = \ + xarray.open_dataset(regionMaskFileName).stack( + nCells=(obsDict['latVar'], obsDict['lonVar'])) + dsRegionMask = dsRegionMask.reset_index('nCells').drop_vars( + [obsDict['latVar'], obsDict['lonVar']]) + if 'nCells' in dsRegionMask.data_vars: + dsRegionMaks = dsRegionMask.drop_vars(['nCells']) + + maskRegionNames = decode_strings(dsRegionMask.regionNames) + regionIndex = maskRegionNames.index(self.regionName) + + dsMask = dsRegionMask.isel(nRegions=regionIndex) + + if 'regionMasks' in dsMask: + # this is the name used by the mask creation tool in mpas_tools + maskVar = 'regionMasks' + elif 'regionCellMasks' in dsMask: + # this is the name used in the old mask creation tool in + # mpas-analysis + maskVar = 'regionCellMasks' + else: + raise ValueError(f'The file {regionMaskFileName} doesn\'t ' + f'contain a mask variable: regionMasks or ' + f'regionCellMasks') + + cellMask = dsMask[maskVar] == 1 + + if config.has_option(sectionName, 'zmin'): + zmin = config.getfloat(sectionName, 'zmin') + elif 'zminRegions' in dsMask: + zmin = dsMask.zminRegions.values + else: + zmin = dsMask.zmin.values + + if config.has_option(sectionName, 'zmax'): + zmax = config.getfloat(sectionName, 'zmax') + elif 'zmaxRegions' in dsMask: + zmax = dsMask.zmaxRegions.values + else: + zmax = dsMask.zmax.values + + TVarName = obsDict['TVar'] + SVarName = obsDict['SVar'] + zVarName = obsDict['zVar'] + lonVarName = obsDict['lonVar'] + latVarName = obsDict['latVar'] + volVarName = obsDict['volVar'] + tDim = obsDict['tDim'] + + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['TFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + + ds = xarray.open_dataset(obsFileName) + if obsDict['SFileName'] != obsDict['TFileName']: + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['SFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + dsS = xarray.open_dataset(obsFileName) + ds[SVarName] = dsS[SVarName] + + if obsDict['volFileName'] is None: + # compute volume from lat, lon, depth bounds + self.logger.info(' Computing volume...'.format(obsFileName)) + latBndsName = ds[latVarName].attrs['bounds'] + lonBndsName = ds[lonVarName].attrs['bounds'] + zBndsName = ds[zVarName].attrs['bounds'] + latBnds = ds[latBndsName] + lonBnds = ds[lonBndsName] + zBnds = ds[zBndsName] + dLat = numpy.deg2rad(latBnds[:, 1] - latBnds[:, 0]) + dLon = numpy.deg2rad(lonBnds[:, 1] - lonBnds[:, 0]) + lat = numpy.deg2rad(ds[latVarName]) + dz = zBnds[:, 1] - zBnds[:, 0] + radius = 6378137.0 + area = radius**2*numpy.cos(lat)*dLat*dLon + volume = dz*area + ds[volVarName] = volume + + elif obsDict['volFileName'] != obsDict['TFileName']: + obsFileName = build_obs_path( + config, component=self.componentName, + relativePath=obsDict['volFileName']) + self.logger.info(' Reading from {}...'.format(obsFileName)) + dsVol = xarray.open_dataset(obsFileName) + ds[volVarName] = dsVol[volVarName] + + if 'positive' in ds[zVarName].attrs and \ + ds[zVarName].attrs['positive'] == 'down': + attrs = ds[zVarName].attrs + ds[zVarName] = -ds[zVarName] + ds[zVarName].attrs = attrs + ds[zVarName].attrs['positive'] = 'up' + + TMean = numpy.zeros(ds.sizes[tDim]) + SMean = numpy.zeros(ds.sizes[tDim]) + + depthMask = numpy.logical_and(ds[zVarName] >= zmin, + ds[zVarName] <= zmax) + + for tIndex in range(ds.sizes[tDim]): + dsMonth = ds.isel({tDim: tIndex}) + dsMonth = dsMonth.stack(nCells=(obsDict['latVar'], + obsDict['lonVar'])) + dsMonth = dsMonth.reset_index('nCells').drop_vars( + [obsDict['latVar'], obsDict['lonVar']]) + if 'nCells' in dsMonth.data_vars: + dsMonth = dsMonth.drop_vars(['nCells']) + + dsMonth = dsMonth.where(cellMask, drop=True) + + dsMonth = dsMonth.where(depthMask) + + mask = dsMonth[TVarName].notnull() + TSum = (dsMonth[TVarName]*dsMonth[volVarName]).sum(dim=('nCells', + zVarName)) + volSum = (mask*dsMonth[volVarName]).sum(dim=('nCells', zVarName)) + TMean[tIndex] = TSum/volSum + + mask = dsMonth[SVarName].notnull() + SSum = (dsMonth[SVarName]*dsMonth[volVarName]).sum(dim=('nCells', + zVarName)) + volSum = (mask*dsMonth[volVarName]).sum(dim=('nCells', zVarName)) + SMean[tIndex] = SSum/volSum + + dsOut = xarray.Dataset() + dsOut['temperature'] = ('Time', TMean) + dsOut['salinity'] = ('Time', SMean) + dsOut['zbounds'] = ('nBounds', [zmin, zmax]) + dsOut['month'] = ('Time', numpy.array(ds.month.values, dtype=float)) + dsOut['year'] = ('Time', numpy.ones(ds.sizes[tDim])) + write_netcdf_with_fill(dsOut, outFileName) + + +class PlotRegionTimeSeriesSubtask(AnalysisTask): + """ + Plots time-series output of properties in an ocean region. + + Attributes + ---------- + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + regionIndex : int + The index into the dimension ``nRegions`` of the region to plot + + sectionName : str + The section of the config file to get options from + + controlConfig : mpas_tools.config.MpasConfigParser + The configuration options for the control run (if any) + + """ + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, regionGroup, regionName, regionIndex, + controlConfig, sectionName, fullSuffix, obsSubtasks, + geojsonFileName): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : TimeSeriesOceanRegions + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionGroup : str + Name of the collection of region to plot + + regionName : str + Name of the region to plot + + regionIndex : int + The index into the dimension ``nRegions`` of the region to plot + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + + sectionName : str + The config section with options for this regionGroup + + fullSuffix : str + The regionGroup and regionName combined and modified to be + appropriate as a task or file suffix + + obsSubtasks : dict of ``AnalysisTasks`` + Subtasks for computing the mean observed T and S in the region + + geojsonFileName : str + The geojson file including the feature to plot + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(PlotRegionTimeSeriesSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='plot{}'.format(fullSuffix)) + + self.regionGroup = regionGroup + self.regionName = regionName + self.regionIndex = regionIndex + self.sectionName = sectionName + self.controlConfig = controlConfig + self.prefix = fullSuffix + self.obsSubtasks = obsSubtasks + self.geojsonFileName = geojsonFileName + + for obsName in obsSubtasks: + self.run_after(obsSubtasks[obsName]) + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If files are not present + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(PlotRegionTimeSeriesSubtask, self).setup_and_check() + + self.variables = self.config.getexpression(self.sectionName, + 'variables') + + self.xmlFileNames = [] + for var in self.variables: + self.xmlFileNames.append('{}/{}_{}.xml'.format( + self.plotsDirectory, self.prefix, var['name'])) + return + + def run_task(self): + """ + Plots time-series output of properties in an ocean region. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.logger.info("\nPlotting time series of ocean properties of {}" + "...".format(self.regionName)) + + self.logger.info(' Load time series...') + + config = self.config + calendar = self.calendar + + fcAll = read_feature_collection(self.geojsonFileName) + + fc = FeatureCollection() + for feature in fcAll.features: + if feature['properties']['name'] == self.regionName: + fc.add_feature(feature) + break + + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + regionGroup = self.regionGroup + timeSeriesName = regionGroup.replace(' ', '') + + inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( + baseDirectory, timeSeriesName, timeSeriesName, startYear, endYear) + + dsIn = xarray.open_dataset(inFileName).isel(nRegions=self.regionIndex) + + zbounds = dsIn.zbounds.values + + controlConfig = self.controlConfig + plotControl = controlConfig is not None + if plotControl: + controlRunName = controlConfig.get('runs', 'mainRunName') + baseDirectory = build_config_full_path( + controlConfig, 'output', 'timeSeriesSubdirectory') + + startYear = controlConfig.getint('timeSeries', 'startYear') + endYear = controlConfig.getint('timeSeries', 'endYear') + + inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( + baseDirectory, timeSeriesName, timeSeriesName, startYear, + endYear) + dsRef = xarray.open_dataset(inFileName).isel( + nRegions=self.regionIndex) + + zboundsRef = dsRef.zbounds.values + + mainRunName = config.get('runs', 'mainRunName') + movingAveragePoints = 1 + + self.logger.info(' Make plots...') + + groupLink = self.regionGroup.replace(' ', '') + + for var in self.variables: + varName = var['name'] + mainArray = dsIn[varName] + is3d = mainArray.attrs['is3d'] == 'True' + if is3d: + title = 'Volume-Mean {} in {}'.format( + var['title'], self.regionName) + else: + title = 'Area-Mean {} in {}'.format(var['title'], + self.regionName) + + if plotControl: + refArray = dsRef[varName] + xLabel = 'Time (yr)' + yLabel = '{} ({})'.format(var['title'], var['units']) + + filePrefix = '{}_{}'.format(self.prefix, varName) + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + + fields = [mainArray] + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [2.5] + legendText = [mainRunName] + if plotControl: + fields.append(refArray) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(1.2) + legendText.append(controlRunName) + + if varName in ['temperature', 'salinity']: + obsColors = [ + config.get('timeSeries', 'obsColor{}'.format(index + 1)) + for index in range(5)] + daysInMonth = constants.daysInMonth + for obsName in self.obsSubtasks: + obsFileName = self.obsSubtasks[obsName].outFileName + obsDict = self.obsSubtasks[obsName].obsDict + dsObs = xarray.open_dataset(obsFileName) + endMonthDays = numpy.cumsum(daysInMonth) + midMonthDays = endMonthDays - 0.5*daysInMonth + + obsTime = [] + fieldMean = \ + numpy.sum(dsObs[varName].values*daysInMonth)/365. + for year in range(startYear, endYear+1): + obsTime.append(midMonthDays + 365.*(year-1.)) + obsTime = numpy.array(obsTime).ravel() + obsField = fieldMean*numpy.ones(obsTime.shape) + da = xarray.DataArray(data=obsField, dims='Time', + coords=[('Time', obsTime)]) + fields.append(da) + lineColors.append(obsColors.pop(0)) + lineWidths.append(1.2) + legendText.append(obsDict['legend']) + + if is3d: + if not plotControl or numpy.all(zbounds == zboundsRef): + title = '{} ({} < z < {} m)'.format(title, zbounds[0], + zbounds[1]) + else: + legendText[0] = '{} ({} < z < {} m)'.format( + legendText[0], zbounds[0], zbounds[1]) + legendText[1] = '{} ({} < z < {} m)'.format( + legendText[1], zboundsRef[0], zboundsRef[1]) + + sectionName = self.sectionName + if config.has_option(sectionName, 'titleFontSize'): + titleFontSize = config.getint(sectionName, 'titleFontSize') + else: + titleFontSize = None + + if config.has_option(sectionName, 'defaultFontSize'): + defaultFontSize = config.getint(sectionName, 'defaultFontSize') + else: + defaultFontSize = None + + fig = timeseries_analysis_plot( + config, fields, calendar=calendar, title=title, xlabel=xLabel, + ylabel=yLabel, movingAveragePoints=movingAveragePoints, + lineColors=lineColors, lineWidths=lineWidths, + legendText=legendText, titleFontSize=titleFontSize, + defaultFontSize=defaultFontSize) + + # do this before the inset because otherwise it moves the inset + # and cartopy doesn't play too well with tight_layout anyway + plt.tight_layout() + + add_inset(fig, fc, width=2.0, height=2.0) + + savefig(outFileName, config, tight=False) + + caption = 'Regional mean of {}'.format(title) + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='{} Time Series'.format(self.regionGroup), + groupLink=groupLink, + gallery=var['title'], + thumbnailDescription=self.regionName, + imageDescription=caption, + imageCaption=caption) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ohc_anomaly.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ohc_anomaly.html new file mode 100644 index 000000000..1813088db --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_ohc_anomaly.html @@ -0,0 +1,346 @@ + + + + + + mpas_analysis.ocean.time_series_ohc_anomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_ohc_anomaly

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+import xarray as xr
+import numpy
+import matplotlib.pyplot as plt
+
+from mpas_tools.cime.constants import constants as cime_constants
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.ocean.compute_anomaly_subtask import ComputeAnomalySubtask
+from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask
+from mpas_analysis.ocean.plot_depth_integrated_time_series_subtask import \
+    PlotDepthIntegratedTimeSeriesSubtask
+
+from mpas_analysis.shared.constants import constants as mpas_constants
+
+
+
+[docs] +class TimeSeriesOHCAnomaly(AnalysisTask): + """ + Performs analysis of ocean heat content (OHC) from time-series output. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesOHCAnomaly, self).__init__( + config=config, + taskName='timeSeriesOHCAnomaly', + componentName='ocean', + tags=['timeSeries', 'ohc', 'publicObs', 'anomaly']) + + sectionName = 'timeSeriesOHCAnomaly' + regionNames = config.getexpression(sectionName, 'regions') + movingAveragePoints = config.getint(sectionName, 'movingAveragePoints') + + self.variableDict = {} + for suffix in ['avgLayerTemperature', 'sumLayerMaskValue', + 'avgLayerArea', 'avgLayerThickness']: + key = 'timeMonthly_avg_avgValueWithinOceanLayerRegion_' + suffix + self.variableDict[key] = suffix + + mpasFieldName = 'ohc' + + timeSeriesFileName = 'regionAveragedOHCAnomaly.nc' + + anomalyTask = ComputeAnomalySubtask( + parentTask=self, + mpasTimeSeriesTask=mpasTimeSeriesTask, + outFileName=timeSeriesFileName, + variableList=list(self.variableDict.keys()), + movingAveragePoints=movingAveragePoints, + alter_dataset=self._compute_ohc) + self.add_subtask(anomalyTask) + + for regionName in regionNames: + caption = 'Trend of {} OHC Anomaly vs depth'.format( + regionName) + plotTask = PlotHovmollerSubtask( + parentTask=self, + regionName=regionName, + inFileName=timeSeriesFileName, + outFileLabel='ohcAnomalyZ', + fieldNameInTitle='OHC Anomaly', + mpasFieldName=mpasFieldName, + unitsLabel=r'[$\times 10^{22}$ J]', + sectionName='hovmollerOHCAnomaly', + thumbnailSuffix=u'ΔOHC', + imageCaption=caption, + galleryGroup='Trends vs Depth', + groupSubtitle=None, + groupLink='trendsvsdepth', + galleryName=None) + + plotTask.run_after(anomalyTask) + self.add_subtask(plotTask) + + caption = 'Running Mean of the Anomaly in {} Ocean Heat ' \ + 'Content'.format(regionName) + plotTask = PlotOHCAnomaly( + parentTask=self, + regionName=regionName, + inFileName=timeSeriesFileName, + outFileLabel='ohcAnomaly', + fieldNameInTitle='OHC Anomaly', + mpasFieldName=mpasFieldName, + yAxisLabel=r'$\Delta$OHC [$\times 10^{22}$ J]', + sectionName='timeSeriesOHCAnomaly', + thumbnailSuffix=u'ΔOHC', + imageCaption=caption, + galleryGroup='Time Series', + groupSubtitle=None, + groupLink='timeseries', + galleryName=None, + controlConfig=controlConfig) + + plotTask.run_after(anomalyTask) + self.add_subtask(plotTask)
+ + + def _compute_ohc(self, ds): + """ + Compute the OHC time series. + """ + + # regionNames = self.config.getexpression('regions', 'regions') + # ds['regionNames'] = ('nOceanRegionsTmp', regionNames) + + # for convenience, rename the variables to simpler, shorter names + ds = ds.rename(self.variableDict) + + # specific heat [J/(kg*degC)] + cp = self.namelist.getfloat('config_specific_heat_sea_water') + # [kg/m3] + rho = self.namelist.getfloat('config_density0') + + unitsScalefactor = 1e-22 + + ds['ohc'] = unitsScalefactor * rho * cp * ds['sumLayerMaskValue'] * \ + ds['avgLayerArea'] * ds['avgLayerThickness'] * \ + ds['avgLayerTemperature'] + ds.ohc.attrs['units'] = '$10^{22}$ J' + ds.ohc.attrs['description'] = 'Ocean heat content in each region' + + # Note: restart file, not a mesh file because we need refBottomDepth, + # not in a mesh file + try: + restartFile = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for OHC calculation') + + # Define/read in general variables + with xr.open_dataset(restartFile) as dsRestart: + # reference depth [m] + # add depths as a coordinate to the data set + ds.coords['depth'] = (('nVertLevels',), + dsRestart.refBottomDepth.values) + + return ds
+ + + +class PlotOHCAnomaly(PlotDepthIntegratedTimeSeriesSubtask): + def customize_fig(self, fig): + """ + A function to override to customize the figure. + + fig : matplotlib.pyplot.Figure + The figure + """ + def joules_to_watts_m2(joules): + watts_m2 = joules/factor + return watts_m2 + + def watts_m2_to_joules(watts_m2): + joules = factor*watts_m2 + return joules + + # add an axis on the right-hand side + color = 'tab:blue' + ax = plt.gca() + xlim = ax.get_xlim() + + earth_surface_area = (4. * numpy.pi * + cime_constants['SHR_CONST_REARTH']**2) + + max_time = xlim[-1]*mpas_constants.sec_per_day + + factor = earth_surface_area*max_time/10**22 + + secaxy = ax.secondary_yaxis( + 'right', functions=(joules_to_watts_m2, watts_m2_to_joules)) + secaxy.set_ylabel(r'W/m$^2$', color=color) + secaxy.tick_params(axis='y', colors=color) + ax.spines['right'].set_color(color) + plt.draw() + yticks = secaxy.get_yticks() + for ytick in yticks: + plt.plot(xlim, [0, watts_m2_to_joules(ytick)], color=color, + linewidth=0.5) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_salinity_anomaly.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_salinity_anomaly.html new file mode 100644 index 000000000..de0bd3938 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_salinity_anomaly.html @@ -0,0 +1,224 @@ + + + + + + mpas_analysis.ocean.time_series_salinity_anomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_salinity_anomaly

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.ocean.compute_anomaly_subtask import ComputeAnomalySubtask
+from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask
+
+
+
+[docs] +class TimeSeriesSalinityAnomaly(AnalysisTask): + """ + Performs analysis of time series of salinity anomalies from the first + simulation year as a function of depth. + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesSalinityAnomaly, self).__init__( + config=config, + taskName='timeSeriesSalinityAnomaly', + componentName='ocean', + tags=['timeSeries', 'salinity', 'publicObs', 'anomaly']) + + sectionName = 'hovmollerSalinityAnomaly' + regionNames = config.getexpression(sectionName, 'regions') + movingAveragePoints = config.getint(sectionName, 'movingAveragePoints') + + mpasFieldName = 'timeMonthly_avg_avgValueWithinOceanLayerRegion_' \ + 'avgLayerSalinity' + + timeSeriesFileName = 'regionAveragedSalinityAnomaly.nc' + + anomalyTask = ComputeAnomalySubtask( + parentTask=self, + mpasTimeSeriesTask=mpasTimeSeriesTask, + outFileName=timeSeriesFileName, + variableList=[mpasFieldName], + movingAveragePoints=movingAveragePoints) + self.add_subtask(anomalyTask) + + for regionName in regionNames: + caption = 'Trend of {} Salinity Anomaly vs depth'.format( + regionName) + plotTask = PlotHovmollerSubtask( + parentTask=self, + regionName=regionName, + inFileName=timeSeriesFileName, + outFileLabel='SAnomalyZ', + fieldNameInTitle='Salinity Anomaly', + mpasFieldName=mpasFieldName, + unitsLabel='[PSU]', + sectionName=sectionName, + thumbnailSuffix=u'ΔS', + imageCaption=caption, + galleryGroup='Trends vs Depth', + groupSubtitle=None, + groupLink='trendsvsdepth', + galleryName=None) + + plotTask.run_after(anomalyTask) + self.add_subtask(plotTask)
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_sst.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_sst.html new file mode 100644 index 000000000..1b82b5d59 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_sst.html @@ -0,0 +1,431 @@ + + + + + + mpas_analysis.ocean.time_series_sst — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_sst

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig
+
+from mpas_analysis.shared.time_series import combine_time_series_with_ncrcat
+from mpas_analysis.shared.io import open_mpas_dataset
+
+from mpas_analysis.shared.timekeeping.utility import date_to_days, \
+    days_to_datetime
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, check_path_exists
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class TimeSeriesSST(AnalysisTask): + """ + Performs analysis of the time-series output of sea-surface temperature + (SST). + + Attributes + ---------- + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesSST, self).__init__( + config=config, + taskName='timeSeriesSST', + componentName='ocean', + tags=['timeSeries', 'sst', 'publicObs']) + + self.mpasTimeSeriesTask = mpasTimeSeriesTask + self.controlConfig = controlConfig + + self.run_after(mpasTimeSeriesTask)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + OSError + If files are not present + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(TimeSeriesSST, self).setup_and_check() + + config = self.config + + self.startDate = self.config.get('timeSeries', 'startDate') + self.endDate = self.config.get('timeSeries', 'endDate') + + self.variableList = \ + ['timeMonthly_avg_avgValueWithinOceanRegion_avgSurfaceTemperature'] + self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) + + if config.get('runs', 'preprocessedReferenceRunName') != 'None': + check_path_exists(config.get('oceanPreprocessedReference', + 'baseDirectory')) + + self.inputFile = self.mpasTimeSeriesTask.outputFile + + mainRunName = config.get('runs', 'mainRunName') + regions = config.getexpression('timeSeriesSST', 'regions') + + self.xmlFileNames = [] + self.filePrefixes = {} + + for region in regions: + filePrefix = 'sst_{}_{}'.format(region, mainRunName) + self.xmlFileNames.append('{}/{}.xml'.format(self.plotsDirectory, + filePrefix)) + self.filePrefixes[region] = filePrefix + + return + + def run_task(self): + """ + Performs analysis of the time-series output of sea-surface temperature + (SST). + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + self.logger.info("\nPlotting SST time series...") + + self.logger.info(' Load SST data...') + + config = self.config + calendar = self.calendar + + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = \ + config.get('runs', 'preprocessedReferenceRunName') + preprocessedInputDirectory = config.get('oceanPreprocessedReference', + 'baseDirectory') + + movingAveragePoints = config.getint('timeSeriesSST', + 'movingAveragePoints') + + regions = config.getexpression('regions', 'regions') + plotTitles = config.getexpression('regions', 'plotTitles') + regionsToPlot = config.getexpression('timeSeriesSST', 'regions') + + regionIndicesToPlot = [regions.index(region) for region in + regionsToPlot] + + outputDirectory = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + make_directories(outputDirectory) + + dsSST = open_mpas_dataset(fileName=self.inputFile, + calendar=calendar, + variableList=self.variableList, + startDate=self.startDate, + endDate=self.endDate) + + yearStart = days_to_datetime(dsSST.Time.min(), calendar=calendar).year + yearEnd = days_to_datetime(dsSST.Time.max(), calendar=calendar).year + timeStart = date_to_days(year=yearStart, month=1, day=1, + calendar=calendar) + timeEnd = date_to_days(year=yearEnd, month=12, day=31, + calendar=calendar) + + if self.controlConfig is not None: + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + controlFileName = '{}/{}.nc'.format( + baseDirectory, self.mpasTimeSeriesTask.fullTaskName) + + controlStartYear = self.controlConfig.getint( + 'timeSeries', 'startYear') + controlEndYear = self.controlConfig.getint('timeSeries', 'endYear') + controlStartDate = '{:04d}-01-01_00:00:00'.format(controlStartYear) + controlEndDate = '{:04d}-12-31_23:59:59'.format(controlEndYear) + + dsRefSST = open_mpas_dataset( + fileName=controlFileName, + calendar=calendar, + variableList=self.variableList, + startDate=controlStartDate, + endDate=controlEndDate) + else: + dsRefSST = None + + if preprocessedReferenceRunName != 'None': + self.logger.info(' Load in SST for a preprocesses reference ' + 'run...') + inFilesPreprocessed = '{}/SST.{}.year*.nc'.format( + preprocessedInputDirectory, preprocessedReferenceRunName) + + outFolder = '{}/preprocessed'.format(outputDirectory) + make_directories(outFolder) + outFileName = '{}/sst.nc'.format(outFolder) + + combine_time_series_with_ncrcat(inFilesPreprocessed, + outFileName, logger=self.logger) + dsPreprocessed = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max(), + calendar=calendar).year + if yearStart <= yearEndPreprocessed: + dsPreprocessedTimeSlice = \ + dsPreprocessed.sel(Time=slice(timeStart, timeEnd)) + else: + self.logger.warning('Preprocessed time series ends before the ' + 'timeSeries startYear and will not be ' + 'plotted.') + preprocessedReferenceRunName = 'None' + + self.logger.info(' Make plots...') + for regionIndex in regionIndicesToPlot: + region = regions[regionIndex] + + title = '{} SST'.format(plotTitles[regionIndex]) + xLabel = 'Time [years]' + yLabel = r'[$\degree$C]' + + varName = self.variableList[0] + SST = dsSST[varName].isel(nOceanRegions=regionIndex) + + filePrefix = self.filePrefixes[region] + + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [3] + + fields = [SST] + legendText = [mainRunName] + + if dsRefSST is not None: + refSST = dsRefSST[varName].isel(nOceanRegions=regionIndex) + fields.append(refSST) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(1.5) + controlRunName = self.controlConfig.get('runs', 'mainRunName') + legendText.append(controlRunName) + + if preprocessedReferenceRunName != 'None': + SST_v0 = dsPreprocessedTimeSlice.SST + fields.append(SST_v0) + lineColors.append('purple') + lineWidths.append(1.5) + legendText.append(preprocessedReferenceRunName) + + if config.has_option(self.taskName, 'firstYearXTicks'): + firstYearXTicks = config.getint(self.taskName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(self.taskName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(self.taskName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + timeseries_analysis_plot(config, fields, calendar=calendar, + title=title, xlabel=xLabel, ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks) + + savefig(outFileName, config) + + caption = 'Running Mean of {} Sea Surface Temperature'.format( + region) + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Time Series', + groupLink='timeseries', + thumbnailDescription='{} SST'.format(region), + imageDescription=caption, + imageCaption=caption)
+ + + +# }}} +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_temperature_anomaly.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_temperature_anomaly.html new file mode 100644 index 000000000..019cb6835 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_temperature_anomaly.html @@ -0,0 +1,224 @@ + + + + + + mpas_analysis.ocean.time_series_temperature_anomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_temperature_anomaly

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.ocean.compute_anomaly_subtask import ComputeAnomalySubtask
+from mpas_analysis.ocean.plot_hovmoller_subtask import PlotHovmollerSubtask
+
+
+
+[docs] +class TimeSeriesTemperatureAnomaly(AnalysisTask): + """ + Performs analysis of time series of potential temperature anomalies from + a reference simulation year as a function of depth. + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesTemperatureAnomaly, self).__init__( + config=config, + taskName='timeSeriesTemperatureAnomaly', + componentName='ocean', + tags=['timeSeries', 'temperature', 'publicObs', 'anomaly']) + + sectionName = 'hovmollerTemperatureAnomaly' + regionNames = config.getexpression(sectionName, 'regions') + movingAveragePoints = config.getint(sectionName, 'movingAveragePoints') + + mpasFieldName = 'timeMonthly_avg_avgValueWithinOceanLayerRegion_' \ + 'avgLayerTemperature' + + timeSeriesFileName = 'regionAveragedTemperatureAnomaly.nc' + + anomalyTask = ComputeAnomalySubtask( + parentTask=self, + mpasTimeSeriesTask=mpasTimeSeriesTask, + outFileName=timeSeriesFileName, + variableList=[mpasFieldName], + movingAveragePoints=movingAveragePoints) + self.add_subtask(anomalyTask) + + for regionName in regionNames: + caption = 'Trend of {} Potential Temperature Anomaly vs ' \ + 'Depth'.format(regionName) + plotTask = PlotHovmollerSubtask( + parentTask=self, + regionName=regionName, + inFileName=timeSeriesFileName, + outFileLabel='TAnomalyZ', + fieldNameInTitle='Potential Temperature Anomaly', + mpasFieldName=mpasFieldName, + unitsLabel=r'[$\degree$C]', + sectionName=sectionName, + thumbnailSuffix=u'Δϴ', + imageCaption=caption, + galleryGroup='Trends vs Depth', + groupSubtitle=None, + groupLink='trendsvsdepth', + galleryName=None) + + plotTask.run_after(anomalyTask) + self.add_subtask(plotTask)
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_transport.html b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_transport.html new file mode 100644 index 000000000..8a376fd92 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/ocean/time_series_transport.html @@ -0,0 +1,822 @@ + + + + + + mpas_analysis.ocean.time_series_transport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.ocean.time_series_transport

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import os
+import xarray
+import numpy
+import matplotlib.pyplot as plt
+
+from geometric_features import FeatureCollection, read_feature_collection
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig, \
+    add_inset
+
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    get_files_year_month, decode_strings
+
+from mpas_analysis.shared.html import write_image_xml
+
+from mpas_analysis.shared.transects import ComputeTransectMasksSubtask
+
+
+
+[docs] +class TimeSeriesTransport(AnalysisTask): + """ + Extract and plot time series of transport through transects on the MPAS + mesh. + """ + + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + +
+[docs] + def __init__(self, config, controlConfig=None): + + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesTransport, self).__init__( + config=config, + taskName='timeSeriesTransport', + componentName='ocean', + tags=['timeSeries', 'transport']) + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + years = [year for year in range(startYear, endYear + 1)] + + transectsToPlot = config.getexpression('timeSeriesTransport', + 'transectsToPlot') + if len(transectsToPlot) == 0: + return + + masksSubtask = ComputeTransectMasksSubtask( + parentTask=self, transectGroup='Transport Transects') + + transectsToPlot = masksSubtask.expand_transect_names(transectsToPlot) + transportTransectFileName = masksSubtask.geojsonFileName + + self.add_subtask(masksSubtask) + + # in the end, we'll combine all the time series into one, but we + # create this task first so it's easier to tell it to run after all + # the compute tasks + combineSubtask = CombineTransportSubtask( + self, startYears=years, endYears=years) + + # run one subtask per year + for year in years: + computeSubtask = ComputeTransportSubtask( + self, startYear=year, endYear=year, masksSubtask=masksSubtask, + transectsToPlot=transectsToPlot) + self.add_subtask(computeSubtask) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + + for index, transect in enumerate(transectsToPlot): + plotTransportSubtask = PlotTransportSubtask( + self, transect, index, controlConfig, transportTransectFileName) + plotTransportSubtask.run_after(combineSubtask) + self.add_subtask(plotTransportSubtask)
+
+ + + +class ComputeTransportSubtask(AnalysisTask): + """ + Computes time-series of transport through transects. + + Attributes + ---------- + startYear, endYear : int + The beginning and end of the time series to compute + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each ice shelf to plot + + transectsToPlot : list of str + A list of transects to plot + """ + + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + def __init__(self, parentTask, startYear, endYear, + masksSubtask, transectsToPlot): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + startYear, endYear : int + The beginning and end of the time series to compute + + masksSubtask : ``ComputeRegionMasksSubtask`` + A task for creating mask files for each ice shelf to plot + + transectsToPlot : list of str + A list of transects to plot + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeTransportSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='computeTransport_{:04d}-{:04d}'.format(startYear, + endYear)) + + self.subprocessCount = self.config.getint('timeSeriesTransport', + 'subprocessCount') + self.startYear = startYear + self.endYear = endYear + + self.masksSubtask = masksSubtask + self.run_after(masksSubtask) + + self.transectsToPlot = transectsToPlot + self.restartFileName = None + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If a restart file is not present + + ValueError + If ``config_land_ice_flux_mode`` is not one of ``standalone`` or + ``coupled`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(ComputeTransportSubtask, self).setup_and_check() + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + # Load mesh related variables + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-O restart file found: need at least one ' + 'restart file for transport calculations') + + def run_task(self): + """ + Computes time-series of transport through transects. + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + self.logger.info("Computing time series of transport through " + "transects...") + + config = self.config + + startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) + endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) + + outputDirectory = '{}/transport/'.format( + build_config_full_path(config, 'output', 'timeseriesSubdirectory')) + try: + os.makedirs(outputDirectory) + except OSError: + pass + + outFileName = '{}/transport_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYear, self.endYear) + + inputFiles = sorted(self.historyStreams.readpath( + 'timeSeriesStatsMonthlyOutput', startDate=startDate, + endDate=endDate, calendar=self.calendar)) + + years, months = get_files_year_month(inputFiles, + self.historyStreams, + 'timeSeriesStatsMonthlyOutput') + + variableList = ['timeMonthly_avg_layerThickness'] + with open_mpas_dataset(fileName=inputFiles[0], + calendar=self.calendar, + startDate=startDate, + endDate=endDate) as dsIn: + if 'timeMonthly_avg_normalTransportVelocity' in dsIn: + variableList.append('timeMonthly_avg_normalTransportVelocity') + elif 'timeMonthly_avg_normalGMBolusVelocity' in dsIn: + variableList = variableList + \ + ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_normalGMBolusVelocity'] + else: + self.logger.warning('Cannot compute transport velocity. ' + 'Using advection velocity.') + variableList.append('timeMonthly_avg_normalVelocity') + + outputExists = os.path.exists(outFileName) + outputValid = outputExists + if outputExists: + with open_mpas_dataset(fileName=outFileName, + calendar=self.calendar, + timeVariableNames=None, + variableList=None, + startDate=startDate, + endDate=endDate) as dsOut: + + for inIndex in range(dsOut.sizes['Time']): + + mask = numpy.logical_and( + dsOut.year[inIndex].values == years, + dsOut.month[inIndex].values == months) + if numpy.count_nonzero(mask) == 0: + outputValid = False + break + + if outputValid: + self.logger.info(' Time series exists -- Done.') + return + + transectMaskFileName = self.masksSubtask.maskFileName + + dsTransectMask = xarray.open_dataset(transectMaskFileName) + + # figure out the indices of the transects to plot + maskTransectNames = decode_strings(dsTransectMask.transectNames) + + dsMesh = xarray.open_dataset(self.restartFileName) + dsMesh = dsMesh[['dvEdge', 'cellsOnEdge']] + dsMesh.load() + dvEdge = dsMesh.dvEdge + cellsOnEdge = dsMesh.cellsOnEdge - 1 + + transectIndices = [] + transectData = [] + self.logger.info(' Caching transect data...') + for transect in self.transectsToPlot: + self.logger.info(' transect: {}'.format(transect)) + try: + transectIndex = maskTransectNames.index(transect) + except ValueError: + self.logger.warning(' Not found in masks. Skipping.') + continue + transectIndices.append(transectIndex) + + # select the current transect + dsMask = dsTransectMask.isel(nTransects=[transectIndex]) + dsMask.load() + edgeIndices = numpy.flatnonzero(dsMask.transectEdgeMasks.values) + edgeIndices = edgeIndices[edgeIndices >= 0].astype(int) + edgeSign = dsMask.transectEdgeMaskSigns.isel( + nEdges=edgeIndices) + + dv = dvEdge.isel(nEdges=edgeIndices) + coe = cellsOnEdge.isel(nEdges=edgeIndices) + transectData.append({'edgeIndices': edgeIndices, + 'edgeSign': edgeSign, + 'dv': dv, + 'coe': coe, + 'transect': transect}) + + timeDatasets = [] + self.logger.info(' Computing transport...') + for fileName in inputFiles: + self.logger.info(' input file: {}'.format(fileName)) + dsTimeSlice = open_mpas_dataset( + fileName=fileName, + calendar=self.calendar, + variableList=variableList, + startDate=startDate, + endDate=endDate) + dsTimeSlice.load() + + transectDatasets = [] + for data in transectData: + self.logger.info(' transect: {}'.format(data['transect'])) + + edgeIndices = data['edgeIndices'] + coe = data['coe'] + edgeSign = data['edgeSign'] + dv = data['dv'] + dsIn = dsTimeSlice.isel(nEdges=edgeIndices) + + # work on data from simulations + if 'timeMonthly_avg_normalTransportVelocity' in dsIn: + vel = dsIn.timeMonthly_avg_normalTransportVelocity + elif 'timeMonthly_avg_normalGMBolusVelocity' in dsIn: + vel = (dsIn.timeMonthly_avg_normalVelocity + + dsIn.timeMonthly_avg_normalGMBolusVelocity) + else: + vel = dsIn.timeMonthly_avg_normalVelocity + + # get layer thickness on edges by averaging adjacent cells + h = 0.5 * dsIn.timeMonthly_avg_layerThickness.isel( + nCells=coe).sum(dim='TWO') + + edgeTransport = edgeSign * vel * h * dv + + # convert from m^3/s to Sv + transport = (constants.m3ps_to_Sv * edgeTransport.sum( + dim=['nEdges', 'nVertLevels'])) + + dsOut = xarray.Dataset() + dsOut['transport'] = transport + dsOut.transport.attrs['units'] = 'Sv' + dsOut.transport.attrs['description'] = \ + 'Transport through transects' + transectDatasets.append(dsOut) + + dsOut = xarray.concat(transectDatasets, 'nTransects') + timeDatasets.append(dsOut) + + # combine data sets into a single data set + dsOut = xarray.concat(timeDatasets, 'Time') + dsOut.coords['transectNames'] = dsTransectMask.transectNames.isel( + nTransects=transectIndices) + dsOut.coords['year'] = (('Time',), years) + dsOut['year'].attrs['units'] = 'years' + dsOut.coords['month'] = (('Time',), months) + dsOut['month'].attrs['units'] = 'months' + write_netcdf_with_fill(dsOut, outFileName) + + +class CombineTransportSubtask(AnalysisTask): + """ + Combine individual time series into a single data set + """ + + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, startYears, endYears): + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``TimeSeriesOceanRegions`` + The main task of which this is a subtask + + startYears, endYears : list of int + The beginning and end of each time series to combine + + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(CombineTransportSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='combineTimeSeries') + + self.startYears = startYears + self.endYears = endYears + + def run_task(self): + """ + Combine the time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + outputDirectory = '{}/transport/'.format( + build_config_full_path(self.config, 'output', + 'timeseriesSubdirectory')) + + outFileName = '{}/transport_{:04d}-{:04d}.nc'.format( + outputDirectory, self.startYears[0], self.endYears[-1]) + + if not os.path.exists(outFileName): + inFileNames = [] + for startYear, endYear in zip(self.startYears, self.endYears): + inFileName = '{}/transport_{:04d}-{:04d}.nc'.format( + outputDirectory, startYear, endYear) + inFileNames.append(inFileName) + + ds = xarray.open_mfdataset(inFileNames, combine='nested', + concat_dim='Time', decode_times=False) + ds.load() + write_netcdf_with_fill(ds, outFileName) + + +class PlotTransportSubtask(AnalysisTask): + """ + Plots time-series output of transport through transects. + + Attributes + ---------- + transect : str + Name of the transect to plot + + transectIndex : int + The index into the dimension ``nTransects`` of the transect to plot + + controlConfig : mpas_tools.config.MpasConfigParser + The configuration options for the control run (if any) + + """ + + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + def __init__(self, parentTask, transect, transectIndex, controlConfig, + transportTransectFileName): + + """ + Construct the analysis task. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + transect : str + Name of the transect to plot + + transectIndex : int + The index into the dimension ``nTransects`` of the transect to plot + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(PlotTransportSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName='plotTransport_{}'.format(transect.replace(' ', '_'))) + + self.transportTransectFileName = transportTransectFileName + self.transect = transect + self.transectIndex = transectIndex + self.controlConfig = controlConfig + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError + If files are not present + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(PlotTransportSubtask, self).setup_and_check() + + self.xmlFileNames = ['{}/transport_{}.xml'.format( + self.plotsDirectory, self.transect.replace(' ', '_'))] + + def run_task(self): + """ + Plots time-series output of transport through transects. + """ + # Authors + # ------- + # Xylar Asay-Davis, Stephen Price + + self.logger.info("\nPlotting time series of transport through " + "{}...".format(self.transect)) + + self.logger.info(' Load transport data...') + + obsDict = {'Drake Passage': [120, 175], + 'Tasmania-Ant': [147, 167], + 'Africa-Ant': None, + 'Antilles Inflow': [-23.1, -13.7], + 'Mona Passage': [-3.8, -1.4], + 'Windward Passage': [-7.2, -6.8], + 'Florida-Cuba': [30, 33], + 'Florida-Bahamas': [30, 33], + 'Indonesian Throughflow': [-21, -11], + 'Agulhas': [-90, -50], + 'Mozambique Channel': [-20, -8], + 'Bering Strait': [0.6, 1.0], + 'Lancaster Sound': [-1.0, -0.5], + 'Fram Strait': [-4.7, 0.7], + 'Davis Strait': [-1.6, -3.6], + 'Barents Sea Opening': [1.4, 2.6], + 'Nares Strait': [-1.8, 0.2], + 'Denmark Strait': None, + 'Iceland-Faroe-Scotland': None} + + config = self.config + calendar = self.calendar + + fcAll = read_feature_collection(self.transportTransectFileName) + + fc = FeatureCollection() + for feature in fcAll.features: + if feature['properties']['name'] == self.transect: + fc.add_feature(feature) + break + + transport, trans_mean, trans_std = self._load_transport(config) + + if self.transect in obsDict: + bounds = obsDict[self.transect] + else: + bounds = None + + plotControl = self.controlConfig is not None + + mainRunName = config.get('runs', 'mainRunName') + movingAveragePoints = config.getint('timeSeriesTransport', + 'movingAveragePoints') + + self.logger.info(' Plotting...') + + transectName = self.transect.replace('_', ' ') + title = transectName + thumbnailDescription = transectName + + xLabel = 'Time (yr)' + yLabel = 'Transport (Sv)' + + filePrefix = 'transport_{}'.format(self.transect.replace(' ', '_')) + outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + + fields = [transport] + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [2.5] + meanString = 'mean={:.2f} $\\pm$ {:.2f}'.format(trans_mean, trans_std) + if plotControl: + controlRunName = self.controlConfig.get('runs', 'mainRunName') + ref_transport, ref_mean, ref_std = \ + self._load_transport(self.controlConfig) + refMeanString = f'mean={ref_mean:.2f} $\\pm$ {ref_std:.2f}' + fields.append(ref_transport) + lineColors.append(config.get('timeSeries', 'controlColor')) + lineWidths.append(1.2) + legendText = ['{} ({})'.format(mainRunName, meanString), + '{} ({})'.format(controlRunName, refMeanString)] + + else: + legendText = [mainRunName] + title = '{} ({})'.format(title, meanString) + + if config.has_option(self.taskName, 'firstYearXTicks'): + firstYearXTicks = config.getint(self.taskName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(self.taskName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(self.taskName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + fig = timeseries_analysis_plot(config, fields, calendar=calendar, + title=title, xlabel=xLabel, + ylabel=yLabel, + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks) + + if bounds is not None: + t = transport.Time.values + plt.gca().fill_between(t, bounds[0] * numpy.ones_like(t), + bounds[1] * numpy.ones_like(t), alpha=0.3, + label='observations') + plt.legend(loc='lower left') + + # do this before the inset because otherwise it moves the inset + # and cartopy doesn't play too well with tight_layout anyway + plt.tight_layout() + + add_inset(fig, fc, width=2.0, height=2.0) + + savefig(outFileName, config) + + caption = 'Transport through the {} Transect'.format(transectName) + write_image_xml( + config=config, + filePrefix=filePrefix, + componentName='Ocean', + componentSubdirectory='ocean', + galleryGroup='Transport Time Series', + groupLink='transporttime', + thumbnailDescription=thumbnailDescription, + imageDescription=caption, + imageCaption=caption) + + def _load_transport(self, config): + """ + Reads transport time series for this transect + """ + # Authors + # ------- + # Xylar Asay-Davis + + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + startYear = config.getint('timeSeries', 'startYear') + endYear = config.getint('timeSeries', 'endYear') + + inFileName = '{}/transport/transport_{:04d}-{:04d}.nc'.format( + baseDirectory, startYear, endYear) + + dsIn = xarray.open_dataset(inFileName) + transport = dsIn.transport.isel(nTransects=self.transectIndex) + mean = transport.mean().values + std = transport.std().values + return transport, mean, std +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_berg_conc.html b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_berg_conc.html new file mode 100644 index 000000000..ae392bf78 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_berg_conc.html @@ -0,0 +1,360 @@ + + + + + + mpas_analysis.sea_ice.climatology_map_berg_conc — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.sea_ice.climatology_map_berg_conc

+# Copyright (c) 2017,  Los Alamos National Security, LLC (LANS)
+# and the University Corporation for Atmospheric Research (UCAR).
+#
+# Unless noted otherwise source code is licensed under the BSD license.
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at http://mpas-dev.github.com/license.html
+#
+
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+
+
+[docs] +class ClimatologyMapIcebergConc(AnalysisTask): + """ + An analysis task for comparison of iceberg concentration against + observations + """ + # Authors + # ------- + # Darin Comeau, Xylar Asay-Davis + +
+[docs] + def __init__(self, config, mpasClimatologyTask, hemisphere, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + hemisphere : {'NH', 'SH'} + The hemisphere to plot + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Darin Comeau, Xylar Asay-Davis + + taskName = 'climatologyMapIcebergConc{}'.format(hemisphere) + + fieldName = 'IcebergConc' + + tags = ['icebergs', 'climatology', 'horizontalMap', fieldName] + if hemisphere == 'NH': + tags = tags + ['arctic'] + else: + tags = tags + ['antarctic'] + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapIcebergConc, self).__init__( + config=config, taskName=taskName, + componentName='seaIce', + tags=tags) + + mpasFieldName = 'timeMonthly_avg_bergAreaCell' + iselValues = None + + sectionName = taskName + + if hemisphere == 'NH': + hemisphereLong = 'Northern' + else: + hemisphereLong = 'Southern' + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='{}{}'.format(fieldName, hemisphere), + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + refTitleLabel = 'Observations (Altiberg)' + galleryName = 'Observations: Altiberg' + diffTitleLabel = 'Model - Observations' + refFieldName = 'icebergConc' + obsFileName = build_obs_path( + config, 'iceberg', + 'concentrationAltiberg{}'.format(hemisphere)) + + remapObservationsSubtask = RemapAltibergConcClimatology( + parentTask=self, seasons=seasons, + fileName=obsFileName, + outFilePrefix='{}{}'.format(refFieldName, + hemisphere), + comparisonGridNames=comparisonGridNames) + self.add_subtask(remapObservationsSubtask) + + else: + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + refFieldName = mpasFieldName + diffTitleLabel = 'Main - Control' + + remapObservationsSubtask = None + + for season in seasons: + for comparisonGridName in comparisonGridNames: + + imageCaption = \ + '{} Climatology Map of {}-Hemisphere Iceberg ' \ + 'Concentration.'.format(season, hemisphereLong) + galleryGroup = \ + '{}-Hemisphere Iceberg Concentration'.format( + hemisphereLong) + # make a new subtask for this season and comparison grid + subtask = PlotClimatologyMapSubtask( + parentTask=self, season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel='bergconc{}'.format(hemisphere), + fieldNameInTitle='Iceberg concentration', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'fraction', + imageCaption=imageCaption, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink='{}_conc'.format(hemisphere.lower()), + galleryName=galleryName, + extend='neither') + + self.add_subtask(subtask)
+
+ + + +class RemapAltibergConcClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping iceberg concentration from Altiberg + observations + """ + # Authors + # ------- + # Darin Comeau, Xylar Asay-Davis + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Darin Comeau, Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='latitude', + lonVarName='longitude') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Darin Comeau, Xylar Asay-Davis + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'probability': 'icebergConc', 'time': 'Time'}) + dsObs.coords['month'] = dsObs['Time.month'] + dsObs.coords['year'] = dsObs['Time.year'] + dsObs = dsObs.transpose('Time', 'latitude', 'longitude') + + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.html b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.html new file mode 100644 index 000000000..96d1a7396 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_conc.html @@ -0,0 +1,427 @@ + + + + + + mpas_analysis.sea_ice.climatology_map_sea_ice_conc — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.sea_ice.climatology_map_sea_ice_conc

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+
+
+[docs] +class ClimatologyMapSeaIceConc(AnalysisTask): + """ + An analysis task for comparison of sea ice concentration against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTask, hemisphere, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + hemisphere : {'NH', 'SH'} + The hemisphere to plot + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + taskName = 'climatologyMapSeaIceConc{}'.format(hemisphere) + + fieldName = 'seaIceConc' + + tags = ['climatology', 'horizontalMap', fieldName, 'publicObs'] + if hemisphere == 'NH': + tags = tags + ['arctic'] + else: + tags = tags + ['antarctic'] + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSeaIceConc, self).__init__( + config=config, taskName=taskName, + componentName='seaIce', + tags=tags) + + mpasFieldName = 'timeMonthly_avg_iceAreaCell' + iselValues = None + + sectionName = taskName + + if hemisphere == 'NH': + hemisphereLong = 'Northern' + else: + hemisphereLong = 'Southern' + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of seasons'.format(sectionName)) + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError('config section {} does not contain valid list ' + 'of comparison grids'.format(sectionName)) + + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='{}{}'.format(fieldName, hemisphere), + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + self._add_obs_tasks(seasons, comparisonGridNames, hemisphere, + hemisphereLong, remapClimatologySubtask, + mpasFieldName) + else: + self._add_ref_tasks(seasons, comparisonGridNames, hemisphere, + hemisphereLong, remapClimatologySubtask, + controlConfig, mpasFieldName)
+ + + def _add_obs_tasks(self, seasons, comparisonGridNames, hemisphere, + hemisphereLong, remapClimatologySubtask, + mpasFieldName): + config = self.config + obsFieldName = 'seaIceConc' + sectionName = self.taskName + + minConcentration = config.getfloat(self.taskName, 'minConcentration') + + observationPrefixes = config.getexpression(sectionName, + 'observationPrefixes') + for prefix in observationPrefixes: + for season in seasons: + observationTitleLabel = \ + 'Observations (SSM/I {})'.format(prefix) + + obsFileName = build_obs_path( + config, 'seaIce', + relativePathOption='concentration{}{}_{}'.format( + prefix, hemisphere, season), + relativePathSection=sectionName) + + remapObservationsSubtask = RemapObservedConcClimatology( + parentTask=self, seasons=[season], + fileName=obsFileName, + outFilePrefix='{}{}{}_{}'.format( + obsFieldName, prefix, hemisphere, season), + comparisonGridNames=comparisonGridNames, + subtaskName='remapObservations_{}{}'.format( + prefix, season)) + self.add_subtask(remapObservationsSubtask) + for comparisonGridName in comparisonGridNames: + + imageDescription = \ + 'Climatology Map of {}-Hemisphere Sea-Ice ' \ + 'Concentration'.format(hemisphereLong) + imageCaption = \ + '{}. <br> Observations: SSM/I {}'.format( + imageDescription, prefix) + galleryGroup = \ + '{}-Hemisphere Sea-Ice Concentration'.format( + hemisphereLong) + # make a new subtask for this season and comparison + # grid + + subtaskName = f'plot{season}_{comparisonGridName}_{prefix}' + + subtask = PlotClimatologyMapSubtask( + parentTask=self, season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + subtaskName=subtaskName) + + subtask.set_plot_info( + outFileLabel='iceconc{}{}'.format(prefix, + hemisphere), + fieldNameInTitle='Sea ice concentration', + mpasFieldName=mpasFieldName, + refFieldName=obsFieldName, + refTitleLabel=observationTitleLabel, + diffTitleLabel='Model - Observations', + unitsLabel=r'fraction', + imageCaption=imageCaption, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink='{}_conc'.format(hemisphere.lower()), + galleryName='Observations: SSM/I {}'.format( + prefix), + maskMinThreshold=minConcentration, + extend='neither') + + self.add_subtask(subtask) + + def _add_ref_tasks(self, seasons, comparisonGridNames, hemisphere, + hemisphereLong, remapClimatologySubtask, + controlConfig, mpasFieldName): + + minConcentration = self.config.getfloat(self.taskName, + 'minConcentration') + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = 'Control: {}'.format(controlRunName) + + for season in seasons: + for comparisonGridName in comparisonGridNames: + + imageDescription = \ + '{} Climatology Map of {}-Hemisphere Sea-Ice ' \ + 'Concentration'.format(season, hemisphereLong) + imageCaption = imageDescription + galleryGroup = \ + '{}-Hemisphere Sea-Ice Concentration'.format( + hemisphereLong) + # make a new subtask for this season and comparison + # grid + subtask = PlotClimatologyMapSubtask( + parentTask=self, season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel='iceconc{}'.format(hemisphere), + fieldNameInTitle='Sea ice concentration', + mpasFieldName=mpasFieldName, + refFieldName=mpasFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel='Main - Control', + unitsLabel=r'fraction', + imageCaption=imageCaption, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink='{}_conc'.format(hemisphere.lower()), + galleryName=galleryName, + maskMinThreshold=minConcentration, + extend='neither') + + self.add_subtask(subtask)
+ + + +class RemapObservedConcClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping sea ice concentration observations + """ + # Authors + # ------- + # Xylar Asay-Davis + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='t_lat', + lonVarName='t_lon') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'AICE': 'seaIceConc'}) + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.html b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.html new file mode 100644 index 000000000..8ba81788a --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/sea_ice/climatology_map_sea_ice_thick.html @@ -0,0 +1,363 @@ + + + + + + mpas_analysis.sea_ice.climatology_map_sea_ice_thick — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.sea_ice.climatology_map_sea_ice_thick

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray as xr
+from pyremap import LatLonGridDescriptor
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask, \
+    RemapObservedClimatologySubtask
+
+from mpas_analysis.shared.plot import PlotClimatologyMapSubtask
+
+from mpas_analysis.shared.io.utility import build_obs_path
+
+
+
+[docs] +class ClimatologyMapSeaIceThick(AnalysisTask): + """ + An analysis task for comparison of sea ice thickness against + observations + """ + # Authors + # ------- + # Luke Van Roekel, Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasClimatologyTask, hemisphere, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + hemisphere : {'NH', 'SH'} + The hemisphere to plot + + controlConfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + taskName = f'climatologyMapSeaIceThick{hemisphere}' + + fieldName = 'seaIceThick' + + tags = ['climatology', 'horizontalMap', fieldName, 'publicObs'] + if hemisphere == 'NH': + tags = tags + ['arctic'] + else: + tags = tags + ['antarctic'] + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapSeaIceThick, self).__init__( + config=config, taskName=taskName, + componentName='seaIce', + tags=tags) + + mpasFieldName = 'timeMonthly_avg_iceVolumeCell' + iselValues = None + + sectionName = taskName + + if hemisphere == 'NH': + hemisphereLong = 'Northern' + else: + hemisphereLong = 'Southern' + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + f'a valid list of seasons') + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + f'a valid list of comparison grids') + + # the variable self.mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=f'{fieldName}{hemisphere}', + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues) + + if controlConfig is None: + refTitleLabel = 'Observations (ICESat)' + galleryName = 'Observations: ICESat' + diffTitleLabel = 'Model - Observations' + refFieldName = 'seaIceThick' + else: + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = None + refTitleLabel = f'Control: {controlRunName}' + refFieldName = mpasFieldName + diffTitleLabel = 'Main - Control' + + for season in seasons: + if controlConfig is None: + obsFileName = build_obs_path( + config, 'seaIce', + relativePathOption=f'thickness{hemisphere}_{season}', + relativePathSection=sectionName) + + remapObservationsSubtask = RemapObservedThickClimatology( + parentTask=self, seasons=[season], + fileName=obsFileName, + outFilePrefix=f'{refFieldName}{hemisphere}_{season}', + comparisonGridNames=comparisonGridNames, + subtaskName=f'remapObservations{season}') + self.add_subtask(remapObservationsSubtask) + else: + remapObservationsSubtask = None + + for comparisonGridName in comparisonGridNames: + + imageCaption = \ + f'Climatology Map of {hemisphereLong}-Hemisphere ' \ + f'Sea-Ice Thickness.' + galleryGroup = \ + f'{hemisphereLong}-Hemisphere Sea-Ice Thickness' + # make a new subtask for this season and comparison grid + subtaskName = f'plot{season}_{comparisonGridName}' + + subtask = PlotClimatologyMapSubtask( + parentTask=self, season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapClimatologySubtask, + remapObsClimatologySubtask=remapObservationsSubtask, + subtaskName=subtaskName, + controlConfig=controlConfig) + + subtask.set_plot_info( + outFileLabel=f'icethick{hemisphere}', + fieldNameInTitle='Sea ice thickness', + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=r'm', + imageCaption=imageCaption, + galleryGroup=galleryGroup, + groupSubtitle=None, + groupLink=f'{hemisphere.lower()}_thick', + galleryName=galleryName, + maskMinThreshold=0, + extend='neither') + + self.add_subtask(subtask)
+
+ + + +class RemapObservedThickClimatology(RemapObservedClimatologySubtask): + """ + A subtask for reading and remapping sea ice thickness observations + """ + # Authors + # ------- + # Xylar Asay-Davis + + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + # create a descriptor of the observation grid using the lat/lon + # coordinates + obsDescriptor = LatLonGridDescriptor.read(fileName=fileName, + latVarName='t_lat', + lonVarName='t_lon') + return obsDescriptor + + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + dsObs = xr.open_dataset(fileName) + dsObs = dsObs.rename({'HI': 'seaIceThick'}) + return dsObs +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/sea_ice/time_series.html b/1.11.0rc1/_modules/mpas_analysis/sea_ice/time_series.html new file mode 100644 index 000000000..5619cc27f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/sea_ice/time_series.html @@ -0,0 +1,800 @@ + + + + + + mpas_analysis.sea_ice.time_series — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.sea_ice.time_series

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import numpy as np
+import xarray as xr
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.plot import timeseries_analysis_plot, \
+    timeseries_analysis_plot_polar, savefig
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    check_path_exists, make_directories, build_obs_path
+
+from mpas_analysis.shared.timekeeping.utility import date_to_days, \
+    days_to_datetime, datetime_to_days, get_simulation_start_time
+from mpas_analysis.shared.timekeeping.MpasRelativeDelta import \
+    MpasRelativeDelta
+
+from mpas_analysis.shared.time_series import combine_time_series_with_ncrcat
+from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill
+
+from mpas_analysis.shared.html import write_image_xml
+
+
+
+[docs] +class TimeSeriesSeaIce(AnalysisTask): + """ + Performs analysis of time series of sea-ice properties. + + Attributes + ---------- + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser + Configuration options for a control run (if any) + + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + +
+[docs] + def __init__(self, config, mpasTimeSeriesTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasTimeSeriesTask : ``MpasTimeSeriesTask`` + The task that extracts the time series from MPAS monthly output + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(TimeSeriesSeaIce, self).__init__( + config=config, + taskName='timeSeriesSeaIceAreaVol', + componentName='seaIce', + tags=['timeSeries', 'publicObs', 'arctic', 'antarctic']) + + self.mpasTimeSeriesTask = mpasTimeSeriesTask + self.controlConfig = controlConfig + + self.run_after(mpasTimeSeriesTask)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + OSError + If files are not present + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(TimeSeriesSeaIce, self).setup_and_check() + + config = self.config + + self.startDate = self.config.get('timeSeries', 'startDate') + self.endDate = self.config.get('timeSeries', 'endDate') + + self.variableList = ['timeMonthly_avg_iceAreaCell', + 'timeMonthly_avg_iceVolumeCell'] + self.mpasTimeSeriesTask.add_variables(variableList=self.variableList) + + self.inputFile = self.mpasTimeSeriesTask.outputFile + + if config.get('runs', 'preprocessedReferenceRunName') != 'None': + check_path_exists(config.get('seaIcePreprocessedReference', + 'baseDirectory')) + + # get a list of timeSeriesStatsMonthly output files from the streams + # file, reading only those that are between the start and end dates + streamName = 'timeSeriesStatsMonthlyOutput' + self.startDate = config.get('timeSeries', 'startDate') + self.endDate = config.get('timeSeries', 'endDate') + self.inputFiles = \ + self.historyStreams.readpath(streamName, + startDate=self.startDate, + endDate=self.endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(streamName, self.startDate, + self.endDate)) + + self.simulationStartTime = get_simulation_start_time(self.runStreams) + + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS-SeaIce restart file found: need at least ' + 'one restart file to perform remapping of ' + 'climatologies.') + + # these are redundant for now. Later cleanup is needed where these + # file names are reused in run() + self.xmlFileNames = [] + + polarPlot = config.getboolean('timeSeriesSeaIceAreaVol', 'polarPlot') + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = \ + config.get('runs', 'preprocessedReferenceRunName') + compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol', + 'compareWithObservations') + + polarXMLFileNames = [] + + if (not compareWithObservations and + preprocessedReferenceRunName == 'None'): + for variableName in ['iceArea', 'iceVolume']: + filePrefix = '{}.{}'.format(mainRunName, + variableName) + + self.xmlFileNames.append('{}/{}.xml'.format( + self.plotsDirectory, filePrefix)) + polarXMLFileNames.append('{}/{}_polar.xml'.format( + self.plotsDirectory, filePrefix)) + else: + + for hemisphere in ['NH', 'SH']: + for variableName in ['iceArea', 'iceVolume']: + filePrefix = '{}{}_{}'.format(variableName, + hemisphere, + mainRunName) + + self.xmlFileNames.append('{}/{}.xml'.format( + self.plotsDirectory, filePrefix)) + polarXMLFileNames.append('{}/{}_polar.xml'.format( + self.plotsDirectory, filePrefix)) + + if polarPlot: + self.xmlFileNames.extend(polarXMLFileNames) + return + + def run_task(self): + """ + Performs analysis of time series of sea-ice properties. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + self.logger.info("\nPlotting sea-ice area and volume time series...") + + config = self.config + calendar = self.calendar + + sectionName = self.taskName + + plotTitles = {'iceArea': 'Sea-ice area', + 'iceVolume': 'Sea-ice volume', + 'iceThickness': 'Sea-ice mean thickness'} + + units = {'iceArea': '[km$^2$]', + 'iceVolume': '[10$^3$ km$^3$]', + 'iceThickness': '[m]'} + + obsFileNames = { + 'iceArea': {'NH': build_obs_path( + config, 'seaIce', + relativePathOption='areaNH', + relativePathSection=sectionName), + 'SH': build_obs_path( + config, 'seaIce', + relativePathOption='areaSH', + relativePathSection=sectionName)}, + 'iceVolume': {'NH': build_obs_path( + config, 'seaIce', + relativePathOption='volNH', + relativePathSection=sectionName), + 'SH': build_obs_path( + config, 'seaIce', + relativePathOption='volSH', + relativePathSection=sectionName)}} + + # Some plotting rules + titleFontSize = config.get('timeSeriesSeaIceAreaVol', 'titleFontSize') + + mainRunName = config.get('runs', 'mainRunName') + preprocessedReferenceRunName = \ + config.get('runs', 'preprocessedReferenceRunName') + preprocessedReferenceDirectory = \ + config.get('seaIcePreprocessedReference', 'baseDirectory') + + compareWithObservations = config.getboolean('timeSeriesSeaIceAreaVol', + 'compareWithObservations') + + movingAveragePoints = config.getint('timeSeriesSeaIceAreaVol', + 'movingAveragePoints') + + polarPlot = config.getboolean('timeSeriesSeaIceAreaVol', 'polarPlot') + + outputDirectory = build_config_full_path(config, 'output', + 'timeseriesSubdirectory') + + make_directories(outputDirectory) + + self.logger.info(' Load sea-ice data...') + # Load mesh + + dsTimeSeries = self._compute_area_vol() + + yearStart = days_to_datetime(dsTimeSeries['NH'].Time.min(), + calendar=calendar).year + yearEnd = days_to_datetime(dsTimeSeries['NH'].Time.max(), + calendar=calendar).year + timeStart = date_to_days(year=yearStart, month=1, day=1, + calendar=calendar) + timeEnd = date_to_days(year=yearEnd, month=12, day=31, + calendar=calendar) + + if preprocessedReferenceRunName != 'None': + # determine if we're beyond the end of the preprocessed data + # (and go ahead and cache the data set while we're checking) + outFolder = '{}/preprocessed'.format(outputDirectory) + make_directories(outFolder) + inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( + preprocessedReferenceDirectory, preprocessedReferenceRunName) + outFileName = '{}/iceVolume.nc'.format(outFolder) + + combine_time_series_with_ncrcat(inFilesPreprocessed, + outFileName, + logger=self.logger) + dsPreprocessed = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + preprocessedYearEnd = days_to_datetime(dsPreprocessed.Time.max(), + calendar=calendar).year + if yearStart <= preprocessedYearEnd: + dsPreprocessedTimeSlice = \ + dsPreprocessed.sel(Time=slice(timeStart, timeEnd)) + else: + self.logger.warning('Preprocessed time series ends before the ' + 'timeSeries startYear and will not be ' + 'plotted.') + preprocessedReferenceRunName = 'None' + + if self.controlConfig is not None: + + dsTimeSeriesRef = {} + baseDirectory = build_config_full_path( + self.controlConfig, 'output', 'timeSeriesSubdirectory') + + controlRunName = self.controlConfig.get('runs', 'mainRunName') + + for hemisphere in ['NH', 'SH']: + inFileName = '{}/seaIceAreaVol{}.nc'.format(baseDirectory, + hemisphere) + + dsTimeSeriesRef[hemisphere] = xr.open_dataset(inFileName) + + norm = {'iceArea': 1e-6, # m^2 to km^2 + 'iceVolume': 1e-12, # m^3 to 10^3 km^3 + 'iceThickness': 1.} + + xLabel = 'Time [years]' + + galleryGroup = 'Time Series' + groupLink = 'timeseries' + + obs = {} + preprocessed = {} + figureNameStd = {} + figureNamePolar = {} + title = {} + plotVars = {} + obsLegend = {} + plotVarsRef = {} + + for hemisphere in ['NH', 'SH']: + + self.logger.info(' Make {} plots...'.format(hemisphere)) + + for variableName in ['iceArea', 'iceVolume']: + key = (hemisphere, variableName) + + # apply the norm to each variable + plotVars[key] = (norm[variableName] * + dsTimeSeries[hemisphere][variableName]) + + if self.controlConfig is not None: + plotVarsRef[key] = norm[variableName] * \ + dsTimeSeriesRef[hemisphere][variableName] + + prefix = '{}/{}{}_{}'.format(self.plotsDirectory, + variableName, + hemisphere, + mainRunName) + + figureNameStd[key] = '{}.png'.format(prefix) + figureNamePolar[key] = '{}_polar.png'.format(prefix) + + title[key] = '{} ({})'.format(plotTitles[variableName], + hemisphere) + + if compareWithObservations: + key = (hemisphere, 'iceArea') + obsLegend[key] = 'SSM/I observations, annual cycle ' + if hemisphere == 'NH': + key = (hemisphere, 'iceVolume') + obsLegend[key] = 'PIOMAS, annual cycle (blue)' + + if preprocessedReferenceRunName != 'None': + for variableName in ['iceArea', 'iceVolume']: + key = (hemisphere, variableName) + + if compareWithObservations: + + outFolder = '{}/obs'.format(outputDirectory) + make_directories(outFolder) + outFileName = '{}/iceArea{}.nc'.format(outFolder, hemisphere) + + combine_time_series_with_ncrcat( + obsFileNames['iceArea'][hemisphere], + outFileName, logger=self.logger) + dsObs = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + key = (hemisphere, 'iceArea') + obs[key] = self._replicate_cycle(plotVars[key], dsObs.IceArea, + calendar) + + key = (hemisphere, 'iceVolume') + if hemisphere == 'NH': + outFileName = '{}/iceVolume{}.nc'.format(outFolder, + hemisphere) + combine_time_series_with_ncrcat( + obsFileNames['iceVolume'][hemisphere], + outFileName, logger=self.logger) + dsObs = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + obs[key] = self._replicate_cycle(plotVars[key], + dsObs.IceVol, + calendar) + else: + obs[key] = None + + if preprocessedReferenceRunName != 'None': + outFolder = '{}/preprocessed'.format(outputDirectory) + inFilesPreprocessed = '{}/icearea.{}.year*.nc'.format( + preprocessedReferenceDirectory, + preprocessedReferenceRunName) + + outFileName = '{}/iceArea.nc'.format(outFolder) + + combine_time_series_with_ncrcat(inFilesPreprocessed, + outFileName, + logger=self.logger) + dsPreprocessed = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + dsPreprocessedTimeSlice = dsPreprocessed.sel( + Time=slice(timeStart, timeEnd)) + key = (hemisphere, 'iceArea') + preprocessed[key] = dsPreprocessedTimeSlice[ + 'icearea_{}'.format(hemisphere.lower())] + + inFilesPreprocessed = '{}/icevol.{}.year*.nc'.format( + preprocessedReferenceDirectory, + preprocessedReferenceRunName) + outFileName = '{}/iceVolume.nc'.format(outFolder) + + combine_time_series_with_ncrcat(inFilesPreprocessed, + outFileName, + logger=self.logger) + dsPreprocessed = open_mpas_dataset(fileName=outFileName, + calendar=calendar, + timeVariableNames='xtime') + dsPreprocessedTimeSlice = dsPreprocessed.sel( + Time=slice(timeStart, timeEnd)) + key = (hemisphere, 'iceVolume') + preprocessed[key] = dsPreprocessedTimeSlice[ + 'icevolume_{}'.format(hemisphere.lower())] + + for variableName in ['iceArea', 'iceVolume']: + key = (hemisphere, variableName) + dsvalues = [plotVars[key]] + legendText = [mainRunName] + lineColors = [config.get('timeSeries', 'mainColor')] + lineWidths = [3] + if compareWithObservations and key in obsLegend.keys(): + dsvalues.append(obs[key]) + legendText.append(obsLegend[key]) + lineColors.append(config.get('timeSeries', 'obsColor1')) + lineWidths.append(1.2) + if preprocessedReferenceRunName != 'None': + dsvalues.append(preprocessed[key]) + legendText.append(preprocessedReferenceRunName) + lineColors.append('purple') + lineWidths.append(1.2) + + if self.controlConfig is not None: + dsvalues.append(plotVarsRef[key]) + legendText.append(controlRunName) + lineColors.append(config.get('timeSeries', + 'controlColor')) + lineWidths.append(1.2) + + if config.has_option(sectionName, 'firstYearXTicks'): + firstYearXTicks = config.getint(sectionName, + 'firstYearXTicks') + else: + firstYearXTicks = None + + if config.has_option(sectionName, 'yearStrideXTicks'): + yearStrideXTicks = config.getint(sectionName, + 'yearStrideXTicks') + else: + yearStrideXTicks = None + + # separate plots for nothern and southern hemispheres + timeseries_analysis_plot( + config, dsvalues, calendar=calendar, title=title[key], + xlabel=xLabel, ylabel=units[variableName], + movingAveragePoints=movingAveragePoints, + lineColors=lineColors, lineWidths=lineWidths, + legendText=legendText, titleFontSize=titleFontSize, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks) + + savefig(figureNameStd[key], config) + + filePrefix = '{}{}_{}'.format(variableName, + hemisphere, + mainRunName) + thumbnailDescription = '{} {}'.format( + hemisphere, plotTitles[variableName]) + caption = 'Running mean of {}'.format( + thumbnailDescription) + write_image_xml( + config, + filePrefix, + componentName='Sea Ice', + componentSubdirectory='sea_ice', + galleryGroup=galleryGroup, + groupLink=groupLink, + thumbnailDescription=thumbnailDescription, + imageDescription=caption, + imageCaption=caption) + + if (polarPlot): + timeseries_analysis_plot_polar(config, dsvalues, title[key], + movingAveragePoints, + lineColors=lineColors, + lineWidths=lineWidths, + legendText=legendText, + titleFontSize=titleFontSize) + + savefig(figureNamePolar[key], config) + + filePrefix = '{}{}_{}_polar'.format(variableName, + hemisphere, + mainRunName) + write_image_xml( + config, + filePrefix, + componentName='Sea Ice', + componentSubdirectory='sea_ice', + galleryGroup=galleryGroup, + groupLink=groupLink, + thumbnailDescription=thumbnailDescription, + imageDescription=caption, + imageCaption=caption) + + def _replicate_cycle(self, ds, dsToReplicate, calendar): + """ + Replicates a periodic time series `dsToReplicate` to cover the + timeframe of the dataset `ds`. + + Parameters + ---------- + ds : dataset used to find the start and end time of the replicated + cycle + + dsToReplicate : dataset to replicate. The period of the cycle is the + length of dsToReplicate plus the time between the first two time + values (typically one year total). + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores + + Returns: + -------- + dsShift : a cyclicly repeated version of `dsToReplicte` covering the + range of time of `ds`. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + dsStartTime = days_to_datetime(ds.Time.min(), calendar=calendar) + dsEndTime = days_to_datetime(ds.Time.max(), calendar=calendar) + repStartTime = days_to_datetime(dsToReplicate.Time.min(), + calendar=calendar) + repEndTime = days_to_datetime(dsToReplicate.Time.max(), + calendar=calendar) + + repSecondTime = days_to_datetime(dsToReplicate.Time.isel(Time=1), + calendar=calendar) + + period = (MpasRelativeDelta(repEndTime, repStartTime) + + MpasRelativeDelta(repSecondTime, repStartTime)) + + startIndex = 0 + while(dsStartTime > repStartTime + (startIndex + 1) * period): + startIndex += 1 + + endIndex = 0 + while(dsEndTime > repEndTime + endIndex * period): + endIndex += 1 + + dsShift = dsToReplicate.copy() + + times = days_to_datetime(dsShift.Time, calendar=calendar) + dsShift.coords['Time'] = ('Time', + datetime_to_days(times + startIndex * period, + calendar=calendar)) + # replicate cycle: + for cycleIndex in range(startIndex, endIndex): + dsNew = dsToReplicate.copy() + dsNew.coords['Time'] = \ + ('Time', datetime_to_days(times + (cycleIndex + 1) * period, + calendar=calendar)) + dsShift = xr.concat([dsShift, dsNew], dim='Time') + + # clip dsShift to the range of ds + dsStartTime = dsShift.Time.sel(Time=ds.Time.min(), + method=str('nearest')).values + dsEndTime = dsShift.Time.sel(Time=ds.Time.max(), + method=str('nearest')).values + dsShift = dsShift.sel(Time=slice(dsStartTime, dsEndTime)) + + return dsShift + + def _compute_area_vol(self): + """ + Compute part of the time series of sea ice volume and area, given time + indices to process. + """ + + config = self.config + chunkYears = config.getint('timeSeriesSeaIceAreaVol', 'chunkYears') + + maxAllowedSeaIceThickness = config.get( + 'timeSeriesSeaIceAreaVol', 'maxAllowedSeaIceThickness') + + if maxAllowedSeaIceThickness == 'None': + maxAllowedSeaIceThickness = None + else: + maxAllowedSeaIceThickness = float(maxAllowedSeaIceThickness) + + outFileNames = {} + for hemisphere in ['NH', 'SH']: + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + make_directories(baseDirectory) + + outFileName = '{}/seaIceAreaVol{}.nc'.format(baseDirectory, + hemisphere) + outFileNames[hemisphere] = outFileName + + dsTimeSeries = {} + dsMesh = xr.open_dataset(self.restartFileName) + dsMesh = dsMesh[['latCell', 'areaCell']] + # Load data + ds = open_mpas_dataset( + fileName=self.inputFile, + calendar=self.calendar, + variableList=self.variableList, + startDate=self.startDate, + endDate=self.endDate) + + ds = ds.rename( + {'timeMonthly_avg_iceAreaCell': 'iceConc', + 'timeMonthly_avg_iceVolumeCell': 'iceThick'}) + + nTime = ds.sizes['Time'] + # chunk into 10-year seguments so we don't run out of memory + if nTime > 12 * chunkYears: + ds = ds.chunk({'Time': 12 * chunkYears}) + + for hemisphere in ['NH', 'SH']: + + if hemisphere == 'NH': + mask = dsMesh.latCell > 0 + else: + mask = dsMesh.latCell < 0 + + if maxAllowedSeaIceThickness is not None: + mask = np.logical_and(mask, + ds.iceThick <= maxAllowedSeaIceThickness) + + dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells') + dsAreaSum = dsAreaSum.rename( + {'iceConc': 'iceArea', + 'iceThick': 'iceVolume'}) + dsAreaSum['iceThickness'] = (dsAreaSum.iceVolume / + dsMesh.areaCell.sum('nCells')) + + dsAreaSum['iceArea'].attrs['units'] = 'm$^2$' + dsAreaSum['iceArea'].attrs['description'] = \ + f'Total {hemisphere} sea ice area' + dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$' + dsAreaSum['iceVolume'].attrs['description'] = \ + f'Total {hemisphere} sea ice volume' + dsAreaSum['iceThickness'].attrs['units'] = 'm' + dsAreaSum['iceThickness'].attrs['description'] = \ + f'Mean {hemisphere} sea ice volume' + + dsTimeSeries[hemisphere] = dsAreaSum + + write_netcdf_with_fill(dsAreaSum, outFileNames[hemisphere]) + + return dsTimeSeries
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/analysis_task.html b/1.11.0rc1/_modules/mpas_analysis/shared/analysis_task.html new file mode 100644 index 000000000..dcfc5e01f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/analysis_task.html @@ -0,0 +1,818 @@ + + + + + + mpas_analysis.shared.analysis_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.analysis_task

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Defines the base class for analysis tasks.
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+from multiprocessing import Process, Value
+import time
+import traceback
+import logging
+import sys
+
+from mpas_analysis.shared.io import NameList, StreamsFile
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_files_year_month
+
+
+
+[docs] +class AnalysisTask(Process): + """ + The base class for analysis tasks. + + Attributes + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + taskName : str + The name of the task, typically the same as the class name except + starting with lowercase (e.g. 'myTask' for class 'MyTask') + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + tags : list of str + Tags used to describe the task (e.g. 'timeSeries', 'climatology', + horizontalMap', 'index', 'transect'). These are used to determine + which tasks are generated (e.g. 'all_transect' or 'no_climatology' + in the 'generate' flags) + + runDirectory : str + The base input directory for namelists, streams files and restart files + + historyDirectory : str + The base input directory for history files + + plotsDirectory : str + The directory for writing plots (which is also created if it doesn't + exist) + + namelist : ``shared.io.NameList`` + the namelist reader + + runStreams : ``shared.io.StreamsFile`` + the streams file reader for streams in the run directory (e.g. restart + files) + + historyStreams : ``shared.io.StreamsFile`` + the streams file reader for streams in the history directory (most + streams other than restart files) + + calendar : {'gregorian', 'gregoraian_noleap'} + The calendar used in the MPAS run + + runAfterTasks : list of ``AnalysisTasks`` + tasks that must be complete before this task can run + + subtasks : list of mpas_analysis.shared.AnalysisTask + Subtasks of this task + + xmlFileNames : list of strings + The XML file associated with each plot produced by this analysis, empty + if no plots were produced + + logger : ``logging.Logger`` + A logger for output during the run phase of an analysis task + """ + # Authors + # ------- + # Xylar Asay-Davis + + # flags for run status + UNSET = 0 + READY = 1 + BLOCKED = 2 + RUNNING = 3 + SUCCESS = 4 + FAIL = 5 + +
+[docs] + def __init__(self, config, taskName, componentName, tags=[], + subtaskName=None): + """ + Construct the analysis task. + + Individual tasks (children classes of this base class) should first + call this method to perform basic initialization, then, define the + ``taskName``, ``componentName`` and list of ``tags`` for the task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + taskName : str + The name of the task, typically the same as the class name except + starting with lowercase (e.g. 'myTask' for class 'MyTask') + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + tags : list of str, optional + Tags used to describe the task (e.g. 'timeSeries', 'climatology', + horizontalMap', 'index', 'transect'). These are used to determine + which tasks are generated (e.g. 'all_transect' or 'no_climatology' + in the 'generate' flags) + + subtaskName : str, optional + If this is a subtask of ``taskName``, the name of the subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + + if subtaskName is None: + self.fullTaskName = taskName + self.printTaskName = taskName + else: + self.fullTaskName = '{}_{}'.format(taskName, subtaskName) + self.printTaskName = '{}: {}'.format(taskName, subtaskName) + + # call the constructor from the base class (Process) + super(AnalysisTask, self).__init__(name=self.fullTaskName) + + self.config = config + self.taskName = taskName + self.subtaskName = subtaskName + self.componentName = componentName + self.tags = tags + self.subtasks = [] + self.logger = None + self.runAfterTasks = [] + self.xmlFileNames = [] + + # non-public attributes related to multiprocessing and logging + self.daemon = True + self._setupStatus = None + self._runStatus = Value('i', AnalysisTask.UNSET) + self._stackTrace = None + self._logFileName = None + + # the number of subprocesses run by this process, typically 1 but + # could be 12 for ncclimo in bck or mpi mode + self.subprocessCount = 1 + + # run the task directly as opposed to launching it as a new process + # even in parallel because it has subprocesses such as Pools + self.runDirectly = False
+ + +
+[docs] + def setup_and_check(self): + """ + Perform steps to set up the analysis (e.g. reading namelists and + streams files). + + After this call, the following attributes are set (see documentation + for the class): + runDirectory, historyDirectory, plotsDirectory, namelist, runStreams, + historyStreams, calendar + + Individual tasks (children classes of this base class) should first + call this method to perform basic setup, then, check whether the + configuration is correct for a given analysis and perform additional, + analysis-specific setup. For example, this function could check if + necessary observations and other data files are found, then, determine + the list of files to be read when the analysis is run. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # read parameters from config file + # the run directory contains the restart files + self.runDirectory = build_config_full_path(self.config, 'input', + 'runSubdirectory') + # if the history directory exists, use it; if not, fall back on + # runDirectory + self.historyDirectory = build_config_full_path( + self.config, 'input', + '{}HistorySubdirectory'.format(self.componentName), + defaultPath=self.runDirectory) + + self.plotsDirectory = build_config_full_path(self.config, 'output', + 'plotsSubdirectory') + namelistFileName = build_config_full_path( + self.config, 'input', + '{}NamelistFileName'.format(self.componentName)) + self.namelist = NameList(namelistFileName) + + streamsFileName = build_config_full_path( + self.config, 'input', + '{}StreamsFileName'.format(self.componentName)) + self.runStreams = StreamsFile(streamsFileName, + streamsdir=self.runDirectory) + self.historyStreams = StreamsFile(streamsFileName, + streamsdir=self.historyDirectory) + + self.calendar = self.namelist.get('config_calendar_type') + + make_directories(self.plotsDirectory) + + # set the start and end dates for each type of analysis + for tag in ['climatology', 'timeSeries', 'index']: + if tag in self.tags: + self.set_start_end_date(section=tag) + + # redirect output to a log file + logsDirectory = build_config_full_path(self.config, 'output', + 'logsSubdirectory') + + self._logFileName = '{}/{}.log'.format(logsDirectory, + self.fullTaskName)
+ + +
+[docs] + def run_task(self): + """ + Run the analysis. Each task should override this function to do the + work of computing and/or plotting analysis + """ + # Authors + # ------- + # Xylar Asay-Davis + + return
+ + +
+[docs] + def run_after(self, task): + """ + Only run this task after the given task has completed. This allows a + task to be constructed of multiple subtasks, some of which may block + later tasks, while allowing some subtasks to run in parallel. It also + allows for tasks to depend on other tasks (e.g. for computing + climatologies or extracting time series for many variables at once). + + Parameters + ---------- + task : ``AnalysisTask`` + The task that should finish before this one begins + """ + # Authors + # ------- + # Xylar Asay-Davis + + if task not in self.runAfterTasks: + self.runAfterTasks.append(task)
+ + +
+[docs] + def add_subtask(self, subtask): + """ + Add a subtask to this tasks. This task always runs after the subtask + has finished. However, this task gets set up *before* the subtask, + so the setup of the subtask can depend on fields defined during the + setup of this task (the parent). + + Parameters + ---------- + subtask : ``AnalysisTask`` + The subtask to run as part of this task + """ + # Authors + # ------- + # Xylar Asay-Davis + + if subtask not in self.subtasks: + self.subtasks.append(subtask)
+ + +
+[docs] + def run(self, writeLogFile=True): + """ + Sets up logging and then runs the analysis task. + + Parameters + ---------- + writeLogFile : bool, optional + If ``True``, output to stderr and stdout get written to a log file. + Otherwise, the internal logger ``self.logger`` points to stdout + and no log file is created. The intention is for logging to take + place in parallel mode but not in serial mode. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # redirect output to a log file + if writeLogFile: + self.logger = logging.getLogger(self.fullTaskName) + handler = logging.FileHandler(self._logFileName) + else: + self.logger = logging.getLogger() + handler = logging.StreamHandler(sys.stdout) + + formatter = AnalysisFormatter() + handler.setFormatter(formatter) + self.logger.addHandler(handler) + self.logger.setLevel(logging.INFO) + self.logger.propagate = False + + if writeLogFile: + oldStdout = sys.stdout + oldStderr = sys.stderr + sys.stdout = StreamToLogger(self.logger, logging.INFO) + sys.stderr = StreamToLogger(self.logger, logging.ERROR) + + startTime = time.time() + try: + self.run_task() + self._runStatus.value = AnalysisTask.SUCCESS + except (Exception, BaseException) as e: + if isinstance(e, KeyboardInterrupt): + raise e + self._stackTrace = traceback.format_exc() + self.logger.error("analysis task {} failed during run \n" + "{}".format(self.fullTaskName, self._stackTrace)) + self._runStatus.value = AnalysisTask.FAIL + + runDuration = time.time() - startTime + m, s = divmod(runDuration, 60) + h, m = divmod(int(m), 60) + self.logger.info('Execution time: {}:{:02d}:{:05.2f}'.format(h, m, s)) + + if writeLogFile: + handler.close() + # restore stdout and stderr + sys.stdout = oldStdout + sys.stderr = oldStderr + + # remove the handlers from the logger (probably only necessary if + # writeLogFile==False) + self.logger.handlers = []
+ + +
+[docs] + def check_generate(self): + + """ + Determines if this analysis should be generated, based on the + ``generate`` config option and ``taskName``, ``componentName`` and + ``tags``. + + Individual tasks do not need to create their own versions of this + function. + + Returns + ------- + generate : bool + Whether or not this task should be run. + + Raises + ------ + ValueError : If one of ``self.taskName``, ``self.componentName`` + or ``self.tags`` has not been set. + """ + # Authors + # ------- + # Xylar Asay-Davis + + for memberName in ['taskName', 'componentName', 'tags']: + if not hasattr(self, memberName): + raise ValueError('Analysis tasks must define self.{} in their ' + '__init__ method.'.format(memberName)) + + if (not isinstance(self.tags, list) and + self.tags is not None): + raise ValueError('Analysis tasks\'s member self.tags ' + 'must be None or a list of strings.') + + config = self.config + generateList = config.getexpression('output', 'generate') + if len(generateList) > 0 and generateList[0][0:5] == 'only_': + # add 'all' if the first item in the list has the 'only' prefix. + # Otherwise, we would not run any tasks + generateList = ['all'] + generateList + generate = False + for element in generateList: + if '_' in element: + (prefix, suffix) = element.split('_', 1) + else: + prefix = element + suffix = None + + allSuffixes = [self.componentName] + if self.tags is not None: + allSuffixes = allSuffixes + self.tags + noSuffixes = [self.taskName] + allSuffixes + if prefix == 'all': + if (suffix in allSuffixes) or (suffix is None): + generate = True + elif prefix == 'no': + if suffix in noSuffixes: + generate = False + if prefix == 'only': + if suffix not in allSuffixes: + generate = False + elif element == self.taskName: + generate = True + + return generate
+ + +
+[docs] + def check_analysis_enabled(self, analysisOptionName, default=False, + raiseException=True): + """ + Check to make sure a given analysis is turned on, issuing a warning or + raising an exception if not. + + Parameters + ---------- + analysisOptionName : str + The name of a boolean namelist option indicating whether the given + analysis member is enabled + + default : bool, optional + If no analysis option with the given name can be found, indicates + whether the given analysis is assumed to be enabled by default. + + raiseException : bool, optional + Whether + + Returns + ------- + enabled : bool + Whether the given analysis is enabled + + Raises + ------ + RuntimeError + If the given analysis option is not found and ``default`` is not + ``True`` or if the analysis option is found and is ``False``. The + exception is only raised if ``raiseException = True``. + """ + # Authors + # ------- + # Xylar Asay-Davis + + try: + optionName = analysisOptionName + enabled = self.namelist.getbool(optionName) + except ValueError: + enabled = default + if default: + print(f'Warning: namelist option {analysisOptionName} not ' + f'found.\n' + f'This likely indicates that the simulation you ' + f'are analyzing was run with an\n' + f'older version of MPAS-O that did not support ' + f'this flag. Assuming enabled.') + + if not enabled and raiseException: + raise RuntimeError('*** MPAS-Analysis relies on {} = .true.\n' + '*** Make sure to enable this analysis ' + 'member.'.format(analysisOptionName)) + + return enabled
+ + +
+[docs] + def set_start_end_date(self, section): + """ + Set the start and end dates in the ``config`` correspond to the start + and end years in a given category of analysis + + Parameters + ---------- + section : str + The name of a section in the config file containing ``startYear`` + and ``endYear`` options. ``section`` is typically one of + ``climatology``, ``timeSeries`` or ``index`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + if not self.config.has_option(section, 'startDate'): + startDate = '{:04d}-01-01_00:00:00'.format( + self.config.getint(section, 'startYear')) + self.config.set(section, 'startDate', startDate) + if not self.config.has_option(section, 'endDate'): + endDate = '{:04d}-12-31_23:59:59'.format( + self.config.getint(section, 'endYear')) + self.config.set(section, 'endDate', endDate)
+
+ + + +# }}} + + +class AnalysisFormatter(logging.Formatter): + """ + A custom formatter for logging + + Modified from: + https://stackoverflow.com/a/8349076/7728169 + """ + # Authors + # ------- + # Xylar Asay-Davis + + # printing error messages without a prefix because they are sometimes + # errors and sometimes only warnings sent to stderr + dbg_fmt = "DEBUG: %(module)s: %(lineno)d: %(msg)s" + info_fmt = "%(msg)s" + err_fmt = info_fmt + + def __init__(self, fmt=info_fmt): + logging.Formatter.__init__(self, fmt) + + def format(self, record): + + # Save the original format configured by the user + # when the logger formatter was instantiated + format_orig = self._fmt + + # Replace the original format with one customized by logging level + if record.levelno == logging.DEBUG: + self._fmt = AnalysisFormatter.dbg_fmt + + elif record.levelno == logging.INFO: + self._fmt = AnalysisFormatter.info_fmt + + elif record.levelno == logging.ERROR: + self._fmt = AnalysisFormatter.err_fmt + + # Call the original formatter class to do the grunt work + result = logging.Formatter.format(self, record) + + # Restore the original format configured by the user + self._fmt = format_orig + + return result + + +# }}} + + +class StreamToLogger(object): + """ + Modified based on code by: + https://www.electricmonk.nl/log/2011/08/14/redirect-stdout-and-stderr-to-a-logger-in-python/ + + Copyright (C) 2011 Ferry Boender + + License: "available under the GPL" (the author does not provide more + details) + + Fake file-like stream object that redirects writes to a logger instance. + """ + + def __init__(self, logger, log_level=logging.INFO): + self.logger = logger + self.log_level = log_level + self.linebuf = '' + + def write(self, buf): + for line in buf.rstrip().splitlines(): + self.logger.log(self.log_level, str(line.rstrip())) + + def flush(self): + pass + + +def update_time_bounds_from_file_names(config, section, componentName): + """ + Update the start and end years and dates for time series, climatologies or + climate indices based on the years actually available in the list of files. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # read parameters from config file + # the run directory contains the restart files + runDirectory = build_config_full_path(config, 'input', 'runSubdirectory') + # if the history directory exists, use it; if not, fall back on + # runDirectory + historyDirectory = build_config_full_path( + config, 'input', + '{}HistorySubdirectory'.format(componentName), + defaultPath=runDirectory) + + namelistFileName = build_config_full_path( + config, 'input', + '{}NamelistFileName'.format(componentName)) + try: + namelist = NameList(namelistFileName) + except (OSError, IOError): + # this component likely doesn't have output in this run + return + + streamsFileName = build_config_full_path( + config, 'input', + '{}StreamsFileName'.format(componentName)) + try: + historyStreams = StreamsFile(streamsFileName, + streamsdir=historyDirectory) + except (OSError, IOError): + # this component likely doesn't have output in this run + return + + calendar = namelist.get('config_calendar_type') + + requestedStartYear = config.getint(section, 'startYear') + requestedEndYear = config.getint(section, 'endYear') + + startDate = '{:04d}-01-01_00:00:00'.format(requestedStartYear) + endDate = '{:04d}-12-31_23:59:59'.format(requestedEndYear) + + streamName = 'timeSeriesStatsMonthlyOutput' + try: + inputFiles = historyStreams.readpath( + streamName, startDate=startDate, endDate=endDate, + calendar=calendar) + except ValueError: + # this component likely doesn't have output in this run + return + + if len(inputFiles) == 0: + raise ValueError('No input files found for stream {} in {} between ' + '{} and {}'.format(streamName, componentName, + requestedStartYear, + requestedEndYear)) + + years, months = get_files_year_month(sorted(inputFiles), + historyStreams, + streamName) + + # search for the start of the first full year + firstIndex = 0 + while (firstIndex < len(years) and months[firstIndex] != 1): + firstIndex += 1 + startYear = years[firstIndex] + + # search for the end of the last full year + lastIndex = len(years) - 1 + while (lastIndex >= 0 and months[lastIndex] != 12): + lastIndex -= 1 + endYear = years[lastIndex] + + if startYear != requestedStartYear or endYear != requestedEndYear: + raise ValueError( + "{} start and/or end year different from requested\n" + "requested: {:04d}-{:04d}\n" + "actual: {:04d}-{:04d}\n".format( + section, requestedStartYear, requestedEndYear, startYear, + endYear)) + + startDate = '{:04d}-01-01_00:00:00'.format(startYear) + config.set(section, 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format(endYear) + config.set(section, 'endDate', endDate) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/climatology/climatology.html b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/climatology.html new file mode 100644 index 000000000..2b9f21e1d --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/climatology.html @@ -0,0 +1,1041 @@ + + + + + + mpas_analysis.shared.climatology.climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.climatology.climatology

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Functions for creating climatologies from monthly time series data
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import xarray as xr
+import os
+import numpy
+from tempfile import TemporaryDirectory
+
+from mpas_tools.io import write_netcdf
+from pyremap import Remapper, LatLonGridDescriptor, ProjectionGridDescriptor
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.timekeeping.utility import days_to_datetime
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, fingerprint_generator
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.climatology.comparison_descriptors import \
+    get_comparison_descriptor, known_comparison_grids
+
+
+
+[docs] +def get_remapper(config, sourceDescriptor, comparisonDescriptor, + mappingFilePrefix, method, logger=None, vertices=False): + """ + Given config options and descriptions of the source and comparison grids, + returns a ``pyremap.Remapper`` object that can be used to remap from source + files or data sets to corresponding data sets on the comparison grid. + + If necessary, creates the mapping file containing weights and indices + needed to perform remapping. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + sourceDescriptor : pyremap.MeshDescriptor + A description of the source mesh or grid + + comparisonDescriptor : pyremap.MeshDescriptor + A description of the comparison grid + + mappingFilePrefix : str + A prefix to be prepended to the mapping file name + + method : {'bilinear', 'neareststod', 'conserve'} + The method of interpolation used. + + logger : logging.Logger, optional + A logger to which ncclimo output should be redirected + + vertices : bool, optional + Whether to remap from vertices, rather than cells + + Returns + ------- + remapper : pyremap.Remapper + A remapper that can be used to remap files or data sets from the source + grid or mesh to the comparison grid. + """ + # Authors + # ------- + # Xylar Asay-Davis + + mappingFileName = None + + if not _matches_comparison(sourceDescriptor, comparisonDescriptor): + # we need to remap because the grids don't match + + if vertices: + srcMeshName = f'{sourceDescriptor.meshName}_vertices' + else: + srcMeshName = sourceDescriptor.meshName + destMeshName = comparisonDescriptor.meshName + + mappingBaseName = \ + f'{mappingFilePrefix}_{srcMeshName}_to_{destMeshName}_{method}.nc' + + tryCustom = config.get('diagnostics', 'customDirectory') != 'none' + if tryCustom: + # first see if mapping files are in the custom directory + mappingSubdirectory = build_config_full_path( + config, 'diagnostics', 'mappingSubdirectory', + baseDirectoryOption='customDirectory') + + mappingFileName = f'{mappingSubdirectory}/{mappingBaseName}' + + if not tryCustom or not os.path.exists(mappingFileName): + # second see if mapping files are in the base directory + + mappingSubdirectory = build_config_full_path( + config, 'diagnostics', 'mappingSubdirectory', + baseDirectoryOption='base_path') + + mappingFileName = f'{mappingSubdirectory}/{mappingBaseName}' + + if not os.path.exists(mappingFileName): + # we don't have a mapping file yet, so get ready to create one + # in the output subfolder if needed + mappingSubdirectory = \ + build_config_full_path(config, 'output', + 'mappingSubdirectory') + make_directories(mappingSubdirectory) + mappingFileName = f'{mappingSubdirectory}/{mappingBaseName}' + + remapper = Remapper(sourceDescriptor, comparisonDescriptor, + mappingFileName) + + mpiTasks = config.getint('execute', 'mapMpiTasks') + esmf_parallel_exec = config.get('execute', 'mapParallelExec') + if esmf_parallel_exec == 'None': + esmf_parallel_exec = None + + mappingSubdirectory = \ + build_config_full_path(config, 'output', + 'mappingSubdirectory') + make_directories(mappingSubdirectory) + with TemporaryDirectory(dir=mappingSubdirectory) as tempdir: + remapper.build_mapping_file(method=method, logger=logger, + mpiTasks=mpiTasks, tempdir=tempdir, + esmf_parallel_exec=esmf_parallel_exec) + + return remapper
+ + + +
+[docs] +def compute_monthly_climatology(ds, calendar=None, maskVaries=True): + """ + Compute monthly climatologies from a data set. The mean is weighted but + the number of days in each month of the data set, ignoring values masked + out with NaNs. If the month coordinate is not present, a data array + ``month`` will be added based on ``Time`` and the provided calendar. + + Parameters + ---------- + ds : xarray.Dataset or xarray.DataArray + A data set with a ``Time`` coordinate expressed as days since + 0001-01-01 or ``month`` coordinate + + calendar : {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores, used to + determine ``month`` from ``Time`` coordinate, so must be supplied if + ``ds`` does not already have a ``month`` coordinate or data array + + maskVaries : bool, optional + If the mask (where variables in ``ds`` are ``NaN``) varies with time. + If not, the weighted average does not need make extra effort to account + for the mask. Most MPAS fields will have masks that don't vary in + time, whereas observations may sometimes be present only at some + times and not at others, requiring ``maskVaries = True``. + + Returns + ------- + climatology : object of same type as ``ds`` + A data set without the ``'Time'`` coordinate containing the mean + of ds over all months in monthValues, weighted by the number of days + in each month. + """ + + # Authors + # ------- + # Xylar Asay-Davis + + def compute_one_month_climatology(ds): + monthValues = list(ds.month.values) + return compute_climatology(ds, monthValues, calendar, maskVaries) + + ds = add_years_months_days_in_month(ds, calendar) + + monthlyClimatology = \ + ds.groupby('month').map(compute_one_month_climatology) + + return monthlyClimatology
+ + + +
+[docs] +def compute_climatology(ds, monthValues, calendar=None, + maskVaries=True): + """ + Compute a monthly, seasonal or annual climatology data set from a data + set. The mean is weighted but the number of days in each month of + the data set, ignoring values masked out with NaNs. If the month + coordinate is not present, a data array ``month`` will be added based + on ``Time`` and the provided calendar. + + Parameters + ---------- + ds : xarray.Dataset or xarray.DataArray + A data set with a ``Time`` coordinate expressed as days since + 0001-01-01 or ``month`` coordinate + + monthValues : int or array-like of ints + A single month or an array of months to be averaged together + + calendar : {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores, used to + determine ``month`` from ``Time`` coordinate, so must be supplied if + ``ds`` does not already have a ``month`` coordinate or data array + + maskVaries : bool, optional + If the mask (where variables in ``ds`` are ``NaN``) varies with time. + If not, the weighted average does not need make extra effort to account + for the mask. Most MPAS fields will have masks that don't vary in + time, whereas observations may sometimes be present only at some + times and not at others, requiring ``maskVaries = True``. + + Returns + ------- + climatology : object of same type as ``ds`` + A data set without the ``'Time'`` coordinate containing the mean + of ds over all months in monthValues, weighted by the number of days + in each month. + """ + # Authors + # ------- + # Xylar Asay-Davis + + ds = add_years_months_days_in_month(ds, calendar) + + mask = xr.zeros_like(ds.month, bool) + + for month in monthValues: + mask = numpy.logical_or(mask, ds.month == month) + + climatologyMonths = ds.where(mask, drop=True) + + climatology = _compute_masked_mean(climatologyMonths, maskVaries) + + return climatology
+ + + +
+[docs] +def add_years_months_days_in_month(ds, calendar=None): + """ + Add ``year``, ``month`` and ``daysInMonth`` as data arrays in ``ds``. + The number of days in each month of ``ds`` is computed either using the + ``startTime`` and ``endTime`` if available or assuming ``noleap`` + calendar and ignoring leap years. ``year`` and ``month`` are computed + accounting correctly for the the calendar. + + Parameters + ---------- + ds : ``xarray.Dataset`` or ``xarray.DataArray`` object + A data set with a ``Time`` coordinate expressed as days since + 0001-01-01 + + calendar : {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores, used to + determine ``year`` and ``month`` from ``Time`` coordinate + + Returns + ------- + ds : object of same type as ``ds`` + The data set with ``year``, ``month`` and ``daysInMonth`` data arrays + added (if not already present) + """ + # Authors + # ------- + # Xylar Asay-Davis + + if ('year' in ds.coords and 'month' in ds.coords and + 'daysInMonth' in ds.coords): + return ds + + ds = ds.copy() + + if 'year' not in ds.coords or 'month' not in ds.coords: + if calendar is None: + raise ValueError('calendar must be provided if month and year ' + 'coordinate is not in ds.') + datetimes = days_to_datetime(ds.Time, calendar=calendar) + + if 'year' not in ds.coords: + ds.coords['year'] = ('Time', [date.year for date in datetimes]) + + if 'month' not in ds.coords: + ds.coords['month'] = ('Time', [date.month for date in datetimes]) + + if 'daysInMonth' not in ds.coords: + if 'startTime' in ds.coords and 'endTime' in ds.coords: + ds.coords['daysInMonth'] = ds.endTime - ds.startTime + else: + if calendar == 'gregorian': + print('Warning: The MPAS run used the Gregorian calendar ' + 'but does not appear to have\n' + 'supplied start and end times. Climatologies ' + 'will be computed with\n' + 'month durations ignoring leap years.') + + daysInMonth = numpy.array( + [constants.daysInMonth[int(month) - 1] for + month in ds.month.values], float) + ds.coords['daysInMonth'] = ('Time', daysInMonth) + + return ds
+ + + +def remap_and_write_climatology(config, climatologyDataSet, + climatologyFileName, remappedFileName, + remapper, logger=None): + """ + Given a field in a climatology data set, use the ``remapper`` to remap + horizontal dimensions of all fields, write the results to an output file, + and return the remapped data set. + + Note that ``climatologyFileName`` and ``remappedFileName`` will be + overwritten if they exist, so if this behavior is not desired, the calling + code should skip this call if the files exist and simply load the contents + of ``remappedFileName``. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + climatologyDataSet : ``xarray.DataSet`` or ``xarray.DataArray`` object + A data set containing a climatology + + fieldName : str + A field within the climatology to be remapped + + climatologyFileName : str + The name of the output file to which the data set should be written + before remapping (if using ncremap). + + remappedFileName : str + The name of the output file to which the remapped data set should + be written. + + remapper : ``pyremap.Remapper`` object + A remapper that can be used to remap files or data sets to a + comparison grid. + + logger : ``logging.Logger``, optional + A logger to which ncclimo output should be redirected + + Returns + ------- + remappedClimatology : ``xarray.DataSet`` or ``xarray.DataArray`` object + A data set containing the remapped climatology + """ + # Authors + # ------- + # Xylar Asay-Davis + + useNcremap = config.getboolean('climatology', 'useNcremap') + + if remapper.mappingFileName is None: + # no remapping is needed + remappedClimatology = climatologyDataSet + else: + renormalizationThreshold = config.getfloat( + 'climatology', 'renormalizationThreshold') + parallel_exec = config.get( + 'execute', 'ncremapParallelExec') + if parallel_exec == 'None': + parallel_exec = None + + if useNcremap: + if not os.path.exists(climatologyFileName): + write_netcdf(climatologyDataSet, climatologyFileName) + remapper.remap_file(inFileName=climatologyFileName, + outFileName=remappedFileName, + overwrite=True, + renormalize=renormalizationThreshold, + logger=logger, + parallel_exec=parallel_exec) + remappedClimatology = xr.open_dataset(remappedFileName) + else: + + remappedClimatology = remapper.remap(climatologyDataSet, + renormalizationThreshold) + write_netcdf_with_fill(remappedClimatology, remappedFileName) + return remappedClimatology + + +
+[docs] +def get_unmasked_mpas_climatology_directory(config, op='avg'): + """ + Get the directory for an unmasked MPAS climatology produced by ncclimo, + making the directory if it doesn't already exist + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + configuration options + + op : {'avg', 'min', 'max'} + operator for monthly stats + """ + # Authors + # ------- + # Xylar Asay-Davis + + climatologyOpDirectory = get_climatology_op_directory(config, op) + + mpasMeshName = config.get('input', 'mpasMeshName') + + directory = '{}/unmasked_{}'.format(climatologyOpDirectory, + mpasMeshName) + + make_directories(directory) + return directory
+ + + +
+[docs] +def get_unmasked_mpas_climatology_file_name(config, season, componentName, + op='avg'): + """ + Get the file name for an unmasked MPAS climatology produced by ncclimo + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + configuration options + + season : str + One of the seasons in ``constants.monthDictionary`` + + componentName : {'ocean', 'seaIce'} + The MPAS component for which the climatology is being computed + + op : {'avg', 'min', 'max'} + operator for monthly stats + """ + # Authors + # ------- + # Xylar Asay-Davis + + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + + if componentName == 'ocean': + ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + directory = get_unmasked_mpas_climatology_directory(config, op) + + make_directories(directory) + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + startYear, startMonth, endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format(directory, ncclimoModel, + season, suffix) + return fileName
+ + + +
+[docs] +def get_masked_mpas_climatology_file_name(config, season, componentName, + climatologyName, op='avg'): + """ + Get the file name for a masked MPAS climatology + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + season : str + One of the seasons in ``constants.monthDictionary`` + + componentName : {'ocean', 'seaIce'} + The MPAS component for which the climatology is being computed + + climatologyName : str + The name of the climatology (typically the name of a field to mask + and later remap) + + op : {'avg', 'min', 'max'} + operator for monthly stats + """ + # Authors + # ------- + # Xylar Asay-Davis + + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + mpasMeshName = config.get('input', 'mpasMeshName') + + if componentName == 'ocean': + ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + climatologyOpDirectory = get_climatology_op_directory(config, op) + + stageDirectory = '{}/masked'.format(climatologyOpDirectory) + + directory = '{}/{}_{}'.format( + stageDirectory, climatologyName, + mpasMeshName) + + make_directories(directory) + + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + startYear, startMonth, endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format( + directory, ncclimoModel, season, suffix) + + return fileName
+ + + +
+[docs] +def get_remapped_mpas_climatology_file_name(config, season, componentName, + climatologyName, + comparisonGridName, + op='avg'): + """ + Get the file name for a masked MPAS climatology + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + season : str + One of the seasons in ``constants.monthDictionary`` + + componentName : {'ocean', 'seaIce'} + The MPAS component for which the climatology is being computed + + climatologyName : str + The name of the climatology (typically the name of a field to mask + and later remap) + + comparisonGridName : str + The name of the comparison grid to use for remapping. If it is one + of the known comparison grid names, the full grid name is looked up via + :py:func:`mpas_analysis.shared.climatology.get_comparison_descriptor()` + + op : {'avg', 'min', 'max'} + operator for monthly stats + """ + # Authors + # ------- + # Xylar Asay-Davis + + startYear = config.getint('climatology', 'startYear') + endYear = config.getint('climatology', 'endYear') + mpasMeshName = config.get('input', 'mpasMeshName') + + if componentName == 'ocean': + ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + climatologyOpDirectory = get_climatology_op_directory(config, op) + + if comparisonGridName in known_comparison_grids: + comparisonDescriptor = get_comparison_descriptor(config, + comparisonGridName) + comparisonFullMeshName = comparisonDescriptor.meshName + else: + comparisonFullMeshName = comparisonGridName.replace(' ', '_') + + stageDirectory = '{}/remapped'.format(climatologyOpDirectory) + + directory = '{}/{}_{}_to_{}'.format(stageDirectory, climatologyName, + mpasMeshName, comparisonFullMeshName) + + make_directories(directory) + + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + startYear, startMonth, endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format( + directory, ncclimoModel, season, suffix) + + return fileName
+ + + +def get_climatology_op_directory(config, op='avg'): + """ + Get the output directory for MPAS climatologies from output with the given + monthly operator: avg, min or max + """ + climatologyBaseDirectory = build_config_full_path( + config, 'output', 'mpasClimatologySubdirectory') + + return '{}/{}'.format(climatologyBaseDirectory, op) + + +def _compute_masked_mean(ds, maskVaries): + """ + Compute the time average of data set, masked out where the variables in ds + are NaN and, if ``maskVaries == True``, weighting by the number of days + used to compute each monthly mean time in ds. + """ + + # Authors + # ------- + # Xylar Asay-Davis + + def ds_to_weights(ds): + # make an identical data set to ds but replacing all data arrays with + # nonnull applied to that data array + weights = ds.copy(deep=True) + if isinstance(ds, xr.core.dataarray.DataArray): + weights = ds.notnull() + elif isinstance(ds, xr.core.dataset.Dataset): + for var in ds.data_vars: + weights[var] = ds[var].notnull() + else: + raise TypeError('ds must be an instance of either xarray.Dataset ' + 'or xarray.DataArray.') + + return weights + + if maskVaries: + dsWeightedSum = (ds * ds.daysInMonth).sum(dim='Time', keep_attrs=True) + + weights = ds_to_weights(ds) + + weightSum = (weights * ds.daysInMonth).sum(dim='Time') + + timeMean = dsWeightedSum / weightSum.where(weightSum > 0.) + else: + days = ds.daysInMonth.sum(dim='Time') + + dsWeightedSum = (ds * ds.daysInMonth).sum(dim='Time', keep_attrs=True) + + timeMean = dsWeightedSum / days.where(days > 0.) + + return timeMean + + +def _matches_comparison(obsDescriptor, comparisonDescriptor): + """ + Determine if the two meshes are the same + """ + # Authors + # ------- + # Xylar Asay-Davis + + if isinstance(obsDescriptor, ProjectionGridDescriptor) and \ + isinstance(comparisonDescriptor, ProjectionGridDescriptor): + # pretty hard to determine if projections are the same, so we'll rely + # on the grid names + match = obsDescriptor.meshName == comparisonDescriptor.meshName and \ + len(obsDescriptor.x) == len(comparisonDescriptor.x) and \ + len(obsDescriptor.y) == len(comparisonDescriptor.y) and \ + numpy.all(numpy.isclose(obsDescriptor.x, + comparisonDescriptor.x)) and \ + numpy.all(numpy.isclose(obsDescriptor.y, + comparisonDescriptor.y)) + elif isinstance(obsDescriptor, LatLonGridDescriptor) and \ + isinstance(comparisonDescriptor, LatLonGridDescriptor): + match = ((('degree' in obsDescriptor.units and + 'degree' in comparisonDescriptor.units) or + ('radian' in obsDescriptor.units and + 'radian' in comparisonDescriptor.units)) and + len(obsDescriptor.lat) == len(comparisonDescriptor.lat) and + len(obsDescriptor.lon) == len(comparisonDescriptor.lon) and + numpy.all(numpy.isclose(obsDescriptor.lat, + comparisonDescriptor.lat)) and + numpy.all(numpy.isclose(obsDescriptor.lon, + comparisonDescriptor.lon))) + else: + match = False + + return match + + +def _setup_climatology_caching(ds, startYearClimo, endYearClimo, + yearsPerCacheFile, cachePrefix, + monthValues): + """ + Determine which cache files already exist, which are incomplete and which + years are present in each cache file (whether existing or to be created). + """ + # Authors + # ------- + # Xylar Asay-Davis + + cacheInfo = [] + + cacheIndices = -1 * numpy.ones(ds.sizes['Time'], int) + monthsInDs = ds.month.values + yearsInDs = ds.year.values + + # figure out which files to load and which years go in each file + for firstYear in range(startYearClimo, endYearClimo + 1, + yearsPerCacheFile): + years = range(firstYear, firstYear + yearsPerCacheFile) + + yearString, fileSuffix = _get_year_string(years[0], years[-1]) + outputFileClimo = '{}_{}.nc'.format(cachePrefix, fileSuffix) + + done = False + if os.path.exists(outputFileClimo): + # already cached + dsCached = None + try: + dsCached = xr.open_dataset(outputFileClimo) + except IOError: + # assuming the cache file is corrupt, so deleting it. + print('Warning: Deleting cache file {}, which appears to ' + 'have been corrupted.'.format(outputFileClimo)) + + os.remove(outputFileClimo) + + monthsIfDone = len(monthValues) * len(years) + if ((dsCached is not None) and + (dsCached.attrs['totalMonths'] == monthsIfDone)): + # also complete, so we can move on + done = True + if dsCached is not None: + dsCached.close() + + cacheIndex = len(cacheInfo) + for year in years: + for month in monthValues: + mask = numpy.logical_and(yearsInDs == year, + monthsInDs == month) + cacheIndices[mask] = cacheIndex + + if numpy.count_nonzero(cacheIndices == cacheIndex) == 0: + continue + + cacheInfo.append((outputFileClimo, done, yearString)) + + ds = ds.copy() + ds.coords['cacheIndices'] = ('Time', cacheIndices) + + return cacheInfo, cacheIndices + + +def _cache_individual_climatologies(ds, cacheInfo, printProgress, + yearsPerCacheFile, monthValues, + calendar): + """ + Cache individual climatologies for later aggregation. + """ + # Authors + # ------- + # Xylar Asay-Davis + + for cacheIndex, info in enumerate(cacheInfo): + outputFileClimo, done, yearString = info + if done: + continue + dsYear = ds.where(ds.cacheIndices == cacheIndex, drop=True) + + if printProgress: + print(' {}'.format(yearString)) + + totalDays = dsYear.daysInMonth.sum(dim='Time').values + + monthCount = dsYear.sizes['Time'] + + climatology = compute_climatology(dsYear, monthValues, calendar, + maskVaries=False) + + climatology.attrs['totalDays'] = totalDays + climatology.attrs['totalMonths'] = monthCount + climatology.attrs['fingerprintClimo'] = fingerprint_generator() + + write_netcdf_with_fill(climatology, outputFileClimo) + climatology.close() + + +def _cache_aggregated_climatology(startYearClimo, endYearClimo, cachePrefix, + printProgress, monthValues, + cacheInfo): + """ + Cache aggregated climatology from individual climatologies. + """ + # Authors + # ------- + # Xylar Asay-Davis + + yearString, fileSuffix = _get_year_string(startYearClimo, endYearClimo) + outputFileClimo = '{}_{}.nc'.format(cachePrefix, fileSuffix) + + done = False + if len(cacheInfo) == 0: + climatology = None + done = True + + if os.path.exists(outputFileClimo): + # already cached + climatology = None + try: + climatology = xr.open_dataset(outputFileClimo) + + except IOError: + # assuming the cache file is corrupt, so deleting it. + print('Warning: Deleting cache file {}, which appears to have ' + 'been corrupted.'.format(outputFileClimo)) + os.remove(outputFileClimo) + + if len(cacheInfo) == 1 and outputFileClimo == cacheInfo[0][0]: + # theres only one cache file and it already has the same name + # as the aggregated file so no need to aggregate + done = True + + elif climatology is not None: + monthsIfDone = ((endYearClimo - startYearClimo + 1) + * len(monthValues)) + if climatology.attrs['totalMonths'] == monthsIfDone: + # also complete, so we can move on + done = True + else: + climatology.close() + + if not done: + if printProgress: + print(' Computing aggregated climatology ' + '{}...'.format(yearString)) + + first = True + for cacheIndex, info in enumerate(cacheInfo): + inFileClimo = info[0] + ds = xr.open_dataset(inFileClimo) + days = ds.attrs['totalDays'] + months = ds.attrs['totalMonths'] + if first: + totalDays = days + totalMonths = months + climatology = ds * days + first = False + else: + totalDays += days + totalMonths += months + climatology = climatology + ds * days + + ds.close() + climatology = climatology / totalDays + + climatology.attrs['totalDays'] = totalDays + climatology.attrs['totalMonths'] = totalMonths + climatology.attrs['fingerprintClimo'] = fingerprint_generator() + + write_netcdf_with_fill(climatology, outputFileClimo) + + return climatology + + +def _get_year_string(startYear, endYear): + if startYear == endYear: + yearString = '{:04d}'.format(startYear) + fileSuffix = 'year{}'.format(yearString) + else: + yearString = '{:04d}-{:04d}'.format(startYear, endYear) + fileSuffix = 'years{}'.format(yearString) + + return yearString, fileSuffix +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/climatology/comparison_descriptors.html b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/comparison_descriptors.html new file mode 100644 index 000000000..51c513314 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/comparison_descriptors.html @@ -0,0 +1,299 @@ + + + + + + mpas_analysis.shared.climatology.comparison_descriptors — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.climatology.comparison_descriptors

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Functions for creating climatologies from monthly time series data
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import numpy
+
+from mpas_analysis.shared.constants import constants
+from mpas_analysis.shared.projection import known_comparison_grids, \
+    get_pyproj_projection
+
+from pyremap import LatLonGridDescriptor, ProjectionGridDescriptor
+
+
+
+[docs] +def get_comparison_descriptor(config, comparison_grid_name): + """ + Get the comparison grid descriptor from the comparison_grid_name. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + comparison_grid_name : {'latlon', 'antarctic', 'arctic', 'north_atlantic', + 'north_pacific', 'subpolar_north_atlantic'} + The name of the comparison grid to use for remapping. + + Raises + ------ + ValueError + If comparison_grid_name does not describe a known comparison grid + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + if comparison_grid_name not in known_comparison_grids: + raise ValueError( + f'Unknown comparison grid type {comparison_grid_name}') + + if comparison_grid_name == 'latlon': + comparison_descriptor = \ + _get_lat_lon_comparison_descriptor(config) + else: + comparison_descriptor = \ + _get_projection_comparison_descriptor(config, comparison_grid_name) + + return comparison_descriptor
+ + + +def _get_lat_lon_comparison_descriptor(config): + """ + Get a descriptor of the lat/lon comparison grid, used for remapping and + determining the grid name + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + Returns + ------- + descriptor : LatLonGridDescriptor + A descriptor of the lat/lon grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + section = 'climatology' + + lat_res = config.getfloat(section, 'comparisonLatResolution') + lon_res = config.getfloat(section, 'comparisonLatResolution') + + nlat = int((constants.latmax - constants.latmin) / lat_res) + 1 + nlon = int((constants.lonmax - constants.lonmin) / lon_res) + 1 + lat = numpy.linspace(constants.latmin, constants.latmax, nlat) + lon = numpy.linspace(constants.lonmin, constants.lonmax, nlon) + + descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') + + return descriptor + + +def _get_projection_comparison_descriptor(config, comparison_grid_name): + """ + Get a descriptor of any comparison grid base on a projection, used for + remapping and determining the grid name + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + comparison_grid_name : str + One of the projections + + Returns + ------- + descriptor : pyremap.ProjectionGridDescriptor + A descriptor of the comparison grid + (eg. - Arctic, North Atlantic) + """ + # Authors + # ------- + # Xylar Asay-Davis + + section = 'climatology' + + option_suffixes = {'antarctic': 'AntarcticStereo', + 'arctic': 'ArcticStereo', + 'antarctic_extended': 'AntarcticExtended', + 'arctic_extended': 'ArcticExtended', + 'north_atlantic': 'NorthAtlantic', + 'north_pacific': 'NorthPacific', + 'subpolar_north_atlantic': 'SubpolarNorthAtlantic'} + + grid_suffixes = {'antarctic': 'Antarctic_stereo', + 'arctic': 'Arctic_stereo', + 'antarctic_extended': 'Antarctic_stereo', + 'arctic_extended': 'Arctic_stereo', + 'north_atlantic': 'North_Atlantic', + 'north_pacific': 'North_Pacific', + 'subpolar_north_atlantic': 'Subpolar_North_Atlantic'} + + if comparison_grid_name not in option_suffixes: + raise ValueError(f'{comparison_grid_name} is not one of the supported ' + f'projection grids') + + projection = get_pyproj_projection(comparison_grid_name) + + option_suffix = option_suffixes[comparison_grid_name] + grid_suffix = grid_suffixes[comparison_grid_name] + width = config.getfloat( + section, f'comparison{option_suffix}Width') + option = f'comparison{option_suffix}Height' + if config.has_option(section, option): + height = config.getfloat(section, option) + else: + height = width + res = config.getfloat( + section, f'comparison{option_suffix}Resolution') + + xmax = 0.5 * width * 1e3 + nx = int(width / res) + 1 + x = numpy.linspace(-xmax, xmax, nx) + + ymax = 0.5 * height * 1e3 + ny = int(height / res) + 1 + y = numpy.linspace(-ymax, ymax, ny) + + mesh_name = f'{width}x{height}km_{res}km_{grid_suffix}' + descriptor = ProjectionGridDescriptor.create(projection, x, y, mesh_name) + + return descriptor +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/climatology/mpas_climatology_task.html b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/mpas_climatology_task.html new file mode 100644 index 000000000..36a3af6a9 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/mpas_climatology_task.html @@ -0,0 +1,873 @@ + + + + + + mpas_analysis.shared.climatology.mpas_climatology_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.climatology.mpas_climatology_task

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray
+import os
+import subprocess
+from distutils.spawn import find_executable
+import dask
+import multiprocessing
+from multiprocessing.pool import ThreadPool
+import glob
+
+from mpas_tools.io import write_netcdf
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.climatology.climatology import \
+    get_unmasked_mpas_climatology_directory, \
+    get_unmasked_mpas_climatology_file_name, \
+    get_climatology_op_directory
+
+from mpas_analysis.shared.io.utility import make_directories, \
+    get_files_year_month
+
+from mpas_analysis.shared.constants import constants
+
+
+
+[docs] +class MpasClimatologyTask(AnalysisTask): + """ + An analysis tasks for computing climatologies from output from the + ``timeSeriesStatsMonthly*`` analysis members. + + Attributes + ---------- + + variableList : dict of lists + A dictionary with seasons as keys and a list of variable names in + the stream to be included in the climatologies for each season in the + values. + + allVariables : list of str + A list of all available variable names in the stream used to raise an + exception when an unavailable variable is requested + + inputFiles : list of str + A list of input files used to compute the climatologies. + + ncclimoModel : {'mpaso', 'mpascice'} + The name of the component expected by ``ncclimo`` + + startDate, endDate : str + The start and end dates of the climatology as strings + + startYear, endYear : int + The start and end years of the climatology + + seasonSubtasks : dict + If using xarray to compute climatologies, a dictionary of subtasks, one + for each possible season + + op : {'avg', 'min', 'max'} + operator for monthly stats + + streamName : str + The name of the stream to read from, one of + ``timeSeriesStatsMonthlyOutput``, + ``timeSeriesStatsMonthlyMinOutput``, + ``timeSeriesStatsMonthlyMaxOutput`` + """ + + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, componentName, taskName=None, op='avg'): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + op : {'avg', 'min', 'max'}, optioinal + operator for monthly stats + + taskName : str, optional + the name of the task, defaults to + mpasClimatology<ComponentName><Op> + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.variableList = {} + + self.op = op + if op == 'avg': + self.streamName = 'timeSeriesStatsMonthlyOutput' + elif op == 'min': + self.streamName = 'timeSeriesStatsMonthlyMinOutput' + elif op == 'max': + self.streamName = 'timeSeriesStatsMonthlyMaxOutput' + else: + raise ValueError('Unexpected monthly stats operator {}'.format(op)) + + tags = ['climatology', op] + + if componentName == 'ocean': + self.ncclimoModel = 'mpaso' + elif componentName == 'seaIce': + self.ncclimoModel = 'mpascice' + else: + raise ValueError('component {} is not supported by ncclimo.\n' + 'Check with Charlie Zender and Xylar Asay-Davis\n' + 'about getting it added'.format(componentName)) + + if taskName is None: + suffix = componentName[0].upper() + componentName[1:] + \ + op[0].upper() + op[1:] + taskName = 'mpasClimatology{}'.format(suffix) + + self.allVariables = None + self.useNcclimo = config.getboolean('climatology', 'useNcclimo') + + # call the constructor from the base class (AnalysisTask) + super(MpasClimatologyTask, self).__init__( + config=config, + taskName=taskName, + componentName=componentName, + tags=tags) + + ncclimoParallelMode = config.get('execute', 'ncclimoParallelMode') + if self.useNcclimo: + if ncclimoParallelMode in ['bck', 'mpi']: + ncclimoThreads = config.getint('execute', 'ncclimoThreads') + self.subprocessCount = ncclimoThreads + else: + # running in serial + self.subprocessCount = 1 + + self.seasonSubtasks = {} + + if not self.useNcclimo: + # this process doesn't do anything on its own, so no need to + # block other tasks + self.subprocessCount = 1 + + # setup one subtask for each possible season that could be added + for season in constants.monthDictionary: + self.seasonSubtasks[season] = MpasClimatologySeasonSubtask( + self, season) + self.add_subtask(self.seasonSubtasks[season]) + + # make sure each season runs after the months that make up that + # season + for season in constants.monthDictionary: + if season in constants.abrevMonthNames: + continue + monthValues = constants.monthDictionary[season] + monthNames = [constants.abrevMonthNames[month - 1] for month in + monthValues] + for monthName in monthNames: + self.seasonSubtasks[season].run_after( + self.seasonSubtasks[monthName])
+ + +
+[docs] + def add_variables(self, variableList, seasons=None): + """ + Add one or more variables and optionally one or more seasons for which + to compute climatologies. + + Parameters + ---------- + variableList : list of str + A list of variable names in the stream to be included in the + climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ``None`` if only monthly + climatologies are needed. + + Raises + ------ + ValueError + if this funciton is called before this task has been set up (so + the list of available variables has not yet been set) or if one + or more of the requested variables is not available in the stream. + """ + # Authors + # ------- + # Xylar Asay-Davis + + if self.allVariables is None: + raise ValueError('add_variables() can only be called after ' + 'setup_and_check() in MpasClimatologyTask.\n' + 'Presumably tasks were added in the wrong order ' + 'or add_variables() is being called in the wrong ' + 'place.') + + if seasons is None: + seasons = list(constants.abrevMonthNames) + + for variable in variableList: + if variable not in self.allVariables: + raise ValueError( + '{} is not available in {} output:\n{}'.format( + variable, self.streamName, self.allVariables)) + + for season in seasons: + if season not in self.variableList: + self.variableList[season] = [] + if variable not in self.variableList[season]: + self.variableList[season].append(variable) + + # add variables to individual months as well, since those will + # be computed first + for season in seasons: + if season not in constants.abrevMonthNames: + monthValues = constants.monthDictionary[season] + monthNames = [constants.abrevMonthNames[month - 1] for month in + monthValues] + self.add_variables(variableList, seasons=monthNames)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(MpasClimatologyTask, self).setup_and_check() + + if self.op == 'avg': + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + elif self.op == 'min': + self.check_analysis_enabled( + analysisOptionName='config_AM_timeSeriesStatsMonthlyMin_enable', + raiseException=True) + elif self.op == 'max': + self.check_analysis_enabled( + analysisOptionName='config_AM_timeSeriesStatsMonthlyMax_enable', + raiseException=True) + + self.startYear, self.endYear = self.get_start_and_end() + + self.startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) + self.endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) + + # get a list of timeSeriesSta output files from the streams file, + # reading only those that are between the start and end dates + self.inputFiles = self.historyStreams.readpath( + self.streamName, startDate=self.startDate, endDate=self.endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(self.streamName, self.startDate, + self.endDate)) + + self.symlinkDirectory = self._create_symlinks() + + with xarray.open_dataset(self.inputFiles[0]) as ds: + self.allVariables = list(ds.data_vars.keys()) + + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + if len(self.variableList.keys()) == 0: + # nothing to do + return + + if not self.useNcclimo: + # subtasks will take care of it, so nothing to do + return + + self.logger.info( + f'\nComputing MPAS climatologies from files:\n' + f' {os.path.basename(self.inputFiles[0])} through\n' + f' {os.path.basename(self.inputFiles[-1])}') + + seasonsToCheck = list(constants.abrevMonthNames) + + for season in self.variableList: + if season not in seasonsToCheck: + seasonsToCheck.append(season) + + allExist = True + for season in seasonsToCheck: + + climatologyFileName = self.get_file_name(season) + climatologyDirectory = get_unmasked_mpas_climatology_directory( + self.config, self.op) + + if not os.path.exists(climatologyFileName): + allExist = False + break + + if allExist: + for season in seasonsToCheck: + if season not in self.variableList: + continue + # make sure all the necessary variables are also present + with xarray.open_dataset(self.get_file_name(season)) as ds: + for variableName in self.variableList[season]: + if variableName not in ds.variables: + allExist = False + break + + if not allExist: + self._compute_climatologies_with_ncclimo( + inDirectory=self.symlinkDirectory, + outDirectory=climatologyDirectory) + + def get_start_and_end(self): + """ + Get the start and end years and dates for the climatology. This + function is provided to allow a custom method for setting the start + and end years of the climatology. By default, they are read from the + climatology section of the config file + + Returns + ------- + startYear, endYear : int + The start and end years of the climatology + + """ + # Authors + # ------- + # Xylar Asay-Davis + + startYear = self.config.getint('climatology', 'startYear') + endYear = self.config.getint('climatology', 'endYear') + + return startYear, endYear + +
+[docs] + def get_file_name(self, season): + """ + + Returns the full path for MPAS climatology file produced by ncclimo. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + """ + # Authors + # ------- + # Xylar Asay-Davis + + return get_unmasked_mpas_climatology_file_name(self.config, season, + self.componentName, + self.op)
+ + + def _create_symlinks(self): + """ + Create symlinks to monthly mean files so they have the expected file + naming convention for ncclimo. + + Returns + ------- + symlinkDirectory : str + The path to the symlinks created for each timeSeriesStatsMonthly + input file + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + fileNames = sorted(self.inputFiles) + years, months = get_files_year_month(fileNames, + self.historyStreams, + self.streamName) + + climatologyOpDirectory = get_climatology_op_directory(config, self.op) + + symlinkDirectory = '{}/source_symlinks'.format( + climatologyOpDirectory) + + make_directories(symlinkDirectory) + + for inFileName, year, month in zip(fileNames, years, months): + outFileName = \ + f'{symlinkDirectory}/{self.ncclimoModel}.hist.am.' \ + f'timeSeriesStatsMonthly.{year:04d}-{month:02d}-01.nc' + + try: + os.symlink(inFileName, outFileName) + except OSError: + pass + + return symlinkDirectory + + def _compute_climatologies_with_ncclimo(self, inDirectory, outDirectory, + remapper=None, + remappedDirectory=None): + """ + Uses ncclimo to compute monthly, seasonal and/or annual climatologies. + + Parameters + ---------- + inDirectory : str + The run directory containing timeSeriesStatsMonthly output + + outDirectory : str + The output directory where climatologies will be written + + remapper : ``pyremap.Remapper`` object, optional + If present, a remapper that defines the source and desitnation + grids for remapping the climatologies. + + remappedDirectory : str, optional + If present, the path where remapped climatologies should be + written. By default, remapped files are stored in the same + directory as the climatologies on the source grid. Has no effect + if ``remapper`` is ``None``. + + Raises + ------ + OSError + If ``ncclimo`` is not in the system path. + """ + # Authors + # ------- + # Xylar Asay-Davis + + if find_executable('ncclimo') is None: + raise OSError('ncclimo not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + parallelMode = self.config.get('execute', 'ncclimoParallelMode') + + seasons = [season for season in self.variableList + if season not in constants.abrevMonthNames] + + variableList = [] + for season in self.variableList: + variableList.extend(self.variableList[season]) + + # include each variable only once + variableList = sorted(list(set(variableList))) + + if len(seasons) == 0: + seasons = ['none'] + + workDir = os.getcwd() + os.chdir(inDirectory) + + inFiles = sorted(glob.glob(f'{self.ncclimoModel}*')) + + args = ['ncclimo', + '--no_stdin', + '-4', + '--clm_md=mth', + '-a', 'sdd', + '-P', self.ncclimoModel, + '-p', parallelMode, + '-j', '{}'.format(self.subprocessCount), + '-v', ','.join(variableList), + '--seasons={}'.format(','.join(seasons)), + '-s', '{:04d}'.format(self.startYear), + '-e', '{:04d}'.format(self.endYear), + '-o', outDirectory] + inFiles + + if remapper is not None: + args.extend(['-r', remapper.mappingFileName]) + if remappedDirectory is not None: + args.extend(['-O', remappedDirectory]) + + self.logger.info('running: {}'.format(' '.join(args))) + for handler in self.logger.handlers: + handler.flush() + + # set an environment variable to make sure we're not using czender's + # local version of NCO instead of one we have intentionally loaded + env = os.environ.copy() + env['NCO_PATH_OVERRIDE'] = 'No' + + process = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, env=env) + stdout, stderr = process.communicate() + + if stdout: + stdout = stdout.decode('utf-8') + for line in stdout.split('\n'): + self.logger.info(line) + if stderr: + stderr = stderr.decode('utf-8') + for line in stderr.split('\n'): + self.logger.error(line) + + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, + ' '.join(args)) + + os.chdir(workDir)
+ + + +class MpasClimatologySeasonSubtask(AnalysisTask): + """ + An analysis subtasks for computing climatologies from output from the + ``timeSeriesStatsMonthly`` analysis member for a single month or season. + + Attributes + ---------- + + season : str + The season of the climatology + + parentTask : ``MpasClimatologyTask`` + The task that this subtask belongs to. + """ + + # Authors + # ------- + # Xylar Asay-Davis + + def __init__(self, parentTask, season, subtaskName=None): + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + parentTask : ``MpasClimatologyTask`` + The task that this subtask belongs to. + + season : str + A keys in ``shared.constants.monthDictionary`` + + subtaskName : str, optional + the name of the subtask, defaults to season + + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.season = season + + if subtaskName is None: + subtaskName = season + + # call the constructor from the base class (AnalysisTask) + super(MpasClimatologySeasonSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + componentName=parentTask.componentName, + tags=parentTask.tags, + subtaskName=subtaskName) + + self.parentTask = parentTask + + parallelTaskCount = self.config.getint('execute', 'parallelTaskCount') + self.subprocessCount = min(parallelTaskCount, + self.config.getint('climatology', + 'subprocessCount')) + self.daskThreads = min( + multiprocessing.cpu_count(), + self.config.getint('climatology', 'daskThreads')) + + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + season = self.season + parentTask = self.parentTask + if season not in parentTask.variableList: + # nothing to do + return + + variableList = parentTask.variableList[season] + + if len(variableList) == 0: + # nothing to do + return + + self.logger.info( + f'\nComputing MPAS climatologies from files:\n' + f' {os.path.basename(parentTask.inputFiles[0])} through\n' + f' {os.path.basename(parentTask.inputFiles[-1])}') + + climatologyFileName = parentTask.get_file_name(season) + climatologyDirectory = get_unmasked_mpas_climatology_directory( + self.config) + + allExist = False + if os.path.exists(climatologyFileName): + allExist = True + # make sure all the necessary variables are also present + with xarray.open_dataset(climatologyFileName) as ds: + for variableName in variableList: + if variableName not in ds.variables: + allExist = False + break + + if not allExist: + with dask.config.set(schedular='threads', + pool=ThreadPool(self.daskThreads)): + self._compute_climatologies_with_xarray( + inDirectory=parentTask.symlinkDirectory, + outDirectory=climatologyDirectory) + + def _compute_climatologies_with_xarray(self, inDirectory, outDirectory): + + """ + Uses xarray to compute seasonal and/or annual climatologies. + + Parameters + ---------- + inDirectory : str + The run directory containing timeSeriesStatsMonthly output + + outDirectory : str + The output directory where climatologies will be written + """ + + # Authors + # ------- + # Xylar Asay-Davis + + def _preprocess(ds): + # drop unused variables during preprocessing because only the + # variables we want are guaranteed to be in all the files + return ds[variableList] + + season = self.season + parentTask = self.parentTask + variableList = parentTask.variableList[season] + + chunkSize = self.config.getint('input', 'maxChunkSize') + + if season in constants.abrevMonthNames: + # this is an individual month, so create a climatology from + # timeSeriesStatsMonthlyOutput + + fileNames = sorted(parentTask.inputFiles) + years, months = get_files_year_month( + fileNames, self.historyStreams, + parentTask.streamName) + + with xarray.open_mfdataset(parentTask.inputFiles, + combine='nested', + concat_dim='Time', + chunks={'nCells': chunkSize}, + decode_times=False, + preprocess=_preprocess) as ds: + + ds.coords['year'] = ('Time', years) + ds.coords['month'] = ('Time', months) + month = constants.abrevMonthNames.index(season) + 1 + climatologyFileName = parentTask.get_file_name(season) + self.logger.info('computing climatology {}'.format( + os.path.basename(climatologyFileName))) + + ds = ds.where(ds.month == month, drop=True) + ds = ds.mean(dim='Time', keep_attrs=True) + ds.compute(num_workers=self.subprocessCount) + write_netcdf(ds, climatologyFileName) + else: + outFileName = parentTask.get_file_name(season=season) + self.logger.info('computing climatology {}'.format( + os.path.basename(outFileName))) + fileNames = [] + weights = [] + for month in constants.monthDictionary[season]: + monthName = constants.abrevMonthNames[month - 1] + fileNames.append(parentTask.get_file_name(season=monthName)) + weights.append(constants.daysInMonth[month - 1]) + + with xarray.open_mfdataset(fileNames, concat_dim='weight', + combine='nested', + chunks={'nCells': chunkSize}, + decode_times=False, + preprocess=_preprocess) as ds: + ds.coords['weight'] = ('weight', weights) + dsNew = ((ds.weight * ds).sum(dim='weight') / + ds.weight.sum(dim='weight')) + for varName in ds.data_vars: + attrs = ds[varName].attrs + # _FillValue causes trouble + attrs.pop('_FillValue', None) + dsNew[varName].attrs = attrs + + dsNew.compute(num_workers=self.subprocessCount) + write_netcdf(dsNew, outFileName) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.html b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.html new file mode 100644 index 000000000..94f4946f6 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.html @@ -0,0 +1,780 @@ + + + + + + mpas_analysis.shared.climatology.remap_mpas_climatology_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.climatology.remap_mpas_climatology_subtask

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray as xr
+import numpy
+import os
+
+from mpas_tools.io import write_netcdf
+from pyremap import MpasCellMeshDescriptor, MpasVertexMeshDescriptor
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.climatology.climatology import get_remapper, \
+    get_masked_mpas_climatology_file_name, \
+    get_remapped_mpas_climatology_file_name, \
+    get_climatology_op_directory
+from mpas_analysis.shared.climatology.comparison_descriptors import \
+    get_comparison_descriptor
+
+
+
+[docs] +class RemapMpasClimatologySubtask(AnalysisTask): + """ + An analysis tasks for computing climatologies from output from the + ``timeSeriesStatsMonthly`` analysis member. + + Attributes + ---------- + + climatologyName : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + iselValues : dict + A dictionary of dimensions and indices (or ``None``) used to extract + a slice of the MPAS field. + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + over which the climatology should be computed or ['none'] if only + monthly climatologies are needed. + + comparisonDescriptors : dict of ``MeshDescriptor`` + Descriptors of the comparison grids to use for remapping, with + grid names as the keys. + + restartFileName : str + If ``comparisonGridName`` is not ``None``, the name of a restart + file from which the MPAS mesh can be read. + + useNcremap : bool, optional + Whether to use ncremap to do the remapping (the other option being + an internal python code that handles more grid types and extra + dimensions) + + op : {'avg', 'min', 'max'} + operator for monthly stats + + vertices : bool + Whether to remap from vertices, rather than cells + """ + + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, mpasClimatologyTask, parentTask, climatologyName, + variableList, seasons, comparisonGridNames=None, + iselValues=None, subtaskName='remapMpasClimatology', + useNcremap=None, vertices=False): + + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped + + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatologyName : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + comparisonGridNames : list of str + optional + The name(s) of the comparison grid to use for remapping. If none + is supplied, `add_comparison_descriptor()` must be called to add + one or more comparison grids. + + iselValues : dict, optional + A dictionary of dimensions and indices (or ``None``) used to + extract a slice of the MPAS field(s). + + subtaskName : str, optional + The name of the subtask + + useNcremap : bool, optional + Whether to use ncremap to do the remapping (the other option being + an internal python code that handles more grid types and extra + dimensions). This defaults to the config option ``useNcremap`` + if it is not explicitly given. If a comparison grid other than + ``latlon`` is given, ncremap is not supported so this flag is set + to ``False``. + + vertices : bool, optional + Whether to remap from vertices, rather than cells + """ + # Authors + # ------- + # Xylar Asay-Davis + + tags = ['climatology'] + + # call the constructor from the base class (AnalysisTask) + super(RemapMpasClimatologySubtask, self).__init__( + config=mpasClimatologyTask.config, + taskName=parentTask.taskName, + subtaskName=subtaskName, + componentName=parentTask.componentName, + tags=tags) + + self.variableList = variableList + self.seasons = seasons + self.comparisonDescriptors = {} + if comparisonGridNames is not None: + for comparisonGridName in comparisonGridNames: + comparisonDescriptor = get_comparison_descriptor( + self.config, comparisonGridName) + self.comparisonDescriptors[comparisonGridName] = \ + comparisonDescriptor + + self.iselValues = iselValues + self.climatologyName = climatologyName + self.mpasClimatologyTask = mpasClimatologyTask + self.op = mpasClimatologyTask.op + + self.run_after(mpasClimatologyTask) + + parentTask.add_subtask(self) + + # this is a stopgap until MPAS implements the _FillValue attribute + # correctly + self._fillValue = -9.99999979021476795361e+33 + + if useNcremap is None: + self.useNcremap = self.config.getboolean('climatology', + 'useNcremap') + else: + self.useNcremap = useNcremap + + self.vertices = vertices
+ + +
+[docs] + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(RemapMpasClimatologySubtask, self).setup_and_check() + + try: + self.restartFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file to perform remapping of ' + 'climatologies.') + + # we set up the remapper here because ESFM_RegridWeightGen seems to + # have trouble if it runs in another process (or in several at once) + self._setup_remappers() + + # don't add the variables and seasons to mpasClimatologyTask until + # we're sure this subtask is supposed to run + self.mpasClimatologyTask.add_variables(self.variableList, self.seasons) + + # make the mapping directory, because doing so within each process + # seems to be giving ESMF_RegridWeightGen some trouble + mappingSubdirectory = build_config_full_path(self.config, 'output', + 'mappingSubdirectory') + make_directories(mappingSubdirectory)
+ + +
+[docs] + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.logger.info('\nRemapping climatology {}'.format( + self.climatologyName)) + + dsMask = xr.open_dataset(self.mpasClimatologyTask.inputFiles[0]) + dsMask = dsMask[self.variableList] + iselValues = {'Time': 0} + if self.iselValues is not None: + iselValues.update(self.iselValues) + # select only Time=0 and possibly only the desired vertical + # slice + dsMask = dsMask.isel(**iselValues) + + for season in self.seasons: + self._mask_climatologies(season, dsMask) + + for comparisonGridName in self.comparisonDescriptors: + + for season in self.seasons: + + maskedClimatologyFileName = self.get_masked_file_name( + season) + + remappedFileName = self.get_remapped_file_name( + season, comparisonGridName) + + if not os.path.exists(remappedFileName): + self._remap(inFileName=maskedClimatologyFileName, + outFileName=remappedFileName, + remapper=self.remappers[comparisonGridName], + comparisonGridName=comparisonGridName, + season=season)
+ + +
+[docs] + def add_comparison_grid_descriptor(self, comparisonGridName, + comparisonDescriptor): + """ + Add a custom grid descriptor (something other than 'latlon', + 'antarctic', 'arctic', 'north_atlantic', or 'north_pacific', + or 'subpolar_north_atlantic'). + + Parameters + ---------- + comparisonGridName : str + The name of the comparison grid + + comparisonDescriptor : ``MeshDescriptor`` + A descriptor of the comparison grid to use for + remapping + + """ + self.comparisonDescriptors[comparisonGridName] = \ + comparisonDescriptor
+ + +
+[docs] + def get_masked_file_name(self, season): + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + """ + # Authors + # ------- + # Xylar Asay-Davis + + fileName = get_masked_mpas_climatology_file_name(self.config, + season, + self.componentName, + self.climatologyName, + self.op) + + return fileName
+ + +
+[docs] + def get_remapped_file_name(self, season, comparisonGridName): + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + season : str + One of the seasons in ``constants.monthDictionary`` + + comparisonGridName : str + The name of the comparison grid to use for remapping. + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + """ + # Authors + # ------- + # Xylar Asay-Davis + + fileName = get_remapped_mpas_climatology_file_name( + self.config, season, self.componentName, self.climatologyName, + comparisonGridName, self.op) + + return fileName
+ + +
+[docs] + def customize_masked_climatology(self, climatology, season): + """ + Override this function to customize the climatology during the masking + phase (before remapping) + + Parameters + ---------- + climatology : xarray.Dataset + The MPAS climatology data set that has had a mask added but has + not yet been remapped + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + The same data set with any custom fields added or modifications + made + """ + # Authors + # ------- + # Xylar Asay-Davis + + return climatology
+ + +
+[docs] + def customize_remapped_climatology(self, climatology, comparisonGridNames, + season): + """ + Override this function to customize the climatology after remapping + + Parameters + ---------- + climatology : xarray.Dataset + The MPAS climatology data set that has been remapped + + comparisonGridNames : str + The name of the comparison grid to use for remapping. + + season : str + The name of the season to be remapped + + Returns + ------- + climatology : xarray.Dataset + The same data set with any custom fields added or modifications + made + """ + # Authors + # ------- + # Xylar Asay-Davis + + return climatology
+ + + def _setup_remappers(self): + """ + Set up the remappers for remapping from the MPAS to the comparison + grids. + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + # make reamppers + mappingFilePrefix = 'map' + self.remappers = {} + for comparisonGridName in self.comparisonDescriptors: + comparisonDescriptor = \ + self.comparisonDescriptors[comparisonGridName] + self.comparisonGridName = comparisonDescriptor.meshName + meshName = config.get('input', 'mpasMeshName') + if self.vertices: + mpasDescriptor = MpasVertexMeshDescriptor( + self.restartFileName, meshName=meshName) + else: + mpasDescriptor = MpasCellMeshDescriptor( + self.restartFileName, meshName=meshName) + self.mpasMeshName = mpasDescriptor.meshName + + self.remappers[comparisonGridName] = get_remapper( + config=config, sourceDescriptor=mpasDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix=mappingFilePrefix, + method=config.get('climatology', 'mpasInterpolationMethod'), + logger=self.logger, vertices=self.vertices) + + def _setup_file_names(self): + """ + Create a dictionary of file names and directories for this climatology + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + climatologyBaseDirectory = get_climatology_op_directory(config, + self.op) + + mpasMeshName = config.get('input', 'mpasMeshName') + + comparisonFullMeshNames = {} + for comparisonGridName in self.comparisonDescriptors: + comparisonDescriptor = \ + self.comparisonDescriptors[comparisonGridName] + comparisonFullMeshNames[comparisonGridName] = \ + comparisonDescriptor.meshName + + keys = [] + for season in self.seasons: + stage = 'masked' + keys.append((season, stage)) + stage = 'remapped' + for comparisonGridName in self.comparisonDescriptors: + keys.append((season, stage, comparisonGridName)) + + self._outputDirs = {} + self._outputFiles = {} + + for key in keys: + season = key[0] + stage = key[1] + if stage == 'remapped': + comparisonGridName = key[2] + + stageDirectory = '{}/{}'.format(climatologyBaseDirectory, stage) + + if stage == 'masked': + directory = '{}/{}_{}'.format( + stageDirectory, self.climatologyName, + mpasMeshName) + elif stage == 'remapped': + directory = '{}/{}_{}_to_{}'.format( + stageDirectory, + self.climatologyName, + mpasMeshName, + comparisonFullMeshNames[comparisonGridName]) + + make_directories(directory) + + monthValues = sorted(constants.monthDictionary[season]) + startMonth = monthValues[0] + endMonth = monthValues[-1] + + suffix = '{:04d}{:02d}_{:04d}{:02d}_climo'.format( + self.mpasClimatologyTask.startYear, startMonth, + self.mpasClimatologyTask.endYear, endMonth) + + if season in constants.abrevMonthNames: + season = '{:02d}'.format(monthValues[0]) + fileName = '{}/{}_{}_{}.nc'.format( + directory, self.mpasClimatologyTask.ncclimoModel, season, + suffix) + + self._outputDirs[key] = directory + self._outputFiles[key] = fileName + + def _mask_climatologies(self, season, dsMask): + """ + For each season, creates a masked version of the climatology + + Parameters + ---------- + season : str + The name of the season to be masked + + dsMask : ``xarray.Dataset`` object + A data set (from the first input file) that can be used to + determine the mask in MPAS output files. + + Author + ------ + Xylar Asay-Davis + """ + + climatologyFileName = self.mpasClimatologyTask.get_file_name(season) + + maskedClimatologyFileName = self.get_masked_file_name(season) + + if not os.path.exists(maskedClimatologyFileName): + # slice and mask the data set + climatology = xr.open_dataset(climatologyFileName) + climatology = climatology[self.variableList] + iselValues = {} + if 'Time' in climatology.dims: + iselValues['Time'] = 0 + if self.iselValues is not None: + iselValues.update(self.iselValues) + # select only Time=0 and possibly only the desired vertical + # slice + if len(iselValues.keys()) > 0: + climatology = climatology.isel(**iselValues) + + # mask the data set + for variableName in self.variableList: + climatology[variableName] = \ + climatology[variableName].where( + dsMask[variableName] != self._fillValue) + + # customize (if this function has been overridden) + climatology = self.customize_masked_climatology(climatology, + season) + + if self.vertices: + dim = 'nVertices' + else: + dim = 'nCells' + # add valid mask as a variable, useful for remapping later + climatology['validMask'] = \ + xr.DataArray(numpy.ones(climatology.sizes[dim]), + dims=[dim]) + + write_netcdf(climatology, maskedClimatologyFileName) + + def _remap(self, inFileName, outFileName, remapper, comparisonGridName, + season): + """ + Performs remapping either using ``ncremap`` or the native python code, + depending on the requested setting and the comparison grid + + Parameters + ---------- + inFileName : str + The name of the input file to be remapped. + + outFileName : str + The name of the output file to which the remapped data set should + be written. + + remapper : ``pyremap.Remapper`` object + A remapper that can be used to remap files or data sets to a + comparison grid. + + comparisonGridNames : str + The name of the comparison grid to use for remapping. + + season : str + The name of the season to be remapped + """ + # Authors + # ------- + # Xylar Asay-Davis + + if remapper.mappingFileName is None: + # no remapping is needed + return + + renormalizationThreshold = self.config.getfloat( + 'climatology', 'renormalizationThreshold') + + parallel_exec = self.config.get( + 'execute', 'ncremapParallelExec') + if parallel_exec == 'None': + parallel_exec = None + + if self.useNcremap: + remapper.remap_file(inFileName=inFileName, + outFileName=outFileName, + overwrite=True, + renormalize=renormalizationThreshold, + logger=self.logger, + parallel_exec=parallel_exec) + + remappedClimatology = xr.open_dataset(outFileName) + remappedClimatology.load() + remappedClimatology.close() + else: + + climatologyDataSet = xr.open_dataset(inFileName) + + remappedClimatology = remapper.remap(climatologyDataSet, + renormalizationThreshold) + + # customize (if this function has been overridden) + remappedClimatology = self.customize_remapped_climatology( + remappedClimatology, comparisonGridName, season) + + write_netcdf_with_fill(remappedClimatology, outFileName)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.html b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.html new file mode 100644 index 000000000..3e88d6e42 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/climatology/remap_observed_climatology_subtask.html @@ -0,0 +1,483 @@ + + + + + + mpas_analysis.shared.climatology.remap_observed_climatology_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.climatology.remap_observed_climatology_subtask

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import os
+import os.path
+import xarray as xr
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.climatology.climatology import get_remapper, \
+    remap_and_write_climatology, compute_climatology
+
+from mpas_analysis.shared.climatology.comparison_descriptors import \
+    get_comparison_descriptor
+
+
+
+[docs] +class RemapObservedClimatologySubtask(AnalysisTask): + """ + An analysis task for comparison of 2D model fields against observations. + + Attributes + ---------- + seasons : list of str + A list of seasons (keys in ``constants.monthDictionary``) over + which the climatology should be computed. + + fileName : str + The name of the observation file + + outFilePrefix : str + The prefix in front of output files and mapping files, typically the + name of the field being remapped + + comparisonGridNames : list of str + The name(s) of the comparison grid to use for remapping. + """ + + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, parentTask, seasons, fileName, outFilePrefix, + comparisonGridNames=['latlon'], + subtaskName='remapObservations'): + + """ + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent (main) task for this subtask + + seasons : list of str + A list of seasons (keys in ``constants.monthDictionary``) over + which the climatology should be computed. + + fileName : str + The name of the observation file + + outFilePrefix : str + The prefix in front of output files and mapping files, typically + the name of the field being remapped + + comparisonGridNames : list of str + optional + The name(s) of the comparison grid to use for remapping. + + subtaskName : str, optional + The name of the subtask + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.seasons = seasons + self.fileName = fileName + self.outFilePrefix = outFilePrefix + self.comparisonGridNames = comparisonGridNames + + config = parentTask.config + taskName = parentTask.taskName + tags = parentTask.tags + componentName = parentTask.componentName + + # call the constructor from the base class (AnalysisTask) + super(RemapObservedClimatologySubtask, self).__init__( + config=config, taskName=taskName, subtaskName=subtaskName, + componentName=componentName, tags=tags)
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(RemapObservedClimatologySubtask, self).setup_and_check() + + # we set up the remappers here because ESFM_RegridWeightGen seems to + # have trouble if it runs in another process (or in several at once) + self._setup_remappers(self.fileName) + + # build the observational data set and write it out to a file, to + # be read back in during the run_task() phase + obsFileName = self.get_file_name(stage='original') + if not os.path.exists(obsFileName): + ds = self.build_observational_dataset(self.fileName) + write_netcdf_with_fill(ds, obsFileName) + + def run_task(self): + """ + Performs remapping of obsrevations to the comparsion grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + obsFileName = self.get_file_name(stage='original') + if not os.path.isfile(obsFileName): + raise OSError('Obs file {} not found.'.format( + obsFileName)) + + for comparisonGridName in self.comparisonGridNames: + for season in self.seasons: + + remappedFileName = self.get_file_name( + stage='remapped', + season=season, + comparisonGridName=comparisonGridName) + + if not os.path.exists(remappedFileName): + + ds = xr.open_dataset(obsFileName) + + climatologyFileName = self.get_file_name( + stage='climatology', + season=season, + comparisonGridName=comparisonGridName) + if 'month' in ds.variables.keys() and \ + 'year' in ds.variables.keys(): + # this data set is not yet a climatology, so compute + # the climatology + monthValues = constants.monthDictionary[season] + seasonalClimatology = compute_climatology( + ds, monthValues, maskVaries=True) + else: + # We don't have month or year arrays to compute a + # climatology so assume this already is one + seasonalClimatology = ds + + write_netcdf_with_fill(seasonalClimatology, climatologyFileName) + + remapper = self.remappers[comparisonGridName] + + if remapper.mappingFileName is None: + # no need to remap because the observations are on the + # comparison grid already + os.symlink(climatologyFileName, remappedFileName) + else: + remap_and_write_climatology( + config, seasonalClimatology, + climatologyFileName, + remappedFileName, remapper, + logger=self.logger) + +
+[docs] + def get_observation_descriptor(self, fileName): + """ + get a MeshDescriptor for the observation grid. A subclass derived from + this class must override this method to create the appropriate + descriptor + + Parameters + ---------- + fileName : str + observation file name describing the source grid + + Returns + ------- + obsDescriptor : ``MeshDescriptor`` + The descriptor for the observation grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + return None
+ + +
+[docs] + def build_observational_dataset(self, fileName): + """ + read in the data sets for observations, and possibly rename some + variables and dimensions. A subclass derived from this class must + override this method to create the appropriate data set + + Parameters + ---------- + fileName : str + observation file name + + Returns + ------- + dsObs : ``xarray.Dataset`` + The observational dataset + """ + # Authors + # ------- + # Xylar Asay-Davis + + return None
+ + +
+[docs] + def get_file_name(self, stage, season=None, comparisonGridName=None): + + """ + Given config options, the name of a field and a string identifying the + months in a seasonal climatology, returns the full path for MPAS + climatology files before and after remapping. + + Parameters + ---------- + stage : {'original', 'climatology', 'remapped'} + The stage of the masking and remapping process + + season : str, optional + One of the seasons in ``constants.monthDictionary`` + + comparisonGridName : str, optional + The name of the comparison grid to use for remapping. + + Returns + ------- + fileName : str + The path to the climatology file for the specified season. + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + obsSection = '{}Observations'.format(self.componentName) + if comparisonGridName is None: + # just needed for getting the obs. grid name, so doesn't matter + # which comparison grid + remapper = self.remappers[self.comparisonGridNames[0]] + else: + remapper = self.remappers[comparisonGridName] + + obsGridName = remapper.sourceDescriptor.meshName + + outFilePrefix = self.outFilePrefix + + if stage in ['original', 'climatology']: + climatologyDirectory = build_config_full_path( + config=config, section='output', + relativePathOption='climatologySubdirectory', + relativePathSection=obsSection) + + make_directories(climatologyDirectory) + + if stage == 'original': + fileName = '{}/{}_{}.nc'.format( + climatologyDirectory, outFilePrefix, obsGridName) + else: + fileName = '{}/{}_{}_{}.nc'.format( + climatologyDirectory, outFilePrefix, obsGridName, season) + + elif stage == 'remapped': + remappedDirectory = build_config_full_path( + config=config, section='output', + relativePathOption='remappedClimSubdirectory', + relativePathSection=obsSection) + + make_directories(remappedDirectory) + + comparisonGridName = remapper.destinationDescriptor.meshName + fileName = '{}/{}_{}_to_{}_{}.nc'.format( + remappedDirectory, outFilePrefix, obsGridName, + comparisonGridName, season) + + else: + raise ValueError('Unknown stage {}'.format(stage)) + + return fileName
+ + + def _setup_remappers(self, fileName): + """ + Set up the remappers for remapping from observations to the comparison + grids. + + Parameters + ---------- + fileName : str + The name of the observation file used to determine the source grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + sectionName = '{}Observations'.format(self.componentName) + + obsDescriptor = self.get_observation_descriptor(fileName) + + outFilePrefix = self.outFilePrefix + self.remappers = {} + for comparisonGridName in self.comparisonGridNames: + comparisonDescriptor = get_comparison_descriptor( + config, comparison_grid_name=comparisonGridName) + + self.remappers[comparisonGridName] = get_remapper( + config=config, + sourceDescriptor=obsDescriptor, + comparisonDescriptor=comparisonDescriptor, + mappingFilePrefix='map_obs_{}'.format(outFilePrefix), + method=config.get(sectionName, + 'interpolationMethod'), + logger=self.logger)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/generalized_reader/generalized_reader.html b/1.11.0rc1/_modules/mpas_analysis/shared/generalized_reader/generalized_reader.html new file mode 100644 index 000000000..830266405 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/generalized_reader/generalized_reader.html @@ -0,0 +1,513 @@ + + + + + + mpas_analysis.shared.generalized_reader.generalized_reader — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.generalized_reader.generalized_reader

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Utility functions for importing MPAS files into xarray. These functions extend
+the capabilities of mpas_xarray to include mapping variable names from MPAS
+names to MPAS-Analysis generalized names and support for slicing to given
+start and end dates.
+
+open_multifile_dataset : opens a data set, maps variable names, preprocess
+    the data set removes repeated time indices, and slices the time coordinate
+    to lie between desired start and end dates.
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import xarray
+from functools import partial
+import resource
+
+from mpas_analysis.shared.mpas_xarray import mpas_xarray
+from mpas_analysis.shared.timekeeping.utility import \
+    string_to_days_since_date, days_to_datetime
+
+
+
+[docs] +def open_multifile_dataset(fileNames, calendar, config, + simulationStartTime=None, + timeVariableName='Time', + variableList=None, selValues=None, + iselValues=None, variableMap=None, + startDate=None, endDate=None, + chunking=None): + """ + Opens and returns an xarray data set given file name(s) and the MPAS + calendar name. + + Parameters + ---------- + fileNames : list of strings + A lsit of file paths to read + + calendar : {``'gregorian'``, ``'noleap'``}, optional + The name of one of the calendars supported by MPAS cores + + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + simulationStartTime : string, optional + The start date of the simulation, used to convert from time variables + expressed as days since the start of the simulation to days since the + reference date. ``simulationStartTime`` takes one of the following + forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + ``simulationStartTime`` is only required if the MPAS time variable + (identified by ``timeVariableName``) is a number of days since the + start of the simulation. + + timeVariableName : string, optional + The name of the time variable (typically ``'Time'`` if using a + ``variableMap`` or ``'xtime'`` if not using a ``variableMap``) + + variableList : list of strings, optional + If present, a list of variables to be included in the data set + + selValues : dict, optional + A dictionary of coordinate names (keys) and values or arrays of + values used to slice the variales in the data set. See + ``xarray.DataSet.sel()`` for details on how this dictonary is used. + An example:: + + selectCorrdValues = {'cellLon': 180.0} + + iselValues : dict, optional + A dictionary of coordinate names (keys) and indices, slices or + arrays of indices used to slice the variales in the data set. See + ``xarray.DataSet.isel()`` for details on how this dictonary is used. + An example:: + + iselValues = {'nVertLevels': slice(0, 3), + 'nCells': cellIDs} + + variableMap : dict, optional + A dictionary with keys that are variable names used by + MPAS-Analysis and values that are lists of possible names for the same + variable in the MPAS dycore that produced the data set (which may + differ between versions). + + startDate, endDate : string or datetime.datetime, optional + If present, the first and last dates to be used in the data set. The + time variable is sliced to only include dates within this range. + + chunking : None, int, True, dict, optional + If integer is present, applies maximum chunk size from config file + value ``maxChunkSize``, otherwise if None do not perform chunking. If + True, use automated chunking using default config value + ``maxChunkSize``. If chunking is a dict use dictionary values for + chunking. + + Returns + ------- + ds : ``xarray.Dataset`` + + Raises + ------ + TypeError + If the time variable has an unsupported type (not a date string, + a floating-pont number of days since the start of the simulation + or a ``numpy.datatime64`` object). + + ValueError + If the time variable is not found in the data set or if the time + variable is a number of days since the start of the simulation but + simulationStartTime is None. + """ + # Authors + # ------- + # Xylar Asay-Davis, Phillip J. Wolfram + + preprocess_partial = partial(_preprocess, + calendar=calendar, + simulationStartTime=simulationStartTime, + timeVariableName=timeVariableName, + variableList=variableList, + selValues=selValues, + iselValues=iselValues, + variableMap=variableMap, + startDate=startDate, + endDate=endDate) + + ds = xarray.open_mfdataset(fileNames, + preprocess=preprocess_partial, + combine='nested', + concat_dim='Time', + decode_times=False) + + ds = mpas_xarray.remove_repeated_time_index(ds) + + if startDate is not None and endDate is not None: + if isinstance(startDate, str): + startDate = string_to_days_since_date(dateString=startDate, + calendar=calendar) + if isinstance(endDate, str): + endDate = string_to_days_since_date(dateString=endDate, + calendar=calendar) + + # select only the data in the specified range of dates + ds = ds.sel(Time=slice(startDate, endDate)) + + if ds.sizes['Time'] == 0: + raise ValueError('The data set contains no Time entries between ' + 'dates {} and {}.'.format( + days_to_datetime(startDate, calendar=calendar), + days_to_datetime(endDate, calendar=calendar))) + # process chunking + if chunking is True: + # limit chunk size to prevent memory error + chunking = config.getint('input', 'maxChunkSize') + + ds = mpas_xarray.process_chunking(ds, chunking) + + return ds
+ + + +def _preprocess(ds, calendar, simulationStartTime, timeVariableName, + variableList, selValues, iselValues, variableMap, + startDate, endDate): + """ + Performs variable remapping, then calls mpas_xarray.preprocess, to + perform the remainder of preprocessing. + + Parameters + ---------- + ds : xarray.DataSet object + The data set containing an MPAS time variable to be used to build + an xarray time coordinate and with variable names to be + substituted. + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores + + The name of the time variable (typically 'Time' if using a variableMap + or 'xtime' if not using a variableMap) + + simulationStartTime : string + The start date of the simulation, used to convert from time variables + expressed as days since the start of the simulation to days since the + reference date. `simulationStartTime` takes one of the following + forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + simulationStartTime is only required if the MPAS time variable + (identified by time_variable_name) is a number of days since the + start of the simulation. + + timeVariableName : string + The name of the time variable (typically 'Time' if using a variable_map + or 'xtime' if not using a variable_map) + + variableList : list of strings + If present, a list of variables to be included in the data set + + selValues : dict + A dictionary of coordinate names (keys) and values or arrays of + values used to slice the variales in the data set. See + xarray.DataSet.sel() for details on how this dictonary is used. + An example:: + + selectCorrdValues = {'cellLon': 180.0} + + iselValues : dict + A dictionary of coordinate names (keys) and indices, slices or + arrays of indices used to slice the variales in the data set. See + xarray.DataSet.isel() for details on how this dictonary is used. + An example:: + + iselValues = {'nVertLevels': slice(0, 3), + 'nCells': cellIDs} + + variableMap : dict + A dictionary with keys that are variable names used by + MPAS-Analysis and values that are lists of possible names for the same + variable in the MPAS dycore that produced the data set (which may + differ between versions). + + startDate, endDate : string or datetime.datetime + If present, the first and last dates to be used in the data set. The + time variable is sliced to only include dates within this range. + + Returns + ------- + ds : xarray.DataSet object + A copy of the data set with the time coordinate set and which + has been sliced. + """ + # Authors + # ------- + # Xylar Asay-Davis, Phillip J. Wolfram + + submap = variableMap + + # time_variable_names is a special case so we take it out of the map + # and handle it manually (adding a new variable rather than renaming + # an existing one) + if variableMap is not None and timeVariableName in variableMap: + # make a copy of variableMap and remove timeVariableName + submap = variableMap.copy() + submap.pop(timeVariableName, None) + # temporarily change the time variable name + timeVariableName = \ + _map_variable_name(timeVariableName, + ds, + variableMap) + + if submap is not None: + ds = _rename_variables(ds, submap) + + # now that the variables are mapped, do the normal preprocessing in + # mpas_xarray + ds = mpas_xarray.preprocess(ds, + calendar=calendar, + simulationStartTime=simulationStartTime, + timeVariableName=timeVariableName, + variableList=variableList, + selValues=selValues, + iselValues=iselValues) + + return ds + + +def _map_variable_name(variableName, ds, variableMap): + """ + Given a `variableName` in a `variableMap` and an xarray `ds`, + return the name of the the first variable in `variableMap[variableName]` + that is found in ds. + + variableMap is a dictionary with keys that are variable names used by + MPAS-Analysis and values that are lists of possible names for the same + variable in the MPAS dycore that produced the data set (which may differ + between versions). + + Parameters + ---------- + variableName : string + Name of a variable in `varriableMap` + + ds : `xarray.DataSet` object + A data set in which the mapped variable name should be found + + variableMap : dict + A dictionary with keys that are variable names used by + MPAS-Analysis and values that are lists of possible names for the same + variable in the MPAS dycore that produced the data set (which may + differ between versions). + + Returns + ------- + mappedVariableName : The corresponding variable name to `variableName` + found in `ds`. + + Raises + ------ + ValueError + If none of the possible variable names in `variableMap[variableName]` + can be found in `ds`. + """ + # Authors + # ------- + # Xylar Asay-Davis + + possibleVariables = variableMap[variableName] + for variable in possibleVariables: + if isinstance(variable, (list, tuple)): + allFound = True + for subvariable in variable: + if subvariable not in ds.data_vars.keys(): + allFound = False + break + if allFound: + return variable + + elif variable in ds.data_vars.keys(): + return variable + + raise ValueError('Variable {} could not be mapped. None of the ' + 'possible mapping variables {}\n match any of the ' + 'variables in {}.'.format( + variableName, possibleVariables, + ds.data_vars.keys())) + + +def _rename_variables(ds, variableMap): + """ + Given an `xarray.DataSet` object `ds` and a dictionary mapping + variable names `variableMap`, returns a new data set in which variables + from `ds` with names equal to values in `variableMap` are renamed + to the corresponding key in `variableMap`. + + Parameters + ---------- + ds : `xarray.DataSet` object + A data set in which the mapped variable names should be renamed + + variableMap : dict + A dictionary with keys that are variable names used by + MPAS-Analysis and values that are lists of possible names for the same + variable in the MPAS dycore that produced the data set (which may + differ between versions). + + Returns + ------- + outDataSEt : A new `xarray.DataSet` object with the variable renamed. + """ + # Authors + # ------- + # Xylar Asay-Davis + + renameDict = {} + for datasetVariable in ds.data_vars: + for mapVariable in variableMap: + renameList = variableMap[mapVariable] + if datasetVariable in renameList: + renameDict[datasetVariable] = mapVariable + break + + return ds.rename(renameDict) + + +# vim: ai ts=4 sts=4 et sw=4 ft=python +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/io/mpas_reader.html b/1.11.0rc1/_modules/mpas_analysis/shared/io/mpas_reader.html new file mode 100644 index 000000000..af5874cc1 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/io/mpas_reader.html @@ -0,0 +1,339 @@ + + + + + + mpas_analysis.shared.io.mpas_reader — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.io.mpas_reader

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Utility functions for reading a single MPAS file into xarray and for removing
+all but a given list of variables from a data set.
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import xarray
+
+from mpas_analysis.shared.timekeeping.utility import \
+    string_to_days_since_date, days_to_datetime
+
+
+
+[docs] +def open_mpas_dataset(fileName, calendar, + timeVariableNames=['xtime_startMonthly', + 'xtime_endMonthly'], + variableList=None, startDate=None, endDate=None): + """ + Opens and returns an xarray data set given file name(s) and the MPAS + calendar name. + + Parameters + ---------- + fileName : str + File path to read + + calendar : {``'gregorian'``, ``'noleap'``}, optional + The name of one of the calendars supported by MPAS cores + + timeVariableNames : str or list of 2 str, optional + The name of the time variable (typically ``'xtime'`` + or ``['xtime_startMonthly', 'xtime_endMonthly']``), or ``None`` if + time does not need to be parsed (and is already in the ``Time`` + variable) + + variableList : list of strings, optional + If present, a list of variables to be included in the data set + + startDate, endDate : string or datetime.datetime, optional + If present, the first and last dates to be used in the data set. The + time variable is sliced to only include dates within this range. + + Returns + ------- + ds : ``xarray.Dataset`` + + Raises + ------ + TypeError + If the time variable has an unsupported type (not a date string). + + ValueError + If the time variable is not found in the data set + """ + # Authors + # ------- + # Xylar Asay-Davis + + ds = xarray.open_dataset(fileName, decode_cf=True, decode_times=False, + lock=False) + + if timeVariableNames is not None: + ds = _parse_dataset_time(ds, timeVariableNames, calendar) + + if startDate is not None and endDate is not None: + if isinstance(startDate, str): + startDate = string_to_days_since_date(dateString=startDate, + calendar=calendar) + if isinstance(endDate, str): + endDate = string_to_days_since_date(dateString=endDate, + calendar=calendar) + + # select only the data in the specified range of dates + ds = ds.sel(Time=slice(startDate, endDate)) + + if ds.sizes['Time'] == 0: + raise ValueError('The data set contains no Time entries between ' + 'dates {} and {}.'.format( + days_to_datetime(startDate, calendar=calendar), + days_to_datetime(endDate, calendar=calendar))) + if variableList is not None: + ds = ds[variableList] + + return ds
+ + + +def _parse_dataset_time(ds, inTimeVariableName, calendar, + outTimeVariableName='Time', + referenceDate='0001-01-01'): + """ + A helper function for computing a time coordinate from an MPAS time + variable. Given a data set and a time variable name (or list of 2 + time names), returns a new data set with time coordinate + `outTimeVariableName` filled with days since `referenceDate` + + Parameters + ---------- + ds : ``xarray.DataSet`` + The data set containing an MPAS time variable to be used to build + an xarray time coordinate. + + inTimeVariableName : str or tuple or list of str + The name of the time variable in the MPAS data set that will be + used to build the 'Time' coordinate. The array(s) named by + inTimeVariableName should contain date strings. Typically, + inTimeVariableName is ``'xtime'``. If a list of two variable + names is provided, times from the two are averaged together to + determine the value of the time coordinate. In such cases, + inTimeVariableName is typically + ``['xtime_startMonthly', 'xtime_endMonthly']``. + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores + + outTimeVariableName : str + The name of the coordinate to assign times to, typically 'Time'. + + referenceDate : str, optional + The reference date for the time variable, typically '0001-01-01', + taking one of the following forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + dsOut : ``xarray.DataSet`` + A copy of the input data set with the `outTimeVariableName` + coordinate containing the time coordinate parsed from + `inTimeVariableName`. + + Raises + ------ + TypeError + If the time variable has an unsupported type (not a date string + or a floating-pont number of days since the start of the simulatio). + """ + # Authors + # ------- + # Xylar Asay-Davis + + if isinstance(inTimeVariableName, (tuple, list)): + # we want to average the two + assert(len(inTimeVariableName) == 2) + + dsStart = _parse_dataset_time( + ds=ds, + inTimeVariableName=inTimeVariableName[0], + calendar=calendar, + outTimeVariableName=outTimeVariableName, + referenceDate=referenceDate) + dsEnd = _parse_dataset_time( + ds=ds, + inTimeVariableName=inTimeVariableName[1], + calendar=calendar, + outTimeVariableName=outTimeVariableName, + referenceDate=referenceDate) + starts = dsStart[outTimeVariableName].values + ends = dsEnd[outTimeVariableName].values + + # replace the time in starts with the mean of starts and ends + dsOut = dsStart.copy() + + dsOut.coords['startTime'] = (outTimeVariableName, starts) + dsOut.coords['endTime'] = (outTimeVariableName, ends) + + dsOut.coords[outTimeVariableName] = (outTimeVariableName, + [starts[i] + + (ends[i] - starts[i]) / 2 + for i in range(len(starts))]) + + else: + # there is just one time variable (either because we're recursively + # calling the function or because we're not averaging). + + timeVar = ds[inTimeVariableName] + + if timeVar.dtype != '|S64': + raise TypeError("timeVar of unsupported type {}. String variable " + "expected.".format(timeVar.dtype)) + + # this is an array of date strings like 'xtime' + # convert to string + timeStrings = [''.join(xtime.astype('U')).strip() + for xtime in timeVar.values] + days = string_to_days_since_date(dateString=timeStrings, + referenceDate=referenceDate, + calendar=calendar) + + dsOut = ds.copy() + dsOut.coords[outTimeVariableName] = (outTimeVariableName, days) + + return dsOut + + +# vim: ai ts=4 sts=4 et sw=4 ft=python +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/io/namelist_streams_interface.html b/1.11.0rc1/_modules/mpas_analysis/shared/io/namelist_streams_interface.html new file mode 100644 index 000000000..dbde59da2 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/io/namelist_streams_interface.html @@ -0,0 +1,704 @@ + + + + + + mpas_analysis.shared.io.namelist_streams_interface — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.io.namelist_streams_interface

+#!/usr/bin/env python
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Module of classes/routines to manipulate fortran namelist and streams
+files.
+"""
+# Authors
+# -------
+# Phillip Wolfram, Xylar Asay-Davis
+
+from lxml import etree
+import re
+import os.path
+import f90nml
+import json
+
+from mpas_analysis.shared.containers import ReadOnlyDict
+from mpas_analysis.shared.io.utility import paths
+from mpas_analysis.shared.timekeeping.utility import string_to_datetime
+
+
+
+[docs] +def convert_namelist_to_dict(fname, readonly=True): + """ + Converts a namelist file to key-value pairs in dictionary. + + Parameters + ---------- + fname : str + The file name of the namelist + + readonly : bool, optional + Should the resulting dictionary read-only? + + Returns + ------- + nml : dict + A dictionary where keys are namelist options and values are namelist + """ + nml = f90nml.read(fname).todict() + + # convert ordered dict to dict (Python 3 dict is ordered) + # https://stackoverflow.com/a/27373027/7728169 + nml = json.loads(json.dumps(nml)) + + # flatten the dict + flat = dict() + for section in nml: + for key in nml[section]: + flat[key.lower()] = nml[section][key] + nml = flat + + if readonly: + nml = ReadOnlyDict(nml) + + return nml
+ + + +class NameList: + """ + Class for fortran manipulation of namelist files, provides + read and write functionality + """ + + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + # constructor +
+[docs] + def __init__(self, fname, path=None): + """ + Parse the namelist file + + Parameters + ---------- + fname : str + The file name of the namelist file + + path : str, optional + If ``fname`` contains a relative path, ``fname`` is + relative to ``path``, rather than the current working directory + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + if not os.path.isabs(fname) and path is not None: + # only the file name was given, not the absolute path, and + # a path was provided, so we will assume the namelist + # file is actually in the path + fname = '{}/{}'.format(path, fname) + + # input file name + self.fname = fname + # get values + self.nml = convert_namelist_to_dict(fname)
+ + + # note following accessors do not do type casting +
+[docs] + def __getattr__(self, key): + """ + Accessor for dot noation, e.g., nml.field + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : str + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + return self.nml[key.lower()]
+ + + # provide accessor for dictionary notation (returns string) +
+[docs] + def __getitem__(self, key): + """ + Accessor for bracket notation, e.g., nml['field'] + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : Any + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + return self.nml[key.lower()]
+ + + # provide accessors for get, getint, getfloat, getbool with appropriate + # casting for comparable behavior with config files #{{{ +
+[docs] + def get(self, key): + """ + Get the value associated with a given key + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : Any + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + return self.nml[key.lower()]
+ + +
+[docs] + def getint(self, key): + """ + Get the integer value associated with a given key + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : int + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + return int(self.nml[key.lower()])
+ + +
+[docs] + def getfloat(self, key): + """ + Get the float value associated with a given key + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : float + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + return float(self.nml[key.lower()])
+ + +
+[docs] + def getbool(self, key): + """ + Get the boolean value associated with a given key + + Parameters + ---------- + key : str + The key to get a value for + + Returns + ------- + value : bool + The value associated with ``key`` + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + value = self.nml[key.lower()] + assert type(value) is bool + return value
+ + + def find_option(self, possibleOptions): + """ + If one (or more) of the names in ``possibleOptions`` is an option in + this namelist file, returns the first match. + + Parameters + ---------- + possibleOptions: list of str + A list of options to search for + + Returns + ------- + optionName : str + The name of an option from possibleOptions occurring in the + namelist file + + Raises + ------ + ValueError + If no match is found. + """ + # Authors + # ------- + # Xylar Asay-Davis + + for optionName in possibleOptions: + if optionName in self.nml.keys(): + return optionName + + raise ValueError('None of the possible options {} found in namelist ' + 'file {}.'.format(possibleOptions, self.fname)) + + +class StreamsFile: + """ + Class to read in streams configuration file, provdies + read and write functionality + """ + + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + +
+[docs] + def __init__(self, fname, streamsdir=None): + """ + Parse the streams file. + + Parameters + ---------- + fname : str + The file name the stream file + + streamsdir : str, optional + The base path to both the output streams data and the sreams file + (the latter only if ``fname`` is a relative path). + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + if not os.path.isabs(fname) and streamsdir is not None: + # only the file name was given, not the absolute path, and + # a streamsdir was provided, so we will assume the streams + # file is actually in the streamsdir + fname = '{}/{}'.format(streamsdir, fname) + + self.fname = fname + self.xmlfile = etree.parse(fname) + self.root = self.xmlfile.getroot() + + if streamsdir is None: + # get the absolute path to the directory where the + # streams file resides (used to determine absolute paths + # to file names referred to in streams) + self.streamsdir = os.path.dirname(os.path.abspath(fname)) + else: + self.streamsdir = streamsdir
+ + +
+[docs] + def read(self, streamname, attribname): + """ + Get the value of the given attribute in the given stream + + Parameters + ---------- + streamname : str + The name of the stream + + attribname : str + The name of the attribute within the stream + + + Returns + ------- + value : str + The value associated with the attribute, or ``None`` if the + attribute was not found + """ + # Authors + # ------- + # Phillip Wolfram, Xylar Asay-Davis + + for stream in self.root: + # assumes streamname is unique in XML + if stream.get('name') == streamname: + return stream.get(attribname) + return None
+ + + def read_datetime_template(self, streamname): + """ + Get the value of the given attribute in the given stream + + Parameters + ---------- + streamname : str + The name of the stream + + Returns + ------- + value : str + The template for file names from this stream in a format accepted + by ``datetime.strptime``. This is useful for parsing the date + from a given file name. + """ + # Authors + # ------- + # Xylar Asay-Davis + + template = self.read(streamname, 'filename_template') + replacements = {'$Y': '%Y', + '$M': '%m', + '$D': '%d', + '$S': '00000', # datetime doesn't handle seconds alone + '$h': '%H', + '$m': '%M', + '$s': '%S'} + + for old in replacements: + template = template.replace(old, replacements[old]) + + return template + +
+[docs] + def readpath(self, streamName, startDate=None, endDate=None, + calendar=None): + """ + Given the name of a stream and optionally start and end dates and a + calendar type, returns a list of files that match the file template in + the stream. + + Parameters + ---------- + streamName : string + The name of a stream that produced the files + + startDate, endDate : string or datetime.datetime, optional + String or datetime.datetime objects identifying the beginning + and end dates to be found. + + Note: a buffer of one output interval is subtracted from startDate + and added to endDate because the file date might be the first + or last date contained in the file (or anything in between). + + calendar : {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores, and is + required if startDate and/or endDate are supplied + + Returns + ------- + fileList : list + A list of file names produced by the stream that fall between + the startDate and endDate (if supplied) + + Raises + ------ + ValueError + If no files from the stream are found. + """ + # Authors + # ------- + # Xylar Asay-Davis + + template = self.read(streamName, 'filename_template') + if template is None: + raise ValueError('Stream {} not found in streams file {}.'.format( + streamName, self.fname)) + replacements = {'$Y': '[0-9][0-9][0-9][0-9]', + '$M': '[0-9][0-9]', + '$D': '[0-9][0-9]', + '$S': '[0-9][0-9][0-9][0-9][0-9]', + '$h': '[0-9][0-9]', + '$m': '[0-9][0-9]', + '$s': '[0-9][0-9]'} + + path = template + for old in replacements: + path = path.replace(old, replacements[old]) + + if not os.path.isabs(path): + # this is not an absolute path, so make it an absolute path + path = '{}/{}'.format(self.streamsdir, path) + + fileList = paths(path) + + if len(fileList) == 0: + raise ValueError( + "Path {} in streams file {} for '{}' not found.".format( + path, self.fname, streamName)) + + if (startDate is None) and (endDate is None): + return fileList + + if startDate is not None: + # read one extra file before the start date to be on the safe side + if isinstance(startDate, str): + startDate = string_to_datetime(startDate) + + if endDate is not None: + # read one extra file after the end date to be on the safe side + if isinstance(endDate, str): + endDate = string_to_datetime(endDate) + + # remove any path that's part of the template + template = os.path.basename(template) + dateStartIndex = template.find('$') + if dateStartIndex == -1: + # there is no date in the template, so we can't exclude any files + # based on date + return fileList + dateEndOffset = len(template) - (template.rfind('$') + 2) + + outFileList = [] + for fileName in fileList: + # get just the + baseName = os.path.basename(fileName) + dateEndIndex = len(baseName) - dateEndOffset + fileDateString = baseName[dateStartIndex:dateEndIndex] + fileDate = string_to_datetime(fileDateString) + add = True + if startDate is not None and startDate > fileDate: + add = False + if endDate is not None and endDate < fileDate: + add = False + if add: + outFileList.append(fileName) + + return outFileList
+ + +
+[docs] + def has_stream(self, streamName): + """ + Does the stream file have the given stream? + + Returns True if the streams file has a stream with the given + streamName, otherwise returns False. + + Parameters + ---------- + streamName : str + The name of the stream + + Returns + ------- + streamFound : bool + ``True`` if the stream was found in the stream file, ``False`` + otherwise + """ + # Authors + # ------- + # Xylar Asay-Davis + + for stream in self.root: + # assumes streamname is unique in XML + if stream.get('name') == streamName: + return True + return False
+ + +
+[docs] + def find_stream(self, possibleStreams): + """ + If one (or more) of the names in ``possibleStreams`` is an stream in + this streams file, returns the first match. + + Parameters + ---------- + possibleStreams : list of str + A list of streams to search for + + Returns + ------- + streamName : str + The name of an stream from possibleOptions occurring in the + streams file + + Raises + ------ + ValueError + If no match is found. + """ + # Authors + # ------- + # Xylar Asay-Davis + + for streamName in possibleStreams: + if self.has_stream(streamName): + return streamName + + raise ValueError('None of the possible streams {} found in streams ' + 'file {}.'.format(possibleStreams, self.fname))
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/io/utility.html b/1.11.0rc1/_modules/mpas_analysis/shared/io/utility.html new file mode 100644 index 000000000..14615dd12 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/io/utility.html @@ -0,0 +1,514 @@ + + + + + + mpas_analysis.shared.io.utility — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.io.utility

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+IO utility functions
+
+Phillip J. Wolfram, Xylar Asay-Davis
+"""
+
+import glob
+import os
+import random
+import string
+from datetime import datetime
+import numpy
+import shutil
+import warnings
+
+
+
+[docs] +def paths(*args): + """ + Returns glob'd paths in list for arbitrary number of function arguments. + Note, each expanded set of paths is sorted. + + Parameters + ---------- + *args : list + A list of arguments to pass to ``glob.glob`` + + Returns + ------- + paths : list of str + A list of file paths + """ + # Authors + # ------- + # Phillip J. Wolfram + + paths = [] + for aargs in args: + paths += sorted(glob.glob(aargs)) + return paths
+ + + +def fingerprint_generator(size=12, + chars=string.ascii_uppercase + string.digits): + """ + Returns a random string that can be used as a unique fingerprint + + Parameters + ---------- + size : int, optional + The number of characters in the fingerprint + + chars : list of char, optional + The fingerprint + + Returns + ------- + fingerprint : str + A random string + + Reference + --------- + http://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python + """ + # Authors + # ------- + # Phillip J. Wolfram + + return ''.join(random.choice(chars) for _ in range(size)) + + +
+[docs] +def make_directories(path): + """ + Make the given path if it does not already exist. + + Parameters + ---------- + path : str + the path to make + + Returns + ------- + path : str + the path unchanged + """ + # Authors + # ------- + # Xylar Asay-Davis + + try: + os.makedirs(path) + except OSError: + pass + return path
+ + + +
+[docs] +def build_config_full_path(config, section, relativePathOption, + relativePathSection=None, + defaultPath=None, + baseDirectoryOption='baseDirectory'): + """ + Get a full path from a base directory and a relative path + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + configuration from which to read the path + + section : str + the name of a section in `config`, which must have an option + ``baseDirectory`` + + relativePathOption : str + the name of an option in ``section`` of the relative path within + ``baseDirectory`` (or possibly an absolute path) + + relativePathSection : str, optional + the name of a section for ``relativePathOption`` if not ``section`` + + defaultPath : str, optional + the name of a path to return if the resulting path doesn't exist. + + baseDirectoryOption : str, optional + the name of the option in ``section`` for the base directorys + + Returns + ------- + fullPath : str + The full path to the given relative path within the given + ``baseDirectory`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + if relativePathSection is None: + relativePathSection = section + + subDirectory = config.get(relativePathSection, relativePathOption) + if os.path.isabs(subDirectory): + fullPath = subDirectory + else: + fullPath = '{}/{}'.format(config.get(section, baseDirectoryOption), + subDirectory) + + if defaultPath is not None and not os.path.exists(fullPath): + fullPath = defaultPath + return fullPath
+ + + +def get_region_mask(config, regionMaskFile): + """ + Get the full path for a region mask with a given file name + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + configuration from which to read the path + + regionMaskFile : str + the file name of the region mask, typically a relative path + + Returns + ------- + fullFileName : str + The absolute path to the given fileName within the custom or base + diagnostics directories + """ + # Authors + # ------- + # Xylar Asay-Davis + + if os.path.isabs(regionMaskFile): + fullFileName = regionMaskFile + else: + tryCustom = config.get('diagnostics', 'customDirectory') != 'none' + found = False + fullFileName = None + if tryCustom: + # first see if region mask file is in the custom directory + regionMaskDirectory = build_config_full_path( + config, 'diagnostics', 'regionMaskSubdirectory', + baseDirectoryOption='customDirectory') + + fullFileName = '{}/{}'.format(regionMaskDirectory, + regionMaskFile) + found = os.path.exists(fullFileName) + + if not found: + # no, so second see if mapping files are in the base directory + regionMaskDirectory = build_config_full_path( + config, 'diagnostics', 'regionMaskSubdirectory', + baseDirectoryOption='base_path') + + fullFileName = '{}/{}'.format(regionMaskDirectory, + regionMaskFile) + found = os.path.exists(fullFileName) + + if not found: + # still not found, point to a local mask directory + maskSubdirectory = build_config_full_path(config, 'output', + 'maskSubdirectory') + make_directories(maskSubdirectory) + + fullFileName = '{}/{}'.format(maskSubdirectory, + regionMaskFile) + + return fullFileName + + +def build_obs_path(config, component, relativePathOption=None, + relativePathSection=None, relativePath=None): + """ + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + configuration from which to read the path + + component : {'ocean', 'seaIce', 'iceberg'} + the prefix on the ``*Observations`` section in ``config``, which must + have an option ``obsSubdirectory`` + + relativePathOption : str, optional + the name of an option in `section` of the relative path within + ``obsSubdirectory`` (or possibly an absolute path) + + relativePathSection : str, optional + the name of a section for ``relativePathOption`` if not + ``<component>Observations`` + + relativePath : str, optional + As an alternative to giving the option (and possibly section) of the + relative path, it can be supplied directly + + Returns + ------- + fullPath : str + The full path to the given relative path within the observations + directory for the given component + """ + # Authors + # ------- + # Xylar Asay-Davis + + obsSection = '{}Observations'.format(component) + + if relativePath is None: + if relativePathSection is None: + relativePathSection = obsSection + relativePath = config.get(relativePathSection, relativePathOption) + + if os.path.isabs(relativePath): + fullPath = relativePath + else: + obsSubdirectory = config.get(obsSection, 'obsSubdirectory') + if os.path.isabs(obsSubdirectory): + fullPath = '{}/{}'.format(obsSubdirectory, relativePath) + else: + basePath = config.get('diagnostics', 'customDirectory') + fullPath = '{}/{}/{}'.format(basePath, obsSubdirectory, + relativePath) + if basePath == 'none' or not os.path.exists(fullPath): + basePath = config.get('diagnostics', 'base_path') + fullPath = '{}/{}/{}'.format(basePath, obsSubdirectory, + relativePath) + + return fullPath + + +
+[docs] +def check_path_exists(path): + """ + Raise an exception if the given path does not exist. + + Parameters + ---------- + path : str + Absolute path + + Raises + ------ + OSError + If the path does not exist + """ + # Authors + # ------- + # Xylar Asay-Davis + + if not (os.path.isdir(path) or os.path.isfile(path)): + raise OSError('Path {} not found'.format(path))
+ + + +def get_files_year_month(fileNames, streamsFile, streamName): + """ + Extract the year and month from file names associated with a stream + + Parameters + ---------- + fileNames : list of str + The names of files with a year and month in their names. + + streamsFile : ``StreamsFile`` + The parsed streams file, used to get a template for the + + streamName : str + The name of the stream with a file-name template for ``fileNames`` + + Returns + ------- + years, months : list of int + The years and months for each file in ``fileNames`` + """ + # Authors + # ------- + # Xylar Asay-Davis + + template = streamsFile.read_datetime_template(streamName) + template = os.path.basename(template) + dts = [datetime.strptime(os.path.basename(fileName), template) for + fileName in fileNames] + + years = [dt.year for dt in dts] + months = [dt.month for dt in dts] + + return years, months + + +def decode_strings(da): + """ + Decode to unicode strings an array that might either be char or string type + in the NetCDF file. + + Parameters + ---------- + da : ``xarray.DataArray`` + the data array of strings to decode + + Returns + ------- + strings : list + The data array as a list of unicode strings + """ + # Authors + # ------- + # Xylar Asay-Davis + + if da.dtype.type is numpy.string_: + strings = [bytes.decode(name) for name in da.values] + else: + strings = [name for name in da.values] + + return strings + + +def copyfile(src, dst): + """ Copy a file, retrying if temporarily unavailable """ + + try: + shutil.copyfile(src, dst) + except BlockingIOError: + # this is an occasional problem on Chrysalis. Try a slow copy + warnings.warn('Making a slow copy from {} to {}'.format(src, dst)) + with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst: + shutil.copyfileobj(fsrc, fdst) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/mpas_xarray/mpas_xarray.html b/1.11.0rc1/_modules/mpas_analysis/shared/mpas_xarray/mpas_xarray.html new file mode 100644 index 000000000..d11fb6362 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/mpas_xarray/mpas_xarray.html @@ -0,0 +1,705 @@ + + + + + + mpas_analysis.shared.mpas_xarray.mpas_xarray — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.mpas_xarray.mpas_xarray

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import numpy as np
+import xarray
+from functools import partial
+
+from mpas_analysis.shared.timekeeping.utility import \
+    string_to_days_since_date, string_to_datetime, days_to_datetime, \
+    datetime_to_days
+
+"""
+Utility functions for importing MPAS files into xarray.
+
+open_multifile_dataset : open an xarray data set from MPAS data files
+subset_variables : Keep only a subset of variables in a dataset
+preprocess : preprocess a single file of an xarray dataset
+remove_repeated_time_index : remove redundant indices in the 'Time' coordinate
+"""
+# Authors
+# -------
+# Phillip J. Wolfram, Xylar Asay-Davis
+
+
+
+[docs] +def open_multifile_dataset(fileNames, calendar, + simulationStartTime=None, + timeVariableName='xtime', + variableList=None, selValues=None, + iselValues=None): + """ + Opens and returns an xarray data set given file name(s) and the MPAS + calendar name. + + Parameters + ---------- + fileNames : list of strings + A lsit of file paths to read + + calendar : {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores + + simulationStartTime : string, optional + The start date of the simulation, used to convert from time variables + expressed as days since the start of the simulation to days since the + reference date. ``simulationStartTime`` takes one of the following + forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + ``simulationStartTime`` is only required if the MPAS time variable + (identified by ``timeVariableName``) is a number of days since the + start of the simulation. + + timeVariableName : string, optional + The name of the time variable (typically ``'xtime'`` or ``'Time'``). + + variableList : list of strings, optional + If present, a list of variables to be included in the data set + + selectCorrdValues : dict, optional + A dictionary of coordinate names (keys) and values or arrays of + values used to slice the variales in the data set. See + ``xarray.dataset.sel()`` for details on how this dictonary is used. + An example:: + + selectCorrdValues = {'cellLon': 180.0} + + iselValues : dict, optional + A dictionary of coordinate names (keys) and indices, slices or + arrays of indices used to slice the variales in the data set. See + ``xarray.dataset.isel()`` for details on how this dictonary is used. + An example:: + + iselValues = {'nVertLevels': slice(0, 3), + 'nCells': cellIDs} + + Returns + ------- + ds : ``xarray.Dataset`` + + Raises + ------ + TypeError + If the time variable has an unsupported type (not a date string or + a floating-pont number of days since the start of the simulation). + + ValueError + If the time variable is not found in the data set or if the time + variable is a number of days since the start of the simulation but + simulationStartTime is None. + """ + # Authors + # ------- + # Xylar Asay-Davis + + preprocess_partial = partial(preprocess, + calendar=calendar, + simulationStartTime=simulationStartTime, + timeVariableName=timeVariableName, + variableList=variableList, + selValues=selValues, + iselValues=iselValues) + + ds = xarray.open_mfdataset(fileNames, + preprocess=preprocess_partial, + combine='nested', concat_dim='Time', + decode_times=False) + + ds = remove_repeated_time_index(ds) + + return ds
+ + + +
+[docs] +def subset_variables(ds, variableList): + """ + Given a data set and a list of variable names, returns a new data set that + contains only variables with those names. + + Parameters + ---------- + ds : ``xarray.DataSet`` object + The data set from which a subset of variables is to be extracted. + + variableList : string or list of strings + The names of the variables to be extracted. + + Returns + ------- + ds : ``xarray.DataSet`` object + A copy of the original data set with only the variables in + variableList. + + Raises + ------ + ValueError + If the resulting data set is empty. + """ + # Authors + # ------- + # Phillip J. Wolfram, Xylar Asay-Davis + + allvars = ds.data_vars.keys() + + # get set of variables to drop (all ds variables not in vlist) + dropvars = set(allvars) - set(variableList) + + # drop variables not requested and coordinates that are no longer needed + ds = ds.drop_vars(dropvars) + + if len(ds.data_vars.keys()) == 0: + raise ValueError( + 'Empty dataset is returned.\n' + 'Variables {}\n' + 'are not found within the dataset ' + 'variables: {}.'.format(variableList, allvars)) + + return ds
+ + + +
+[docs] +def preprocess(ds, calendar, simulationStartTime, timeVariableName, + variableList, selValues, iselValues): + """ + Builds correct time specification for MPAS, allowing a date offset + because the time must be between 1678 and 2262 based on the xarray + library. Also, if slicing information (`selValues` and/or + `iselValues`) was provided in `openMultifileDataSet`, this + function performs the appropriate slicing on the data set. + + Parameters + ---------- + ds : ``xarray.DataSet`` object + The data set containing an MPAS time variable to be used to build + an xarray time coordinate. + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores + + simulationStartTime : string, optinal + The start date of the simulation, used to convert from time + variables expressed as days since the start of the simulation to + days since the reference date. ``simulationStartTime`` takes one + of the following forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + ``simulationStartTime`` is only required if the MPAS time variable + (identified by ``timeVariableName``) is a number of days since the + start of the simulation. + + timeVariableName : string, optional + The name of the time variable (typically ``'xtime'`` or ``'Time'``). + + variableList : list of strings + If present, a list of variables to be included in the data set + + selectCorrdValues : dict + A dictionary of coordinate names (keys) and values or arrays of + values used to slice the variales in the data set. See + ``xarray.DataSet.sel()`` for details on how this dictonary is used. + An example:: + + selectCorrdValues = {'cellLon': 180.0} + + iselValues : dict + A dictionary of coordinate names (keys) and indices, slices or + arrays of indices used to slice the variales in the data set. See + ``xarray.DataSet.isel()`` for details on how this dictonary is used. + An example:: + + iselValues = {'nVertLevels': slice(0, 3), + 'nCells': cellIDs} + + Returns + ------- + ds : ``xarray.DataSet`` object + A copy of the data set with the time coordinate set and which + has been sliced. + """ + # Authors + # ------- + # Phillip J. Wolfram, Milena Veneziani, Luke van Roekel + # and Xylar Asay-Davis + + # following a suggestion by @rabernat + # https://github.com/pydata/xarray/issues/2064#issuecomment-381717472 + concat_dim = 'Time' + coord_vars = [v for v in ds.data_vars if concat_dim not in ds[v].dims] + ds = ds.set_coords(coord_vars) + + ds = _parse_dataset_time(ds=ds, + inTimeVariableName=timeVariableName, + calendar=calendar, + simulationStartTime=simulationStartTime, + outTimeVariableName='Time', + referenceDate='0001-01-01') + + if variableList is not None: + ds = subset_variables(ds, + _ensure_list(variableList)) + + _assert_valid_selections(ds, selValues, + iselValues) + + if selValues is not None: + ds = ds.sel(**selValues) + + if iselValues is not None: + ds = ds.isel(**iselValues) + + return ds
+ + + +
+[docs] +def remove_repeated_time_index(ds): + """ + Remove repeated times from xarray dataset. + + Parameters + ---------- + ds : ``xarray.DataSet`` object + The data set potentially containing repeated time indices. + + Returns + ------- + ds : ``xarray.DataSet`` object + A copy of the original data set with any repeated time indices removed. + """ + # Authors + # ------- + # Phillip J. Wolfram, Xylar Asay-Davis + + # get repeated indices + times = ds.Time.values + indices = list(range(len(times))) + uniqueTimes = set() + remove = [] + for timeIndex, time in enumerate(times): + if time not in uniqueTimes: + uniqueTimes.add(time) + else: + remove.append(timeIndex) + + # remove repeaded indices, working backwards from the last + remove.reverse() + for timeIndex in remove: + indices.pop(timeIndex) + + # remove repeated indices + ds = ds.isel(Time=indices) + + return ds
+ + + +def _assert_valid_selections(ds, selvals, iselvals): + """ + Ensure that dataset selections are compatable. + + It is possible selVals and iselVals may conflict, e.g., selVals + restricts the dataset to a point where iselvals is unable to be + satisfied, hence a check is needed to make sure that keys in selvals + and iselvals are unique. Additionally, keys for selvals and iselvals + are tested to make sure they are dataset dimensions that can be used + for selection. + """ + # Authors + # ------- + # Phillip J. Wolfram, Xylar Asay-Davis + + def test_vals_in_ds(vals, dims): + if vals is not None: + for val in vals.keys(): + assert val in dims, \ + '{} is not a dimension in the dataset ' \ + 'that can be used for selection.'.format(val) + + if (selvals is not None) and (iselvals is not None): + duplicatedkeys = len(np.intersect1d(selvals.keys(), + iselvals.keys())) + assert len(duplicatedkeys) == 0, \ + 'Duplicated selection of variables {} was found! ' \ + 'Selection is ambiguous.'.format(duplicatedkeys) + + test_vals_in_ds(selvals, ds.dims) + test_vals_in_ds(iselvals, ds.dims) + + return + + +def _ensure_list(alist): + """ + Ensure that variables used as a list are actually lists. + """ + # Authors + # ------- + # Phillip J. Wolfram, Xylar Asay-Davis + + if isinstance(alist, str): + # print 'Warning, converting %s to a list'%(alist) + alist = [alist] + + return alist + + +def _parse_dataset_time(ds, inTimeVariableName, calendar, + simulationStartTime, outTimeVariableName, + referenceDate): + """ + A helper function for computing a time coordinate from an MPAS time + variable. Given a data set and a time variable name (or tuple of 2 + time names), returns a new data set with time coordinate + `outTimeVariableName` filled with days since `referenceDate` + + Parameters + ---------- + ds : xarray.DataSet object + The data set containing an MPAS time variable to be used to build + an xarray time coordinate. + + inTimeVariableName : string or tuple or list of strings + The name of the time variable in the MPAS data set that will be + used to build the 'Time' coordinate. The array(s) named by + inTimeVariableName should contain date strings or the number of + days since the start of the simulation. Typically, + inTimeVariableName is one of {'daysSinceStartOfSim','xtime'}. + If a list of two variable + names is provided, times from the two are averaged together to + determine the value of the time coordinate. In such cases, + inTimeVariableName is typically {['xtime_start', 'xtime_end']}. + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores + + + simulationStartTime : string + The start date of the simulation, used to convert from time variables + expressed as days since the start of the simulation to days since the + reference date. `simulationStartTime` takes one of the following + forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + simulationStartTime is only required if the MPAS time variable + (identified by timeVariableName) is a number of days since the + start of the simulation. + + outTimeVariableName : string + The name of the coordinate to assign times to, typically 'Time'. + + referenceDate : string + The reference date for the time variable, typically '0001-01-01', + taking one of the following forms:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + dataset : xarray.dataset object + A copy of the input data set with the `outTimeVariableName` + coordinate containing the time coordinate parsed from + `inTimeVariableName`. + + Raises + ------ + TypeError + If the time variable has an unsupported type (not a date string + or a floating-pont number of days since the start of the simulatio). + ValueError + If the time variable is a number of days since the start of the + simulation but simulationStartTime is None. + """ + # Authors + # ------- + # Xylar Asay-Davis + + if isinstance(inTimeVariableName, (tuple, list)): + # we want to average the two + assert(len(inTimeVariableName) == 2) + + dsStart = _parse_dataset_time( + ds=ds, + inTimeVariableName=inTimeVariableName[0], + calendar=calendar, + simulationStartTime=simulationStartTime, + outTimeVariableName=outTimeVariableName, + referenceDate=referenceDate) + dsEnd = _parse_dataset_time( + ds=ds, + inTimeVariableName=inTimeVariableName[1], + calendar=calendar, + simulationStartTime=simulationStartTime, + outTimeVariableName=outTimeVariableName, + referenceDate=referenceDate) + starts = dsStart[outTimeVariableName].values + ends = dsEnd[outTimeVariableName].values + + # replace the time in starts with the mean of starts and ends + dsOut = dsStart.copy() + + dsOut.coords['startTime'] = (outTimeVariableName, starts) + dsOut.coords['endTime'] = (outTimeVariableName, ends) + + dsOut.coords[outTimeVariableName] = (outTimeVariableName, + [starts[i] + + (ends[i] - starts[i]) / 2 + for i in range(len(starts))]) + + else: + + # there is just one time variable (either because we're recursively + # calling the function or because we're not averaging). + + # The contents of the time variable is expected to be either a string + # (|S64) or a float (meaning days since start of the simulation). + + timeVar = ds[inTimeVariableName] + + if timeVar.dtype == '|S64': + # this is an array of date strings like 'xtime' + # convert to string + timeStrings = [''.join(str(xtime.astype('U'))).strip() + for xtime in timeVar.values] + days = string_to_days_since_date(dateString=timeStrings, + referenceDate=referenceDate, + calendar=calendar) + + elif timeVar.dtype == 'float64': + # this array contains floating-point days like + # 'daysSinceStartOfSim' + + if simulationStartTime is None: + raise ValueError('MPAS time variable {} appears to be a ' + 'number of days since start \n' + 'of sim but simulationStartTime was not' + ' supplied.'.format(inTimeVariableName)) + + if (string_to_datetime(referenceDate) == + string_to_datetime(simulationStartTime)): + days = timeVar.values + else: + # a conversion may be required + dates = days_to_datetime(days=timeVar.values, + referenceDate=simulationStartTime, + calendar=calendar) + days = datetime_to_days(dates=dates, + referenceDate=referenceDate, + calendar=calendar) + + elif timeVar.dtype == 'timedelta64[ns]': + raise TypeError('timeVar of unsupported type {}. This is likely ' + 'because xarray.open_dataset \n' + 'was called with decode_times=True, which can ' + 'mangle MPAS times.'.format(timeVar.dtype)) + else: + raise TypeError("timeVar of unsupported type {}".format( + timeVar.dtype)) + + dsOut = ds.copy() + dsOut.coords[outTimeVariableName] = (outTimeVariableName, days) + + return dsOut + + +def process_chunking(ds, chunking): + """ + Computes chunking for a dataset. + + Parameters + ---------- + ds : ``xarray.Dataset`` + Input dataset to be chunked. + + chunking : None, int, dict + If chunking is an integer it specifies the maximum chunking rule, + otherwise if None do not perform chunking. If a chunking is a dict use + dictionary values for chunking. + + Returns + ------- + ds : ``xarray.Dataset`` + + Raises + ------ + + ValueError + If chunking value used is not an acceptable value. + """ + # Authors + # ------- + # Phillip J. Wolfram + + if isinstance(chunking, int): + chunks = {} + for name in ds.chunks.keys(): + chunklim = np.asarray(ds.chunks[name]).max() + chunks[name] = np.minimum(chunking, chunklim) + + ds = ds.chunk(chunks) + + elif isinstance(chunking, dict): + ds = ds.chunk(chunking) + + # if chunking is None don't do any chunking + elif chunking is None: + pass + + else: + raise ValueError( + 'Chunking parameter choice is not understood ' + 'for {} of type {}\n'.format(chunking, type(chunking))) + + return ds + +# vim: ai ts=4 sts=4 et sw=4 ft=python +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/climatology_map.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/climatology_map.html new file mode 100644 index 000000000..cedfbad4f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/climatology_map.html @@ -0,0 +1,933 @@ + + + + + + mpas_analysis.shared.plot.climatology_map — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.climatology_map

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Functions for plotting remapped horizontal fields (and comparing with reference
+ data sets)
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib
+import matplotlib.pyplot as plt
+import matplotlib.colors as cols
+import matplotlib.ticker as mticker
+import matplotlib.patches as mpatches
+import numpy as np
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+from mpl_toolkits.axes_grid1.inset_locator import inset_axes
+import cartopy
+from cartopy.util import add_cyclic_point
+
+from mpas_analysis.shared.plot.colormap import setup_colormap
+from mpas_analysis.shared.plot.title import limit_title
+from mpas_analysis.shared.plot.save import savefig
+from mpas_analysis.shared.projection import get_cartopy_projection
+
+
+
+[docs] +def plot_polar_comparison( + config, + lon, + lat, + modelArray, + refArray, + diffArray, + colorMapSectionName, + fileout, + title=None, + plotProjection='npstere', + latmin=50.0, + lon0=0, + modelTitle='Model', + refTitle='Observations', + diffTitle='Model-Observations', + cbarlabel='units', + titleFontSize=None, + defaultFontSize=None, + figsize=None, + dpi=None, + vertical=False, + maxTitleLength=None): + """ + Plots a data set around either the north or south pole. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + the configuration, containing a [plot] section with options that + control plotting + + lon, lat : float arrays + longitude and latitude arrays + + modelArray, refArray : numpy.ndarray + model and observational or control run data sets + + diffArray : float array + difference between modelArray and refArray + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + fileout : str + the file name to be written + + title : str, optional + the subtitle of the plot + + plotProjection : {'npstere', 'spstere'}, optional + projection for the plot (north or south pole) + + modelTitle : str, optional + title of the model panel + + refTitle : str, optional + title of the observations or control run panel + + diffTitle : str, optional + title of the difference (bias) panel + + cbarlabel : str, optional + label on the colorbar + + titleFontSize : int, optional + size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + figsize : tuple of float, optional + the size of the figure in inches. If ``None``, the figure size is + ``(8, 22)`` if ``vertical == True`` and ``(22, 8)`` otherwise. + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + vertical : bool, optional + whether the subplots should be stacked vertically rather than + horizontally + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + def do_subplot(ax, field, title, colormap, norm, levels, ticks, contours, + lineWidth, lineColor, arrows): + """ + Make a subplot within the figure. + """ + + data_crs = cartopy.crs.PlateCarree() + ax.set_extent(extent, crs=data_crs) + + title = limit_title(title, maxTitleLength) + ax.set_title(title, y=1.06, **plottitle_font) + + gl = ax.gridlines(crs=data_crs, color='k', linestyle=':', zorder=5, + draw_labels=True) + gl.xlocator = mticker.FixedLocator(np.arange(-180., 181., 20.)) + gl.ylocator = mticker.FixedLocator(np.arange(-80., 81., 10.)) + gl.n_steps = 100 + gl.right_labels = False + gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER + gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER + gl.rotate_labels = False + + fieldPeriodic, lonPeriodic = add_cyclic_point(field, lon) + + LonsPeriodic, LatsPeriodic = np.meshgrid(lonPeriodic, lat) + + if levels is None: + plotHandle = ax.pcolormesh(LonsPeriodic, LatsPeriodic, + fieldPeriodic, cmap=colormap, + norm=norm, transform=data_crs, + zorder=1, rasterized=True) + else: + plotHandle = ax.contourf(LonsPeriodic, LatsPeriodic, + fieldPeriodic, cmap=colormap, + norm=norm, levels=levels, + transform=data_crs, + zorder=1) + + _add_land_lakes_coastline(ax) + + if contours is not None: + matplotlib.rcParams['contour.negative_linestyle'] = 'solid' + ax.contour(LonsPeriodic, LatsPeriodic, fieldPeriodic, + levels=contours, colors=lineColor, + linewidths=lineWidth, transform=data_crs) + + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.1, + axes_class=plt.Axes) + cbar = plt.colorbar(plotHandle, cax=cax) + cbar.set_label(cbarlabel) + if ticks is not None: + cbar.set_ticks(ticks) + cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + if dpi is None: + dpi = config.getint('plot', 'dpi') + + dictModelRef = setup_colormap(config, colorMapSectionName, suffix='Result') + dictDiff = setup_colormap(config, colorMapSectionName, suffix='Difference') + + if refArray is None: + if figsize is None: + figsize = (8, 8.5) + subplots = [111] + elif vertical: + if figsize is None: + figsize = (8, 22) + subplots = [311, 312, 313] + else: + if figsize is None: + figsize = (22, 7.5) + subplots = [131, 132, 133] + + fig = plt.figure(figsize=figsize, dpi=dpi) + + if (title is not None): + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + fig.suptitle(title, y=0.95, **title_font) + + plottitle_font = {'size': config.get('plot', + 'threePanelPlotTitleFontSize')} + + if plotProjection == 'npstere': + projection = cartopy.crs.NorthPolarStereo() + extent = [-180, 180, latmin, 90] + elif plotProjection == 'spstere': + projection = cartopy.crs.SouthPolarStereo() + extent = [-180, 180, -90, latmin] + else: + raise ValueError('Unexpected plot projection {}'.format( + plotProjection)) + + ax = plt.subplot(subplots[0], projection=projection) + do_subplot(ax=ax, field=modelArray, title=modelTitle, **dictModelRef) + + if refArray is not None: + ax = plt.subplot(subplots[1], projection=projection) + do_subplot(ax=ax, field=refArray, title=refTitle, **dictModelRef) + + ax = plt.subplot(subplots[2], projection=projection) + do_subplot(ax=ax, field=diffArray, title=diffTitle, **dictDiff) + + fig.canvas.draw() + plt.tight_layout(pad=4.) + if vertical: + plt.subplots_adjust(top=0.9) + + if fileout is not None: + savefig(fileout, config) + + plt.close()
+ + + +
+[docs] +def plot_global_comparison( + config, + Lons, + Lats, + modelArray, + refArray, + diffArray, + colorMapSectionName, + fileout, + title=None, + modelTitle='Model', + refTitle='Observations', + diffTitle='Model-Observations', + cbarlabel='units', + titleFontSize=None, + defaultFontSize=None, + figsize=None, + dpi=None, + lineWidth=1, + lineColor='black', + maxTitleLength=None, + extend='both'): + """ + Plots a data set as a longitude/latitude map. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + the configuration, containing a [plot] section with options that + control plotting + + Lons, Lats : numpy.ndarray + longitude and latitude arrays + + modelArray, refArray : numpy.ndarray + model and observational or control run data sets + + diffArray : float array + difference between modelArray and refArray + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + fileout : str + the file name to be written + + title : str, optional + the subtitle of the plot + + modelTitle : str, optional + title of the model panel + + refTitle : str, optional + title of the observations or control run panel + + diffTitle : str, optional + title of the difference (bias) panel + + cbarlabel : str, optional + label on the colorbar + + titleFontSize : int, optional + size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + lineWidth : int, optional + the line width of contour lines (if specified) + + lineColor : str, optional + the color of contour lines (if specified) + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + extend : {'neither', 'both', 'min', 'max'}, optional + Determines the ``contourf``-coloring of values that are outside the + range of the levels provided if using an indexed colormap. + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani + + def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, + lineWidth, lineColor, arrows): + + ax.set_extent(extent, crs=projection) + + title = limit_title(title, maxTitleLength) + ax.set_title(title, y=1.02, **plottitle_font) + + gl = ax.gridlines(crs=projection, color='k', linestyle=':', zorder=5, + draw_labels=True) + gl.right_labels = False + gl.top_labels = False + gl.xlocator = mticker.FixedLocator(np.arange(-180., 181., 60.)) + gl.ylocator = mticker.FixedLocator(np.arange(-80., 81., 20.)) + gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER + gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER + + if levels is None: + plotHandle = ax.pcolormesh(Lons, Lats, array, cmap=colormap, + norm=norm, transform=projection, + zorder=1, rasterized=True) + else: + plotHandle = ax.contourf(Lons, Lats, array, cmap=colormap, + norm=norm, levels=levels, extend=extend, + transform=projection, zorder=1) + + _add_land_lakes_coastline(ax) + + if contours is not None: + matplotlib.rcParams['contour.negative_linestyle'] = 'solid' + ax.contour(Lons, Lats, array, levels=contours, colors=lineColor, + linewidths=lineWidth, transform=projection) + + cax = inset_axes(ax, width='5%', height='60%', loc='center right', + bbox_to_anchor=(0.08, 0., 1, 1), + bbox_transform=ax.transAxes, borderpad=0) + + cbar = plt.colorbar(plotHandle, cax=cax) + cbar.set_label(cbarlabel) + if ticks is not None: + cbar.set_ticks(ticks) + cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + # set up figure + if dpi is None: + dpi = config.getint('plot', 'dpi') + if figsize is None: + # set the defaults, depending on if we have 1 or 3 panels + if refArray is None: + figsize = (8, 5) + else: + figsize = (8, 13) + fig = plt.figure(figsize=figsize, dpi=dpi) + if (title is not None): + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + fig.suptitle(title, y=0.935, **title_font) + + plottitle_font = {'size': config.get('plot', + 'threePanelPlotTitleFontSize')} + + if refArray is None: + subplots = [111] + else: + subplots = [311, 312, 313] + + projection = cartopy.crs.PlateCarree() + + extent = [-180, 180, -85, 85] + + dictModelRef = setup_colormap(config, colorMapSectionName, suffix='Result') + dictDiff = setup_colormap(config, colorMapSectionName, suffix='Difference') + + axes = [] + ax = plt.subplot(subplots[0], projection=projection) + plot_panel(ax, modelTitle, modelArray, **dictModelRef) + axes.append(ax) + + if refArray is not None: + ax = plt.subplot(subplots[1], projection=projection) + plot_panel(ax, refTitle, refArray, **dictModelRef) + axes.append(ax) + + ax = plt.subplot(subplots[2], projection=projection) + plot_panel(ax, diffTitle, diffArray, **dictDiff) + axes.append(ax) + + _add_stats(modelArray, refArray, diffArray, Lats, axes) + + if fileout is not None: + savefig(fileout, config, pad_inches=0.2) + + plt.close()
+ + + +def plot_projection_comparison( + config, + x, + y, + landMask, + modelArray, + refArray, + diffArray, + fileout, + colorMapSectionName, + projectionName, + title=None, + modelTitle='Model', + refTitle='Observations', + diffTitle='Model-Observations', + cbarlabel='units', + titleFontSize=None, + cartopyGridFontSize=None, + defaultFontSize=None, + vertical=False, + maxTitleLength=None, + extend='both'): + """ + Plots a data set as a projection map. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + the configuration, containing a [plot] section with options that + control plotting + + x, y : numpy.ndarrays + 1D x and y arrays of the corners of grid cells on the projection grid + + landMask : numpy.ndarrays + model and observational or control run data sets + + modelArray, refArray : numpy.ndarrays + model and observational or control run data sets + + diffArray : float array + difference between modelArray and refArray + + fileout : str + the file name to be written + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + title : str, optional + the subtitle of the plot + + modelTitle : str, optional + title of the model panel + + refTitle : str, optional + title of the observations or control run panel + + diffTitle : str, optional + title of the difference (bias) panel + + cbarlabel : str, optional + label on the colorbar + + titleFontSize : int, optional + size of the title font + + cartopyGridFontSize : int, optional + the size of text used by cartopy to label lon and lat + + defaultFontSize : int, optional + the size of text other than the title + + vertical : bool, optional + whether the subplots should be stacked vertically rather than + horizontally + + projectionName : str, optional + the name of projection that the data is on, one of the projections + available via + :py:func:`mpas_analysis.shared.projection.get_cartopy_projection()`. + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + extend : {'neither', 'both', 'min', 'max'}, optional + Determines the ``contourf``-coloring of values that are outside the + range of the levels provided if using an indexed colormap. + """ + # Authors + # ------- + # Xylar Asay-Davis + + def add_arrow_to_line_2d(ax, poly, arrow_spacing=8e5, arrow_width=1.5e4): + """ + https://stackoverflow.com/a/27637925/7728169 + Add arrows to a matplotlib.lines.Line2D at selected locations. + + Polygons instead of paths, following + https://gis.stackexchange.com/a/246861/143986 + """ + x = poly[:, 0] + y = poly[:, 1] + arrows = [] + delta = np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2) + s = np.cumsum(delta) + indices = np.searchsorted( + s, arrow_spacing*np.arange(1, int(s[-1]/arrow_spacing))) + for n in indices: + dx = np.mean(x[n-2:n]) - x[n] + dy = np.mean(y[n-2:n]) - y[n] + p = mpatches.FancyArrow( + x[n], y[n], dx, dy, length_includes_head=False, + width=arrow_width, facecolor='k') + ax.add_patch(p) + arrows.append(p) + return arrows + + def plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, + lineWidth, lineColor, arrows): + + title = limit_title(title, maxTitleLength) + ax.set_title(title, **plottitle_font) + + ax.set_extent(extent, crs=projection) + + gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), color='k', + linestyle=':', zorder=5, draw_labels=True) + gl.xlocator = mticker.FixedLocator(lonLines) + gl.ylocator = mticker.FixedLocator(latLines) + gl.n_steps = 100 + gl.right_labels = False + gl.left_labels = left_labels + gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER + gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER + gl.xlabel_style['size'] = cartopyGridFontSize + gl.ylabel_style['size'] = cartopyGridFontSize + gl.rotate_labels = False + + if levels is None: + plotHandle = ax.pcolormesh(x, y, array, cmap=colormap, norm=norm, + rasterized=True) + else: + plotHandle = ax.contourf(xCenter, yCenter, array, cmap=colormap, + norm=norm, levels=levels, extend=extend) + + if useCartopyCoastline: + _add_land_lakes_coastline(ax, ice_shelves=False) + else: + # add the model coastline + plt.pcolormesh(x, y, landMask, cmap=landColorMap) + plt.contour(xCenter, yCenter, landMask.mask, (0.5,), colors='k', + linewidths=0.5) + + if contours is not None: + matplotlib.rcParams['contour.negative_linestyle'] = 'solid' + x_center = 0.5*(x[0:-1] + x[1:]) + y_center = 0.5*(y[0:-1] + y[1:]) + cs = ax.contour(x_center, y_center, array, levels=contours, + colors=lineColor, linewidths=lineWidth) + # add arrows to streamlines + if arrows is not None: + for collection in cs.collections: + for path in collection.get_paths(): + for poly in path.to_polygons(): + add_arrow_to_line_2d(ax, poly) + # create an axes on the right side of ax. The width of cax will be 5% + # of ax and the padding between cax and ax will be fixed at 0.05 inch. + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.05, + axes_class=plt.Axes) + + cbar = plt.colorbar(plotHandle, cax=cax) + cbar.set_label(cbarlabel) + if ticks is not None: + cbar.set_ticks(ticks) + cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + if cartopyGridFontSize is None: + cartopyGridFontSize = config.getint('plot', 'cartopyGridFontSize') + + # set up figure + dpi = config.getint('plot', 'dpi') + + section = f'plot_{projectionName}' + useCartopyCoastline = config.getboolean(section, 'useCartopyCoastline') + + if refArray is None: + figsize = config.getexpression(section, 'onePanelFigSize') + subplots = [111] + elif vertical: + figsize = config.getexpression(section, 'threePanelVertFigSize') + subplots = [311, 312, 313] + else: + figsize = config.getexpression(section, 'threePanelHorizFigSize') + subplots = [131, 132, 133] + latLines = config.getexpression(section, 'latLines', use_numpyfunc=True) + lonLines = config.getexpression(section, 'lonLines', use_numpyfunc=True) + + # put latitude labels on the left unless we're in a polar projection + left_labels = projectionName not in ['arctic', 'antarctic'] + + dictModelRef = setup_colormap(config, colorMapSectionName, suffix='Result') + dictDiff = setup_colormap(config, colorMapSectionName, suffix='Difference') + + fig = plt.figure(figsize=figsize, dpi=dpi) + + if title is not None: + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + fig.suptitle(title, y=0.95, **title_font) + + plottitle_font = {'size': config.get('plot', + 'threePanelPlotTitleFontSize')} + + # set up land colormap + if not useCartopyCoastline: + colorList = [(0.8, 0.8, 0.8), (0.8, 0.8, 0.8)] + landColorMap = cols.LinearSegmentedColormap.from_list('land', colorList) + + # locations of centers for contour plots + xCenter = 0.5 * (x[1:] + x[0:-1]) + yCenter = 0.5 * (y[1:] + y[0:-1]) + + projection = get_cartopy_projection(projectionName) + + extent = [x[0], x[-1], y[0], y[-1]] + + ax = plt.subplot(subplots[0], projection=projection) + plot_panel(ax, modelTitle, modelArray, **dictModelRef) + + if refArray is not None: + ax = plt.subplot(subplots[1], projection=projection) + plot_panel(ax, refTitle, refArray, **dictModelRef) + + ax = plt.subplot(subplots[2], projection=projection) + plot_panel(ax, diffTitle, diffArray, **dictDiff) + + if fileout is not None: + savefig(fileout, config) + + plt.close() + + +def _add_stats(modelArray, refArray, diffArray, Lats, axes): + """ compute the means, std devs. and Pearson correlation """ + weights = np.cos(np.deg2rad(Lats)) + + model_weights = weights + model_mask = None + if isinstance(modelArray, np.ma.MaskedArray): + # make sure we're using the MPAS land mask for all 3 sets of stats + model_mask = modelArray.mask + model_weights = np.ma.array(weights, mask=model_mask) + + modelMean = np.average(modelArray, weights=model_weights) + + _add_stats_text( + names=['Min', 'Mean', 'Max'], + values=[np.amin(modelArray), modelMean, np.amax(modelArray)], + ax=axes[0], loc='upper') + + if refArray is not None: + ref_weights = weights + ref_mask = None + if isinstance(modelArray, np.ma.MaskedArray): + # make sure we're using the MPAS land mask for all 3 sets of stats + if isinstance(refArray, np.ma.MaskedArray): + # mask invalid where either model or ref array is invalid + ref_mask = np.logical_or(model_mask, refArray.mask) + ref_weights = np.ma.array(weights, mask=ref_mask) + refArray = np.ma.array(refArray, mask=ref_mask) + modelAnom = modelArray - modelMean + modelVar = np.average(modelAnom ** 2, weights=ref_weights) + refMean = np.average(refArray, weights=ref_weights) + refAnom = refArray - refMean + refVar = np.average(refAnom**2, weights=ref_weights) + + _add_stats_text( + names=['Min', 'Mean', 'Max'], + values=[np.amin(refArray), refMean, np.amax(refArray)], + ax=axes[1], loc='upper') + + diffMean = np.average(diffArray, weights=ref_weights) + diffVar = np.average((diffArray - diffMean)**2, weights=ref_weights) + diffRMSE = np.sqrt(diffVar) + + _add_stats_text( + names=['Min', 'Mean', 'Max'], + values=[np.amin(diffArray), diffMean, np.amax(diffArray)], + ax=axes[2], loc='upper') + + covar = np.average(modelAnom*refAnom, weights=ref_weights) + + corr = covar/np.sqrt(modelVar*refVar) + + _add_stats_text( + names=['RMSE', 'Corr'], + values=[diffRMSE, corr], + ax=axes[2], loc='lower') + + +def _add_stats_text(names, values, ax, loc): + if loc == 'upper': + text_ax = inset_axes(ax, width='17%', height='20%', loc='upper right', + bbox_to_anchor=(0.2, 0.1, 1., 1.), + bbox_transform=ax.transAxes, borderpad=0) + else: + text_ax = inset_axes(ax, width='17%', height='20%', loc='lower right', + bbox_to_anchor=(0.2, 0.03, 1., 1.), + bbox_transform=ax.transAxes, borderpad=0) + + text = '\n'.join(names) + text_ax.text(0., 0., text, fontsize=10, horizontalalignment='left') + + text = '\n'.join(['{:6.4g}'.format(val) for val in values]) + + text_ax.text(1., 0., text, fontsize=10, horizontalalignment='right') + text_ax.axis('off') + + +def _add_land_lakes_coastline(ax, ice_shelves=True): + land_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'land', '50m', edgecolor='k', + facecolor='#cccccc', linewidth=0.5) + lakes_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'lakes', '50m', edgecolor='k', + facecolor='white', + linewidth=0.5) + ax.add_feature(land_50m, zorder=2) + if ice_shelves: + ice_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'antarctic_ice_shelves_polys', '50m', edgecolor='k', + facecolor='lightgray', linewidth=0.5) + ax.add_feature(ice_50m, zorder=3) + ax.add_feature(lakes_50m, zorder=4) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/colormap.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/colormap.html new file mode 100644 index 000000000..11e9524a5 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/colormap.html @@ -0,0 +1,635 @@ + + + + + + mpas_analysis.shared.plot.colormap — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.colormap

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Utilities for handling color maps and color bars
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib.pyplot as plt
+import matplotlib.colors as cols
+import numpy as np
+from matplotlib.colors import LinearSegmentedColormap
+import matplotlib
+from xml.etree import ElementTree
+import configparser
+import cmocean
+import pkg_resources
+
+
+
+[docs] +def setup_colormap(config, configSectionName, suffix=''): + """ + Set up a colormap from the registry + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + configSectionName : str + name of config section + + suffix: str, optional + suffix of colormap related options + + Returns + ------- + colormapDict : dict + A dictionary of colormap information. + + 'colormap' specifies the name of the new colormap + + 'norm' is a matplotlib norm object used to normalize the colormap + + 'levels' is an array of contour levels or ``None`` if not using indexed + color map + + 'ticks' is an array of values where ticks should be placed + + 'contours' is an array of contour values to plot or ``None`` if none + have been specified + + 'lineWidth' is the width of contour lines or ``None`` if not specified + + 'lineColor' is the color of contour lines or ``None`` if not specified + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + register_custom_colormaps() + + option = 'colormapType{}'.format(suffix) + if config.has_option(configSectionName, option): + colormapType = config.get(configSectionName, option) + if colormapType == 'indexed': + (colormap, norm, levels, ticks) = _setup_indexed_colormap( + config, configSectionName, suffix=suffix) + elif colormapType == 'continuous': + (colormap, norm, ticks) = _setup_colormap_and_norm( + config, configSectionName, suffix=suffix) + levels = None + else: + raise ValueError(f'config section {configSectionName} option ' + f'{option} is not "indexed" or "continuous"') + else: + colormap = None + norm = None + levels = None + ticks = None + + option = 'contourLevels{}'.format(suffix) + if config.has_option(configSectionName, option): + contours = config.getexpression(configSectionName, + option, + use_numpyfunc=True) + if isinstance(contours, str) and contours == 'none': + contours = None + else: + contours = None + + option = 'contourThickness{}'.format(suffix) + if config.has_option(configSectionName, option): + lineWidth = config.getfloat(configSectionName, option) + else: + lineWidth = None + + option = 'contourColor{}'.format(suffix) + if config.has_option(configSectionName, option): + lineColor = config.get(configSectionName, option) + else: + lineColor = None + option = 'arrowsOnContour{}'.format(suffix) + if config.has_option(configSectionName, option): + arrows = config.getboolean(configSectionName, option) + else: + arrows = None + return {'colormap': colormap, 'norm': norm, 'levels': levels, + 'ticks': ticks, 'contours': contours, 'lineWidth': lineWidth, + 'lineColor': lineColor, 'arrows': arrows}
+ + + +def register_custom_colormaps(): + name = 'ferret' + backgroundColor = (0.9, 0.9, 0.9) + + red = np.array([[0, 0.6], + [0.15, 1], + [0.35, 1], + [0.65, 0], + [0.8, 0], + [1, 0.75]]) + + green = np.array([[0, 0], + [0.1, 0], + [0.35, 1], + [1, 0]]) + + blue = np.array([[0, 0], + [0.5, 0], + [0.9, 0.9], + [1, 0.9]]) + + colorCount = 21 + colorList = np.ones((colorCount, 4), float) + colorList[:, 0] = np.interp(np.linspace(0, 1, colorCount), + red[:, 0], red[:, 1]) + colorList[:, 1] = np.interp(np.linspace(0, 1, colorCount), + green[:, 0], green[:, 1]) + colorList[:, 2] = np.interp(np.linspace(0, 1, colorCount), + blue[:, 0], blue[:, 1]) + colorList = colorList[::-1, :] + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + colorMap.set_bad(backgroundColor) + _register_colormap_and_reverse(name, colorMap) + + name = 'erdc_iceFire_H' + + colorArray = np.array([ + [-1, 4.05432e-07, 0, 5.90122e-06], + [-0.87451, 0, 0.120401, 0.302675], + [-0.74902, 0, 0.216583, 0.524574], + [-0.623529, 0.0552475, 0.345025, 0.6595], + [-0.498039, 0.128047, 0.492588, 0.720288], + [-0.372549, 0.188955, 0.641309, 0.792092], + [-0.247059, 0.327673, 0.784935, 0.873434], + [-0.121569, 0.60824, 0.892164, 0.935547], + [0.00392157, 0.881371, 0.912178, 0.818099], + [0.129412, 0.951407, 0.835621, 0.449279], + [0.254902, 0.904481, 0.690489, 0], + [0.380392, 0.85407, 0.510864, 0], + [0.505882, 0.777093, 0.33018, 0.00088199], + [0.631373, 0.672862, 0.139087, 0.00269398], + [0.756863, 0.508815, 0, 0], + [0.882353, 0.299417, 0.000366289, 0.000547829], + [1, 0.0157519, 0.00332021, 4.55569e-08]], float) + + colorCount = 255 + colorList = np.ones((colorCount, 4), float) + x = colorArray[:, 0] + for cIndex in range(3): + colorList[:, cIndex] = np.interp( + np.linspace(-1., 1., colorCount), + x, colorArray[:, cIndex + 1]) + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + _register_colormap_and_reverse(name, colorMap) + + name = 'erdc_iceFire_L' + + colorArray = np.array([ + [-1, 0.870485, 0.913768, 0.832905], + [-0.87451, 0.586919, 0.887865, 0.934003], + [-0.74902, 0.31583, 0.776442, 0.867858], + [-0.623529, 0.18302, 0.632034, 0.787722], + [-0.498039, 0.117909, 0.484134, 0.713825], + [-0.372549, 0.0507239, 0.335979, 0.654741], + [-0.247059, 0, 0.209874, 0.511832], + [-0.121569, 0, 0.114689, 0.28935], + [0.00392157, 0.0157519, 0.00332021, 4.55569e-08], + [0.129412, 0.312914, 0, 0], + [0.254902, 0.520865, 0, 0], + [0.380392, 0.680105, 0.15255, 0.0025996], + [0.505882, 0.785109, 0.339479, 0.000797922], + [0.631373, 0.857354, 0.522494, 0], + [0.756863, 0.910974, 0.699774, 0], + [0.882353, 0.951921, 0.842817, 0.478545], + [1, 0.881371, 0.912178, 0.818099]], float) + + colorCount = 255 + colorList = np.ones((colorCount, 4), float) + x = colorArray[:, 0] + for cIndex in range(3): + colorList[:, cIndex] = np.interp( + np.linspace(-1., 1., colorCount), + x, colorArray[:, cIndex + 1]) + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + _register_colormap_and_reverse(name, colorMap) + + name = 'BuOr' + colors1 = plt.cm.PuOr(np.linspace(0., 1, 256)) + colors2 = plt.cm.RdBu(np.linspace(0, 1, 256)) + + # combine them and build a new colormap, just the orange from the first + # and the blue from the second + colorList = np.vstack((colors1[0:128, :], colors2[128:256, :])) + # reverse the order + colorList = colorList[::-1, :] + colorMap = cols.LinearSegmentedColormap.from_list(name, colorList) + + _register_colormap_and_reverse(name, colorMap) + + name = 'Maximenko' + colorArray = np.array([ + [-1, 0., 0.45882352941, 0.76470588235], + [-0.666667, 0., 0.70196078431, 0.90588235294], + [-0.333333, 0.3294117647, 0.87058823529, 1.], + [0., 0.76470588235, 0.94509803921, 0.98039215686], + [0.333333, 1., 1., 0.], + [0.666667, 1., 0.29411764705, 0.], + [1, 1., 0., 0.]], float) + + colorCount = 255 + colorList = np.ones((colorCount, 4), float) + x = colorArray[:, 0] + for cIndex in range(3): + colorList[:, cIndex] = np.interp( + np.linspace(-1., 1., colorCount), + x, colorArray[:, cIndex + 1]) + + colorMap = cols.LinearSegmentedColormap.from_list( + name, colorList, N=255) + + _register_colormap_and_reverse(name, colorMap) + + # add the cmocean color maps + map_names = list(cmocean.cm.cmapnames) + # don't bother with gray (already exists, I think) + map_names.pop(map_names.index('gray')) + for map_name in map_names: + _register_colormap_and_reverse(map_name, getattr(cmocean.cm, map_name)) + + # add ScientificColourMaps7 from + # http://www.fabiocrameri.ch/colourmaps.php + # https://doi.org/10.5281/zenodo.5501399 + for map_name in ['acton', 'bam', 'bamako', 'bamO', 'batlow', 'batlowK', + 'batlowW', 'berlin', 'bilbao', 'broc', 'brocO', 'buda', + 'bukavu', 'cork', 'corkO', 'davos', 'devon', 'fes', + 'grayC', 'hawaii', 'imola', 'lajolla', 'lapaz', 'lisbon', + 'nuuk', 'oleron', 'oslo', 'roma', 'romaO', 'tofino', + 'tokyo', 'turku', 'vanimo', 'vik', 'vikO']: + xml_file = f'ScientificColourMaps7/{map_name}/{map_name}_PARAVIEW.xml' + xml_file = pkg_resources.resource_filename(__name__, xml_file) + _read_xml_colormap(xml_file, map_name) + + # add SciVisColor colormaps from + # https://sciviscolor.org/home/colormaps/ + for map_name in ['3wave-yellow-grey-blue', '3Wbgy5', + '4wave-grey-red-green-mgreen', '5wave-yellow-brown-blue', + 'blue-1', 'blue-3', 'blue-6', 'blue-8', 'blue-orange-div', + 'brown-2', 'brown-5', 'brown-8', 'green-1', 'green-4', + 'green-7', 'green-8', 'orange-5', 'orange-6', + 'orange-green-blue-gray', 'purple-7', 'purple-8', 'red-1', + 'red-3', 'red-4', 'yellow-1', 'yellow-7']: + + xml_file = f'SciVisColorColormaps/{map_name}.xml' + xml_file = pkg_resources.resource_filename(__name__, xml_file) + _read_xml_colormap(xml_file, map_name) + + name = 'white_cmo_deep' + # modify cmo.deep to start at white + colors2 = plt.cm.get_cmap('cmo.deep')(np.linspace(0, 1, 224)) + colorCount = 32 + colors1 = np.ones((colorCount, 4), float) + x = np.linspace(0., 1., colorCount+1)[0:-1] + white = [1., 1., 1., 1.] + for cIndex in range(4): + colors1[:, cIndex] = np.interp(x, [0., 1.], + [white[cIndex], colors2[0, cIndex]]) + + colors = np.vstack((colors1, colors2)) + + # generating a smoothly-varying LinearSegmentedColormap + cmap = LinearSegmentedColormap.from_list(name, colors) + _register_colormap_and_reverse(name, cmap) + + +def _setup_colormap_and_norm(config, configSectionName, suffix=''): + """ + Set up a colormap from the registry + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + configSectionName : str + name of config section + + suffix: str, optional + suffix of colormap related options + + Returns + ------- + colormap : srt + new colormap + + norm : matplotlib.colors.Normalize + the norm used to normalize the colormap + + ticks : array of float + the tick marks on the colormap + """ + # Authors + # ------- + # Xylar Asay-Davis + + register_custom_colormaps() + + colormap = plt.get_cmap(config.get(configSectionName, + 'colormapName{}'.format(suffix))) + + normType = config.get(configSectionName, 'normType{}'.format(suffix)) + + kwargs = config.getexpression(configSectionName, + 'normArgs{}'.format(suffix)) + + if normType == 'symLog': + norm = cols.SymLogNorm(**kwargs) + elif normType == 'log': + norm = cols.LogNorm(**kwargs) + elif normType == 'linear': + norm = cols.Normalize(**kwargs) + else: + raise ValueError('Unsupported norm type {} in section {}'.format( + normType, configSectionName)) + + try: + ticks = config.getexpression( + configSectionName, 'colorbarTicks{}'.format(suffix), + use_numpyfunc=True) + except configparser.NoOptionError: + ticks = None + + return colormap, norm, ticks + + +def _setup_indexed_colormap(config, configSectionName, suffix=''): + """ + Set up a colormap from the registry + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + configSectionName : str + name of config section + + suffix: str, optional + suffix of colormap related options + + Returns + ------- + colormap : srt + new colormap + + norm : matplotlib.colors.Normalize + the norm used to normalize the colormap + + ticks : array of float + the tick marks on the colormap + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Greg Streletz + + colormap = plt.get_cmap(config.get(configSectionName, + 'colormapName{}'.format(suffix))) + + indices = config.getexpression(configSectionName, + 'colormapIndices{}'.format(suffix), + use_numpyfunc=True) + + try: + levels = config.getexpression( + configSectionName, 'colorbarLevels{}'.format(suffix), + use_numpyfunc=True) + except configparser.NoOptionError: + levels = None + + if levels is not None: + # set under/over values based on the first/last indices in the colormap + underColor = colormap(indices[0]) + overColor = colormap(indices[-1]) + if len(levels) + 1 == len(indices): + # we have 2 extra values for the under/over so make the colormap + # without these values + indices = indices[1:-1] + elif len(levels) - 1 != len(indices): + # indices list must be either one element shorter + # or one element longer than colorbarLevels list + raise ValueError('length mismatch between indices and ' + 'colorbarLevels') + colormap = cols.ListedColormap(colormap(indices), + 'colormapName{}'.format(suffix)) + colormap.set_under(underColor) + colormap.set_over(overColor) + + norm = cols.BoundaryNorm(levels, colormap.N) + + try: + ticks = config.getexpression( + configSectionName, 'colorbarTicks{}'.format(suffix), + use_numpyfunc=True) + except configparser.NoOptionError: + ticks = levels + + return colormap, norm, levels, ticks + + +def _read_xml_colormap(xmlFile, map_name): + """Read in an XML colormap""" + + xml = ElementTree.parse(xmlFile) + + root = xml.getroot() + colormap = root.findall('ColorMap') + if len(colormap) > 0: + colormap = colormap[0] + colorDict = {'red': [], 'green': [], 'blue': []} + for point in colormap.findall('Point'): + x = float(point.get('x')) + color = [float(point.get('r')), float(point.get('g')), + float(point.get('b'))] + colorDict['red'].append((x, color[0], color[0])) + colorDict['green'].append((x, color[1], color[1])) + colorDict['blue'].append((x, color[2], color[2])) + cmap = LinearSegmentedColormap(map_name, colorDict, 256) + + _register_colormap_and_reverse(map_name, cmap) + + +def _register_colormap_and_reverse(map_name, cmap): + if map_name not in matplotlib.colormaps: + matplotlib.colormaps.register(cmap, name=map_name) + matplotlib.colormaps.register(cmap.reversed(), name=f'{map_name}_r') + + +def _plot_color_gradients(): + """from https://matplotlib.org/tutorials/colors/colormaps.html""" + + cmap_list = [m for m in plt.colormaps() if not m.endswith("_r")] + + gradient = np.linspace(0, 1, 256) + gradient = np.vstack((gradient, gradient)) + + nrows = len(cmap_list) + + fig, axes = plt.subplots(figsize=(7.2, 0.25 * nrows), nrows=nrows) + fig.subplots_adjust(top=0.99, bottom=0.01, left=0.35, right=0.99) + + for ax, name in zip(axes, cmap_list): + ax.imshow(gradient, aspect='auto', cmap=plt.get_cmap(name)) + pos = list(ax.get_position().bounds) + x_text = pos[0] - 0.01 + y_text = pos[1] + pos[3] / 2. + fig.text(x_text, y_text, name, va='center', ha='right', fontsize=10) + + # Turn off *all* ticks & spines, not just the ones with colormaps. + for ax in axes: + ax.set_axis_off() + + plt.savefig('colormaps.png', dpi=100) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/inset.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/inset.html new file mode 100644 index 000000000..231a7b31f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/inset.html @@ -0,0 +1,324 @@ + + + + + + mpas_analysis.shared.plot.inset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.inset

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Functions for plotting inset maps in plots (e.g. for transects)
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import matplotlib.path
+import matplotlib.ticker as mticker
+import cartopy
+import cartopy.crs as ccrs
+import numpy
+import shapely.geometry
+
+from geometric_features.plot import subdivide_geom
+
+
+
+[docs] +def add_inset(fig, fc, latlonbuffer=45., polarbuffer=5., width=1.0, + height=1.0, lowerleft=None, xbuffer=None, ybuffer=None, + maxlength=1.): + """ + Plots an inset map showing the location of a transect or polygon. Shapes + are plotted on a polar grid if they are entirely poleward of +/-50 deg. + latitude and with a lat/lon grid if not. + + Parameters + ---------- + fig : ``matplotlib.figure.Figure`` + A matplotlib figure to add the inset to + + fc : ``geometric_features.FeatureCollection`` + A collection of regions, transects and/or points to plot in the inset + + latlonbuffer : float, optional + The number of degrees lat/lon to use as a buffer around the shape(s) + to plot if a lat/lon plot is used. + + polarbuffer : float, optional + The number of degrees latitude to use as a buffer equatorward of the + shape(s) in polar plots + + width, height : float, optional + width and height in inches of the inset + + lowerleft : pair of floats, optional + the location of the lower left corner of the axis in inches, default + puts the inset in the upper right corner of ``fig``. + + xbuffer, ybuffer : float, optional + right and top buffers from the top-right corner (in inches) if + lowerleft is ``None``. + + maxlength : float or ``None``, optional + Any segments longer than maxlength will be subdivided in the plot to + ensure curvature. If ``None``, no subdivision is performed. + + Returns + ------- + inset : ``matplotlib.axes.Axes`` + The new inset axis + """ + # Authors + # ------- + # Xylar Asay-Davis + + minLon, minLat, maxLon, maxLat = _get_bounds(fc) + + figsize = fig.get_size_inches() + width /= figsize[0] + height /= figsize[1] + if lowerleft is None: + if xbuffer is None: + xbuffer = 0.1*width + else: + xbuffer /= figsize[0] + if ybuffer is None: + ybuffer = xbuffer*figsize[0]/figsize[1] + else: + ybuffer /= figsize[1] + lowerleft = [1.0 - width - xbuffer, 1.0 - height - ybuffer] + else: + lowerleft = [lowerleft[0]/figsize[0], lowerleft[1]/figsize[1]] + bounds = [lowerleft[0], lowerleft[1], width, height] + + if maxLat <= -50: + # an Antarctic-focused map makes the most sense + inset = fig.add_axes(bounds, + projection=ccrs.SouthPolarStereo()) + extent = [-180., 180., -90., max(-65., maxLat+polarbuffer)] + _set_circular_boundary(inset) + xlocator = mticker.FixedLocator(numpy.linspace(-180., 180., 9)) + ylocator = mticker.FixedLocator(numpy.linspace(-90., -50., 9)) + elif minLat >= 50: + # an Arctic-focused map makes the most sense + inset = fig.add_axes(bounds, + projection=ccrs.NorthPolarStereo()) + extent = [-180, 180, min(65., minLat-polarbuffer), 90] + _set_circular_boundary(inset) + xlocator = mticker.FixedLocator(numpy.linspace(-180., 180., 9)) + ylocator = mticker.FixedLocator(numpy.linspace(50., 90., 9)) + else: + inset = fig.add_axes(bounds, + projection=ccrs.PlateCarree()) + extent = [max(-180., minLon-latlonbuffer), + min(180., maxLon+latlonbuffer), + max(-90., minLat-latlonbuffer), + min(90., maxLat+latlonbuffer)] + xlocator = None + ylocator = None + + # kind of like "top" justified -- graphics are toward the "north" end of + # the subplot + inset.set_anchor('N') + + inset.set_extent(extent, ccrs.PlateCarree()) + inset.add_feature(cartopy.feature.LAND, zorder=1) + inset.add_feature(cartopy.feature.OCEAN, zorder=0) + + gl = inset.gridlines(crs=ccrs.PlateCarree(), draw_labels=False, + linewidth=0.5, color='gray', alpha=0.5, + linestyle='--') + + if xlocator is not None: + gl.xlocator = xlocator + + if ylocator is not None: + gl.ylocator = ylocator + + for feature in fc.features: + geomtype = feature['geometry']['type'] + shape = shapely.geometry.shape(feature['geometry']) + if maxlength is not None: + shape = subdivide_geom(shape, shape.geom_type, maxlength) + if geomtype in ['Polygon', 'MultiPolygon']: + inset.add_geometries((shape,), crs=ccrs.PlateCarree(), + edgecolor='blue', facecolor='blue', alpha=0.4, + linewidth=1.) + elif geomtype in ['Point', 'MultiPoint']: + point_x, point_y = shape.xy + inset.scatter(point_x, point_y, s=9, color='k', + transform=ccrs.PlateCarree(), edgecolors='face') + else: + inset.add_geometries((shape,), crs=ccrs.PlateCarree(), + edgecolor='k', facecolor='none', alpha=1., + linewidth=1.) + # put a red point at the beginning and a blue point at the end + # of the transect to help show the orientation + begin = shape.coords[0] + end = shape.coords[-1] + inset.plot(begin[0], begin[1], color='r', marker='o', + markersize=3., transform=ccrs.PlateCarree()) + inset.plot(end[0], end[1], color='g', marker='o', + markersize=3., transform=ccrs.PlateCarree()) + + return inset
+ + + +def _set_circular_boundary(ax): + """Set the boundary of the given axis to be circular (for a polar plot)""" + + # Compute a circle in axes coordinates, which we can use as a boundary + # for the map. We can pan/zoom as much as we like - the boundary will be + # permanently circular. + theta = numpy.linspace(0, 2*numpy.pi, 100) + center = numpy.array([0.5, 0.5]) + radius = 0.5 + verts = numpy.vstack([numpy.sin(theta), numpy.cos(theta)]).T + circle = matplotlib.path.Path(verts * radius + center) + + ax.set_boundary(circle, transform=ax.transAxes) + + +def _get_bounds(fc): + """Compute the lon/lat bounding box for all transects and regions""" + + bounds = shapely.geometry.GeometryCollection() + for feature in fc.features: + shape = shapely.geometry.shape(feature['geometry']) + shape_bounds = shapely.geometry.box(*shape.bounds) + bounds = shapely.geometry.box(*bounds.union(shape_bounds).bounds) + + return bounds.bounds +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/oned.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/oned.html new file mode 100644 index 000000000..00d59c074 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/oned.html @@ -0,0 +1,325 @@ + + + + + + mpas_analysis.shared.plot.oned — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.oned

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Plotting utilities, including routines for plotting:
+    * time series (and comparing with reference data sets)
+    * remapped horizontal fields (and comparing with reference data sets)
+    * vertical sections on native grid
+    * NINO34 time series and spectra
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib
+import matplotlib.pyplot as plt
+
+from mpas_analysis.shared.plot.title import limit_title
+from mpas_analysis.shared.plot.save import savefig
+
+
+
+[docs] +def plot_1D(config, xArrays, fieldArrays, errArrays, + lineColors=None, lineStyles=None, markers=None, lineWidths=None, + legendText=None, title=None, xlabel=None, ylabel=None, + fileout='plot_1D.png', + figsize=(10, 4), dpi=None, + xLim=None, + yLim=None, + invertYAxis=False, + maxTitleLength=None, + titleFontSize=None, + axisFontSize=None, + defaultFontSize=None): + """ + Plots a 1D line plot with error bars if available. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + xArrays : list of float arrays + x array (latitude, or any other x axis except time) + + fieldArrays : list of float arrays + y array (any field as function of x) + + errArrays : list of float arrays + error array (y errors) + + lineColors, lineStyles, markers, legendText : list of str, optional + control line color, style, marker, and corresponding legend + text. Default is black, solid line with no marker, and no legend. + + lineWidths : list of float, optional + control line width. Default is 1.0. + + title : str, optional + title of plot + + xlabel, ylabel : str, optional + label of x- and y-axis + + fileout : str, optional + the file name to be written + + figsize : tuple of float, optional + size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + xLim : float array, optional + x range of plot + + yLim : float array, optional + y range of plot + + invertYAxis : logical, optional + if True, invert Y axis + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + titleFontSize : int, optional + size of the title font + + axisFontSize : int, optional + size of the title font + + defaultFontSize : int, optional + the size of text other than the title + """ + # Authors + # ------- + # Mark Petersen, Milena Veneziani, Xylar Asay-Davis + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + # set up figure + if dpi is None: + dpi = config.getint('plot', 'dpi') + plt.figure(figsize=figsize, dpi=dpi) + + plotLegend = False + for dsIndex in range(len(xArrays)): + xArray = xArrays[dsIndex] + fieldArray = fieldArrays[dsIndex] + errArray = errArrays[dsIndex] + if xArray is None: + continue + + if legendText is None: + label = None + else: + label = legendText[dsIndex] + if label is not None: + label = limit_title(label, maxTitleLength) + plotLegend = True + if lineColors is None: + color = 'k' + else: + color = lineColors[dsIndex] + if markers is None: + marker = None + else: + marker = markers[dsIndex] + if lineStyles is None: + linestyle = '-' + else: + linestyle = lineStyles[dsIndex] + if lineWidths is None: + linewidth = 1. + else: + linewidth = lineWidths[dsIndex] + + plt.plot(xArray, fieldArray, color=color, linestyle=linestyle, + marker=marker, linewidth=linewidth, label=label) + if errArray is not None: + plt.fill_between(xArray, fieldArray, fieldArray + errArray, + facecolor=color, alpha=0.2) + plt.fill_between(xArray, fieldArray, fieldArray - errArray, + facecolor=color, alpha=0.2) + plt.grid() + plt.axhline(0.0, linestyle='-', color='k') # horizontal lines + if plotLegend and len(xArrays) > 1: + plt.legend() + + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + if axisFontSize is None: + axisFontSize = config.get('plot', 'axisFontSize') + axis_font = {'size': axisFontSize} + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + if title is not None: + title = limit_title(title, max_title_length=maxTitleLength) + plt.title(title, **title_font) + if xlabel is not None: + plt.xlabel(xlabel, **axis_font) + if ylabel is not None: + plt.ylabel(ylabel, **axis_font) + + if invertYAxis: + plt.gca().invert_yaxis() + + if xLim: + plt.xlim(xLim) + if yLim: + plt.ylim(yLim) + + if fileout is not None: + savefig(fileout, config) + + plt.close()
+ + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/plot_climatology_map_subtask.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/plot_climatology_map_subtask.html new file mode 100644 index 000000000..02a749158 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/plot_climatology_map_subtask.html @@ -0,0 +1,822 @@ + + + + + + mpas_analysis.shared.plot.plot_climatology_map_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.plot_climatology_map_subtask

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import xarray as xr
+import numpy as np
+from string import capwords
+
+from mpas_analysis.shared import AnalysisTask
+
+from mpas_analysis.shared.plot import plot_global_comparison, \
+    plot_projection_comparison
+
+from mpas_analysis.shared.html import write_image_xml
+
+from mpas_analysis.shared.climatology import \
+    get_remapped_mpas_climatology_file_name
+from mpas_analysis.shared.climatology.comparison_descriptors import \
+    get_comparison_descriptor
+
+
+
+[docs] +class PlotClimatologyMapSubtask(AnalysisTask): + """ + An analysis task for plotting 2D model fields against observations. + + Attributes + ---------- + season : str + A season (key in ``shared.constants.monthDictionary``) to be + plotted. + + comparisonGridName : str + The name of the comparison grid to plot. + + remapMpasClimatologySubtask : mpas_analysis.shared.climatology.RemapMpasClimatologySubtask + The subtask for remapping the MPAS climatology that this subtask + will plot + + remapObsClimatologySubtask : mpas_analysis.shared.climatology.RemapObservedClimatologySubtask + The subtask for remapping the observational climatology that this + subtask will plot + + secondRemapMpasClimatologySubtask : mpas_analysis.shared.climatology.RemapMpasClimatologySubtask + A second subtask for remapping another MPAS climatology to plot + in the second panel and compare with in the third panel + + removeMean : bool, optional + If True, a common mask for the model and reference data sets is + computed (where both are valid) and the mean over that mask is + subtracted from both the model and reference results. This is + useful for data sets where the desire is to compare the spatial + pattern but the mean offset is not meaningful (e.g. SSH) + + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + diffTitleLabel : str, optional + the title of the difference subplot + + unitsLabel : str + the units of the plotted field, to be displayed on color bars + + imageCaption : str + the caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + the name of the group of galleries in which this plot belongs + + groupSubtitle : str or None + the subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + a short name (with no spaces) for the link to the gallery group + + galleryName : str or None + the name of the gallery in which this plot belongs + + depth : {None, float, 'top', 'bot'} + Depth at which to perform the comparison, 'top' for the surface + 'bot' for the base + + configSectionName : str + the name of the section where the color map and range is defined + + maskMinThreshold : float or None + a value below which the field is mask out in plots + + maskMaxThreshold : float or None + a value above which the field is mask out in plots + + extend : {'neither', 'both', 'min', 'max'} + Determines the ``contourf``-coloring of values that are outside the + range of the levels provided if using an indexed colormap. + """ + +
+[docs] + def __init__(self, parentTask, season, comparisonGridName, + remapMpasClimatologySubtask, remapObsClimatologySubtask=None, + secondRemapMpasClimatologySubtask=None, controlConfig=None, + depth=None, removeMean=False, subtaskName=None): + """ + Construct one analysis subtask for each plot (i.e. each season and + comparison grid) and a subtask for computing climatologies. + + Parameters + ---------- + parentTask : mpas_analysis.shared.AnalysisTask + The parent (main) task for this subtask + + season : str + A season (key in ``shared.constants.monthDictionary``) to be + plotted. + + comparisonGridName : str + The name of the comparison grid to plot. + + remapMpasClimatologySubtask : mpas_analysis.shared.climatology.RemapMpasClimatologySubtask + The subtask for remapping the MPAS climatology that this subtask + will plot + + remapObsClimatologySubtask : mpas_analysis.shared.climatology.RemapObservedClimatologySubtask, optional + The subtask for remapping the observational climatology that this + subtask will plot + + secondRemapMpasClimatologySubtask : mpas_analysis.shared.climatology.RemapMpasClimatologySubtask, optional + A second subtask for remapping another MPAS climatology to plot + in the second panel and compare with in the third panel + + controlConfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + + depth : {float, 'top', 'bot'}, optional + Depth the data is being plotted, 'top' for the sea surface + 'bot' for the sea floor + + removeMean : bool, optional + If True, a common mask for the model and reference data sets is + computed (where both are valid) and the mean over that mask is + subtracted from both the model and reference results. This is + useful for data sets where the desire is to compare the spatial + pattern but the mean offset is not meaningful (e.g. SSH) + + subtaskName : str, optional + The name of the subtask. If not specified, it is + ``plot<season>_<comparisonGridName>`` with a suffix indicating the + depth being sliced (if any) + """ + + self.season = season + self.depth = depth + self.comparisonGridName = comparisonGridName + self.remapMpasClimatologySubtask = remapMpasClimatologySubtask + self.remapObsClimatologySubtask = remapObsClimatologySubtask + self.secondRemapMpasClimatologySubtask = \ + secondRemapMpasClimatologySubtask + self.controlConfig = controlConfig + self.removeMean = removeMean + + if depth is None: + self.depthSuffix = '' + else: + self.depthSuffix = f'depth_{depth}' + + if subtaskName is None: + subtaskName = f'plot{season}_{comparisonGridName}' + if depth is not None: + subtaskName = f'{subtaskName}_{self.depthSuffix}' + + config = parentTask.config + taskName = parentTask.taskName + tags = parentTask.tags + + # call the constructor from the base class (AnalysisTask) + super(PlotClimatologyMapSubtask, self).__init__( + config=config, taskName=taskName, subtaskName=subtaskName, + componentName=parentTask.componentName, tags=tags) + + # this task should not run until the remapping subtasks are done, since + # it relies on data from those subtasks + self.run_after(remapMpasClimatologySubtask) + if remapObsClimatologySubtask is not None: + self.run_after(remapObsClimatologySubtask) + if secondRemapMpasClimatologySubtask is not None: + self.run_after(secondRemapMpasClimatologySubtask) + + self.outFileLabel = None + self.fieldNameInTitle = None + self.mpasFieldName = None + self.refFieldName = None + self.refTitleLabel = None + self.diffTitleLabel = None + self.unitsLabel = None + self.imageCaption = None + self.galleryGroup = None + self.groupSubtitle = None + self.groupLink = None + self.galleryName = None + self.configSectionName = None + self.thumbnailDescription = None + self.startYear = None + self.endYear = None + self.startDate = None + self.endDate = None + self.filePrefix = None + self.maskMinThreshold = None + self.maskMaxThreshold = None + self.extend = 'both'
+ + +
+[docs] + def set_plot_info(self, outFileLabel, fieldNameInTitle, mpasFieldName, + refFieldName, refTitleLabel, unitsLabel, + imageCaption, galleryGroup, groupSubtitle, groupLink, + galleryName, diffTitleLabel='Model - Observations', + configSectionName=None, maskMinThreshold=None, + maskMaxThreshold=None, extend=None): + """ + Store attributes related to plots, plot file names and HTML output. + + Parameters + ---------- + outFileLabel : str + The prefix on each plot and associated XML file + + fieldNameInTitle : str + The name of the field being plotted, as used in the plot title + + mpasFieldName : str + The name of the variable in the MPAS timeSeriesStatsMonthly output + + refFieldName : str + The name of the variable to use from the observations or reference + file + + refTitleLabel : str + the title of the observations or reference subplot + + unitsLabel : str + the units of the plotted field, to be displayed on color bars + + imageCaption : str + the caption when mousing over the plot or displaying it full + screen + + galleryGroup : str + the name of the group of galleries in which this plot belongs + + groupSubtitle : str or None + the subtitle of the group in which this plot belongs (or blank + if none) + + groupLink : str + a short name (with no spaces) for the link to the gallery group + + galleryName : str or None + the name of the gallery in which this plot belongs + + diffTitleLabel : str, optional + the title of the difference subplot + + configSectionName : str or None, optional + the name of the section where the color map and range is defined, + default is the name of the task + + maskMinThreshold : float or None, optional + a value below which the field is mask out in plots + + maskMaxThreshold : float or None, optional + a value above which the field is mask out in plots + + extend : {'neither', 'both', 'min', 'max'}, optional + Determines the ``contourf``-coloring of values that are outside the + range of the levels provided if using an indexed colormap. + """ + + self.outFileLabel = outFileLabel + self.fieldNameInTitle = fieldNameInTitle + self.mpasFieldName = mpasFieldName + self.refFieldName = refFieldName + self.refTitleLabel = refTitleLabel + self.diffTitleLabel = diffTitleLabel + self.unitsLabel = unitsLabel + + # xml/html related variables + self.imageCaption = imageCaption + self.galleryGroup = galleryGroup + self.groupSubtitle = groupSubtitle + self.groupLink = groupLink + self.galleryName = galleryName + self.maskMinThreshold = maskMinThreshold + self.maskMaxThreshold = maskMaxThreshold + + if configSectionName is None: + self.configSectionName = self.taskName + else: + self.configSectionName = configSectionName + + season = self.season + depth = self.depth + if depth is None: + self.fieldNameInTitle = fieldNameInTitle + self.thumbnailDescription = season + elif depth == 'top': + self.fieldNameInTitle = f'Sea Surface {fieldNameInTitle}' + self.thumbnailDescription = f'{season} surface' + elif depth == 'bot': + self.fieldNameInTitle = f'Sea Floor {fieldNameInTitle}' + self.thumbnailDescription = f'{season} floor' + else: + self.fieldNameInTitle = f'{fieldNameInTitle} at z={depth} m' + self.thumbnailDescription = f'{season} z={depth} m' + + if extend is not None: + self.extend = extend
+ + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(PlotClimatologyMapSubtask, self).setup_and_check() + + config = self.config + self.startYear = config.getint('climatology', 'startYear') + self.endYear = config.getint('climatology', 'endYear') + self.startDate = config.get('climatology', 'startDate') + self.endDate = config.get('climatology', 'endDate') + + mainRunName = config.get('runs', 'mainRunName') + + self.xmlFileNames = [] + + prefixPieces = [self.outFileLabel] + if self.comparisonGridName != 'latlon': + prefixPieces.append(self.comparisonGridName) + prefixPieces.append(mainRunName) + if self.depth is not None: + prefixPieces.append(self.depthSuffix) + years = f'years{self.startYear:04d}-{self.endYear:04d}' + prefixPieces.extend([self.season, years]) + + self.filePrefix = '_'.join(prefixPieces) + + self.xmlFileNames.append( + f'{self.plotsDirectory}/{self.filePrefix}.xml') + + def run_task(self): + """ + Plots a comparison of E3SM/MPAS output to SST/TEMP, SSS/SALT or MLD + observations or a control run + """ + + season = self.season + depth = self.depth + comparisonGridName = self.comparisonGridName + self.logger.info( + f"\nPlotting 2-d maps of {self.fieldNameInTitle} climatologies " + f"for {season} on the {comparisonGridName} grid...") + + # first read the model climatology + remappedFileName = \ + self.remapMpasClimatologySubtask.get_remapped_file_name( + season=season, comparisonGridName=comparisonGridName) + + remappedModelClimatology = xr.open_dataset(remappedFileName) + + if depth is not None: + if str(depth) not in remappedModelClimatology.depthSlice.values: + raise KeyError(f'The climatology you are attempting to ' + f'perform depth slices of was originally ' + f'created without depth {depth}. You will need ' + f'to delete and regenerate the climatology') + + remappedModelClimatology = remappedModelClimatology.sel( + depthSlice=str(depth), drop=True) + + # now the observations or control run + if self.remapObsClimatologySubtask is not None: + remappedFileName = self.remapObsClimatologySubtask.get_file_name( + stage='remapped', season=season, + comparisonGridName=comparisonGridName) + + remappedRefClimatology = xr.open_dataset(remappedFileName) + elif self.secondRemapMpasClimatologySubtask is not None: + remappedFileName = \ + self.secondRemapMpasClimatologySubtask.get_remapped_file_name( + season=season, comparisonGridName=comparisonGridName) + remappedRefClimatology = xr.open_dataset(remappedFileName) + elif self.controlConfig is not None: + climatologyName = self.remapMpasClimatologySubtask.climatologyName + remappedFileName = \ + get_remapped_mpas_climatology_file_name( + self.controlConfig, season=season, + componentName=self.componentName, + climatologyName=climatologyName, + comparisonGridName=comparisonGridName, + op=self.remapMpasClimatologySubtask.op) + remappedRefClimatology = xr.open_dataset(remappedFileName) + controlStartYear = self.controlConfig.getint('climatology', + 'startYear') + controlEndYear = self.controlConfig.getint('climatology', + 'endYear') + if controlStartYear != self.startYear or \ + controlEndYear != self.endYear: + self.refTitleLabel = \ + f'{self.refTitleLabel}\n' \ + f'(years {controlStartYear:04d}-{controlEndYear:04d})' + + else: + remappedRefClimatology = None + + if remappedRefClimatology is not None and depth is not None: + depthIndex = -1 + for index, depthSlice in enumerate( + remappedRefClimatology.depthSlice.values): + try: + depthSlice = depthSlice.decode("utf-8") + except AttributeError: + pass + if depthSlice == str(depth): + depthIndex = index + if depthIndex == -1: + raise KeyError(f'The climatology you are attempting to ' + f'perform depth slices of was originally ' + f'created without depth {depth}. You will need ' + f'to delete and regenerate the climatology') + + remappedRefClimatology = remappedRefClimatology.isel( + depthSlice=depthIndex, drop=True) + + if self.removeMean: + if remappedRefClimatology is None: + remappedModelClimatology[self.mpasFieldName] = \ + remappedModelClimatology[self.mpasFieldName] - \ + remappedModelClimatology[self.mpasFieldName].mean() + else: + masked = remappedModelClimatology[self.mpasFieldName].where( + remappedRefClimatology[self.refFieldName].notnull()) + remappedModelClimatology[self.mpasFieldName] = \ + remappedModelClimatology[self.mpasFieldName] - \ + masked.mean() + + masked = remappedRefClimatology[self.refFieldName].where( + remappedModelClimatology[self.mpasFieldName].notnull()) + remappedRefClimatology[self.refFieldName] = \ + remappedRefClimatology[self.refFieldName] - \ + masked.mean() + + if self.componentName == 'ocean': + componentName = 'Ocean' + componentSubdirectory = 'ocean' + elif self.componentName == 'seaIce': + componentName = 'Sea Ice' + componentSubdirectory = 'sea_ice' + else: + raise ValueError(f'Unexpected component: {self.componentName}') + + if self.comparisonGridName == 'latlon': + self._plot_latlon(remappedModelClimatology, remappedRefClimatology, + componentName, componentSubdirectory) + else: + self._plot_projection(remappedModelClimatology, + remappedRefClimatology, + componentName, componentSubdirectory) + + def _plot_latlon(self, remappedModelClimatology, remappedRefClimatology, + componentName, componentSubdirectory): + """ plotting a global lat-lon data set """ + + season = self.season + config = self.config + configSectionName = self.configSectionName + + mainRunName = config.get('runs', 'mainRunName') + + modelOutput = _nans_to_numpy_mask( + remappedModelClimatology[self.mpasFieldName].values) + + lon = remappedModelClimatology['lon'].values + lat = remappedModelClimatology['lat'].values + + lonTarg, latTarg = np.meshgrid(lon, lat) + + if remappedRefClimatology is None: + refOutput = None + bias = None + else: + refOutput = _nans_to_numpy_mask( + remappedRefClimatology[self.refFieldName].values) + + bias = modelOutput - refOutput + + # mask with thresholds only after taking the diff + refOutput = self._mask_with_thresholds(refOutput) + + modelOutput = self._mask_with_thresholds(modelOutput) + + if config.has_option(configSectionName, 'titleFontSize'): + titleFontSize = config.getint(configSectionName, 'titleFontSize') + else: + titleFontSize = None + + if config.has_option(configSectionName, 'defaultFontSize'): + defaultFontSize = config.getint(configSectionName, + 'defaultFontSize') + else: + defaultFontSize = None + + filePrefix = self.filePrefix + outFileName = f'{self.plotsDirectory}/{filePrefix}.png' + title = f'{self.fieldNameInTitle} ({season}, years ' \ + f'{self.startYear:04d}-{self.endYear:04d})' + plot_global_comparison(config, + lonTarg, + latTarg, + modelOutput, + refOutput, + bias, + configSectionName, + fileout=outFileName, + title=title, + modelTitle=mainRunName, + refTitle=self.refTitleLabel, + diffTitle=self.diffTitleLabel, + cbarlabel=self.unitsLabel, + titleFontSize=titleFontSize, + defaultFontSize=defaultFontSize, + extend=self.extend) + + caption = f'{season} {self.imageCaption}' + write_image_xml( + config, + filePrefix, + componentName=componentName, + componentSubdirectory=componentSubdirectory, + galleryGroup=f'Global {self.galleryGroup}', + groupSubtitle=self.groupSubtitle, + groupLink=f'global_{self.groupLink}', + gallery=self.galleryName, + thumbnailDescription=self.thumbnailDescription, + imageDescription=caption, + imageCaption=caption) + + def _plot_projection(self, remappedModelClimatology, + remappedRefClimatology, + componentName, componentSubdirectory): + """ plotting a dataset on a projection grid """ + + season = self.season + comparisonGridName = self.comparisonGridName + config = self.config + configSectionName = self.configSectionName + + mainRunName = config.get('runs', 'mainRunName') + + validMask = remappedModelClimatology['validMask'].values + landMask = np.ma.masked_array( + np.ones(validMask.shape), + mask=np.logical_not(np.isnan(validMask))) + + modelOutput = _nans_to_numpy_mask( + remappedModelClimatology[self.mpasFieldName].values) + + if remappedRefClimatology is None: + refOutput = None + bias = None + else: + refOutput = _nans_to_numpy_mask( + remappedRefClimatology[self.refFieldName].values) + + bias = modelOutput - refOutput + + # mask with maskValue only after taking the diff + refOutput = self._mask_with_thresholds(refOutput) + + modelOutput = self._mask_with_thresholds(modelOutput) + + comparisonDescriptor = get_comparison_descriptor( + config, comparisonGridName) + x = comparisonDescriptor.xCorner + y = comparisonDescriptor.yCorner + + aspectRatio = (x[-1] - x[0])/(y[-1] - y[0]) + + # if the plots are even a bit wider than they are tall, make them + # vertical + vertical = aspectRatio > 1.2 + + filePrefix = self.filePrefix + outFileName = f'{self.plotsDirectory}/{filePrefix}.png' + title = f'{self.fieldNameInTitle} ({season}, years ' \ + f'{self.startYear:04d}-{self.endYear:04d})' + + if config.has_option(configSectionName, 'titleFontSize'): + titleFontSize = config.getint(configSectionName, 'titleFontSize') + else: + titleFontSize = None + + if config.has_option(configSectionName, 'defaultFontSize'): + defaultFontSize = config.getint(configSectionName, + 'defaultFontSize') + else: + defaultFontSize = None + + if config.has_option(configSectionName, 'cartopyGridFontSize'): + cartopyGridFontSize = config.getint(configSectionName, + 'cartopyGridFontSize') + else: + cartopyGridFontSize = None + + plot_projection_comparison( + config, + x, + y, + landMask, + modelOutput, + refOutput, + bias, + fileout=outFileName, + colorMapSectionName=configSectionName, + projectionName=comparisonGridName, + title=title, + modelTitle=mainRunName, + refTitle=self.refTitleLabel, + diffTitle=self.diffTitleLabel, + cbarlabel=self.unitsLabel, + titleFontSize=titleFontSize, + cartopyGridFontSize=cartopyGridFontSize, + defaultFontSize=defaultFontSize, + vertical=vertical, + extend=self.extend) + + upperGridName = capwords(comparisonGridName.replace('_', ' ')) + caption = f'{season} {self.imageCaption}' + write_image_xml( + config, + filePrefix, + componentName=componentName, + componentSubdirectory=componentSubdirectory, + galleryGroup=f'{upperGridName} {self.galleryGroup}', + groupSubtitle=self.groupSubtitle, + groupLink=f'{comparisonGridName}_{self.groupLink}', + gallery=self.galleryName, + thumbnailDescription=self.thumbnailDescription, + imageDescription=caption, + imageCaption=caption) + + def _mask_with_thresholds(self, field): + if self.maskMinThreshold is not None or \ + self.maskMaxThreshold is not None: + mask = field.mask + if self.maskMinThreshold is not None: + mask = np.logical_or(mask, field <= self.maskMinThreshold) + if self.maskMaxThreshold is not None: + mask = np.logical_or(mask, field >= self.maskMaxThreshold) + field = np.ma.masked_array(field, mask) + + return field
+ + + +def _nans_to_numpy_mask(field): + """ + Convert a numpy array with NaNs to a masked numpy array + """ + field = np.ma.masked_array( + field, np.isnan(field)) + return field +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/ticks.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/ticks.html new file mode 100644 index 000000000..17ec8292a --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/ticks.html @@ -0,0 +1,222 @@ + + + + + + mpas_analysis.shared.plot.ticks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.ticks

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib.pyplot as plt
+from matplotlib.ticker import FuncFormatter, FixedLocator
+import numpy as np
+from functools import partial
+
+from mpas_analysis.shared.timekeeping.utility import days_to_datetime, \
+    date_to_days
+
+
+
+[docs] +def plot_xtick_format(calendar, minDays, maxDays, maxXTicks, yearStride=None): + """ + Formats tick labels and positions along the x-axis for time series + / index plots + + Parameters + ---------- + calendar : str + the calendar to use for formatting the time axis + + minDays : float + start time for labels + + maxDays : float + end time for labels + + maxXTicks : int + the maximum number of tick marks to display, used to sub-sample ticks + if there are too many + + yearStride : int, optional + the number of years to skip over between ticks + """ + # Authors + # ------- + # Xylar Asay-Davis + + ax = plt.gca() + + start = days_to_datetime(np.amin(minDays), calendar=calendar) + end = days_to_datetime(np.amax(maxDays), calendar=calendar) + + if yearStride is not None or end.year - start.year > maxXTicks / 2: + if yearStride is None: + yearStride = 1 + else: + maxXTicks = None + major = [date_to_days(year=year, calendar=calendar) + for year in np.arange(start.year, end.year + 1, yearStride)] + formatterFun = partial(_date_tick, calendar=calendar, + includeMonth=False) + else: + # add ticks for months + major = [] + for year in range(start.year, end.year + 1): + for month in range(1, 13): + major.append(date_to_days(year=year, month=month, + calendar=calendar)) + formatterFun = partial(_date_tick, calendar=calendar, + includeMonth=True) + + ax.xaxis.set_major_locator(FixedLocator(major, maxXTicks)) + ax.xaxis.set_major_formatter(FuncFormatter(formatterFun)) + + plt.setp(ax.get_xticklabels(), rotation=30) + + plt.autoscale(enable=True, axis='x', tight=True)
+ + + +def _date_tick(days, pos, calendar='gregorian', includeMonth=True): + days = np.maximum(days, 0.) + date = days_to_datetime(days, calendar) + if includeMonth: + return '{:04d}-{:02d}'.format(date.year, date.month) + else: + return '{:04d}'.format(date.year) + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/time_series.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/time_series.html new file mode 100644 index 000000000..b7fdee2ee --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/time_series.html @@ -0,0 +1,540 @@ + + + + + + mpas_analysis.shared.plot.time_series — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.time_series

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Functions for plotting time series (and comparing with reference data sets)
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib
+import matplotlib.pyplot as plt
+import xarray as xr
+import pandas as pd
+import numpy as np
+
+from mpas_analysis.shared.timekeeping.utility import date_to_days
+
+from mpas_analysis.shared.constants import constants
+
+from mpas_analysis.shared.plot.ticks import plot_xtick_format
+from mpas_analysis.shared.plot.title import limit_title
+
+
+
+[docs] +def timeseries_analysis_plot(config, dsvalues, calendar, title, xlabel, ylabel, + movingAveragePoints=None, lineColors=None, + lineStyles=None, markers=None, lineWidths=None, + legendText=None, maxPoints=None, + titleFontSize=None, defaultFontSize=None, + figsize=(12, 6), dpi=None, + firstYearXTicks=None, yearStrideXTicks=None, + maxXTicks=20, obsMean=None, obsUncertainty=None, + obsLegend=None, legendLocation='lower left', + maxTitleLength=None): + """ + Plots the list of time series data sets. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + dsvalues : list of xarray DataSets + the data set(s) to be plotted + + title : str + the title of the plot + + xlabel, ylabel : str + axis labels + + calendar : str + the calendar to use for formatting the time axis + + movingAveragePoints : int, optional + the number of time points over which to perform a moving average + + lineColors, lineStyles, markers, legendText : list of str, optional + control line color, style, marker, and corresponding legend + text. Default is black, solid line with no marker, and no legend. + + lineWidths : list of float, optional + control line width. Default is 1.0. + + maxPoints : list of {None, int}, optional + the approximate maximum number of time points to use in a time series. + This can be helpful for reducing the number of symbols plotted if + plotting with markers. Otherwise the markers become indistinguishable + from each other. + + titleFontSize : int, optional + the size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + firstYearXTicks : int, optional + The year of the first tick on the x axis. By default, the first time + entry is the first tick. + + yearStrideXTicks : int, optional + The number of years between x ticks. By default, the stride is chosen + automatically to have ``maxXTicks`` tick marks or fewer. + + maxXTicks : int, optional + the maximum number of tick marks that will be allowed along the x axis. + This may need to be adjusted depending on the figure size and aspect + ratio. + + obsMean, obsUncertainty : list of float, optional + Mean values and uncertainties for observations to be plotted as error + bars. The two lists must have the same number of elements. + + obsLegend : list of str, optional + The label in the legend for each element in ``obsMean`` (and + ``obsUncertainty``) + + legendLocation : str, optional + The location of the legend (see ``pyplot.legend()`` for details) + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + Returns + ------- + fig : ``matplotlib.figure.Figure`` + The resulting figure + """ + # Authors + # ------- + # Xylar Asay-Davis, Milena Veneziani, Stephen Price + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + if dpi is None: + dpi = config.getint('plot', 'dpi') + fig = plt.figure(figsize=figsize, dpi=dpi) + + minDays = [] + maxDays = [] + labelCount = 0 + for dsIndex in range(len(dsvalues)): + dsvalue = dsvalues[dsIndex] + if dsvalue is None: + continue + if movingAveragePoints == 1 or movingAveragePoints is None: + mean = dsvalue + else: + mean = pd.Series.rolling(dsvalue.to_pandas(), movingAveragePoints, + center=True).mean() + mean = xr.DataArray.from_series(mean) + minDays.append(mean.Time.min()) + maxDays.append(mean.Time.max()) + + if maxPoints is not None and maxPoints[dsIndex] is not None: + nTime = mean.sizes['Time'] + if maxPoints[dsIndex] < nTime: + stride = int(round(nTime / float(maxPoints[dsIndex]))) + mean = mean.isel(Time=slice(0, None, stride)) + + if legendText is None: + label = None + else: + label = legendText[dsIndex] + if label is not None: + label = limit_title(label, maxTitleLength) + labelCount += 1 + if lineColors is None: + color = 'k' + else: + color = lineColors[dsIndex] + if lineStyles is None: + linestyle = '-' + else: + linestyle = lineStyles[dsIndex] + if markers is None: + marker = None + else: + marker = markers[dsIndex] + if lineWidths is None: + linewidth = 1. + else: + linewidth = lineWidths[dsIndex] + + plt.plot(mean['Time'].values, mean.values, color=color, + linestyle=linestyle, marker=marker, linewidth=linewidth, + label=label) + + if obsMean is not None: + obsCount = len(obsMean) + assert(len(obsUncertainty) == obsCount) + + # space the observations along the time line, leaving gaps at either + # end + start = np.amin(minDays) + end = np.amax(maxDays) + obsTimes = np.linspace(start, end, obsCount + 2)[1:-1] + obsSymbols = ['o', '^', 's', 'D', '*'] + obsColors = [config.get('timeSeries', 'obsColor{}'.format(index+1)) + for index in range(5)] + for iObs in range(obsCount): + if obsMean[iObs] is not None: + symbol = obsSymbols[np.mod(iObs, len(obsSymbols))] + color = obsColors[np.mod(iObs, len(obsColors))] + plt.errorbar(obsTimes[iObs], + obsMean[iObs], + yerr=obsUncertainty[iObs], + fmt=symbol, + color=color, + ecolor=color, + capsize=0, + label=obsLegend[iObs]) + # plot a box around the error bar to make it more visible + boxHalfWidth = 0.01 * (end - start) + boxHalfHeight = obsUncertainty[iObs] + boxX = obsTimes[iObs] + \ + boxHalfWidth * np.array([-1, 1, 1, -1, -1]) + boxY = obsMean[iObs] + \ + boxHalfHeight * np.array([-1, -1, 1, 1, -1]) + + plt.plot(boxX, boxY, '-', color=color, linewidth=3) + labelCount += 1 + + if labelCount > 1: + plt.legend(loc=legendLocation) + + ax = plt.gca() + + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + axis_font = {'size': config.get('plot', 'axisFontSize')} + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + + if firstYearXTicks is not None: + minDays = date_to_days(year=firstYearXTicks, calendar=calendar) + + plot_xtick_format(calendar, minDays, maxDays, maxXTicks, + yearStride=yearStrideXTicks) + + # Add a y=0 line if y ranges between positive and negative values + yaxLimits = ax.get_ylim() + if yaxLimits[0] * yaxLimits[1] < 0: + x = ax.get_xlim() + plt.plot(x, np.zeros(np.size(x)), 'k-', linewidth=1.2, zorder=1) + + if title is not None: + title = limit_title(title, maxTitleLength) + plt.title(title, **title_font) + if xlabel is not None: + plt.xlabel(xlabel, **axis_font) + if ylabel is not None: + plt.ylabel(ylabel, **axis_font) + + return fig
+ + + +
+[docs] +def timeseries_analysis_plot_polar(config, dsvalues, title, + movingAveragePoints=None, lineColors=None, + lineStyles=None, markers=None, + lineWidths=None, legendText=None, + titleFontSize=None, defaultFontSize=None, + figsize=(15, 6), dpi=None, + maxTitleLength=None): + """ + Plots the list of time series data sets on a polar plot. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + dsvalues : list of xarray DataSets + the data set(s) to be plotted + + movingAveragePoints : int + the numer of time points over which to perform a moving average + + title : str + the title of the plot + + lineColors, lineStyles, markers, legendText : list of str, optional + control line color, style, marker, and corresponding legend + text. Default is black, solid line with no marker, and no legend. + + lineWidths : list of float, optional + control line width. Default is 1.0. + + titleFontSize : int, optional + the size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + Returns + ------- + fig : ``matplotlib.figure.Figure`` + The resulting figure + """ + # Authors + # ------- + # Adrian K. Turner, Xylar Asay-Davis + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + + if dpi is None: + dpi = config.getint('plot', 'dpi') + fig = plt.figure(figsize=figsize, dpi=dpi) + + minDays = [] + maxDays = [] + labelCount = 0 + for dsIndex in range(len(dsvalues)): + dsvalue = dsvalues[dsIndex] + if dsvalue is None: + continue + mean = pd.Series.rolling(dsvalue.to_pandas(), movingAveragePoints, + center=True).mean() + mean = xr.DataArray.from_series(mean) + minDays.append(mean.Time.min()) + maxDays.append(mean.Time.max()) + + if legendText is None: + label = None + else: + label = legendText[dsIndex] + if label is not None: + label = limit_title(label, maxTitleLength) + labelCount += 1 + if lineColors is None: + color = 'k' + else: + color = lineColors[dsIndex] + if lineStyles is None: + linestyle = '-' + else: + linestyle = lineStyles[dsIndex] + if markers is None: + marker = None + else: + marker = markers[dsIndex] + if lineWidths is None: + linewidth = 1. + else: + linewidth = lineWidths[dsIndex] + + plt.polar((mean['Time'] / 365.0) * np.pi * 2.0, mean, color=color, + linestyle=linestyle, marker=marker, linewidth=linewidth, + label=label) + + if labelCount > 1: + plt.legend(loc='lower left') + + ax = plt.gca() + + # set azimuthal axis formatting + majorTickLocs = np.zeros(12) + minorTickLocs = np.zeros(12) + majorTickLocs[0] = 0.0 + minorTickLocs[0] = (constants.daysInMonth[0] * np.pi) / 365.0 + for month in range(1, 12): + majorTickLocs[month] = majorTickLocs[month - 1] + \ + ((constants.daysInMonth[month - 1] * np.pi * 2.0) / 365.0) + minorTickLocs[month] = minorTickLocs[month - 1] + \ + (((constants.daysInMonth[month - 1] + + constants.daysInMonth[month]) * np.pi) / 365.0) + + ax.set_xticks(majorTickLocs) + ax.set_xticklabels([]) + + ax.set_xticks(minorTickLocs, minor=True) + ax.set_xticklabels(constants.abrevMonthNames, minor=True) + + if titleFontSize is None: + title = limit_title(title, maxTitleLength) + titleFontSize = config.get('plot', 'titleFontSize') + + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + if title is not None: + plt.title(title, **title_font) + + return fig
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/plot/vertical_section.html b/1.11.0rc1/_modules/mpas_analysis/shared/plot/vertical_section.html new file mode 100644 index 000000000..f2334d653 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/plot/vertical_section.html @@ -0,0 +1,1290 @@ + + + + + + mpas_analysis.shared.plot.vertical_section — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.plot.vertical_section

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Funcitons for plotting vertical sections, both alone and as comparisons between
+runs or with observations
+"""
+# Authors
+# -------
+# Xylar Asay-Davis, Milena Veneziani, Luke Van Roekel, Greg Streletz
+
+import matplotlib
+import matplotlib.pyplot as plt
+from matplotlib.tri import Triangulation
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+import xarray as xr
+import numpy as np
+
+from mpas_analysis.shared.timekeeping.utility import date_to_days
+
+from mpas_analysis.shared.plot.colormap import setup_colormap
+from mpas_analysis.shared.plot.ticks import plot_xtick_format
+from mpas_analysis.shared.plot.title import limit_title
+
+
+
+[docs] +def plot_vertical_section_comparison( + config, + modelArray, + refArray, + diffArray, + colorMapSectionName, + xCoords=None, + zCoord=None, + triangulation_args=None, + xOutlineModel=None, + zOutlineModel=None, + xOutlineRef=None, + zOutlineRef=None, + xOutlineDiff=None, + zOutlineDiff=None, + colorbarLabel=None, + xlabels=None, + ylabel=None, + title=None, + modelTitle='Model', + refTitle='Observations', + diffTitle='Model-Observations', + titleFontSize=None, + defaultFontSize=None, + plotTitleFontSize=None, + axisFontSize=None, + figsize=None, + dpi=None, + lineWidth=2, + lineStyle='solid', + lineColor='black', + contourColormap=None, + backgroundColor='grey', + invalidColor='white', + outlineValid=True, + xLim=None, + yLim=None, + numUpperTicks=None, + upperXAxisTickLabelPrecision=None, + invertYAxis=True, + xCoordIsTime=False, + movingAveragePoints=None, + firstYearXTicks=None, + yearStrideXTicks=None, + maxXTicks=20, + calendar='gregorian', + compareAsContours=False, + comparisonContourLineWidth=None, + comparisonContourLineStyle=None, + comparisonContourLineColor=None, + labelContours=False, + contourLabelPrecision=1, + resultSuffix='Result', + diffSuffix='Difference', + maxTitleLength=None): + """ + Plots vertical section plots in a three-panel format, comparing model data + (in modelArray) to some reference dataset (in refArray), which can be + either observations or an alternative model, and also presenting the + difference plot of the two. If refArray is None, then only one panel + is plotted, displaying the model data. + + If compareAsContours is true, the contours of modelArray and refArray are + plotted on a single plot. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + modelArray, refArray : xarray.DataArray + model and observational or control run data sets + + diffArray : float array + difference between modelArray and refArray + + xCoords : xarray.DataArray or list of xarray.DataArray, optional + The x coordinate(s) for the model, ref and diff arrays. Optional second + and third entries will be used for a second and third x axis above the + plot. The typical use for the second and third axis is for transects, + for which the primary x axis represents distance along a transect, and + the second and third x axes are used to display the corresponding + latitudes and longitudes. + + zCoord : xarray.DataArray, optional + The z coordinates for the model, ref and diff arrays + + triangulation_args : dict, optional + A dict of arguments to create a matplotlib.tri.Triangulation of the + transect that does not rely on it being on a logically rectangular grid. + The arguments rather than the triangulation itself are passed because + multiple triangulations with different masks are needed internally and + there is not an obvious mechanism for copying an existing triangulation. + If this option is provided, ``xCoords`` is only used for tick marks if + more than one x axis is requested, and ``zCoord`` will be ignored. + + xOutlineModel, zOutlineModel : numpy.ndarray, optional + pairs of points defining line segments that are used to outline the + valid region of the mesh for the model panel if ``outlineValid = True`` + and ``triangulation_args`` is not ``None`` + + xOutlineRef, zOutlineRef : numpy.ndarray, optional + Same as ``xOutlineModel`` and ``zOutlineModel`` but for the reference + panel + + xOutlineDiff, zOutlineDiff : numpy.ndarray, optional + Same as ``xOutlineModel`` and ``zOutlineModel`` but for the difference + panel + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + colorbarLabel : str, optional + the label for the colorbar. If compareAsContours and labelContours are + both True, colorbarLabel is used as follows (typically in order to + indicate the units that are associated with the contour labels): + if refArray is None, the colorbarLabel string is parenthetically + appended to the plot title; if refArray is not None, it is + parenthetically appended to the legend entries of the contour + comparison plot. + + xlabels : str or list of str, optional + labels of x-axes. Labels correspond to entries in ``xCoords``. + + ylabel : str, optional + label of y-axis + + title : str, optional + the subtitle of the plot + + modelTitle : str, optional + title of the model panel + + refTitle : str, optional + title of the observations or control run panel + + diffTitle : str, optional + title of the difference (bias) panel + + titleFontSize : int, optional + size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + plotTitleFontSize : int, optional + size of the title font for the individual plots + + axisFontSize : int, optional + size of the axis font + + figsize : tuple of float, optional + the size of the figure in inches + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + lineWidth : float, optional + the line width of contour lines (if specified) + + lineStyle : str, optional + the line style of contour lines (if specified); this applies to the + contour lines on heatmaps and to the contour lines of the model field + on contour comparison plots (the line style of the contour lines of + the reference field on contour comparison plots is set using the + contourComparisonLineStyle argument). + + lineColor : str, optional + the color of contour lines (if specified); this applies to the + contour lines on heatmaps and to the contour lines of the model field + on contour comparison plots (the line color of the contour lines of + the reference field on contour comparison plots is set using the + contourComparisonLineColor argument). + + backgroundColor : str, optional + the background color for the plot outside the limits of ``xCoord`` and + ``zCoord``. + + invalidColor : str, optional + the color for invalid values (NaNs and masked areas will be + shown in this color) + + outlineValid : bool, optional + whether to outline the boundary between the valid an invalid regions + with a black contour + + xLim : float array, optional + x range of plot + + yLim : float array, optional + y range of plot + + numUpperTicks : the approximate number of ticks to use on the upper x axis + or axes (these are the second and third x axes, which are placed above + the plot if they have been requested by specifying the secondXAxisData + or thirdXAxisData arrays above) + + upperXAxisTickLabelPrecision : the number of decimal places (to the right + of the decimal point) to use for values at upper axis ticks. This + value can be adjusted (in concert with numUpperTicks) to avoid problems + with overlapping numbers along the upper axis. + + invertYAxis : logical, optional + if True, invert Y axis + + xCoordIsTime : logical, optional + if True, format the x axis for time (this applies only to the primary + x axis, not to the optional second or third x axes) + + movingAveragePoints : int, optional + the number of points over which to perform a moving average + NOTE: this option is mostly intended for use when xCoordIsTime is True, + although it will work with other data as well. Also, the moving + average calculation is based on number of points, not actual x axis + values, so for best results, the values in the xArray should be equally + spaced. + + firstYearXTicks : int, optional + The year of the first tick on the x axis. By default, the first time + entry is the first tick. + + yearStrideXTicks : int, optional + The number of years between x ticks. By default, the stride is chosen + automatically to have ``maxXTicks`` tick marks or fewer. + + maxXTicks : int, optional + the maximum number of tick marks that will be allowed along the primary + x axis. This may need to be adjusted depending on the figure size and + aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True + + calendar : str, optional + the calendar to use for formatting the time axis + NOTE: calendar is only used if xCoordIsTime is True + + compareAsContours : bool, optional + if compareAsContours is True, instead of creating a three panel plot + showing modelArray, refArray, and their difference, the function will + plot the contours of modelArray and refArray on a single plot (unless + refArray is None, in which case only the contours of modelArray will be + plotted on the single panel plot). + + comparisonContourLineWidth : float, optional + the line width of contour lines of the comparisonFieldName field on + a contour comparison plot + + comparisonContourLineStyle : str, optional + the line style of contour lines of the reference field on a contour + comparison plot + + comparisonContourLineColor : str, optional + the line color of contour lines of the reference field on a contour + comparison plot + + labelContours : bool, optional + whether or not to label contour lines (if specified) with their values + + contourLabelPrecision : int, optional + the precision (in terms of number of figures to the right of the + decimal point) of contour labels + + resultSuffix : str, optional + a suffix added to the config options related to colormap information + for the main and control fields + + diffSuffix : str, optional + a suffix added to the config options related to colormap information + for the difference field + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + Returns + ------- + fig : ``matplotlib.figure.Figure`` + The figure that was plotted + + axes : list of ``matplotlib.axes.Axes`` + The subplots + + suptitle : ``matplotlib.text.Text`` + The super-title + """ + # Authors + # ------- + # Greg Streletz, Xylar Asay-Davis, Milena Veneziani + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + if not isinstance(xCoords, list): + xCoords = [xCoords] + + if not isinstance(xlabels, list): + xlabels = [xlabels] + + if refArray is None or compareAsContours: + singlePanel = True + else: + singlePanel = False + + # set up figure + if dpi is None: + dpi = config.getint('plot', 'dpi') + if figsize is None: + # set the defaults, depending on if we have 1 or 3 panels, and + # depending on how many x axes are to be displayed on the plots + if singlePanel: + if compareAsContours and refArray is not None and \ + contourColormap is None: + # no color bar but there is a legend at the bottom + if len(xCoords) == 3: + figsize = (8, 8) + else: + figsize = (8, 7) + else: + # color bar and legend + figsize = (8, 7) + elif len(xCoords) == 3: + figsize = (8, 17) + else: + figsize = (8, 13) + + fig = plt.figure(figsize=figsize, dpi=dpi) + + if title is not None: + if titleFontSize is None: + titleFontSize = config.get('plot', 'threePanelTitleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'threePanelTitleFontColor'), + 'weight': config.get('plot', + 'threePanelTitleFontWeight')} + suptitle = fig.suptitle(title, y=0.99, **title_font) + else: + suptitle = None + + if plotTitleFontSize is None: + plotTitleFontSize = config.get('plot', 'threePanelPlotTitleFontSize') + + if len(xCoords) == 3: + if singlePanel: + titleY = 1.64 + else: + titleY = 1.34 + elif len(xCoords) >= 2: + titleY = 1.20 + else: + titleY = 1.06 + + if axisFontSize is None: + axisFontSize = config.get('plot', 'threePanelAxisFontSize') + + if not singlePanel: + plt.subplot(3, 1, 1) + + if not compareAsContours or refArray is None: + title = modelTitle + contourComparisonField = None + comparisonFieldName = None + originalFieldName = None + else: + title = None + contourComparisonField = refArray + comparisonFieldName = refTitle + originalFieldName = modelTitle + + axes = [] + + _, ax = plot_vertical_section( + config, + modelArray, + colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + triangulation_args=triangulation_args, + xOutline=xOutlineModel, + zOutline=zOutlineModel, + suffix=resultSuffix, + colorbarLabel=colorbarLabel, + title=title, + xlabels=xlabels, + ylabel=ylabel, + figsize=None, + titleFontSize=plotTitleFontSize, + defaultFontSize=defaultFontSize, + titleY=titleY, + axisFontSize=axisFontSize, + xLim=xLim, + yLim=yLim, + lineWidth=lineWidth, + lineStyle=lineStyle, + lineColor=lineColor, + contourColormap=contourColormap, + numUpperTicks=numUpperTicks, + upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, + invertYAxis=invertYAxis, + xCoordIsTime=xCoordIsTime, + movingAveragePoints=movingAveragePoints, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + maxXTicks=maxXTicks, calendar=calendar, + backgroundColor=backgroundColor, + invalidColor=invalidColor, + outlineValid=outlineValid, + plotAsContours=compareAsContours, + contourComparisonField=contourComparisonField, + comparisonFieldName=comparisonFieldName, + originalFieldName=originalFieldName, + comparisonContourLineWidth=comparisonContourLineWidth, + comparisonContourLineStyle=comparisonContourLineStyle, + comparisonContourLineColor=comparisonContourLineColor, + labelContours=labelContours, + contourLabelPrecision=contourLabelPrecision, + maxTitleLength=maxTitleLength) + + axes.append(ax) + + if not singlePanel: + plt.subplot(3, 1, 2) + _, ax = plot_vertical_section( + config, + refArray, + colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + triangulation_args=triangulation_args, + xOutline=xOutlineRef, + zOutline=zOutlineRef, + suffix=resultSuffix, + colorbarLabel=colorbarLabel, + title=refTitle, + xlabels=xlabels, + ylabel=ylabel, + figsize=None, + titleFontSize=plotTitleFontSize, + defaultFontSize=defaultFontSize, + titleY=titleY, + axisFontSize=axisFontSize, + xLim=xLim, + yLim=yLim, + lineWidth=lineWidth, + lineStyle=lineStyle, + lineColor=lineColor, + upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, + numUpperTicks=numUpperTicks, + invertYAxis=invertYAxis, + xCoordIsTime=xCoordIsTime, + movingAveragePoints=movingAveragePoints, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + maxXTicks=maxXTicks, + calendar=calendar, + backgroundColor=backgroundColor, + invalidColor=invalidColor, + outlineValid=outlineValid, + labelContours=labelContours, + contourLabelPrecision=contourLabelPrecision, + maxTitleLength=maxTitleLength) + + axes.append(ax) + + plt.subplot(3, 1, 3) + _, ax = plot_vertical_section( + config, + diffArray, + colorMapSectionName, + xCoords=xCoords, + zCoord=zCoord, + triangulation_args=triangulation_args, + xOutline=xOutlineDiff, + zOutline=zOutlineDiff, + suffix=diffSuffix, + colorbarLabel=colorbarLabel, + title=diffTitle, + xlabels=xlabels, + ylabel=ylabel, + figsize=None, + titleFontSize=plotTitleFontSize, + defaultFontSize=defaultFontSize, + titleY=titleY, + axisFontSize=axisFontSize, + xLim=xLim, + yLim=yLim, + lineWidth=lineWidth, + lineStyle=lineStyle, + lineColor=lineColor, + upperXAxisTickLabelPrecision=upperXAxisTickLabelPrecision, + numUpperTicks=numUpperTicks, + invertYAxis=invertYAxis, + xCoordIsTime=xCoordIsTime, + movingAveragePoints=movingAveragePoints, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + maxXTicks=maxXTicks, + calendar=calendar, + backgroundColor=backgroundColor, + invalidColor=invalidColor, + outlineValid=outlineValid, + labelContours=labelContours, + contourLabelPrecision=contourLabelPrecision, + maxTitleLength=maxTitleLength) + + axes.append(ax) + + if singlePanel: + if len(xCoords) == 3 and refArray is None: + plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.98]) + else: + plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.0, 0.0, 1.0, 0.95]) + else: + plt.tight_layout(pad=0.0, h_pad=2.0, rect=[0.01, 0.0, 1.0, 0.97]) + + return fig, axes, suptitle
+ + + +
+[docs] +def plot_vertical_section( + config, + field, + colorMapSectionName, + xCoords=None, + zCoord=None, + triangulation_args=None, + xOutline=None, + zOutline=None, + suffix='', + colorbarLabel=None, + title=None, + xlabels=None, + ylabel=None, + figsize=(10, 4), + dpi=None, + titleFontSize=None, + defaultFontSize=None, + titleY=None, + axisFontSize=None, + xLim=None, + yLim=None, + lineWidth=2, + lineStyle='solid', + lineColor='black', + contourColormap=None, + backgroundColor='grey', + invalidColor='white', + outlineValid=True, + numUpperTicks=None, + upperXAxisTickLabelPrecision=None, + invertYAxis=True, + xCoordIsTime=False, + movingAveragePoints=None, + firstYearXTicks=None, + yearStrideXTicks=None, + maxXTicks=20, + calendar='gregorian', + plotAsContours=False, + contourComparisonField=None, + comparisonFieldName=None, + originalFieldName=None, + comparisonContourLineWidth=None, + comparisonContourLineStyle=None, + comparisonContourLineColor=None, + labelContours=False, + contourLabelPrecision=1, + maxTitleLength=None): + """ + Plots a data set as a x distance (latitude, longitude, + or spherical distance) vs depth map (vertical section). + + Or, if xCoordIsTime is True, plots data set on a vertical + Hovmoller plot (depth vs. time). + + Typically, the ``field`` data are plotted using a heatmap, but if + ``contourComparisonField`` is not None, then contours of both + ``field`` and ``contourComparisonField`` are plotted instead. + + Parameters + ---------- + config : instance of ConfigParser + the configuration, containing a [plot] section with options that + control plotting + + field : xarray.DataArray + field array to plot. For contour plots, ``xCoords`` and ``zCoords`` + should broadcast to the same shape as ``field``. For heatmap plots, + ``xCoords`` and ``zCoords`` are the corners of the plot. If they + broadcast to the same shape as ``field``, ``field`` will be bilinearly + interpolated to center values for each plot cell. If the coordinates + have one extra element in each direction than ``field``, ``field`` is + assumed to contain cell values and no interpolation is performed. + + colorMapSectionName : str + section name in ``config`` where color map info can be found. + + xCoords : xarray.DataArray or list of xarray.DataArray, optional + The x coordinate(s) for the ``field``. Optional second + and third entries will be used for a second and third x axis above the + plot. The typical use for the second and third axis is for transects, + for which the primary x axis represents distance along a transect, and + the second and third x axes are used to display the corresponding + latitudes and longitudes. + + zCoord : xarray.DataArray, optional + The z coordinates for the ``field`` + + triangulation_args : dict, optional + A dict of arguments to create a matplotlib.tri.Triangulation of the + transect that does not rely on it being on a logically rectangular grid. + The arguments rather than the triangulation itself are passed because + multiple triangulations with different masks are needed internally and + there is not an obvious mechanism for copying an existing triangulation. + If this option is provided, ``xCoords`` is only used for tick marks if + more than one x axis is requested, and ``zCoord`` will be ignored. + + xOutline, zOutline : numpy.ndarray, optional + pairs of points defining line segments that are used to outline the + valid region of the mesh if ``outlineValid = True`` and + ``triangulation_args`` is not ``None`` + + + + suffix : str, optional + the suffix used for colorbar config options + + colorbarLabel : str, optional + the label for the colorbar. If plotAsContours and labelContours are + both True, colorbarLabel is used as follows (typically in order to + indicate the units that are associated with the contour labels): + if ``contourComparisonField`` is None, the ``colorbarLabel`` string is + parenthetically appended to the plot title; if + ``contourComparisonField`` is not None, it is parenthetically appended + to the legend entries of the contour comparison plot. + + title : str, optional + title of plot + + xlabels : str or list of str, optional + labels of x-axes. Labels correspond to entries in ``xCoords``. + + ylabel : str, optional + label of y-axis + + figsize : tuple of float, optional + size of the figure in inches, or None if the current figure should + be used (e.g. if this is a subplot) + + dpi : int, optional + the number of dots per inch of the figure, taken from section ``plot`` + option ``dpi`` in the config file by default + + titleFontSize : int, optional + size of the title font + + defaultFontSize : int, optional + the size of text other than the title + + titleY : float, optional + the y value to use for placing the plot title + + axisFontSize : int, optional + size of the axis font + + xLim : float array, optional + x range of plot + + yLim : float array, optional + y range of plot + + lineWidth : float, optional + the line width of contour lines (if specified) + + lineStyle : str, optional + the line style of contour lines (if specified); this applies to the + style of contour lines of fieldArray (the style of the contour lines + of contourComparisonField is set using + contourComparisonLineStyle). + + lineColor : str, optional + the color of contour lines (if specified); this applies to the + contour lines of fieldArray (the color of the contour lines of + contourComparisonField is set using contourComparisonLineColor + + backgroundColor : str, optional + the background color for the plot outside the limits of ``xCoord`` and + ``zCoord``. + + invalidColor : str, optional + the color for invalid values (NaNs and masked areas will be + shown in this color) + + outlineValid : bool, optional + whether to outline the boundary between the valid an invalid regions + with a black contour + + numUpperTicks : int, optional + the approximate number of ticks to use on the upper x axis + or axes (these are the second and third x axes, which are placed above + the plot if they have been requested by specifying the secondXAxisData + or thirdXAxisData arrays above) + + upperXAxisTickLabelPrecision : int, optional + the number of decimal places (to the right + of the decimal point) to use for values at upper axis ticks. This + value can be adjusted (in concert with numUpperTicks) to avoid problems + with overlapping numbers along the upper axis. + + invertYAxis : logical, optional + if True, invert Y axis + + xCoordIsTime : logical, optional + if True, format the x axis for time (this applies only to the primary + x axis, not to the optional second or third x axes) + + movingAveragePoints : int, optional + the number of points over which to perform a moving average + NOTE: this option is mostly intended for use when ``xCoordIsTime`` is + True, although it will work with other data as well. Also, the moving + average calculation is based on number of points, not actual x axis + values, so for best results, the values in the first entry in + ``xCoords`` should be equally spaced. + + firstYearXTicks : int, optional + The year of the first tick on the x axis. By default, the first time + entry is the first tick. + + yearStrideXTicks : int, optional + The number of years between x ticks. By default, the stride is chosen + automatically to have ``maxXTicks`` tick marks or fewer. + + maxXTicks : int, optional + the maximum number of tick marks that will be allowed along the primary + x axis. This may need to be adjusted depending on the figure size and + aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True + + calendar : str, optional + the calendar to use for formatting the time axis + NOTE: calendar is only used if xCoordIsTime is True + + plotAsContours : bool, optional + if plotAsContours is True, instead of plotting ``field`` as a + heatmap, the function will plot only the contours of ``field``. In + addition, if contourComparisonField is not None, the contours + of this field will be plotted on the same plot. The selection of + contour levels is still determined as for the contours on the heatmap + plots, via the 'contours' entry in ``colorMapSectionName``. + + contourComparisonField : float array, optional + a comparison ``field`` array (typically observational data or results + from another simulation run), assumed to be of the same shape as + ``field``. If ``plotAsContours`` is ``True`` and + ``countourComparisonFieldArray`` is not ``None``, then contours of both + ``field`` and ``contourComparisonField`` will be plotted in order to + enable a comparison of the two fields on the same plot. + + comparisonFieldName : str, optional + the name for the comparison field. If contourComparisonField is + None, this parameter is ignored. + + originalFieldName : str, optional + the name for the ``field`` field (for the purposes of labeling the + contours on a contour comparison plot). If contourComparisonField + is None, this parameter is ignored. + + comparisonContourLineWidth : float, optional + the line width of contour lines of the comparisonFieldName field on + a contour comparison plot + + comparisonContourLineStyle : str, optional + the line style of contour lines of the comparisonFieldName field on + a contour comparison plot + + comparisonContourLineColor : str, optional + the line color of contour lines of the comparisonFieldName field on + a contour comparison plot + + labelContours : bool, optional + whether or not to label contour lines (if specified) with their values + + contourLabelPrecision : int, optional + the precision (in terms of number of figures to the right of the + decimal point) of contour labels + + maxTitleLength : int or None, optional + the maximum number of characters in the title, beyond which it is + truncated with a trailing ellipsis. The default is from the + ``maxTitleLength`` config option. + + Returns + ------- + fig : ``matplotlib.figure.Figure`` + The figure that was plotted + + ax : ``matplotlib.axes.Axes`` + The subplot + """ + # Authors + # ------- + # Milena Veneziani, Mark Petersen, Xylar Asay-Davis, Greg Streletz + + if maxTitleLength is None: + maxTitleLength = config.getint('plot', 'maxTitleLength') + + if defaultFontSize is None: + defaultFontSize = config.getint('plot', 'defaultFontSize') + matplotlib.rc('font', size=defaultFontSize) + if xCoords is not None: + if not isinstance(xCoords, list): + xCoords = [xCoords] + + if not isinstance(xlabels, list): + xlabels = [xlabels] + + if len(xCoords) != len(xlabels): + raise ValueError('Expected the same number of xCoords and xlabels') + + if triangulation_args is None: + + x, y = xr.broadcast(xCoords[0], zCoord) + dims_in_field = all([dim in field.dims for dim in x.dims]) + + if dims_in_field: + x = x.transpose(*field.dims) + y = y.transpose(*field.dims) + else: + xsize = list(x.sizes.values()) + fieldsize = list(field.sizes.values()) + if xsize[0] == fieldsize[0] + 1 and xsize[1] == fieldsize[1] + 1: + pass + elif xsize[0] == fieldsize[1] + 1 and xsize[1] == fieldsize[0] + 1: + x = x.transpose(x.dims[1], x.dims[0]) + y = y.transpose(y.dims[1], y.dims[0]) + else: + raise ValueError('Sizes of coords {}x{} and field {}x{} not ' + 'compatible.'.format(xsize[0], xsize[1], + fieldsize[0], + fieldsize[1])) + + # compute moving averages with respect to the x dimension + if movingAveragePoints is not None and movingAveragePoints != 1: + dim = field.dims[0] + field = field.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + x = x.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + y = y.rolling(dim={dim: movingAveragePoints}, + center=True).mean().dropna(dim, how='all') + + mask = field.notnull() + maskedTriangulation, unmaskedTriangulation = _get_triangulation( + x, y, mask) + if contourComparisonField is not None: + mask = field.notnull() + maskedComparisonTriangulation, _ = _get_triangulation(x, y, mask) + else: + maskedComparisonTriangulation = None + else: + mask = field.notnull() + triMask = np.logical_not(mask.values) + # if any node of a triangle is masked, the triangle is masked + triMask = np.amax(triMask, axis=1) + unmaskedTriangulation = Triangulation(**triangulation_args) + mask_args = dict(triangulation_args) + mask_args['mask'] = triMask + maskedTriangulation = Triangulation(**mask_args) + if contourComparisonField is not None: + mask = contourComparisonField.notnull() + triMask = np.logical_not(mask.values) + triMask = np.amax(triMask, axis=1) + mask_args = dict(triangulation_args) + mask_args['mask'] = triMask + maskedComparisonTriangulation = Triangulation(**mask_args) + else: + maskedComparisonTriangulation = None + + # set up figure + if dpi is None: + dpi = config.getint('plot', 'dpi') + if figsize is not None: + fig = plt.figure(figsize=figsize, dpi=dpi) + else: + fig = plt.gcf() + + colormapDict = setup_colormap(config, colorMapSectionName, + suffix=suffix) + + # fill the unmasked region with the invalid color so it will show through + # any masked regions + zeroArray = xr.zeros_like(field) + plt.tricontourf(unmaskedTriangulation, zeroArray.values.ravel(), + colors=invalidColor) + + if not plotAsContours: + # display a heatmap of fieldArray + fieldMasked = field.where(mask, 0.0).values.ravel() + + if colormapDict['levels'] is None: + + plotHandle = plt.tripcolor(maskedTriangulation, fieldMasked, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + rasterized=True, shading='gouraud') + else: + plotHandle = plt.tricontourf(maskedTriangulation, fieldMasked, + cmap=colormapDict['colormap'], + norm=colormapDict['norm'], + levels=colormapDict['levels'], + extend='both') + + cbar = plt.colorbar(plotHandle, + orientation='vertical', + spacing='uniform', + aspect=9, + ticks=colormapDict['ticks']) + + if colorbarLabel is not None: + cbar.set_label(colorbarLabel) + + else: + # display a white heatmap to get a white background for non-land + zeroArray = xr.zeros_like(field) + plt.tricontourf(maskedTriangulation, zeroArray.values.ravel(), + colors='white') + + ax = plt.gca() + ax.set_facecolor(backgroundColor) + if outlineValid: + if xOutline is not None and zOutline is not None: + # also outline the domain if provided + plt.plot(xOutline, zOutline, color='black', linewidth=1) + else: + # do a contour to outline the boundary between valid and invalid + # values + landMask = np.isnan(field.values).ravel() + plt.tricontour(unmaskedTriangulation, landMask, levels=[0.0001], + colors='black', linewidths=1) + + # plot contours, if they were requested + contourLevels = colormapDict['contours'] + fmt_string = None + cs1 = None + cs2 = None + + if contourLevels is not None: + if len(contourLevels) == 0: + # automatic calculation of contour levels + contourLevels = None + mask = field.notnull() + fieldMasked = field.where(mask, 0.0).values.ravel() + + cs1 = plt.tricontour(maskedTriangulation, fieldMasked, + levels=contourLevels, + colors=lineColor, + linestyles=lineStyle, + linewidths=lineWidth, + cmap=contourColormap) + if labelContours: + fmt_string = "%%1.%df" % int(contourLabelPrecision) + plt.clabel(cs1, fmt=fmt_string) + + if plotAsContours and contourComparisonField is not None: + if comparisonContourLineWidth is None: + comparisonContourLineWidth = lineWidth + mask = contourComparisonField.notnull() + fieldMasked = contourComparisonField.where(mask, 0.0).values.ravel() + cs2 = plt.tricontour(maskedComparisonTriangulation, + fieldMasked, + levels=contourLevels, + colors=comparisonContourLineColor, + linestyles=comparisonContourLineStyle, + linewidths=comparisonContourLineWidth, + cmap=contourColormap) + + if labelContours: + plt.clabel(cs2, fmt=fmt_string) + + plotLegend = (((lineColor is not None and + comparisonContourLineColor is not None) or + (lineWidth is not None and + comparisonContourLineWidth is not None)) and + (plotAsContours and contourComparisonField is not None)) + + if plotLegend: + h1, _ = cs1.legend_elements() + h2, _ = cs2.legend_elements() + if labelContours: + originalFieldName = originalFieldName + " (" + colorbarLabel + ")" + comparisonFieldName = (comparisonFieldName + " (" + + colorbarLabel + ")") + ax.legend([h1[0], h2[0]], [originalFieldName, comparisonFieldName], + loc='upper center', bbox_to_anchor=(0.5, -0.15), ncol=1) + + if title is not None: + if plotAsContours and labelContours \ + and contourComparisonField is None: + title = limit_title(title, + maxTitleLength - (3 + len(colorbarLabel))) + title = title + " (" + colorbarLabel + ")" + else: + title = limit_title(title, maxTitleLength) + if titleFontSize is None: + titleFontSize = config.get('plot', 'titleFontSize') + title_font = {'size': titleFontSize, + 'color': config.get('plot', 'titleFontColor'), + 'weight': config.get('plot', 'titleFontWeight')} + if titleY is not None: + plt.title(title, y=titleY, **title_font) + else: + plt.title(title, **title_font) + + if axisFontSize is None: + axisFontSize = config.get('plot', 'axisFontSize') + axis_font = {'size': axisFontSize} + + if xlabels is not None: + plt.xlabel(xlabels[0], **axis_font) + if ylabel is not None: + plt.ylabel(ylabel, **axis_font) + + if invertYAxis: + ax.invert_yaxis() + + if xLim: + ax.set_xlim(xLim) + if yLim: + ax.set_ylim(yLim) + + if xCoords is not None and xCoordIsTime: + if firstYearXTicks is None: + minDays = xCoords[0][0].values + else: + minDays = date_to_days(year=firstYearXTicks, calendar=calendar) + maxDays = xCoords[0][-1].values + + plot_xtick_format(calendar, minDays, maxDays, maxXTicks, + yearStride=yearStrideXTicks) + + if contourLevels is not None: + if contourColormap is not None: + cbar1 = fig.colorbar(cs1, ax=ax, fraction=.05, + orientation='vertical', + spacing='proportional') + + if colorbarLabel is not None: + cbar1.set_label(colorbarLabel) + + # add a second x-axis scale, if it was requested + if xCoords is not None and len(xCoords) >= 2: + ax2 = ax.twiny() + ax2.set_facecolor(backgroundColor) + if xlabels[1] is not None: + ax2.set_xlabel(xlabels[1], **axis_font) + xlimits = ax.get_xlim() + ax2.set_xlim(xlimits) + formatString = None + xticks = None + if numUpperTicks is not None: + xticks = np.linspace(xlimits[0], xlimits[1], numUpperTicks) + tickValues = np.interp(xticks, xCoords[0].values, xCoords[1].values) + ax2.set_xticks(xticks) + formatString = "{{0:.{:d}f}}{}".format( + upperXAxisTickLabelPrecision, r'$\degree$') + ax2.set_xticklabels([formatString.format(member) + for member in tickValues]) + + # add a third x-axis scale, if it was requested + if len(xCoords) == 3: + ax3 = ax.twiny() + ax3.set_facecolor(backgroundColor) + ax3.set_xlabel(xlabels[2], **axis_font) + ax3.set_xlim(xlimits) + ax3.set_xticks(xticks) + if numUpperTicks is not None: + tickValues = np.interp(xticks, xCoords[0].values, + xCoords[2].values) + ax3.set_xticklabels([formatString.format(member) + for member in tickValues]) + ax3.spines['top'].set_position(('outward', 36)) + + return fig, ax
+ + + +def _get_triangulation(x, y, mask): + """divide each quad in the x/y mesh into 2 triangles""" + + nx = x.sizes[x.dims[0]] - 1 + ny = x.sizes[x.dims[1]] - 1 + nTriangles = 2 * nx * ny + + mask = mask.values + mask = np.logical_and(np.logical_and(mask[0:-1, 0:-1], mask[1:, 0:-1]), + np.logical_and(mask[0:-1, 1:], mask[1:, 1:])) + triMask = np.zeros((nx, ny, 2), bool) + triMask[:, :, 0] = np.logical_not(mask) + triMask[:, :, 1] = triMask[:, :, 0] + + triMask = triMask.ravel() + + xIndices, yIndices = np.meshgrid(np.arange(nx), np.arange(ny), + indexing='ij') + + tris = np.zeros((nx, ny, 2, 3), int) + # upper triangles: + tris[:, :, 0, 0] = (ny + 1) * xIndices + yIndices + tris[:, :, 0, 1] = (ny + 1) * (xIndices + 1) + yIndices + tris[:, :, 0, 2] = (ny + 1) * xIndices + yIndices + 1 + # lower triangle + tris[:, :, 1, 0] = (ny + 1) * xIndices + yIndices + 1 + tris[:, :, 1, 1] = (ny + 1) * (xIndices + 1) + yIndices + tris[:, :, 1, 2] = (ny + 1) * (xIndices + 1) + yIndices + 1 + + tris = tris.reshape((nTriangles, 3)) + + x = x.values.ravel() + y = y.values.ravel() + + maskedTriangulation = Triangulation(x=x, y=y, triangles=tris, mask=triMask) + unmaskedTriangulation = Triangulation(x=x, y=y, triangles=tris) + + return maskedTriangulation, unmaskedTriangulation +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/projection.html b/1.11.0rc1/_modules/mpas_analysis/shared/projection.html new file mode 100644 index 000000000..3d75efab9 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/projection.html @@ -0,0 +1,267 @@ + + + + + + mpas_analysis.shared.projection — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.projection

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import pyproj
+import cartopy
+
+
+known_comparison_grids = ['latlon', 'antarctic', 'antarctic_extended',
+                          'arctic', 'arctic_extended', 'north_atlantic',
+                          'north_pacific', 'subpolar_north_atlantic']
+
+
+
+[docs] +def get_pyproj_projection(comparison_grid_name): + """ + Get the projection from the comparison_grid_name. + + Parameters + ---------- + comparison_grid_name : str + The name of the projection comparison grid to use for remapping + + Returns + ------- + projection : pyproj.Proj + The projection + + Raises + ------ + ValueError + If comparison_grid_name does not describe a known comparison grid + """ + # Authors + # ------- + # Xylar Asay-Davis + # Milena Veneziani + # Yohei Takano + + if comparison_grid_name not in known_comparison_grids: + raise ValueError( + f'Unknown comparison grid type {comparison_grid_name}') + + if comparison_grid_name == 'latlon': + raise ValueError('latlon is not a projection grid.') + elif comparison_grid_name in ['antarctic', 'antarctic_extended']: + projection = pyproj.Proj( + '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 ' + '+x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name in ['arctic', 'arctic_extended']: + projection = pyproj.Proj( + '+proj=stere +lat_ts=75.0 +lat_0=90 +lon_0=0.0 +k_0=1.0 ' + '+x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'north_atlantic': + projection = pyproj.Proj('+proj=lcc +lon_0=-45 +lat_0=45 +lat_1=39 ' + '+lat_2=51 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'north_pacific': + projection = pyproj.Proj('+proj=lcc +lon_0=180 +lat_0=40 +lat_1=34 ' + '+lat_2=46 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + elif comparison_grid_name == 'subpolar_north_atlantic': + projection = pyproj.Proj('+proj=lcc +lon_0=-40 +lat_0=54 +lat_1=40 ' + '+lat_2=68 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + else: + raise ValueError(f'We missed one of the known comparison grids: ' + f'{comparison_grid_name}') + + return projection
+ + + +
+[docs] +def get_cartopy_projection(comparison_grid_name): + """ + Get the projection from the comparison_grid_name. + + Parameters + ---------- + comparison_grid_name : str + The name of the projection comparison grid to use for remapping + + Returns + ------- + projection : cartopy.crs.Projection + The projection + + Raises + ------ + ValueError + If comparison_grid_name does not describe a known comparison grid + """ + # Authors + # ------- + # Xylar Asay-Davis + # Milena Veneziani + # Yohei Takano + + if comparison_grid_name not in known_comparison_grids: + raise ValueError( + f'Unknown comparison grid type {comparison_grid_name}') + + if comparison_grid_name == 'latlon': + raise ValueError('latlon is not a projection grid.') + + elif comparison_grid_name in ['antarctic', 'antarctic_extended']: + projection = cartopy.crs.Stereographic( + central_latitude=-90., central_longitude=0.0, + true_scale_latitude=-71.0) + elif comparison_grid_name in ['arctic', 'arctic_extended']: + projection = cartopy.crs.Stereographic( + central_latitude=90., central_longitude=0.0, + true_scale_latitude=75.0) + elif comparison_grid_name == 'north_atlantic': + projection = cartopy.crs.LambertConformal( + central_latitude=45., central_longitude=-45., + standard_parallels=(39., 51.)) + elif comparison_grid_name == 'north_pacific': + projection = cartopy.crs.LambertConformal( + central_latitude=40., central_longitude=180., + standard_parallels=(34., 46.)) + elif comparison_grid_name == 'subpolar_north_atlantic': + projection = cartopy.crs.LambertConformal( + central_latitude=54., central_longitude=-40., + standard_parallels=(40., 68.)) + else: + raise ValueError(f'We missed one of the known comparison grids: ' + f'{comparison_grid_name}') + + return projection
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks.html b/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks.html new file mode 100644 index 000000000..0ee655e14 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks.html @@ -0,0 +1,248 @@ + + + + + + mpas_analysis.shared.regions.compute_region_masks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.regions.compute_region_masks

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+from mpas_analysis.shared.regions.compute_region_masks_subtask \
+    import ComputeRegionMasksSubtask
+
+
+
+[docs] +class ComputeRegionMasks(AnalysisTask): + """ + An analysis tasks for computing cell masks for regions defined by geojson + features + + Attributes + ---------- + regionMaskSubtasks : dict of ``ComputeRegionMasksSubtask`` objects + The subtasks of this task with file names as keys + """ + +
+[docs] + def __init__(self, config, conponentName): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + conponentName : str + The component to make mapping files for + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call the constructor from the base class (AnalysisTask) + super(ComputeRegionMasks, self).__init__( + config=config, + taskName='computeRegionMasks', + componentName=conponentName, + tags=[]) + + self.regionMaskSubtasks = {}
+ + + def add_mask_subtask(self, regionGroup, obsFileName=None, lonVar='lon', + latVar='lat', meshName=None, useMpasMaskCreator=False): + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + regionGroup : str + The name of one of the supported region groups (see + :py:func:`mpas_analysis.shared.regions.get_region_mask()`) + + obsFileName : str, optional + The name of an observations file to create masks for. But default, + lon/lat are taken from an MPAS restart file + + lonVar, latVar : str, optional + The name of the longitude and latitude variables in ``obsFileName`` + + meshName : str, optional + The name of the mesh or grid, used as part of the mask file name. + Default is the MPAS mesh name + + useMpasMaskCreator : bool, optional + If ``True``, the mask creator from ``mpas_tools`` will be used + to create the mask. Otherwise, python code is used. Since + masks for observations can only be produced with the python code, + this option is ignored if obsFileName is not ``None``. + """ + # Authors + # ------- + # Xylar Asay-Davis + + config = self.config + + if meshName is None: + meshName = config.get('input', 'mpasMeshName') + + key = '{} {}'.format(meshName, regionGroup) + + if key not in self.regionMaskSubtasks: + + subprocessCount = config.getint('execute', 'parallelTaskCount') + + if obsFileName is not None: + useMpasMaskCreator = False + + if useMpasMaskCreator: + subprocessCount = 1 + + maskSubtask = ComputeRegionMasksSubtask( + self, regionGroup=regionGroup, meshName=meshName, + subprocessCount=subprocessCount, obsFileName=obsFileName, + lonVar=lonVar, latVar=latVar, + useMpasMaskCreator=useMpasMaskCreator) + + self.add_subtask(maskSubtask) + + self.regionMaskSubtasks[key] = maskSubtask + + return self.regionMaskSubtasks[key]
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks_subtask.html b/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks_subtask.html new file mode 100644 index 000000000..64181778f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/regions/compute_region_masks_subtask.html @@ -0,0 +1,470 @@ + + + + + + mpas_analysis.shared.regions.compute_region_masks_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.regions.compute_region_masks_subtask

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import os
+import xarray as xr
+import json
+
+from geometric_features import read_feature_collection, GeometricFeatures
+from geometric_features.aggregation import get_aggregator_by_name
+import mpas_tools.conversion
+from mpas_tools.logging import check_call
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_region_mask
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+
+
+[docs] +def get_feature_list(geojsonFileName): + """ + Builds a list of features found in the geojson file + """ + # Authors + # ------- + # Xylar Asay-Davis + featureList = [] + with open(geojsonFileName) as f: + featureData = json.load(f) + + for feature in featureData['features']: + name = feature['properties']['name'] + featureList.append(name) + return featureList
+ + + +def compute_mpas_region_masks(geojsonFileName, meshFileName, maskFileName, + logger=None, processCount=1, + multiprocessingMethod='spawn', chunkSize=1000, + useMpasMaskCreator=False, dir=None): + """ + Build a region mask file from the given MPAS mesh and geojson file defining + a set of regions. + """ + if os.path.exists(maskFileName): + return + + if useMpasMaskCreator: + dsMesh = xr.open_dataset(meshFileName) + fcMask = read_feature_collection(geojsonFileName) + dsMasks = mpas_tools.conversion.mask(dsMesh=dsMesh, fcMask=fcMask, + logger=logger, dir=dir) + write_netcdf_with_fill(dsMasks, maskFileName) + + else: + args = ['compute_mpas_region_masks', + '-m', meshFileName, + '-g', geojsonFileName, + '-o', maskFileName, + '-t', 'cell', + '--chunk_size', '{}'.format(chunkSize), + '--process_count', '{}'.format(processCount), + '--multiprocessing_method', '{}'.format(multiprocessingMethod)] + check_call(args=args, logger=logger) + + +def compute_lon_lat_region_masks(gridFileName, lonVar, latVar, geojsonFileName, + maskFileName, logger=None, processCount=1, + multiprocessingMethod='spawn', chunkSize=1000): + """ + Build a region mask file from the given lon, lat and geojson file defining + a set of regions. + """ + if os.path.exists(maskFileName): + return + + args = ['compute_lon_lat_region_masks', + '-i', gridFileName, + '--lon', lonVar, + '--lat', latVar, + '-g', geojsonFileName, + '-o', maskFileName, + '--chunk_size', '{}'.format(chunkSize), + '--process_count', '{}'.format(processCount), + '--multiprocessing_method', '{}'.format(multiprocessingMethod)] + check_call(args=args, logger=logger) + + +
+[docs] +class ComputeRegionMasksSubtask(AnalysisTask): + """ + An analysis tasks for computing cell masks for regions defined by geojson + features + + Attributes + ---------- + regionGroup : str + The name of one of the supported region groups (see + :py:func:`geometric_features.aggregation.get_region_by_name()`) + + aggregationFunction : callable + An aggregation function returned by + :py:func:`geometric_features.aggregation.get_region_by_name()` + + geojsonFileName : str + A geojson file, typically from the MPAS ``geometric_features`` + repository, defining the shapes to be masked + + outFileSuffix : str + The suffix for the resulting mask file + + maskFileName : str + The name of the output mask file + + obsFileName : str + The name of an observations file to create masks for. But default, + lon/lat are taken from an MPAS restart file + + lonVar, latVar : str + The name of the longitude and latitude variables in ``obsFileName`` + + meshName : str + The name of the mesh or grid, used as part of the mask file name. + Default is the MPAS mesh name + """ + + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, parentTask, regionGroup, meshName, subprocessCount=1, + obsFileName=None, lonVar='lon', latVar='lat', + useMpasMaskCreator=False): + + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + regionGroup : str + The name of one of the supported region groups (see + :py:func:`geometric_features.aggregation.get_region_by_name()`) + + meshName : str + The name of the mesh or grid, used as part of the mask file name. + Default is the MPAS mesh name + + + subprocessCount : int, optional + The number of processes that can be used to make the mask + + obsFileName : str, optional + The name of an observations file to create masks for. But default, + lon/lat are taken from an MPAS restart file + + lonVar, latVar : str, optional + The name of the longitude and latitude variables in ``obsFileName`` + + useMpasMaskCreator : bool, optional + If ``True``, the mask creator from ``mpas_tools`` will be used + to create the mask. Otherwise, python code is used. + """ + # Authors + # ------- + # Xylar Asay-Davis + + suffix = regionGroup.replace(' ', '') + subtaskName = '{}_{}'.format(meshName, suffix) + + # call the constructor from the base class (AnalysisTask) + super(ComputeRegionMasksSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + subtaskName=subtaskName, + componentName=parentTask.componentName, + tags=[]) + + self.regionGroup = regionGroup + self.subprocessCount = subprocessCount + + self.obsFileName = obsFileName + self.lonVar = lonVar + self.latVar = latVar + self.meshName = meshName + self.useMpasMaskCreator = useMpasMaskCreator + self.useMpasMesh = self.obsFileName is None + self.maskFileName = None + + self.aggregationFunction, prefix, date = get_aggregator_by_name( + self.regionGroup) + self.date = date + self.outFileSuffix = '{}{}'.format(prefix, date) + self.geojsonFileName = \ + get_region_mask(self.config, + '{}.geojson'.format(self.outFileSuffix)) + + if not self.useMpasMaskCreator: + # because this uses a Pool, it cannot be launched as a separate + # process + self.runDirectly = True + + parentTask.add_subtask(self)
+ + + def make_region_mask(self): + """ + If the geojson mask file has not already been cached in the diagnostics + or custom diagnostic directories, it will be created in the analysis + output's masks directory. + """ + function = self.aggregationFunction + filename = self.geojsonFileName + if not os.path.exists(filename): + gf = GeometricFeatures() + fc = function(gf) + fc.to_geojson(filename) + + def expand_region_names(self, regionNames): + """ + If ``regionNames`` contains ``'all'``, make sure the geojson file exists + and then return all the region names found in the file. + + Parameters + ---------- + regionNames : list + A list of region names + + Returns + ------- + regionNames : list + A list of region names + """ + if 'all' in regionNames: + self.make_region_mask() + regionNames = get_feature_list(self.geojsonFileName) + return regionNames + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeRegionMasksSubtask, self).setup_and_check() + + if self.useMpasMesh: + try: + self.obsFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file to perform region masking.') + + maskSubdirectory = build_config_full_path(self.config, 'output', + 'maskSubdirectory') + make_directories(maskSubdirectory) + + self.maskFileName = get_region_mask( + self.config, '{}_{}.nc'.format(self.meshName, self.outFileSuffix)) + + if not os.path.exists(self.maskFileName): + # no cached mask file, so let's see if there's already one in the + # masks subdirectory of the output directory + + maskSubdirectory = build_config_full_path(self.config, 'output', + 'maskSubdirectory') + self.maskFileName = '{}/{}_{}.nc'.format(maskSubdirectory, + self.meshName, + self.outFileSuffix) + + if os.path.exists(self.maskFileName): + # nothing to do so don't block a bunch of other processes + self.subprocessCount = 1 + + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + if os.path.exists(self.maskFileName): + return + + # make the geojson file if it doesn't exist + self.make_region_mask() + + multiprocessingMethod = self.config.get('execute', + 'multiprocessingMethod') + + if self.useMpasMesh: + + maskSubdirectory = build_config_full_path(self.config, 'output', + 'maskSubdirectory') + make_directories(maskSubdirectory) + + compute_mpas_region_masks( + self.geojsonFileName, self.obsFileName, self.maskFileName, + self.logger, self.subprocessCount, + multiprocessingMethod=multiprocessingMethod, + useMpasMaskCreator=self.useMpasMaskCreator, + dir=maskSubdirectory) + else: + compute_lon_lat_region_masks( + self.obsFileName, self.lonVar, self.latVar, + self.geojsonFileName, self.maskFileName, self.logger, + self.subprocessCount, + multiprocessingMethod=multiprocessingMethod)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/time_series/anomaly.html b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/anomaly.html new file mode 100644 index 000000000..a6346a048 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/anomaly.html @@ -0,0 +1,222 @@ + + + + + + mpas_analysis.shared.time_series.anomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.time_series.anomaly

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+from mpas_analysis.shared.io import open_mpas_dataset
+from mpas_analysis.shared.time_series.moving_average import compute_moving_avg
+
+
+
+[docs] +def compute_moving_avg_anomaly_from_start(timeSeriesFileName, variableList, + anomalyStartTime, anomalyEndTime, + startDate, endDate, calendar, + movingAveragePoints=12, + alter_dataset=None): + """ + Compute the rolling mean of the anomaly of a quantity from the beginning + of the simulation (such that the rolling mean starts at zero by definition) + + Parameters + ---------- + timeSeriesFileName : str + a file produced by ``MpasTimeSeriesTask`` containing variables, the + anomaly and rolling mean of which is to be computed + + variableList : list of str + variable names to include in the resulting data set + + anomalyStartTime, anomalyEndTime : str + the start and end times of the reference point for the anomaly + + startDate, endDate : str + the start and end dates of the time series + + calendar : {'gregorian', 'gregoraian_noleap'} + The calendar used in the MPAS run + + movingAveragePoints : int, optional + The number of points (months) over which to perform the rolling average + of the data set + + alter_dataset : function + A function for manipulating the data set (e.g. computing new + variables), taking an ``xarray.Dataset`` as input argument and + returning an ``xarray.Dataset`` + + Returns + ------- + ds : ``xarray.Dataset`` + The anomaly of the rolling time mean from the start of the simulation + """ + # Authors + # ------- + # Xylar Asay-Davis + + ds = open_mpas_dataset(fileName=timeSeriesFileName, + calendar=calendar, + variableList=variableList, + startDate=startDate, + endDate=endDate) + + if alter_dataset is not None: + ds = alter_dataset(ds) + + dsStart = open_mpas_dataset( + fileName=timeSeriesFileName, + calendar=calendar, + variableList=variableList, + startDate=anomalyStartTime, + endDate=anomalyEndTime) + + if alter_dataset is not None: + dsStart = alter_dataset(dsStart) + + dsStart = dsStart.isel(Time=slice(0, movingAveragePoints)).mean('Time') + + for variable in ds.data_vars: + ds[variable] = ds[variable] - dsStart[variable] + + ds = compute_moving_avg(ds, movingAveragePoints=movingAveragePoints) + + return ds
+ + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/time_series/moving_average.html b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/moving_average.html new file mode 100644 index 000000000..2497aac96 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/moving_average.html @@ -0,0 +1,173 @@ + + + + + + mpas_analysis.shared.time_series.moving_average — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.time_series.moving_average

+# -*- coding: utf-8 -*-
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+#
+
+
+
+[docs] +def compute_moving_avg(ds, movingAveragePoints=12): + """ + Compute the rolling mean of a data set + + Parameters + ---------- + ds : ``xarray.Dataset`` + a dataset to be averaged + + movingAveragePoints : int, optional + The number of points (months) over which to perform the rolling average + of the data set + + Returns + ------- + ds : ``xarray.Dataset`` + The anomaly of the rolling time mean from the start of the simulation + """ + # Authors + # ------- + # Xylar Asay-Davis + + ds = ds.rolling(Time=movingAveragePoints, + center=True).mean().dropna(dim='Time', how='all') + + return ds
+ + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/time_series/mpas_time_series_task.html b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/mpas_time_series_task.html new file mode 100644 index 000000000..46c6ca5b6 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/mpas_time_series_task.html @@ -0,0 +1,469 @@ + + + + + + mpas_analysis.shared.time_series.mpas_time_series_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.time_series.mpas_time_series_task

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import os
+import subprocess
+from distutils.spawn import find_executable
+import xarray as xr
+import numpy
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_files_year_month, decode_strings
+from mpas_analysis.shared.timekeeping.utility import get_simulation_start_time
+
+
+
+[docs] +class MpasTimeSeriesTask(AnalysisTask): + """ + An analysis tasks for computing time series from output from the + ``timeSeriesStatsMonthly`` analysis member. + + Attributes + ---------- + + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the time series + + allVariables : list of str + A list of all available variable names in ``timeSeriesStatsMonthly`` + used to raise an exception when an unavailable variable is requested + + inputFiles : list of str + A list of input files from which to extract the time series. + + startDate, endDate : str + The start and end dates of the time series as strings + + startYear, endYear : int + The start and end years of the time series + """ + + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, config, componentName, taskName=None, + subtaskName=None, section='timeSeries'): + """ + Construct the analysis task for extracting time series. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Contains configuration options + + componentName : {'ocean', 'seaIce'} + The name of the component (same as the folder where the task + resides) + + taskName : str, optional + The name of the task, 'mpasTimeSeriesOcean' or + 'mpasTimeSeriesSeaIce' by default (depending on ``componentName``) + + subtaskName : str, optional + The name of the subtask (if any) + + section : str, optional + The section of the config file from which to read the start and + end times for the time series, also added as a tag + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.variableList = [] + self.section = section + tags = [section] + + self.allVariables = None + + if taskName is None: + suffix = section[0].upper() + section[1:] + \ + componentName[0].upper() + componentName[1:] + taskName = 'mpas{}'.format(suffix) + + # call the constructor from the base class (AnalysisTask) + super(MpasTimeSeriesTask, self).__init__( + config=config, + taskName=taskName, + subtaskName=subtaskName, + componentName=componentName, + tags=tags)
+ + + def add_variables(self, variableList): + """ + Add one or more variables to extract as a time series. + + Parameters + ---------- + variableList : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the time series + + Raises + ------ + ValueError + if this funciton is called before this task has been set up (so + the list of available variables has not yet been set) or if one + or more of the requested variables is not available in the + ``timeSeriesStatsMonthly`` output. + """ + # Authors + # ------- + # Xylar Asay-Davis + + if self.allVariables is None: + raise ValueError('add_variables() can only be called after ' + 'setup_and_check() in MpasTimeSeriesTask.\n' + 'Presumably tasks were added in the wrong order ' + 'or add_variables() is being called in the wrong ' + 'place.') + + for variable in variableList: + if variable not in self.allVariables: + raise ValueError( + '{} is not available in timeSeriesStatsMonthly ' + 'output:\n{}'.format(variable, self.allVariables)) + + if variable not in self.variableList: + self.variableList.append(variable) + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(MpasTimeSeriesTask, self).setup_and_check() + + config = self.config + baseDirectory = build_config_full_path( + config, 'output', 'timeSeriesSubdirectory') + + make_directories(baseDirectory) + + self.outputFile = '{}/{}.nc'.format(baseDirectory, + self.fullTaskName) + + self.check_analysis_enabled( + analysisOptionName='config_am_timeseriesstatsmonthly_enable', + raiseException=True) + + # get a list of timeSeriesStats output files from the streams file, + # reading only those that are between the start and end dates + startDate = config.get(self.section, 'startDate') + endDate = config.get(self.section, 'endDate') + streamName = 'timeSeriesStatsMonthlyOutput' + self.inputFiles = self.historyStreams.readpath( + streamName, startDate=startDate, endDate=endDate, + calendar=self.calendar) + + if len(self.inputFiles) == 0: + raise IOError('No files were found in stream {} between {} and ' + '{}.'.format(streamName, startDate, endDate)) + + self.runMessage = \ + f'\nComputing MPAS time series from first year plus files:\n' \ + f' {os.path.basename(self.inputFiles[0])} through\n' \ + f' {os.path.basename(self.inputFiles[-1])}' + + # Make sure first year of data is included for computing anomalies + if config.has_option('timeSeries', 'anomalyRefYear'): + anomalyYear = config.getint('timeSeries', 'anomalyRefYear') + anomalyStartDate = '{:04d}-01-01_00:00:00'.format(anomalyYear) + else: + anomalyStartDate = get_simulation_start_time(self.runStreams) + anomalyYear = int(anomalyStartDate[0:4]) + + anomalyEndDate = '{:04d}-12-31_23:59:59'.format(anomalyYear) + firstYearInputFiles = self.historyStreams.readpath( + streamName, startDate=anomalyStartDate, + endDate=anomalyEndDate, + calendar=self.calendar) + for fileName in firstYearInputFiles: + if fileName not in self.inputFiles: + self.inputFiles.append(fileName) + + self.inputFiles = sorted(self.inputFiles) + + with xr.open_dataset(self.inputFiles[0]) as ds: + self.allVariables = list(ds.data_vars.keys()) + + def run_task(self): + """ + Compute the requested time series + """ + # Authors + # ------- + # Xylar Asay-Davis + + if len(self.variableList) == 0: + # nothing to do + return + + self.logger.info(self.runMessage) + + self._compute_time_series_with_ncrcat() + + def _compute_time_series_with_ncrcat(self): + + """ + Uses ncrcat to extact time series from timeSeriesMonthlyOutput files + + Raises + ------ + OSError + If ``ncrcat`` is not in the system path. + + Author + ------ + Xylar Asay-Davis + """ + + if find_executable('ncrcat') is None: + raise OSError('ncrcat not found. Make sure the latest nco ' + 'package is installed: \n' + 'conda install nco\n' + 'Note: this presumes use of the conda-forge ' + 'channel.') + + inputFiles = self.inputFiles + append = False + if os.path.exists(self.outputFile): + # make sure all the necessary variables are also present + with xr.open_dataset(self.outputFile) as ds: + if ds.sizes['Time'] == 0: + updateSubset = False + else: + updateSubset = True + for variableName in self.variableList: + if variableName not in ds.variables: + updateSubset = False + break + + if updateSubset: + # add only input files wiht times that aren't already in + # the output file + + append = True + + fileNames = sorted(self.inputFiles) + inYears, inMonths = get_files_year_month( + fileNames, self.historyStreams, + 'timeSeriesStatsMonthlyOutput') + + inYears = numpy.array(inYears) + inMonths = numpy.array(inMonths) + totalMonths = 12 * inYears + inMonths + + dates = decode_strings(ds.xtime_startMonthly) + + lastDate = dates[-1] + + lastYear = int(lastDate[0:4]) + lastMonth = int(lastDate[5:7]) + lastTotalMonths = 12 * lastYear + lastMonth + + inputFiles = [] + for index, inputFile in enumerate(fileNames): + if totalMonths[index] > lastTotalMonths: + inputFiles.append(inputFile) + + if len(inputFiles) == 0: + # nothing to do + return + else: + # there is an output file but it has the wrong variables + # so we need ot delete it. + self.logger.warning('Warning: deleting file {} because ' + 'it is empty or some variables were ' + 'missing'.format(self.outputFile)) + os.remove(self.outputFile) + + variableList = self.variableList + ['xtime_startMonthly', + 'xtime_endMonthly'] + + args = ['ncrcat', '-4', '--no_tmp_fl', + '-v', ','.join(variableList)] + + if append: + args.append('--record_append') + + printCommand = '{} {} ... {} {}'.format(' '.join(args), inputFiles[0], + inputFiles[-1], + self.outputFile) + args.extend(inputFiles) + args.append(self.outputFile) + + self.logger.info('running: {}'.format(printCommand)) + for handler in self.logger.handlers: + handler.flush() + + process = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + if stdout: + stdout = stdout.decode('utf-8') + for line in stdout.split('\n'): + self.logger.info(line) + if stderr: + stderr = stderr.decode('utf-8') + for line in stderr.split('\n'): + self.logger.error(line) + + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, + ' '.join(args))
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/time_series/time_series.html b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/time_series.html new file mode 100644 index 000000000..7e0e4edb4 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/time_series/time_series.html @@ -0,0 +1,366 @@ + + + + + + mpas_analysis.shared.time_series.time_series — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.time_series.time_series

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Utility functions related to time-series data sets
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import xarray as xr
+import numpy
+import os
+from distutils.spawn import find_executable
+import glob
+import subprocess
+
+from mpas_analysis.shared.timekeeping.utility import days_to_datetime
+
+
+def combine_time_series_with_ncrcat(inFileNames, outFileName,
+                                    variableList=None, logger=None):
+    """
+    Uses ncrcat to extact time series from a series of files
+
+    inFileNames : str or list of str
+        A file name with wildcard(s) or a list of input files from which to
+        extract the time series.
+
+    outFileName : str
+        The output NetCDF file where the time series should be written.
+
+    variableList : list of str, optional
+        A list of varibles to include.  All variables are included by default
+
+    logger : `logging.Logger``, optional
+        A logger to which ncclimo output should be redirected
+
+    Raises
+    ------
+    OSError
+        If ``ncrcat`` is not in the system path.
+
+    Author
+    ------
+    Xylar Asay-Davis
+    """
+
+    if find_executable('ncrcat') is None:
+        raise OSError('ncrcat not found. Make sure the latest nco '
+                      'package is installed: \n'
+                      'conda install nco\n'
+                      'Note: this presumes use of the conda-forge '
+                      'channel.')
+
+    if os.path.exists(outFileName):
+        return
+
+    if isinstance(inFileNames, str):
+        inFileNames = sorted(glob.glob(inFileNames))
+
+    args = ['ncrcat', '-4', '--record_append', '--no_tmp_fl']
+
+    if variableList is not None:
+        args.extend(['-v', ','.join(variableList)])
+
+    printCommand = '{} {} ... {} {}'.format(' '.join(args), inFileNames[0],
+                                            inFileNames[-1],
+                                            outFileName)
+    args.extend(inFileNames)
+    args.append(outFileName)
+
+    if logger is None:
+        print('running: {}'.format(printCommand))
+    else:
+        logger.info('running: {}'.format(printCommand))
+        for handler in logger.handlers:
+            handler.flush()
+    process = subprocess.Popen(args, stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE)
+    stdout, stderr = process.communicate()
+
+    if stdout:
+        stdout = stdout.decode('utf-8')
+        for line in stdout.split('\n'):
+            if logger is None:
+                print(line)
+            else:
+                logger.info(line)
+    if stderr:
+        stderr = stderr.decode('utf-8')
+        for line in stderr.split('\n'):
+            if logger is None:
+                print(line)
+            else:
+                logger.error(line)
+
+    if process.returncode != 0:
+        raise subprocess.CalledProcessError(process.returncode,
+                                            ' '.join(args))
+
+
+
+[docs] +def cache_time_series(timesInDataSet, timeSeriesCalcFunction, cacheFileName, + calendar, yearsPerCacheUpdate=1, + logger=None): + """ + Create or update a NetCDF file ``cacheFileName`` containing the given time + series, calculated with ``timeSeriesCalcFunction`` over the given times, + start and end year, and time frequency with which results are cached. + + Note: only works with climatologies where the mask (locations of ``NaN`` + values) doesn't vary with time. + + Parameters + ---------- + timesInDataSet : array-like + Times at which the time series is to be calculated, typically taken + from ``ds.Times.values`` for a data set from which the time series + will be extracted or computed. + + timeSeriesCalcFunction : function + A function with arguments ``timeIndices``, indicating the entries in + ``timesInDataSet`` to be computed, and ``firstCall``, indicating + whether this is the first call to the funciton (useful for printing + progress information). + + cacheFileName : str + The absolute path to the cache file where the times series will be + stored + + calendar : {'gregorian', 'noleap'} + The name of one of the calendars supported by MPAS cores, used to + determine ``year`` and ``month`` from ``Time`` coordinate + + yearsPerCacheUpdate : int, optional + The frequency with which the cache file is updated as the computation + progresses. If the computation is expensive, it may be useful to + output the file frequently. If not, there will be needless overhead + in caching the file too frequently. + + logger : ``logging.Logger``, optional + A logger to which to write output as the time series is computed + + Returns + ------- + climatology : object of same type as ``ds`` + A data set without the ``'Time'`` coordinate containing the mean + of ds over all months in monthValues, weighted by the number of days + in each month. + """ + # Authors + # ------- + # Xylar Asay-Davis + + timesProcessed = numpy.zeros(len(timesInDataSet), bool) + # figure out which files to load and which years go in each file + continueOutput = os.path.exists(cacheFileName) + cacheDataSetExists = False + if continueOutput: + if logger is not None: + logger.info(' Read in previously computed time series') + # read in what we have so far + + try: + dsCache = xr.open_dataset(cacheFileName, decode_times=False) + cacheDataSetExists = True + except IOError: + # assuming the cache file is corrupt, so deleting it. + message = 'Deleting cache file {}, which appears to have ' \ + 'been corrupted.'.format(cacheFileName) + if logger is None: + print('Warning: {}'.format(message)) + else: + logger.warning(message) + os.remove(cacheFileName) + + if cacheDataSetExists: + # force loading and then close so we can overwrite the file later + dsCache.load() + dsCache.close() + for time in dsCache.Time.values: + timesProcessed[timesInDataSet == time] = True + + datetimes = days_to_datetime(timesInDataSet, calendar=calendar) + yearsInDataSet = numpy.array([date.year for date in datetimes]) + + startYear = yearsInDataSet[0] + endYear = yearsInDataSet[-1] + + firstProcessed = True + for firstYear in range(startYear, endYear + 1, yearsPerCacheUpdate): + years = range(firstYear, numpy.minimum(endYear + 1, + firstYear + yearsPerCacheUpdate)) + + mask = numpy.zeros(len(yearsInDataSet), bool) + for year in years: + mask = numpy.logical_or(mask, yearsInDataSet == year) + mask = numpy.logical_and(mask, numpy.logical_not(timesProcessed)) + + timeIndices = numpy.nonzero(mask)[0] + + if len(timeIndices) == 0: + # no unprocessed time entries in this data range + continue + + if logger is not None: + if firstProcessed: + logger.info(' Process and save time series') + if yearsPerCacheUpdate == 1: + logger.info(' {:04d}'.format(years[0])) + else: + logger.info(' {:04d}-{:04d}'.format(years[0], years[-1])) + + ds = timeSeriesCalcFunction(timeIndices, firstProcessed) + firstProcessed = False + + if cacheDataSetExists: + dsCache = xr.concat([dsCache, ds], dim='Time') + # now sort the Time dimension: + dsCache = dsCache.loc[{'Time': sorted(dsCache.Time.values)}] + else: + dsCache = ds + cacheDataSetExists = True + + dsCache.to_netcdf(cacheFileName) + + return dsCache.sel(Time=slice(timesInDataSet[0], timesInDataSet[-1]))
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/MpasRelativeDelta.html b/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/MpasRelativeDelta.html new file mode 100644 index 000000000..880d09e5f --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/MpasRelativeDelta.html @@ -0,0 +1,295 @@ + + + + + + mpas_analysis.shared.timekeeping.MpasRelativeDelta — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.timekeeping.MpasRelativeDelta

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import datetime
+from dateutil.relativedelta import relativedelta
+from calendar import monthrange, isleap
+
+
+
+[docs] +class MpasRelativeDelta(relativedelta): + """ + ``MpasRelativeDelta`` is a subclass of ``dateutil.relativedelta`` for + relative time intervals with different MPAS calendars. + + Only relative intervals (years, months, etc.) are supported and not the + absolute date specifications (year, month, etc.). Addition/subtraction + of ``datetime.datetime`` objects or other ``MpasRelativeDelta`` (but + currently not ``datetime.date``, ``datetime.timedelta`` or other related + objects) is supported. + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, dt1=None, dt2=None, years=0, months=0, days=0, + hours=0, minutes=0, seconds=0, calendar='gregorian'): + if calendar not in ['gregorian', 'noleap', 'gregorian_noleap']: + raise ValueError('Unsupported MPAs calendar {}'.format(calendar)) + self.calendar = calendar + super(MpasRelativeDelta, self).__init__(dt1=dt1, dt2=dt2, years=years, + months=months, days=days, + hours=hours, minutes=minutes, + seconds=seconds)
+ + + def __add__(self, other): + if not isinstance(other, (datetime.datetime, MpasRelativeDelta)): + return NotImplemented + + if isinstance(other, MpasRelativeDelta): + if self.calendar != other.calendar: + raise ValueError('MpasRelativeDelta objects can only be added ' + 'if their calendars match.') + years = self.years + other.years + months = self.months + other.months + if months > 12: + years += 1 + months -= 12 + elif months < 1: + years -= 1 + months += 12 + + return self.__class__(years=years, + months=months, + days=self.days + other.days, + hours=self.hours + other.hours, + minutes=self.minutes + other.minutes, + seconds=self.seconds + other.seconds, + calendar=self.calendar) + + year = other.year + self.years + + month = other.month + if self.months != 0: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + + if self.calendar == 'gregorian': + daysInMonth = monthrange(year, month)[1] + elif self.calendar in ['noleap', 'gregorian_noleap']: + # use year 0001, which is not a leapyear + daysInMonth = monthrange(1, month)[1] + + day = min(daysInMonth, other.day) + repl = {"year": year, "month": month, "day": day} + + days = self.days + if self.calendar in ['noleap', 'gregorian_noleap'] and isleap(year): + if month == 2 and day + days >= 29: + # skip forward over the leap day + days += 1 + elif month == 3 and day + days <= 0: + # skip backward over the leap day + days -= 1 + + return (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds)) + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__add__(other) + + def __sub__(self, other): + if not isinstance(other, MpasRelativeDelta): + return NotImplemented + return self.__add__(other.__neg__()) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + calendar=self.calendar) + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + calendar=self.calendar) + + __rmul__ = __mul__ + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + outList = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + outList.append("{attr}={value:+g}".format(attr=attr, + value=value)) + outList.append("calendar='{}'".format(self.calendar)) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(outList))
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/utility.html b/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/utility.html new file mode 100644 index 000000000..b0747b585 --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/timekeeping/utility.html @@ -0,0 +1,641 @@ + + + + + + mpas_analysis.shared.timekeeping.utility — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.timekeeping.utility

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Time keeping utility functions
+"""
+# Authors
+# -------
+# Xylar Asay-Davis
+
+import datetime
+import netCDF4
+import xarray
+import numpy
+
+from mpas_analysis.shared.timekeeping.MpasRelativeDelta import \
+    MpasRelativeDelta
+from mpas_analysis.shared.io.utility import decode_strings
+
+
+
+[docs] +def get_simulation_start_time(streams): + """ + Given a ``StreamsFile`` object, returns the simulation start time parsed + from a restart file. + + Parameters + ---------- + steams : ``StreamsFile`` object + For parsing an MPAS streams file + + Returns + ------- + simulation_start_time : str + The start date of the simulation parsed from a restart file identified + by the contents of ``streams``. + + Raises + ------ + IOError + If no restart file can be found. + """ + # Authors + # ------- + # Xylar Asay-Davis + + try: + restartFile = streams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file for analysis to work correctly') + + ds = xarray.open_dataset(restartFile) + da = ds.simulationStartTime + if da.dtype.type is numpy.string_: + simulationStartTime = bytes.decode(da.values.tobytes()) + else: + simulationStartTime = da.values.tobytes() + # replace underscores so it works as a CF-compliant reference date + simulationStartTime = simulationStartTime.rstrip('\x00').replace('_', ' ') + + return simulationStartTime
+ + + +
+[docs] +def string_to_datetime(dateString): + """ + Given a date string and a calendar, returns a ``datetime.datetime`` + + Parameters + ---------- + dateString : string + A date and time in one of the following formats:: + + YYYY-MM-DD hh:mm:ss + YYYY-MM-DD hh.mm.ss + YYYY-MM-DD SSSSS + DDD hh:mm:ss + DDD hh.mm.ss + DDD SSSSS + hh.mm.ss + hh:mm:ss + YYYY-MM-DD + YYYY-MM + SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + Returns + ------- + datetime : A ``datetime.datetime`` object + + Raises + ------ + ValueError + If an invalid ``dateString`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + (year, month, day, hour, minute, second) = \ + _parse_date_string(dateString, isInterval=False) + + return datetime.datetime(year=year, month=month, day=day, hour=hour, + minute=minute, second=second)
+ + + +
+[docs] +def string_to_relative_delta(dateString, calendar='gregorian'): + """ + Given a date string and a calendar, returns an instance of + ``MpasRelativeDelta`` + + Parameters + ---------- + dateString : str + A date and time in one of the following formats:: + + YYYY-MM-DD hh:mm:ss + YYYY-MM-DD hh.mm.ss + YYYY-MM-DD SSSSS + DDD hh:mm:ss + DDD hh.mm.ss + DDD SSSSS + hh.mm.ss + hh:mm:ss + YYYY-MM-DD + YYYY-MM + SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + calendar: {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores + + Returns + ------- + relativedelta : An ``MpasRelativeDelta`` object + + Raises + ------ + ValueError + If an invalid ``dateString`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + (years, months, days, hours, minutes, seconds) = \ + _parse_date_string(dateString, isInterval=True) + + return MpasRelativeDelta(years=years, months=months, days=days, + hours=hours, minutes=minutes, seconds=seconds, + calendar=calendar)
+ + + +
+[docs] +def string_to_days_since_date(dateString, calendar='gregorian', + referenceDate='0001-01-01'): + """ + Given a date string or an array-like of date strings, a reference date + string, and a calendar, returns the number of days (as a float or + numpy.array of floats) since the reference date + + Parameters + ---------- + dateStrings : str or array-like of str + A date and time (or array of date/times) in one of the following + formats:: + + YYYY-MM-DD hh:mm:ss + YYYY-MM-DD hh.mm.ss + YYYY-MM-DD SSSSS + DDD hh:mm:ss + DDD hh.mm.ss + DDD SSSSS + hh.mm.ss + hh:mm:ss + YYYY-MM-DD + YYYY-MM + SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + calendar: {'gregorian', 'noleap'}, optional + The name of one of the calendars supported by MPAS cores + + referenceDate : str, optional + A reference date of the form:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + days : float or numpy.array of floats + The number of days since ``referenceDate`` for each date in + ``dateString`` + + Raises + ------ + ValueError + If an invalid ``dateString`` or ``calendar`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + isSingleString = isinstance(dateString, str) + + if isSingleString: + dateString = [dateString] + + dates = [string_to_datetime(string) for string in dateString] + days = datetime_to_days(dates, calendar=calendar, + referenceDate=referenceDate) + + if isSingleString: + days = days[0] + else: + days = numpy.array(days) + return days
+ + + +
+[docs] +def days_to_datetime(days, calendar='gregorian', referenceDate='0001-01-01'): + """ + Covert days to ``datetime.datetime`` objects given a reference date and an + MPAS calendar (either 'gregorian' or 'noleap'). + + Parameters + ---------- + days : float or array-like of floats + The number of days since the reference date. + + calendar : {'gregorian', 'noleap'}, optional + A calendar to be used to convert days to a ``datetime.datetime`` + object. + + referenceDate : str, optional + A reference date of the form:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + datetime : `datetime.datetime` (or array-like of datetimes) + The days since ``referenceDate`` on the given ``calendar``. + + Raises + ------ + ValueError + If an invalid ``days``, ``referenceDate`` or ``calendar`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + datetimes = netCDF4.num2date(days, + 'days since {}'.format(referenceDate), + calendar=_mpas_to_netcdf_calendar(calendar)) + + # convert to datetime.datetime + if isinstance(datetimes, numpy.ndarray): + newDateTimes = [] + for date in datetimes.flat: + newDateTimes.append(_round_datetime(date)) + if len(newDateTimes) > 0: + datetimes = numpy.reshape(numpy.array(newDateTimes), + datetimes.shape) + + else: + datetimes = _round_datetime(datetimes) + + return datetimes
+ + + +
+[docs] +def datetime_to_days(dates, calendar='gregorian', referenceDate='0001-01-01'): + """ + Given date(s), a calendar and a reference date, returns the days since + the reference date, either as a single float or an array of floats. + + Parameters + ---------- + datetime : instance or array-like of datetime.datetime + The date(s) to be converted to days since ``referenceDate`` on the + given ``calendar``. + + calendar : {'gregorian', 'noleap'}, optional + A calendar to be used to convert days to a ``datetime.datetime`` object. + + referenceDate : str, optional + A reference date of the form:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + days : float or array of floats + The days since ``referenceDate`` on the given ``calendar``. + + Raises + ------ + ValueError + If an invalid ``datetimes``, ``referenceDate`` or ``calendar`` is + supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + isSingleDate = False + if isinstance(dates, datetime.datetime): + dates = [dates] + isSingleDate = True + + days = netCDF4.date2num(dates, 'days since {}'.format(referenceDate), + calendar=_mpas_to_netcdf_calendar(calendar)) + + if isSingleDate: + days = days[0] + + return days
+ + + +
+[docs] +def date_to_days(year=1, month=1, day=1, hour=0, minute=0, second=0, + calendar='gregorian', referenceDate='0001-01-01'): + """ + Convert a date to days since the reference date. + + Parameters + ---------- + year, month, day, hour, minute, second : int, optional + The date to be converted to days since ``referenceDate`` on the + given ``calendar``. + + calendar : {'gregorian', 'noleap'}, optional + A calendar to be used to convert days to a ``datetime.datetime`` + object. + + referenceDate : str, optional + A reference date of the form:: + + 0001-01-01 + 0001-01-01 00:00:00 + + Returns + ------- + days : float + The days since ``referenceDate`` on the given ``calendar``. + + Raises + ------ + ValueError + If an invalid ``referenceDate`` or ``calendar`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + calendar = _mpas_to_netcdf_calendar(calendar) + + date = datetime.datetime(year, month, day, hour, minute, second) + + return netCDF4.date2num(date, 'days since {}'.format(referenceDate), + calendar=calendar)
+ + + +def _parse_date_string(dateString, isInterval=False): + """ + Given a string containing a date, returns a tuple defining a date of the + form (year, month, day, hour, minute, second) appropriate for constructing + a datetime or timedelta + + Parameters + ---------- + dateString : string + A date and time in one of the followingformats:: + + YYYY-MM-DD hh:mm:ss + YYYY-MM-DD hh.mm.ss + YYYY-MM-DD SSSSS + DDD hh:mm:ss + DDD hh.mm.ss + DDD SSSSS + hh.mm.ss + hh:mm:ss + YYYY-MM-DD + YYYY-MM + SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + isInterval : bool, optional + If ``isInterval=True``, the result is appropriate for constructing + a `datetime.timedelta` object rather than a `datetime`. + + Returns + ------- + date : A tuple of (year, month, day, hour, minute, second) + + Raises + ------ + ValueError + If an invalid ``dateString`` is supplied. + """ + # Authors + # ------- + # Xylar Asay-Davis + + if isInterval: + offset = 0 + else: + offset = 1 + + # change underscores to spaces so both can be supported + dateString = dateString.rstrip('\x00').replace('_', ' ').strip() + if ' ' in dateString: + ymd, hms = dateString.split(' ') + else: + if '-' in dateString: + ymd = dateString + # error can result if dateString = '1990-01' + # assume this means '1990-01-01' + if len(ymd.split('-')) == 2: + ymd += '-01' + hms = '00:00:00' + else: + if isInterval: + ymd = '0000-00-00' + else: + ymd = '0001-01-01' + hms = dateString + + if '.' in hms: + hms = hms.replace('.', ':') + + if '-' in ymd: + (year, month, day) \ + = [int(sub) for sub in ymd.split('-')] + else: + day = int(ymd) + year = 0 + month = offset + + if ':' in hms: + (hour, minute, second) \ + = [int(sub) for sub in hms.split(':')] + else: + second = int(hms) + minute = 0 + hour = 0 + return (year, month, day, hour, minute, second) + + +def _mpas_to_netcdf_calendar(calendar): + """ + Convert from MPAS calendar to NetCDF4 calendar names. + """ + + if calendar == 'gregorian_noleap': + calendar = 'noleap' + if calendar not in ['gregorian', 'noleap']: + raise ValueError('Unsupported calendar {}'.format(calendar)) + return calendar + + +def _round_datetime(date): + """Round a datetime object to nearest second + date : datetime.datetime or similar objet object. + """ + (year, month, day, hour, minute, second, microsecond) = \ + (date.year, date.month, date.day, date.hour, date.minute, date.second, + date.microsecond) + + date = datetime.datetime(year=year, month=month, day=day, + hour=hour, minute=minute, + second=second) + + add_seconds = int(1e-6 * microsecond + 0.5) + + return date + datetime.timedelta(0, add_seconds) +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_modules/mpas_analysis/shared/transects/compute_transect_masks_subtask.html b/1.11.0rc1/_modules/mpas_analysis/shared/transects/compute_transect_masks_subtask.html new file mode 100644 index 000000000..92875206b --- /dev/null +++ b/1.11.0rc1/_modules/mpas_analysis/shared/transects/compute_transect_masks_subtask.html @@ -0,0 +1,382 @@ + + + + + + mpas_analysis.shared.transects.compute_transect_masks_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + + +
  • +
  • +
+
+
+
+
+ +

Source code for mpas_analysis.shared.transects.compute_transect_masks_subtask

+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+
+import os
+import xarray as xr
+
+from geometric_features import read_feature_collection, GeometricFeatures
+from geometric_features.aggregation import get_aggregator_by_name
+import mpas_tools.conversion
+from mpas_tools.logging import check_call
+
+from mpas_analysis.shared.analysis_task import AnalysisTask
+
+from mpas_analysis.shared.io.utility import build_config_full_path, \
+    make_directories, get_region_mask
+from mpas_analysis.shared.io import write_netcdf_with_fill
+
+from mpas_analysis.shared.regions import get_feature_list
+
+
+
+[docs] +def compute_mpas_transect_masks(geojsonFileName, meshFileName, maskFileName, + logger=None, processCount=1, chunkSize=1000, + subdivisionThreshold=10e3, + useMpasMaskCreator=False, + dir=None): + """ + Build a transect mask file from the given MPAS mesh and geojson file \ + defining a set of transects. + """ + if os.path.exists(maskFileName): + return + + # For now, we need to use mpas_tools.conversion.mask() because + # compute_mpas_transect_masks doesn't produce edge sign, needed for + # transport transects + if useMpasMaskCreator: + dsMesh = xr.open_dataset(meshFileName) + fcMask = read_feature_collection(geojsonFileName) + dsMask = mpas_tools.conversion.mask(dsMesh=dsMesh, fcMask=fcMask, + logger=logger, dir=dir) + + write_netcdf_with_fill(dsMask, maskFileName) + else: + args = ['compute_mpas_transect_masks', + '-m', meshFileName, + '-g', geojsonFileName, + '-o', maskFileName, + '-t', 'edge', + '-s', '{}'.format(subdivisionThreshold), + '--chunk_size', '{}'.format(chunkSize), + '--process_count', '{}'.format(processCount), + '--add_edge_sign'] + check_call(args, logger=logger)
+ + + +
+[docs] +class ComputeTransectMasksSubtask(AnalysisTask): + """ + An analysis tasks for computing cell masks for transects defined by geojson + features + + Attributes + ---------- + aggregationFunction : callable + An aggregation function returned by + :py:func:`geometric_features.aggregation.get_region_by_name()` + + geojsonFileName : str + A geojson file, typically from the MPAS ``geometric_features`` + repository, defining the shapes to be masked + + outFileSuffix : str + The suffix for the resulting mask file + + maskFileName : str + The name of the output mask file + """ + # Authors + # ------- + # Xylar Asay-Davis + +
+[docs] + def __init__(self, parentTask, transectGroup, subprocessCount=None): + + """ + Construct the analysis task and adds it as a subtask of the + ``parentTask``. + + Parameters + ---------- + parentTask : ``AnalysisTask`` + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + transectGroup : str + The name of a transect group, see + :py:func:`mpas_analysis.shared.transects.get_transect_info()` + + subprocessCount : int, optional + The number of processes that can be used to make the mask, default + is as many processes as allowed + """ + # Authors + # ------- + # Xylar Asay-Davis + + subtaskName = transectGroup.replace(' ', '') + + # call the constructor from the base class (AnalysisTask) + super(ComputeTransectMasksSubtask, self).__init__( + config=parentTask.config, + taskName=parentTask.taskName, + subtaskName=subtaskName, + componentName=parentTask.componentName, + tags=[]) + + if subprocessCount is None: + self.subprocessCount = self.config.getint( + 'execute', 'parallelTaskCount') + else: + self.subprocessCount = subprocessCount + + self.obsFileName = None + self.maskSubdirectory = None + self.maskFileName = None + self.transectGroup = transectGroup + + self.aggregationFunction, prefix, date = \ + get_aggregator_by_name(self.transectGroup) + self.outFileSuffix = '{}{}'.format(prefix, date) + self.geojsonFileName = \ + get_region_mask(self.config, + '{}.geojson'.format(self.outFileSuffix))
+ + + def make_transect_mask(self): + """ + If the geojson mask file has not already been cached in the diagnostics + or custom diagnostic directories, it will be created in the analysis + output's masks directory. + """ + function = self.aggregationFunction + filename = self.geojsonFileName + if not os.path.exists(filename): + gf = GeometricFeatures() + fc = function(gf) + fc.to_geojson(filename) + + def expand_transect_names(self, transectNames): + """ + If ``transectNames`` contains ``'all'``, make sure the geojson file + exists and then return all the transect names found in the file. + + Parameters + ---------- + transectNames : list + A list of transect names + + Returns + ------- + transectNames : list + A list of transect names + """ + if 'all' in transectNames: + self.make_transect_mask() + transectNames = get_feature_list(self.geojsonFileName) + return transectNames + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + IOError : + If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range. + """ + # Authors + # ------- + # Xylar Asay-Davis + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super(ComputeTransectMasksSubtask, self).setup_and_check() + + try: + self.obsFileName = self.runStreams.readpath('restart')[0] + except ValueError: + raise IOError('No MPAS restart file found: need at least one ' + 'restart file to perform region masking.') + + self.maskSubdirectory = build_config_full_path(self.config, 'output', + 'maskSubdirectory') + make_directories(self.maskSubdirectory) + + # first, see if we have cached a mask file name in the region masks + # directory + + meshName = self.config.get('input', 'mpasMeshName') + + self.maskFileName = get_region_mask( + self.config, '{}_{}.nc'.format(meshName, self.outFileSuffix)) + + if not os.path.exists(self.maskFileName): + # no cached mask file, so let's see if there's already one in the + # masks subfolder of the output directory + + self.maskFileName = '{}/{}_{}.nc'.format(self.maskSubdirectory, + meshName, + self.outFileSuffix) + + if os.path.exists(self.maskFileName): + # nothing to do so don't block a bunch of other processes + self.subprocessCount = 1 + + def run_task(self): + """ + Compute the requested climatologies + """ + # Authors + # ------- + # Xylar Asay-Davis + + if os.path.exists(self.maskFileName): + return + + # make the geojson file if it doesn't exist + self.make_transect_mask() + + compute_mpas_transect_masks( + self.geojsonFileName, self.obsFileName, self.maskFileName, + logger=self.logger, processCount=self.subprocessCount, + dir=self.maskSubdirectory)
+ + +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/authors.rst.txt b/1.11.0rc1/_sources/authors.rst.txt new file mode 100644 index 000000000..6bc59ac82 --- /dev/null +++ b/1.11.0rc1/_sources/authors.rst.txt @@ -0,0 +1,24 @@ +Main Authors +============ +* Xylar Asay-Davis +* Milena Veneziani +* Phillip J. Wolfram + +Contributors +============ +* Sterling Baldwin +* Riley X. Brady +* Darin Comeau +* Charles Doutriaux +* Jeremy Fyke +* Matthew Hoffman +* Joseph Kennedy +* Mark Petersen +* Stephen Price +* Kevin Rosa +* Greg Streletz +* Adrian Turner +* Luke Van Roekel + +For a list of all the contributions: +https://github.com/MPAS-Dev/MPAS-Analysis/graphs/contributors diff --git a/1.11.0rc1/_sources/design_docs/analysis_task_template.rst.txt b/1.11.0rc1/_sources/design_docs/analysis_task_template.rst.txt new file mode 100644 index 000000000..9e2e00a3d --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/analysis_task_template.rst.txt @@ -0,0 +1,844 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Analysis Task Template +====================== + +:raw-html-m2r:`

+Xylar Asay-Davis
+date: 2017/03/08
+

` + + +.. raw:: html + +

Summary

+ + +A new template python file for analysis tasks will be added to the repository. +The template will include a list of functions that each analysis task *must* +implement and example syntax for docstrings used to commend both the full +analysis task and the individual functions. + +The existing analysis tasks will be updated to be consistent with this template. +The ``run_analysis.py`` driver script will also be updated to work with the template. + +The template is needed to: + + +#. serve as a starting point for writing new analysis tasks +#. ensure that tasks implement a standard set of + functions, making it easier to perform actions (such as checking whether + the task should be run, checking for required model and observations files, + purging files from a previous analysis run, and running the analysis) on + each analysis task in sequence (and, in the future, in parallel) +#. demonstrate the syntax and style of docstrings required to comment/document + each task and each function + + +.. raw:: html + +

Requirements

+ + + +.. raw:: html + +

Requirement: Template for Analysis Tasks
+ Date last modified: 2017/03/08
+ Contributors: Xylar Asay-Davis +

+ + +The template should include each function that each analysis task *must* implement +and example docstring both for the task as a whole and for each funciton. + + +.. raw:: html + +

Requirement: Validation within Analysis Tasks
+ Date last modified: 2017/03/08
+ Contributors: Xylar Asay-Davis +

+ + +Validation, such as checking config options or adding new ones if they are missing, +or checking if required data files are present, should be performed within a +function in each task (rather than in ``run_analysis.py``\ , as is sometimes the current +case). + + +.. raw:: html + +

Requirement: Analysis Continues even when Analysis Task Fails
+ Date last modified: 2017/03/08
+ Contributors: Xylar Asay-Davis +

+ + +If validation fails, an error message should be printed but other analysis +tasks should be allowed to run. Similarly, if a given analysis task raises +an exception, the error and stack trace should be printed but other analysis +tasks should still be run. + + +.. raw:: html + +

Requirement: List of Tasks to Perform
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +There should be a single place where new tasks are added to ``run_analysis.py``\ , as +is presently the case. Yet, there should be a way to create a list of tasks to be +performed and later determine whether, when and how those tasks are to be run. +This capability also allows for operations like purging files from a prevous run +to be added in the future. The capability is also requried to allow for later task +parallelism. Currently, a task module is imported, there is a check to see if that +task should be run, and the task is performed in immediate sequence. + + +.. raw:: html + +

Algorithmic Formulations (optional)

+ + + +.. raw:: html + +

Design solution: Template for Analysis Tasks
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +A base class, ``AnalysisTask`` will be added under ``shared/analysis_task.py``. +This class will include methods: + + +* ``__init__``\ : construct the task, including assigning variable and streams maps + (optional). +* ``setup_and_check``\ : performs common tasks to all analysis, such as reading + namelist and streams files +* ``run``\ : the base class version does nothing + +The template will show how to set up a child class that decends from ``AnalysisTask``. +It will show examples of: + + +* ``__init__``\ : construct the task, including assigning the ``taskName``\ , ``componentName`` + and ``categories`` of the analysis, and calling the base class's constructor. +* ``setup_and_check``\ : first, calls the base class' version of ``setup_and_check``\ , then, + determines if the configuration is valid for running this task (e.g. if + necessary files and config options are present) +* ``run``\ : runs the analysis task + +The template will be located at: + +.. code-block:: bash + + mpas_analysis/ + - analysis_task_template.py + +That is, it is the only file (other than ``__init__.py``\ ) in the base of the +``mpas_analysis`` directory, making it easy to find. This way, it will be the first +file most developers see when they look in ``mpas_analysis`` itself. + +A reference to the template as the starting point for new developers will be added +to the readme. + + +.. raw:: html + +

Design solution: Validation within Analysis Tasks
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +The ``setup_and_check`` method within each analysis task can be used to determine if +necessary input files are present and/or if config options are set as expected. +The template will provide examples of doing this. + +Existing checks for missing observations files in ``run_analysis.py`` will be +moved to individual analyses. This will make clearer which checks correspond +with which analysis tasks and will make clearer where such checks should be added +within future analysis tasks. Similarly, the addition of the ``startDate`` and +``endDate`` config options will be moved to the corresponding analysis tasks. + + +.. raw:: html + +

Design solution: Analysis Continues even when Analysis Task Fails
+ Date last modified: 2017/03/08
+ Contributors: Xylar Asay-Davis +

+ + +A try/except will be used around both ``setup_and_check`` and ``run`` calls to make sure +an error message and stack trace are printed, but execution will continue +for other tasks. + + +.. raw:: html + +

Design solution: List of Tasks to Perform
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +By having a common base class for all analysis tasks, +each task can be checked to see if it should be run based on +the ``generate`` command-line or config option. If so, its ``setup_and_check`` +function will be run to make sure the configuration is right (and will +print a warning if not). If ``setup_and_check`` passes, the analysis can be added +to a list of functions to be run. Later, a loop through the list +can be used to run each analysis. + +Some analysis tasks require extra arguments (e.g. the field to be +analyzed in the case of ``ocean.modelvsobs`` and the streams and variable +maps for all analysis tasks). These arguments will be passed to ``__init__`` +and stored as member variables that can later be accessed via ``self.``. + + +.. raw:: html + +

Design and Implementation

+ + +Implementation is in the branch: https://github.com/xylar/MPAS-Analysis/tree/analysis_task_template + + +.. raw:: html + +

Implementation: Template for Analysis Tasks
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +Here is the suggested base class ``AnalysisTask`` in full, intended to make discussion +of individual lines easier: + +.. code-block:: python + + """ + Defines the base class for analysis tasks. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + + from ..shared.io import NameList, StreamsFile + from ..shared.io.utility import build_config_full_path, make_directories + + + class AnalysisTask(object): # {{{ + """ + The base class for analysis tasks. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + def __init__(self, config, streamMap=None, variableMap=None): # {{{ + """ + Construct the analysis task. + + Individual tasks (children classes of this base class) should first + call this method to perform basic initialization, then, define the + `taskName`, `componentName` and list of `categories` for the task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + streamMap : dict, optional + A dictionary of MPAS-O stream names that map to their mpas_analysis + counterparts. + + variableMap : dict, optional + A dictionary of MPAS-O variable names that map to their + mpas_analysis counterparts. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + self.config = config + self.streamMap = streamMap + self.variableMap = variableMap # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis (e.g. reading namelists and + streams files). + + After this call, the following member variables are set: + self.inDirectory : the base input directory + self.plotsDirectory : the directory for writing plots (which is + also created if it doesn't exist) + self.namelist : the namelist reader + self.streams : the streams file reader + self.calendar : the name of the calendar ('gregorian' or + 'gregoraian_noleap') + + Individual tasks (children classes of this base class) should first + call this method to perform basic setup, then, check whether the + configuration is correct for a given analysis and perform additional, + analysis-specific setup. For example, this function could check if + necessary observations and other data files are found, then, determine + the list of files to be read when the analysis is run. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + # read parameters from config file + self.inDirectory = self.config.get('input', 'baseDirectory') + self.plotsDirectory = build_config_full_path(self.config, 'output', + 'plotsSubdirectory') + namelistFileName = self.config.get('input', 'oceanNamelistFileName') + self.namelist = NameList(namelistFileName, path=self.inDirectory) + + streamsFileName = self.config.get('input', 'oceanStreamsFileName') + self.streams = StreamsFile(streamsFileName, + streamsdir=self.inDirectory) + + self.calendar = self.namelist.get('config_calendar_type') + + make_directories(self.plotsDirectory) + # }}} + + def run(self): # {{{ + """ + Runs the analysis task. + + Individual tasks (children classes of this base class) should first + call this method to perform any common steps in an analysis task, + then, perform the steps required to run the analysis task. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + return # }}} + + def check_generate(self): + # {{{ + """ + Determines if this analysis should be generated, based on the + `generate` config option and `taskName`, `componentName` and + `categories`. + + Individual tasks do not need to create their own versions of this + function. + + Returns + ------- + generate : bool + Whether or not this task should be run. + + Raises + ------ + ValueError : If one of `self.taskName`, `self.componentName` + or `self.categories` has not been set. + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017s + """ + + for memberName in ['taskName', 'componentName', 'categories']: + if not hasattr(self, memberName): + raise ValueError('Analysis tasks must define self.{} in their ' + '__init__ method.'.format(memberName)) + + if (not isinstance(self.categories, list) and + self.categories is not None): + raise ValueError('Analysis tasks\'s member self.categories ' + 'must be NOne or a list of strings.') + + config = self.config + generateList = config.getExpression('output', 'generate') + generate = False + for element in generateList: + if '_' in element: + (prefix, suffix) = element.split('_', 1) + else: + prefix = element + suffix = None + + allSuffixes = [self.componentName] + if self.categories is not None: + allSuffixes = allSuffixes + self.categories + noSuffixes = [self.taskName] + allSuffixes + if prefix == 'all': + if (suffix in allSuffixes) or (suffix is None): + generate = True + elif prefix == 'no': + if suffix in noSuffixes: + generate = False + elif element == self.taskName: + generate = True + + return generate # }}} + # }}} + + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python + +And here is the suggested template in full: + +.. code-block:: python + + """ + This is an example analysis task to be used as a template for new tasks. + It should be copied into one of the component folders (`ocean`, `sea_ice`, + `land_ice`, etc.) and modified as needed. + + Don't forget to remove this docstring. (It's not needed.) + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + + # import python modules here + + # import mpas_analysis module here (those with relative paths starting with + # dots) + from ..shared.analysis_task import AnalysisTask + + + class MyTask(AnalysisTask): # {{{ + """ + + + Authors + ------- + + + Last Modified + ------------- + + """ + def __init__(self, config, streamMap=None, variableMap=None, + myArg='myDefaultValue'): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + streamMap : dict, optional + A dictionary of MPAS-O stream names that map to their mpas_analysis + counterparts. + + variableMap : dict, optional + A dictionary of MPAS-O variable names that map to their + mpas_analysis counterparts. + + myNewArg : str, optional + + + Authors + ------- + + + Last Modified + ------------- + + """ + # first, call the constructor from the base class (AnalysisTask) + super(MyTask, self).__init__(config, streamMap, variableMap).__init__(config, streamMap, variableMap) + + # next, name the task, the component (ocean, sea_ice, etc.) and the + # categories (if any) of the component ('timeSeries', 'climatologyMap' + # etc.) + self.taskName = 'myTask' + self.componentName = 'component' + self.categories = ['category1', 'category2'] + + # then, store any additional arguments for use later on. These would + # likely include things like the name of a field, region, month, + # season, etc. to be analyzed so that the same subclass of AnalysisTask + # can perform several different tasks (potentially in parallel) + self.myArg = myArg + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + ValueError: if myArg has an invalid value + + Authors + ------- + + + Last Modified + ------------- + + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.inDirectory, self.plotsDirectory, self.namelist, self.streams + # self.calendar + super(MyTask, self).__init__(config, streamMap, variableMap).setup_and_check() + + # then, perform additional checks specific to this analysis + possibleArgs = ['blah', 'thing', 'stuff'] + if self.myArg not in possibleArgs: + # Note: we're going to allow a long line in this case because it + # would be confusing to break up the string (even though it + # violates the PEP8 standard) + raise ValueError('MyTask must be constructed with argument myArg having one of the values\n' + '{}.'.format(possibleArgs)) + + section = 'MyTask' + startDate = '{:04d}-01-01_00:00:00'.format( + self.config.getint(section, 'startYear')) + if not self.config.has_option(section, 'startDate'): + self.config.set(section, 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format( + self.config.getint(section, 'endYear')) + if not self.config.has_option(section, 'endDate'): + self.config.set(section, 'endDate', endDate) + + # }}} + + def run(self): # {{{ + """ + Runs the analysis task. + + Individual tasks (children classes of this base class) should first + call this method to perform any common steps in an analysis task, + then, perform the steps required to run the analysis task. + + Authors + ------- + + + Last Modified + ------------- + + """ + + # here is where the main "meat" of the analysis task goes + + self._my_sub_task('someText', arg2='differentText') + return + # }}} + + # here is where you add helper methods that are meant to be non-public + # (they start with an underscore), meaning you don't expect anyone to + # access them outside of this file. Typically you won't put as much in + # the docstring as you would for a public function or method. + # + # you can either pass arguments (with or without defaults) or you can + # "save" arguments as member variables of `self` and then get them back + # (like `self.myArg` here). + def _my_sub_task(self, arg1, arg2=None): # {{{ + """ + + """ + + # perform the task + print 'myArg:', self.myArg + print 'arg1:', arg1 + if arg2 is not None: + print 'arg2:', arg2 + # }}} + + # }}} + + # vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python + + +.. raw:: html + +

Implementation: Validation within Analysis Tasks
+ Date last modified: 2017/03/08
+ Contributors: Xylar Asay-Davis +

+ + +Here is an example (from ``ocean.climatology_map.ClimatologyMap``\ ) of what +the new ``__init__`` and ``setup_and_check`` methods : + +.. code-block:: python + + def __init__(self, config, streamMap=None, variableMap=None, + fieldName=None): # {{{ + """ + Construct the analysis task. + + Parameters + ---------- + config : instance of MpasAnalysisConfigParser + Contains configuration options + + streamMap : dict, optional + A dictionary of MPAS-O stream names that map to their mpas_analysis + counterparts. + + variableMap : dict, optional + A dictionary of MPAS-O variable names that map to their + mpas_analysis counterparts. + + fieldName : {'sst', 'mld', 'sss'} + The name of the field to be analyzed + + Raises + ------ + ValueError : if `fieldName` is not provided or is not one of the + supported values + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + # first, call the constructor from the base class (AnalysisTask) + AnalysisTask.__init__(config, streamMap, variableMap) + + upperFieldNames = {'sst': 'SST', + 'mld': 'MLD', + 'sss': 'SSS' + # 'nino34': 'Nino34', + # 'mht': 'MHT' + # 'moc': 'MOC' + } + + if fieldName is None: + raise ValueError('fieldName must be supplied.') + if fieldName not in upperFieldNames.keys(): + raise ValueError('fieldName must be one of {}.'.format( + upperFieldNames.keys())) + + self.fieldName = fieldName + self.upperFieldName = upperFieldNames[fieldName] + + # name the task, component and category + self.taskName = 'climatologyMap{}'.format(self.upperFieldName) + self.componentName = 'ocean' + self.categories = ['climatologyMap', fieldName] + + # }}} + + def setup_and_check(self): # {{{ + """ + Perform steps to set up the analysis and check for errors in the setup. + + Raises + ------ + OSError + If files are not present + + Authors + ------- + Xylar Asay-Davis + + Last Modified + ------------- + 03/16/2017 + """ + config = self.config + section = 'climatology' + startDate = '{:04d}-01-01_00:00:00'.format( + config.getint(section, 'startYear')) + if not config.has_option(section, 'startDate'): + config.set(section, 'startDate', startDate) + endDate = '{:04d}-12-31_23:59:59'.format( + config.getint(section, 'endYear')) + if not config.has_option(section, 'endDate'): + config.set(section, 'endDate', endDate) + + return # }}} + +Much of this code has been taken out of ``run_analysis.py``\ , simplifying and clarifying +the code. + + +.. raw:: html + +

Implementation: Analysis Continues even when Analysis Task Fails
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +Calls to ``check`` and ``run`` methods in ``run_analysis.py`` are inside of +``try/except`` blocks, which catch the exceptions and print the stack trace +but don't cause the code to exit. + +.. code-block:: python + + try: + analysisTask.check() + ... + except: + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis module {} failed during check and " \ + "will not be run".format(analysisTask.taskName) + + ... + + try: + analysisModule.run() + except: + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis module {} failed during run".format( + analysisTask.taskName) + + +.. raw:: html + +

Implementation: List of Tasks to Perform
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +The tasks are imported and added to an anaysis list as follows: + +.. code-block:: python + + analyses = [] + + # Ocean Analyses + from mpas_analysis.ocean.time_series_ohc import TimeSeriesOHC + analyses.append(TimeSeriesOHC(config, streamMap=oceanStreamMap, + variableMap=oceanVariableMap)) + from mpas_analysis.ocean.time_series_sst import TimeSeriesSST + analyses.append(TimeSeriesSST(config, streamMap=oceanStreamMap, + variableMap=oceanVariableMap)) + + from mpas_analysis.ocean.climatology_map import ClimatologyMap \ + as ClimatologyMapOcean + for fieldName in ['sst', 'mld', 'sss']: + analyses.append(ClimatologyMapOcean(config, streamMap=oceanStreamMap, + variableMap=oceanVariableMap, + fieldName=fieldName)) + + # Sea Ice Analyses + from mpas_analysis.sea_ice.timeseries import TimeSeries as TimeSeriesSeaIce + analyses.append(TimeSeriesSeaIce(config, streamMap=seaIceStreamMap, + variableMap=seaIceVariableMap)) + from mpas_analysis.sea_ice.climatology_map import ClimatologyMap \ + as ClimatologyMapSeaIce + analyses.append(ClimatologyMapSeaIce(config, streamMap=seaIceStreamMap, + variableMap=seaIceVariableMap)) + +The ``analyses`` list is a list of instances of subclasses of ``AnalysisTask``. + +Subsequent calls to analysis functions can loop over analyses, as in the following +example for calling ``run``\ : + +.. code-block:: python + + # run each analysis task + for analysisTask in analyses: + try: + analysisTask.run() + except: + traceback.print_exc(file=sys.stdout) + print "ERROR: analysis module {} failed during run".format( + analysisTask.taskName) + +:raw-html-m2r:`

Testing

` + + +.. raw:: html + +

Testing and Validation: Template for Analysis Tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Ideally, the test here would be having another developer create an analysis task based on this +template. Realistically, this won't happen before the template gets merged into the repository, +so I'm counting on feedback from other developers to "test" the template before it gets merged, +and there will probably need to be subsequent PRs to make changes as issues arise. + + +.. raw:: html + +

Testing and Validation: Validation within Analysis Tasks
+ Date last modified: 2017/03/16
+ Contributors: Xylar Asay-Davis +

+ + +I have added ``setup_and_check`` functions within each analysis task. So far, these check for only a subset of +the necessary configuration and input files, and could (and should) be expanded in the future. + +I have verified that all ``setup_and_check`` routines fail when the path to their respective observations and/or +preprocessed reference run is not found. + + +.. raw:: html + +

Testing and Validation: Analysis Continues even when Analysis Task Fails
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +I have verified using the ``GMPAS_QU240`` test case and by deliberately introducing errors in the file +paths that an error in a given analysis task (either during ``setup_and_check`` or ``run``\ ) causes that task to +print a stack trace and an error message but does not prevent other tasks from running. + + +.. raw:: html + +

Testing and Validation: List of Tasks to Perform
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +As stated in implementation, there is a single place in ``run_analysis.py`` where a developer would add +her or his task to the analysis. I think this requirement has been satisfied without requiring testing. diff --git a/1.11.0rc1/_sources/design_docs/config_file_reorganization.rst.txt b/1.11.0rc1/_sources/design_docs/config_file_reorganization.rst.txt new file mode 100644 index 000000000..a2fe0c39a --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/config_file_reorganization.rst.txt @@ -0,0 +1,200 @@ + +Config File Reorganization +========================== + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/01/29
+

+

Summary

+ + This document describes various efforts to clean up the structure of the MPAS-Analysis config file. The idea is to create a template config file that will replace `config.analysis` as well as a number of example config files designed to make use of various MPAS and ACME runs on various machines. The reorganization should make the analysis easier for users to modify and run. + +

Requirements

+ + + +.. raw:: html + +

Requirement: a simple way of turning on and off individual analysis modules
+ Date last modified: 2017/01/29
+ Contributors: Xylar Asay-Davis +

+ + +There should be a simple, intuitive method for turning on and off individual analysis modules (e.g. ``ocean/ohc_timeseries``\ ). This should replace the current approach of having a boolean ``generate`` flag for each analysis module in a separate config section. Preferably, there should be an equivalent method for turning on and off analysis modules from the command line that overrides that in the config file. + + +.. raw:: html + +

Requirement: there should be a simplified template for config files
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +The current example config file should be made into a general template. Simplifications should be made to the template so that it can more easily and intuitively be modified for several analyses. Example config files should also be added for analyzing several existing runs on several different machines. + + +.. raw:: html + +

Requirement: removal of ACME specific config options
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +To the extent possible, ACME-specific config options such as ``casename`` and ``ref_casename_v0`` should be generalized in a way that is also appropriate not just ACME runs but also any other runs involving the MPAS components we support. + + +.. raw:: html + +

Requirement: consistent section and option names
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +A consistent convention of capitalization and underscores should be used throughout the config file. + + +.. raw:: html + +

Design and Implementation

+ + + +.. raw:: html + +

Implementation: a simple way of turning on and off individual analysis modules
+ Date last modified: 2017/02/02
+ Contributors: Xylar Asay-Davis +

+ + +Implementation of the ``config.template`` file can be found `here `_. + +The following comment describes the planned implementation in the config file. + +.. code-block:: ini + + # a list of analyses to generate. Valid names are: + # 'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST', + # 'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol', + # 'regriddedSeaIceConcThick' + # the following shortcuts exist: + # 'all' -- all analyses will be run + # 'all_timeSeries' -- all time-series analyses will be run + # 'all_regriddedHorizontal' -- all analyses involving regridded horizontal + # fields will be run + # 'all_ocean' -- all ocean analyses will be run + # 'all_seaIce' -- all sea-ice analyses will be run + # 'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the + # other analyses). + # 'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the + # given category of analysis + # an equivalent syntax can be used on the command line to override this + # option: + # ./run_analysis.py config.analysis --generate \ + # all,no_ocean,all_timeSeries + generate = ['all'] + +Where there are conflicts between items in the ``generate`` list, successive items will override earlier items. For example, ``generate = ['all', 'no_timeSeriesOHC']`` will generate all analyses except ``timeSeriesOHC``. As another example, ``generate = ['all', 'no_ocean', 'all_timeSeries']`` would generate all diagnostics except those comparing ocean model results with observations (and previous model results). (Note that a more efficient and intuitive way to do the same would be ``generate = ['all_seaIce', 'all_timeSeries']``.) + +An analogous approach has also been added at the command line, for example: + +.. code-block:: bash + + ./run_analysis.py config.analysis --generate all,no_ocean,all_timeSeries + +If the ``--generate`` flag is used on the command line, it will replace the generate option in the config file. + +As an aside, I note that it is not clear if future analysis modules will fit neatly into categories like "time series" and "regridded horizontal" fields, and these categories are not meant to be all-encompassing. + + +.. raw:: html + +

Implementation: there should be a simplified template for config files
+ Date last modified: 2017/01/29
+ Contributors: Xylar Asay-Davis +

+ + +The required ``config.template`` has been implemented in #86, specifically `here `_. A subdirectory ``configs`` will be added with several examples from runs on LANL IC and on Edison at NERSC. Other examples can be added as appropriate and useful. + + +.. raw:: html + +

Implementation: removal of ACME specific config options
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +``casename`` has been renamed ``mainRunName``\ , ``referenceRunName`` has been added for comparison with reference runs that have not been preprocessed (not yet supported), and ``ref_casename_v0`` has been renamed ``preprocessedReferenceRunName``. + +See #86, specifically `config.template `_. + + +.. raw:: html + +

Implementation: consistent section and option names
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +In `config.template `_ in #86, "\ `CamelCase `_\ " has been used for all sections and options. The first word is lowercase and subsequent words begin with an uppercase latter. Underscores have been removed (except in the syntax used to turn on and off options, where underscores in prefixes ``all_`` and ``no_`` make splitting and comparison simpler in the implementation. + + +.. raw:: html + +

Testing

+ + + +.. raw:: html + +

Testing and Validation: a simple way of turning on and off individual analysis modules
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +CI will be added to make sure that the function to parse the generate list (\ ``run_analysis.check_generate``\ ) behaves as expected. + + +.. raw:: html + +

Testing and Validation: there should be a simplified template for config files
+ Date last modified: 2017/01/29
+ Contributors: Xylar Asay-Davis +

+ + +There is not a way to test the template in the usual sense. Instead, the test will be asking other developers and users to adapt the template for new runs to make sure it is intuitive. + + +.. raw:: html + +

Testing and Validation: removal of ACME specific config options
+ Date last modified: 2017/01/29
+ Contributors: Xylar Asay-Davis +

+ + +For now, the plan is just to rename the appropriate config options, so the test is simply to ensure that analysis runs correctly and produces bit-for-bit identical images to those produced by the current ``MPAS-Analysis/develop``. + + +.. raw:: html + +

Testing and Validation: consistent section and option names
+ Date last modified: 2017/02/01
+ Contributors: Xylar Asay-Davis +

+ + +As above, the test is simply to ensure that analysis runs correctly and produces bit-for-bit identical images to those produced by the current ``MPAS-Analysis/develop``. diff --git a/1.11.0rc1/_sources/design_docs/eddykineticenergy.rst.txt b/1.11.0rc1/_sources/design_docs/eddykineticenergy.rst.txt new file mode 100644 index 000000000..4f59311a6 --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/eddykineticenergy.rst.txt @@ -0,0 +1,119 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Eddy Kinetic Energy Climatology Mapping +======================================= + + +.. raw:: html + +

+ Kevin Rosa
+ date: 2018/06/18
+

+ + +Summary +------- + +The document describes a new feature which will be added to the MPAS-Analysis +tools package: visualization of surface Eddy Kinetic Energy (EKE). +The EKE climatology map will function very similarly to other climatological fields (e.g. SSH, SST, etc.). +The output file will contain three images: the modeled EKE climatology, the observed EKE climatology, and the difference. +Plotting EKE is particularly important for MPAS-O because one can configure meshes with eddy-permitting regions and would then want to compare the EKE in these regions against observations. + +Requirements +------------ + + +#. Model output must contain the meridional and zonal components of both ``timeMonthly_avg_velocity*`` and ``timeMonthly_avg_velocity*Squared``. +#. User can download the EKE observations data, via 1 of 2 methods: + + * Run ``./download_analysis_data.py -o /path/to/output/directory`` if they wish to download all observations data. + *or* + * Download only the EKE dataset at `https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/EKE/drifter_variance.nc `_ + +#. In config file... + + #. Specify ``ekeSubdirectory`` with location of EKE observations file. + #. Under ``[climatologyMapEKE]``\ , leave ``seasons = ['ANN']``. *Only annual observations are available currently.* + #. When setting ``generate``\ , task ``climatologyMapEKE`` has tags: ``climatology, horizontalMap, eke`` + +Physics +------- + +In the ocean, it is convenient to separate the the horizontal current, *u*\ , +into its mean and eddy components: +(1) :raw-html-m2r:`` + +This approach separates the total kinetic energy into mean kinetic energy +(MKE) and eddy kinetic energy (EKE). + +The EKE over much of the ocean is at least an order of magnitude greater than +the MKE (Wytrki, 1976). +This eddy energy is important for transporting momentum, heat, mass, and chemical +constituents of seawater (Robinson, 1983). + +Algorithms +---------- + +Time mean of equation 1: :raw-html-m2r:`` + +The model outputs :raw-html-m2r:`` +and :raw-html-m2r:`` +while the observational dataset provides :raw-html-m2r:`` +so two different EKE equations must be used: + +(2) :raw-html-m2r:`` + +(3) :raw-html-m2r:`` + +Design and Implementation +------------------------- + +The primary design consideration for this feature is that it integrate +seamlessly with the rest of the analysis tools. +To this end, the sea surface temperature (SST) plotting tools will be used as a +template. + +Files to create: + + +* ``mpas_analysis/ocean/climatology_map_eke.py`` +* ``docs/tasks/climatologyMapEKE.rst`` +* ``README.md`` for ``drifter_variance.nc`` dataset + +Files to edit: + + +* ``mpas_analysis/ocean/__init__.py`` +* ``docs/analysis_tasks.rst`` +* ``docs/api.rst`` +* ``mpas_analysis/config.default`` +* ``mpas_analysis/obs/analysis_input_files`` + +The main challenge for plotting EKE is that EKE is a function of several model variables and is not itself a variable that is directly written by the model. +Because of this, the climatology mapping functions for SSH, SST, SSS, and MLD will not serve as a direct template for the EKE formulation in ``mpas_analysis/ocean/climatology_map_eke.py``. +I will try to follow the structure of ``mpas_analysis/ocean/compute_transects_with_vel_mag.py`` as much as possible. + +It appears that there is a method for plotting velocity magnitudes on the antarctic grid. Look into 'climatology_map_sose.py'... + +Testing +------- + +I will test runs of varying durations and resolutions to make sure the EKE plotting is working. I will also ensure that the following jobs fail: + + +#. Input model results files missing at least one of the 4 necessary velocity variables. +#. Request seasonal plots. +#. Test that ``./download_analysis_data.py`` downloads EKE data. + +Bibliography +------------ + + +* https://latex.codecogs.com/eqneditor/editor.php +* Chelton, D. B., Schlax, M. G., Samelson, R. M. & Szoeke, R. A. de. Global observations of large oceanic eddies. Geophysical Research Letters 34, (2007). +* Laurindo, L. C., Mariano, A. J. & Lumpkin, R. An improved near-surface velocity climatology for the global ocean from drifter observations. Deep Sea Research Part I: Oceanographic Research Papers 124, 73–92 (2017). +* Wyrtki, K., Magaard, L. & Hager, James. Eddy energy in the oceans. Journal of Geophysical Research 81, 2641–2646 diff --git a/1.11.0rc1/_sources/design_docs/generalize_calendar.rst.txt b/1.11.0rc1/_sources/design_docs/generalize_calendar.rst.txt new file mode 100644 index 000000000..bd23e6b3e --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/generalize_calendar.rst.txt @@ -0,0 +1,356 @@ + +Generalize Calendar supported by Analysis +========================================= + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/02/09
+

+

Summary

+ + Currently, the time variable in `xarray` data sets within MPAS-Analysis has two + major shortcomings, inherited from `xarray` (through `pandas` and `numpy.datetime64`). + First, only the Gregorian calendar is supported. Second, there is not support + for dates outside the years 1678 to 2262. The analysis needs to support both + the Gregorian ('gregorian') and the 365-day ('gregorian_noleap') calendars. It also needs to + support, at a minimum, years between 0001 and 9999, and preferably arbitrary + years both positive and negative. + + A major challenge is that it seems that xarray cannot easily be forced to + use an alternative representation of dates to the troublesome + `numpy.datetime64` type (see, for example, + [pydata/xarray#1084](https://github.com/pydata/xarray/issues/1084)). + The most obvious alternative, `datetime.datetime`, + seemingly cannot be used directly in `xarray` because objects of this type + are converted to `numpy.datetime64` objects at various stages when using + features from pandas, raising errors when dates are out of range. While an + alternative date class (e.g. `netcdftime.DatetimNoLeap`) might be used to + represent dates on the 'gregorian_noleap' calendar, there is no such + preexisting alternative for the 'gregorian' calendar. + + The solution proposed herein is to store time as floating-point days since the + reference date 0001-01-01 and to convert dates in this format to + `datetime.datetime` and `MpasRelativeDelta` objects whenever mathematical + manipulation of dates is required. + + A successful implementation would produce essentially identical analysis to + what is currently produced, but making use of the dates from the MPAS calendar + (whether Gregorian or 365-day) without the need for artifical offsets (e.g. + `yearOffset` used in the current code. Plots of horizontal fields would remain + unchanged while plots of time series would have a time axis with the simulation + date instead of the offset date. + + +

Requirements

+ + + +.. raw:: html + +

Requirement: The 'Time' coordinate of xarray data sets must be consistent + with the MPAS calendar
+ Date last modified: 2017/02/09
+ Contributors: Xylar Asay-Davis +

+ + +For all data sets used in the analysis, the 'Time' coordinate must represent dates +on the appropriate MPAS calendar, either 'gregorian' or 'gregorian_noleap', depending +on the namelist option 'config_calendar_type'. There must be ways of mathematically +manipulating times (e.g. adding/subtracting offsets and figuring out the amount of time +between two dates) and of making plots that are consistent with these calendars. + + +.. raw:: html + +

Requirement: The 'Time' coordinate of xarray data sets must support at least years + 0001 and 9999, and preferably any conceivable value
+ Date last modified: 2017/02/16
+ Contributors: Xylar Asay-Davis +

+ + +For all data sets used in the analysis, the 'Time' coordinate must, at a minimum, +support years between 0001 and 9999 (the range of ``datetime.datetime``\ ) and preferably +a broader range. + + +.. raw:: html + +

Algorithmic Formulations (optional)

+ + + +.. raw:: html + +

Design solution: The 'Time' coordinate of xarray data sets must be consistent + with the MPAS calendar
+ Date last modified: 2017/02/11
+ Contributors: Xylar Asay-Davis, Phillip J. Wolfram +

+ + +The proposed solution represents time in ``xarray.DataSet`` objects as the number of +days since the reference date 0001-01-01. +This is reasonable because the smallest unit of time output in MPAS components is +seconds (and unlikely to ever be shorter than ms). We note that a date specified +as a 64-bit float has a precision high enough to represent seconds for dates up +to +/- 100 million years: + +.. code-block:: python + + >>> import sys + >>> 1./(sys.float_info.epsilon*365*24*60*60) + 142808207.36207813 + +We should have no trouble representing any number we might want (including paleo +timescales) with this system. + +For purposes of performing mathematical operations and plotting dates, these +values will be converted to ``datetime.datetime`` objects (via the proposed +``days_to_datetime`` utility function) and back (via the proposed +``datetime_to_days``\ ). + +The conversion operations within ``datetime_to_days`` and ``days_to_datetime`` will be +performed with the calendar-aware functions ``netCDF4.date2num`` and +``netCDF4.num2date``\ , respectively. Both functions will support lists/arrays of dates +(for efficiency and simplicity of calling code) in addition to single values. + +Curve ploting can be supported with ``matplotlib.pyplot.plot_date``\ , which takes a date +of exactly the format used here (days since 0001-01-01). The compatibility with ``plot_date`` +was part of the reason for choosing this format for the date. + + +.. raw:: html + +

Design solution: The 'Time' coordinate of xarray data sets must support at least years + 0001 and 9999, and preferably any conceivable value
+ Date last modified: 2017/02/09
+ Contributors: Xylar Asay-Davis +

+ + +Same as above. In theory, the use of days since 0001-01-01 would allow any year +to be supported, not just the range from 0001 to 9999. However, the conversions +to ``datetime.datetime`` objects for mathematical manipulation will constrain +the dates to be between ``datetime.min`` (0001-01-01) and ``datetime.max`` (9999-12-31). + + +.. raw:: html + +

Design and Implementation

+ + + +.. raw:: html + +

Implementation: The 'Time' coordinate of xarray data sets must be consistent + with the MPAS calendar
+ Date last modified: 2017/02/16
+ Contributors: Xylar Asay-Davis +

+ + +The proposed implementation is on the branch +`xylar/generalize_calendar `_ + +A helper funciton, ``mpas_xarray._parse_dataset_time``\ , computes times as days since +0001-01-01, and serves as a replacement for ``mpas_xarray._get_datetimes``. + +**Note: the current implementation breaks the convention that ``mpas_xarray`` remains +separate from the rest of MPAS-Analyis by using 3 functions from ``timekeeping.utility`` +in ``mpas_xarray``\ :** + +.. code-block:: python + + from ..timekeeping.utility import string_to_days_since_date, \ + days_to_datetime, datetime_to_days + +**This violates the first requirement in the +`Design Document: Moving variable mapping out of mpas_xarray `_. +I am open to alternative solutions for keeping ``mpas_xarray`` separate from the rest +of analysis but these 3 functions do not conceptually belong in ``mpas_xarray``. The +problem is exacerbated by the fact that there are analysis-specific functions in +``timekeeping``\ , meaning that this cannot easily be made a submodule of ``mpas_xarray`` +(nor would this make very much logical sense). Having 2 ``timekeeping`` modules, one +for ``mpas_xarray`` and one for MPAS-Analysis, seems unnecessarily confunsing.** + +The functions ``generalized_reader.open_multifile_dataset`` and +``mpas_xarray.open_multifile_dataset`` have been updated to use this method for parsing +times. This involves removing the ``year_offset`` argument and adding an optional +``simulation_start_time`` argument for supplying a date to use to convert variables +like ``daysSinceStartOfSim`` to days since 0001-01-01. + +An example of opening a data set and manipulating times withe the new approach in +the OHC script is: + +.. code-block:: python + + from ..shared.timekeeping.utility import get_simulation_start_time, \ + date_to_days, days_to_datetime, string_to_datetime + ... + def ohc_timeseries(config, streamMap=None, variableMap=None): + ... + simulationStartTime = get_simulation_start_time(streams) + ... + ds = open_multifile_dataset(file_names=file_names, + calendar=calendar, + simulation_start_time=simulation_start_time, + time_variable_name='Time', + variable_list=variable_list, + variable_map=variableMap, + start_date=startDate, + end_date=endDate) + + timeStart = string_to_datetime(startDate) + timeEnd = string_to_datetime(endDate) + + # Select year-1 data and average it (for later computing anomalies) + timeStartFirstYear = string_to_datetime(simulation_start_time) + if timeStartFirstYear < timeStart: + startDateFirstYear = simulation_start_time + firstYear = int(startDateFirstYear[0:4]) + endDateFirstYear = '{:04d}-12-31_23:59:59'.format(firstYear) + filesFirstYear = streams.readpath(streamName, + startDate=startDateFirstYear, + endDate=endDateFirstYear, + calendar=calendar) + dsFirstYear = open_multifile_dataset( + file_names=filesFirstYear, + calendar=calendar, + simulation_start_time=simulation_start_time, + time_variable_name='Time', + variable_list=variable_list, + variable_map=variableMap, + start_date=startDateFirstYear, + end_date=endDateFirstYear) + else: + dsFirstYear = ds + firstYear = timeStart.year + + timeStartFirstYear = date_to_days(year=firstYear, month=1, day=1, + calendar=calendar) + timeEndFirstYear = date_to_days(year=firstYear, month=12, day=31, + hour=23, minute=59, second=59, + calendar=calendar) + + dsFirstYear = dsFirstYear.sel(Time=slice(timeStartFirstYear, + timeEndFirstYear)) + + meanFirstYear = dsFirstYear.mean('Time') + ... + yearStart = days_to_datetime(ds.Time.min()).year + yearEnd = days_to_datetime(ds.Time.max()).year + timeStart = date_to_days(year=yearStart, month=1, day=1, + calendar=calendar) + timeEnd = date_to_days(year=yearEnd, month=12, day=31, + calendar=calendar) + + if preprocessedReferenceRunName != 'None': + print ' Load in OHC from preprocessed reference run...' + inFilesPreprocessed = '{}/OHC.{}.year*.nc'.format( + preprocessedInputDirectory, preprocessedReferenceRunName) + dsPreprocessed = open_multifile_dataset( + file_names=inFilesPreprocessed, + calendar=calendar, + simulation_start_time=simulation_start_time, + time_variable_name='xtime') + yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max()).year + ... + +The ``replicate_cycles`` function in ``sea_ice.timeseries`` has been a particular +challenge with the existing calendar. Here is that function with the new 'Time' +coordinate: + +.. code-block:: python + + def replicate_cycle(ds, dsToReplicate, calendar): + dsStartTime = days_to_datetime(ds.Time.min(), calendar=calendar) + dsEndTime = days_to_datetime(ds.Time.max(), calendar=calendar) + repStartTime = days_to_datetime(dsToReplicate.Time.min(), + calendar=calendar) + repEndTime = days_to_datetime(dsToReplicate.Time.max(), + calendar=calendar) + + repSecondTime = days_to_datetime(dsToReplicate.Time.isel(Time=1), + calendar=calendar) + + period = (MpasRelativeDelta(repEndTime, repStartTime) + + MpasRelativeDelta(repSecondTime, repStartTime)) + + startIndex = 0 + while(dsStartTime > repStartTime + (startIndex+1)*period): + startIndex += 1 + + endIndex = 0 + while(dsEndTime > repEndTime + (endIndex+1)*period): + endIndex += 1 + + dsShift = dsToReplicate.copy() + + times = days_to_datetime(dsShift.Time, calendar=calendar) + dsShift.coords['Time'] = ('Time', + datetime_to_days(times + startIndex*period, + calendar=calendar)) + # replicate cycle: + for cycleIndex in range(startIndex, endIndex): + dsNew = dsToReplicate.copy() + dsNew.coords['Time'] = ('Time', + datetime_to_days(times + (cycleIndex+1)*period, + calendar=calendar)) + dsShift = xr.concat([dsShift, dsNew], dim='Time') + + return dsShift + + +.. raw:: html + +

Implementation: The 'Time' coordinate of xarray data sets must support at least years + 0001 and 9999, and preferably any conceivable value
+ Date last modified: 2017/02/09
+ Contributors: Xylar Asay-Davis +

+ + +Same as above. + + +.. raw:: html + +

Testing

+ + + +.. raw:: html + +

Testing and Validation: The 'Time' coordinate of xarray data sets must be consistent + with the MPAS calendar
+ Date last modified: 2017/02/11
+ Contributors: Xylar Asay-Davis +

+ In [xylar/generalize_calendar](https://github.com/xylar/MPAS-Analysis/tree/generalize_calendar), + unit testing has been added for `timekeeping` and `mpas_xarray` that checks both the `gregorian` + and `gregorian_noleap` calendars under simple test conditions. However, we have no data sets + that test `gregorian`, so we have a somewhat limited ability to test this calendar option. + Fortunately, there are also no immediate plans to run with `gregorian`. + + I will make sure all tests with config files in the `configs/lanl` and `configs/edison` + directories produce bit-for-bit results with the current `develop`. + +

Testing and Validation: The 'Time' coordinate of xarray data sets must support at least years + 0001 and 9999, and preferably any conceivable value
+ Date last modified: 2017/02/11
+ Contributors: Xylar Asay-Davis +

+ + +Unit tests have been added to ensure that dates both close to 0001-01-01 and typical +calendar dates (e.g. 2017-01-01) function as expected. + +@akturner's MPAS-SeaIce run with real dates (mentioned in +`#81 `_\ ) has been successfully +run with the proposed approach. This run started in 1958, and had presented a problem +for MPAS-Analysis with the previous calendar. diff --git a/1.11.0rc1/_sources/design_docs/generalized_horizontal_interpolation.rst.txt b/1.11.0rc1/_sources/design_docs/generalized_horizontal_interpolation.rst.txt new file mode 100644 index 000000000..84244034b --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/generalized_horizontal_interpolation.rst.txt @@ -0,0 +1,230 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Generalized Horizontal Interpolation in MPAS-Analysis +===================================================== + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/01/19
+

+ + +Horizontal interpolation and related utilities for remapping are now in the `pyremap repo `_. + + +.. raw:: html + +

Summary

+ + +Currently, MPAS-Analysis uses various methods to perform horizontal interpolation. For constructing ocean climatologies, nearest-neighbor interpolation is used, while for sea-ice climatologies, ``ncremap`` is used with the requirement that a mapping file for the appropriate source and destination grids is provided through the config file. This project intends to move MPAS-Analysis to a unified approach to horizontal interpolation that does not require pre-generated mapping files (though it should support caching mapping files for faster execution). + +Many types of analysis in MPAS will require fields that are interpolated from MPAS grids to arbitrary points, not just to points on a lat/lon grid. This project will not attempt to address that case completely but will take that need into consideration in designing a solution that can be extended to interpolation at arbitrary points in the future. + + +.. raw:: html + +

Requirements

+

Requirement: Higher-order interpolation
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + The option to interpolate smoothly (e.g. linearly or with barycentric coordinates) between cell-centered values should be added. The calling code should easily be able to select among various orders of interpolation with a flag. + +

Consideration: Interpolation should handle periodic boundaries
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + If and when MPAS-Analysis supports planar test cases with periodic boundaries, interpolation should be extended to handle periodic boundaries + +

Consideration: Interpolation should handle Cartesian meshes
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + If and when MPAS-Analysis supports planar test cases with purely Cartesian meshes (e.g. where `latCell` and `lonCell` do not vary), interpolation should be extended to handle Cartesian Coordinates + +

Consideration: Support for arbitrary output interpolation points
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis +

+ + The calling code should be able to supply any desired interpolation points, not just a regular latitude-longitude grid. + +

Consideration: Support caching results from any costly, one-time geometric computations
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + For many potential algorithms used to perform interpolation, there is likely to be a relatively costly step of computing fields such as indices into input data fields and interpolation weights that 1) only need to be computed once for a given input mesh and set of output points and 2) are independent of the data in the field being interpolated. If this data were cached, it could mean that rerunning the analysis (which might be very desirable, e.g., while monitoring the progress of a run) would be much cheaper than the initial run. Also, a cached weight file from a previous analysis run could be used when analyzing a subsequent run with identical source meshes. + + + +

Algorithmic Formulations

+ + + +.. raw:: html + +

Design solution: Higher-order interpolation
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +The approach will be to create SCRIP files (or, in the future for greater flexibility perhaps ESMF grid/mesh files) for the source and destination grids, then to use ``ESMF_RegridWeightGen`` to generate a mapping file. ``ESMF_RegridWeightGen`` supports 5 interpolation methods---bilinear, patch, nearestdtos, neareststod, and conserve---and we would likely support at least bilinear, neareststod and conserve, and perhaps all 5. The destination grid will be specified either by reading values from ``lat`` and ``lon`` coordinates of a NetCDF file or through config file options ``lat`` and ``lon`` that are typically expressions involving ``numpy.arange`` or ``numpy.linspace``. + +Then, ``ncremap`` will be used to remap the desired list of variables from an MPAS NetCDF file to the desired destination grid. + + +.. raw:: html + +

Design solution: Interpolation should handle periodic boundaries
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +For now, periodic boundaries (except for the obvious one at +/- 180 longitude) will not be supported. It appears that ESMF grid files do include support for periodic boundaries so the current solution should be relatively easy to extend to periodic boundaries in the future. + + +.. raw:: html + +

Design solution: Interpolation should handle Cartesian meshes
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +ESMF unstructured mesh files seem to support Cartesian coordinates. This will be investigated if and when MPAS-Analysis can accommodate a test case with Cartesian coordinates. + + +.. raw:: html + +

Design solution: Support for arbitrary output interpolation points
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis +

+ + +I do not intend to address this consideration in this project. It may be that ``ESMF_RegridWeightGen`` can also be used to perform interpolation to arbitrary points (in particular, a set of points that are not cell centers or vertices of a mesh), but this is not yet clear to me. If not, an alternative solution for arbitrary destination points will be needed. + + +.. raw:: html + +

Design solution: Support caching results from any costly, one-time geometric computations
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +This should be relatively easy to accommodate with ``ESMF_RegridWeightGen`` and ``ncremap``. The default behavior of the function for generating interpolation weights will be to do nothing if the mapping file already exists. Further, we can support an optional config option that will point to an existing mapping file if one has already been generated and cached somewhere (e.g. in a shared directory). Eventually, we will probably want to systematically store these mapping files for typical MPAS meshes and typical output grids, particularly for those that are expensive to generate. + +:raw-html-m2r:`

Design and Implementation

` + + +.. raw:: html + +

Implementation: Higher-order interpolation
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Implementation is in the branch https://github.com/xylar/MPAS-Analysis/tree/horiz_interp. + +``ESMF_RegridWeightGen`` is used to compute regridding weights that are 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' (conservative). The order of regridding can be chosen separately for MPAS model results, ocean observations and sea-ice observationos via ``mpasInterpolationMethod`` and ``interpolationMethod`` flags (see the template: https://github.com/xylar/MPAS-Analysis/blob/horiz_interp/config.template). + + +.. raw:: html + +

Implementation: Interpolation should handle periodic boundaries
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Not yet supported. + + +.. raw:: html + +

Implementation: Interpolation should handle Cartesian meshes
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Not yet supported. + + +.. raw:: html + +

Implementation: Support for arbitrary output interpolation points
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis +

+ + +Not yet supported. + + +.. raw:: html + +

Implementation: Support caching results from any costly, one-time geometric computations
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +Mapping files, climatologies and remapped climatologies are cached when they are created. Both mapping files and the directory containing the remapped climatologies from observations can be supplied via the config file, saving the time of computing them. + +:raw-html-m2r:`

Testing

` + + +.. raw:: html + +

Testing and Validation: Higher-order interpolation
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Testing of each of the flags ('bilinear', 'neareststod' and 'conserve') has been performed with the ``GMPAS-QU240`` run, all of wich produce plots that look acceptable. Bilinear and conserve methods leave halos of invalid cells around land at coarse resolution, which is consistent with the coarse resolution of this test mesh. + +An alpha8 and a beta0 run on edison. They ran successfully but I have not had a chance to examine the output. + + +.. raw:: html + +

Testing and Validation: Interpolation should handle periodic boundaries
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Not yet supported. + + +.. raw:: html + +

Testing and Validation: Interpolation should handle Cartesian meshes
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis

+ + +Not yet supported. + + +.. raw:: html + +

Testing and Validation: Support for arbitrary output interpolation points
+ Date last modified: 2017/03/04
+ Contributors: Xylar Asay-Davis +

+ + +Not yet supported. + + +.. raw:: html + +

Testing and Validation: Support caching results from any costly, one-time geometric computations
+ Date last modified: 2017/02/25
+ Contributors: Xylar Asay-Davis

+ + +I have verified that I can rerun without re-computing mapping files or climatologies. Using the ``GMPAS-QU240`` run, I have verified that I can supply mapping files and remapped observation climatologies without them being re-computed diff --git a/1.11.0rc1/_sources/design_docs/index.rst.txt b/1.11.0rc1/_sources/design_docs/index.rst.txt new file mode 100644 index 000000000..afb457df8 --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/index.rst.txt @@ -0,0 +1,16 @@ +Design Documents +================ + +.. toctree:: + :titlesonly: + + generalized_horizontal_interpolation + config_file_reorganization + timekeeping_reorg + generalize_calendar + variable_mapping_reorg + parallel_tasks + remapper + analysis_task_template + prerequisite_tasks + eddykineticenergy diff --git a/1.11.0rc1/_sources/design_docs/parallel_tasks.rst.txt b/1.11.0rc1/_sources/design_docs/parallel_tasks.rst.txt new file mode 100644 index 000000000..8edc6280d --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/parallel_tasks.rst.txt @@ -0,0 +1,472 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Support Parallel Tasks +====================== + +:raw-html-m2r:`

+Xylar Asay-Davis
+date: 2017/02/22
+

` + + +.. raw:: html + +

Summary

+ + +Currently, the full analysis suite includes 7 tasks, 5 for the ocean and 2 for sea ice. +The number of tasks is expected to grow over time. Task parallelism in some +form is needed to allow as many tasks as desired to be run simultaneiously. +Successful completion of this design will mean that the analysis suite produces +identical results to the current ``develop`` branch but that several analysis +tasks (a number selected by the user) run simultaneously. + + +.. raw:: html + +

Requirements

+ + + +.. raw:: html + +

Requirement: Tasks run simultaneously
+ Date last modified: 2017/02/22
+ Contributors: Xylar Asay-Davis +

+ There must be a mechanism for running more than one analysis task simultaneously. + +

Requirement: Select maximum number of tasks
+ Date last modified: 2017/02/22
+ Contributors: Xylar Asay-Davis +

+ There must be a mechanism for the user to select the maximum number of tasks + to run simultaneously. This might be necessary to control the number of processors + or the amount of memory used on a given machine or (in the case of running + analysis on login nodes) to be nice to other users on a shared resource. + +

Requirement: Lock files written by multiple tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ There must be a mechanism for locking files (during either reading or writing) if + they can be written by multiple tasks. This is necessary to prevent cases where + multiple tasks write to the same file simultaneously or one task reads from a file + at the same time another is writing. + +

Consideration: Task parallelism should work on either login or compute nodes
+ Date last modified: 2017/02/22
+ Contributors: Xylar Asay-Davis +

+ On some systems, care needs to be taken that scripts run on the compute nodes + rather than on the management node(s). For example, on Edison and Cori, the + `aprun` command is required to ensure that scripts run on compute nodes. + +

Consideration: There may need to be a way to limit the memory used by a task
+ Date last modified: 2017/02/22
+ Contributors: Phillip J. Wolfram, Xylar Asay-Davis +

+ It may be that `xarray-dask` with subprocess (or similar) may need to be some + initialization of xarray corresponding to reduced memory available. For example, + with 10 processes on a node, `xarray` / `dask` should be initialized to use only + 1/10th of the memory and CPUs per task. `xarray-dask` may require special + initialization for efficiency and to avoid crashes. + +

Algorithmic Formulations

+ +

Design solution: Tasks run simultaneously
+ Date last modified: 2017/02/23
+ Contributors: Xylar Asay-Davis +

+ + +I propose to have a config option, ``parallelTaskCount``\ , that is the number of concurrent +tasks that are to be performed. If this flag is set to a number greater than 1, analysis +tasks will run concurrently. To accomplish this, I propose to use ``subprocess.call`` or +one of its variants within ``run_analysis.py``\ to call itself but with only one task at a +time. Thus, if ``run_analysis.py`` gets called with only a single task (whether directly +from the command line or through ``subprocess.call``\ ), it would execute that task without +spawning additional subprocesses. + +This approach would require having a method for creating a list of individual tasks +to be performed, launching ``parallelTaskCount`` of those tasks, and then waiting for +them to complete, launching additional tasks as previous tasks complete. The approach +would also require individual log files for each task, each stored in the log directory +(already a config option). + + +.. raw:: html + +

Design solution: Select maximum number of tasks
+ Date last modified: 2017/02/23
+ Contributors: Xylar Asay-Davis +

+ + +This is accomplished with the ``parallelTaskCount`` flag above. A value of +``parallelTaskCount = 1`` would indicate serial execution, though likely still +via launching subprocesses for each task. + +The command ``subprocess.Popen`` allows enough flexibility that it will be possible +to launch several jobs, andthen to farm out additional jobs as each returns. It should +be possible to use a combination of ``os.kill(pid, 0)``\ , which checks if a +process is running, and ``os.waitpid(-1,0)``\ , which waits for any subprocess to finish, +to accomplish launching several processes and waiting until the first one finishes +before launching the next task, or in pseudo-code: + +.. code-block:: python + + processes = launchTasks(taskNames[0:taskCount]) + remainingTasks = taskNames[taskCount:] + while len(processes) > 0: + process = waitForTask(processes) + processes.pop(process) + if len(remainingTasks) > 0: + process = launchTasks(remainingTasks[0]) + proceses.append(process) + remainingTasks = remainingTasks[1:] + +Output from the main ``run_analysis.py`` task will list which analysis tasks were run +and which completed successfully. The full analysis will exit with an error if one +task fails, but only after attempting to run all desired analysis tasks. This allows +the failure of one analysis task not to interrupt execution of other analyses. + +In a future PR, this work can be expanded to include checking if the appropriate +analysis member (AM) was turned on during the run and skipping any analysis tasks that +depend on that AM if not (Issue #58). + + +.. raw:: html + +

Design solution: Lock files written by multiple tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Teh design solution is based on the process lock in the fasteners package: +http://fasteners.readthedocs.io/en/latest/examples.html#interprocess-locks + +Currently, only mapping files should be written by multiple tasks, requiring locks. + +The algorithm consists of 2 changes. First, I removed the option ``overwriteMappingFiles``\ , +which is now always ``False``\ ---if a mapping file exists, it is not overwritten. This +was necessary because now only one task will write a given mapping file if it doesn't +already exist and the other tasks will wait for it to be written. Then, all tasks +know there is a valid mapping file that they can read without having to lock the file. + +The second change was to add a lock around the subprocess call to ``ESMF_RegridWeightGen`` +that make sure only one process generates the mapping file. Each process attempts to +acquire the lock and checks if the mapping file already exists once it acquires the +lock. If not, it generates the mapping file and releases the lock. If so, it just +releases the lock and moves on. Thus, only the first process to acquire the lock +generates the mapping file and the others wait until it is finished. + + +.. raw:: html + +

Design solution: Task parallelism should work on either login or compute nodes
+ Date last modified: 2017/02/23
+ Contributors: Xylar Asay-Davis +

+ For the time being, I propose to address only task parallelism on the login nodes and to + extend the parallelism to work robustly on compute nodes as a separate project. + Nevertheless, I will seek to implement this design in a way that should be conducive to + this later extension. Likely what will be required is a robust way of adding a prefix + to the commandline (e.g. `aprun -np 1`) when calling subprocesses. Adding such a prefix + should be relatively simple. + +

Design solution: There may need to be a way to limit the memory used by a task
+ Date last modified: 2017/02/23
+ Contributors: Xylar Asay-Davis +

+ I am not very familiar with `dask` within `xarray` and I do not intend to address this + consideration directly in this project. However, on my brief investigation, it seems like + the proper way to handle this may be to have a `chunk` config option either for all tasks + or for individual tasks that can be used to control the size of data in memory. I think + such an approach can be investigated in parallel to this project. An intermediate solution + for situations where memory is limited would be to set `parallelTaskCount` to a small number. + + +

Design and Implementation

+ + This design has been implemented in the test branch https://github.com/xylar/MPAS-Analysis/tree/parallel_tasks + +

Implementation: Tasks run simultaneously
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Tasks can now run in parallel. This has been implemented in these 4 functions within ``run_analysis.py``\ : + +.. code-block:: python + + def run_parallel_tasks(config, analyses, configFiles, taskCount): + # {{{ + """ + Run this script once each for several parallel tasks. + + Author: Xylar Asay-Davis + Last Modified: 03/08/2017 + """ + + taskNames = [analysisModule.get_task_name(**kwargs) for + analysisModule, kwargs in analyses] + + taskCount = min(taskCount, len(taskNames)) + + (processes, logs) = launch_tasks(taskNames[0:taskCount], config, + configFiles) + remainingTasks = taskNames[taskCount:] + while len(processes) > 0: + (taskName, process) = wait_for_task(processes) + if process.returncode == 0: + print "Task {} has finished successfully.".format(taskName) + else: + print "ERROR in task {}. See log file {} for details".format( + taskName, logs[taskName].name) + logs[taskName].close() + # remove the process from the process dictionary (no need to bother) + processes.pop(taskName) + + if len(remainingTasks) > 0: + (process, log) = launch_tasks(remainingTasks[0:1], config, + configFiles) + # merge the new process and log into these dictionaries + processes.update(process) + logs.update(log) + remainingTasks = remainingTasks[1:] + # }}} + + + def launch_tasks(taskNames, config, configFiles): # {{{ + """ + Launch one or more tasks + + Author: Xylar Asay-Davis + Last Modified: 03/08/2017 + """ + thisFile = os.path.realpath(__file__) + + logsDirectory = build_config_full_path(config, 'output', + 'logsSubdirectory') + make_directories(logsDirectory) + + commandPrefix = config.getWithDefault('execute', 'commandPrefix', + default='') + if commandPrefix == '': + commandPrefix = [] + else: + commandPrefix = commandPrefix.split(' ') + + processes = {} + logs = {} + for taskName in taskNames: + args = commandPrefix + [thisFile, '--generate', taskName] + configFiles + + logFileName = '{}/{}.log'.format(logsDirectory, taskName) + + # write the command to the log file + logFile = open(logFileName, 'w') + logFile.write('Command: {}\n'.format(' '.join(args))) + # make sure the command gets written before the rest of the log + logFile.flush() + print 'Running {}'.format(taskName) + process = subprocess.Popen(args, stdout=logFile, + stderr=subprocess.STDOUT) + processes[taskName] = process + logs[taskName] = logFile + + return (processes, logs) # }}} + + + def wait_for_task(processes): # {{{ + """ + Wait for the next process to finish and check its status. Returns both the + task name and the process that finished. + + Author: Xylar Asay-Davis + Last Modified: 03/08/2017 + """ + + # first, check if any process has already finished + for taskName, process in processes.iteritems(): # python 2.7! + if(not is_running(process)): + return (taskName, process) + + # No process has already finished, so wait for the next one + (pid, status) = os.waitpid(-1, 0) + for taskName, process in processes.iteritems(): + if pid == process.pid: + process.returncode = status + # since we used waitpid, this won't happen automatically + return (taskName, process) # }}} + + + def is_running(process): # {{{ + """ + Returns whether a given process is currently running + + Author: Xylar Asay-Davis + Last Modified: 03/08/2017 + """ + + try: + os.kill(process.pid, 0) + except OSError: + return False + else: + return True # }}} + + +.. raw:: html + +

Implementation: Select maximum number of tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +There is a configuration option, ``parallelTaskCount``\ , which defaults to 1, meaning tasks run in serial: + +.. code-block:: ini + + [execute] + ## options related to executing parallel tasks + + # the number of parallel tasks (1 means tasks run in serial, the default) + parallelTaskCount = 8 + + +.. raw:: html + +

Implementation: Lock files written by multiple tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Here is the code for locking the mapping file within ``shared.interpolation.interpolate``\ : + +.. code-block:: python + + import fasteners + ... + # lock the weights file in case it is being written by another process + with fasteners.InterProcessLock(_get_lock_path(outWeightFileName)): + # make sure another process didn't already create the mapping file in + # the meantime + if not os.path.exists(outWeightFileName): + # make sure any output is flushed before we add output from the + # subprocess + subprocess.check_call(args) + + +.. raw:: html + +

Implementation: Task parallelism should work on either login or compute nodes
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +I have included a config option ``commandPrefix`` that *should* be able to be used to +run the analysis on compute nodes. If the command prefix is empty, the code should run +as normal on the compute nodes. + +.. code-block:: ini + + [execute] + ## options related to executing parallel tasks + + # the number of parallel tasks (1 means tasks run in serial, the default) + parallelTaskCount = 1 + + # Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python') + # Default is no prefix (run_analysis.py is executed directly) + commandPrefix = srun -n 1 + + +.. raw:: html + +

Implementation: There may need to be a way to limit the memory used by a task
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +As mentioned above, I have not addressed this consideration in this project. Currently, +the suggested approach would be to limit ``parallelTaskCount`` to a number of tasks that +does not cause memory problems. More sophisticated approaches could be explored in the +future. + + +.. raw:: html + +

Testing

+ + + +.. raw:: html + +

Testing and Validation: Tasks run simultaneously
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +So far, I have tested extensively on my laptop (\ ``parallelTaskCount = 1``\ , ``2``\ , ``4`` and ``8``\ ) +with the expected results. Later, I will test on Edison and Wolf as well. + + +.. raw:: html + +

Testing and Validation: Select maximum number of tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Same as above. + + +.. raw:: html + +

Implementation: Lock files written by multiple tasks
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +I ran multiple climatology map tasks at the same time and verified from the log files +that only one created each mapping file. Others must have waited for that file to be +written or they would have crashed almost immediately when they tried to read the +mapping file during remapping operations. So I'm confident the code is working as +intended. + + +.. raw:: html + +

Testing and Validation: Task parallelism should work on either login or compute nodes
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +On Edison and Wolf, I will test running the analysis with parallel tasks both on login nodes +and by submitting a job to run on the compute nodes (using the appropriate ``commandPrefix``\ ). + + +.. raw:: html + +

Testing and Validation: There may need to be a way to limit the memory used by a task
+ Date last modified: 2017/03/10
+ Contributors: Xylar Asay-Davis +

+ + +Assuming no crashes in my testing on compute nodes with all tasks running in parallel, I will +leave this consideration for investigation in the future. diff --git a/1.11.0rc1/_sources/design_docs/prerequisite_tasks.rst.txt b/1.11.0rc1/_sources/design_docs/prerequisite_tasks.rst.txt new file mode 100644 index 000000000..cc752970f --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/prerequisite_tasks.rst.txt @@ -0,0 +1,281 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Prerequisite Tasks and Subtasks +=============================== + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/06/12
+

+ + + +.. raw:: html + +

Summary

+ + +Currently, no tasks depend on other tasks to run. However, in order to allow +multiple plots to be generated simulataneously, it is desirable to break tasks +into multiple subtasks, and some of these subtasks will need rely on data from +other subtasks. It is also conceivable that multiple tasks could rely on the +same data (e.g. a common climatology dataset). The proposed solution to this +problem is to allow "prerequisite tasks" to a given analysis task. The task +will only run after the prerequisite task(s) have completed. Prerequisite +tasks could be used to build up a sequence of analysis tasks in several steps. +Some of these steps could be shared between analysis tasks (e.g. computing +single data set and then plotting it in various ways). Implementation of this +design will be considered a success if dependent tasks only run once their +prerequisite tasks have completed successfully. + + +.. raw:: html + +

Requirements

+ + + +.. raw:: html + +

Requirement: Define prerequisite tasks
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +A simple mechanism (such as a list of task names) exists to define prerequisite +tasks of each analysis task. + + +.. raw:: html + +

Requirement: Add prerequisites to task list
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +Given a task that we want to run, a mechanism must exist for adding its +prerequisites (if any) to the list of tasks to be run. + + +.. raw:: html + +

Requirement: Holding dependent tasks
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +Dependent tasks (those with prerequisites) must be prevented from running until +their prerequisites have successfully finished. + + +.. raw:: html + +

Requirement: Cancel dependents of failed prerequisites
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +If a prerequisite of a dependent tasks has failed, the dependent task should +not be run. + + +.. raw:: html + +

Algorithmic Formulations

+ + + +.. raw:: html + +

Design solution: Define prerequisite tasks
+ Date last modified: 2017/09/19
+ Contributors: Xylar Asay-Davis +

+ + +Each task will be constructed with a list of the names of prerequisite tasks. +If a task has no prerequisites (the default), the list is empty. + + +.. raw:: html + +

Design solution: Add prerequisites to task list
+ Date last modified: 2017/10/11
+ Contributors: Xylar Asay-Davis +

+ + +A recursive function will be used to add a given task (assuming its +``check_generate`` method returns ``True``\ , meaning that task should be generated) +and its dependencies to a list of analyses to run. The code (with a few +error messages removed for brevity) is as follows: + +.. code-block:: python + + analysesToGenerate = [] + # check which analysis we actually want to generate and only keep those + for analysisTask in analyses: + # update the dictionary with this task and perhaps its subtasks + add_task_and_subtasks(analysisTask, analysesToGenerate) + + def add_task_and_subtasks(analysisTask, analysesToGenerate, + callCheckGenerate=True): + + if analysisTask in analysesToGenerate: + return + + if callCheckGenerate and not analysisTask.check_generate(): + # we don't need to add this task -- it wasn't requested + return + + # first, we should try to add the prerequisites of this task and its + # subtasks (if they aren't also subtasks for this task) + prereqs = analysisTask.runAfterTasks + for subtask in analysisTask.subtasks: + for prereq in subtask.runAfterTasks: + if prereq not in analysisTask.subtasks: + prereqs.extend(subtask.runAfterTasks) + + for prereq in prereqs: + add_task_and_subtasks(prereq, analysesToGenerate, + callCheckGenerate=False) + if prereq._setupStatus != 'success': + # this task should also not run + analysisTask._setupStatus = 'fail' + return + + # make sure all prereqs have been set up successfully before trying to + # set up this task -- this task's setup may depend on setup in the prereqs + try: + analysisTask.setup_and_check() + except (Exception, BaseException): + analysisTask._setupStatus = 'fail' + return + + # next, we should try to add the subtasks. This is done after the current + # analysis task has been set up in case subtasks depend on information + # from the parent task + for subtask in analysisTask.subtasks: + add_task_and_subtasks(subtask, analysesToGenerate, + callCheckGenerate=False) + if subtask._setupStatus != 'success': + analysisTask._setupStatus = 'fail' + return + + analysesToGenerate.append(analysisTask) + analysisTask._setupStatus = 'success' + + +.. raw:: html + +

Design solution: Holding dependent tasks
+ Date last modified: 2017/10/11
+ Contributors: Xylar Asay-Davis +

+ + +Each task is given a ``_runStatus`` attribute, which is a ``multiprocessing.Value`` +object that can be shared and changed across processes. A set of constant +possible values for this attribute, ``READY``\ , ``BLOCKED``\ , ``RUNNING``\ , ``SUCCES`` and +``FAIL`` are defined in\ ``AnalysisTask``. If a task has no prerequisites, initially +``_runStatus = READY``\ ; otherwise ``_runStatus = BLOCKED``. Any ``READY`` +task can be run (\ ``_runStatus = 'running'``\ ). Any task that finishes is given +``_runStatus = SUCCESS`` or ``_runStatus = FAIL`` (I know, not grammatically +consistent but compact...). + +When a new parallel slot becomes available, all ``BLOCKED`` tasks are checked +to see if any prerequisites have failed (in which case the task also fails) or +if all prerequisites have succeeded, in which case the task is now ``READY``. +After that, the next ``READY`` task is run. + + +.. raw:: html + +

Design solution: Cancel dependents of failed prerequisites
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +Same as above: When a new parallel slot becomes available, all ``BLOCKED`` +tasks are checked to see if any prerequisites have failed (in which case the +task also fails). + + +.. raw:: html + +

Design and Implementation

+ + +The design has been implemented in the branch +`xylar/add_mpas_climatology_task `_ + + +.. raw:: html + +

Implementation: Define prerequisite tasks
+ Date last modified: 2017/10/11
+ Contributors: Xylar Asay-Davis +

+ + +``AnalysisTask`` now has an attribute ``runAfterTasks``\ , which default to empty. +Prerequisite tasks can be added by calling ``run_after(self, task)`` with the +task that this task should follow. + + +.. raw:: html + +

Implementation: Add prerequisites to task list
+ Date last modified: 2017/10/11
+ Contributors: Xylar Asay-Davis +

+ + +``build_analysis_list`` in ``run_mpas_analysis`` has been modified to call a +recursive function ``add_task_and_subtasks`` that adds a task, its prerequisites +(if they have not already been added) and its subtasks to the list of tasks +to run. + + +.. raw:: html + +

Implementation: Holding dependent tasks
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +The ``run_analysis`` function in ``run_mpas_analysis`` has been updated to be aware +of the status of each task, as described in the algorithms section. + + +.. raw:: html + +

Implementation: Cancel dependents of failed prerequisites
+ Date last modified: 2017/06/12
+ Contributors: Xylar Asay-Davis +

+ + +Again, the ``run_analysis`` function in ``run_mpas_analysis`` has been updated to +be aware of the status of each task, as described in the algorithms section. + +:raw-html-m2r:`

Testing and Validation

` + +:raw-html-m2r:`

Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

` +All plots will be tested to ensure they are bit-for-bit identical to +those produced by ``develop`` for all tests defined in the ``configs/edison`` +and ``configs/lanl`` directories. Task will be run in parallel and I will +verify that no dependent tasks run before prerequisite tasks have completed. diff --git a/1.11.0rc1/_sources/design_docs/remapper.rst.txt b/1.11.0rc1/_sources/design_docs/remapper.rst.txt new file mode 100644 index 000000000..b76047848 --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/remapper.rst.txt @@ -0,0 +1,205 @@ +.. role:: raw-html-m2r(raw) + :format: html + + +Remapper for "online" remapping of data sets +============================================ + +:raw-html-m2r:`

+Xylar Asay-Davis
+date: 2017/04/15
+

` + + +.. raw:: html + +

Summary

+ + +This document describes the design and implementation of a ``Remapper`` class +for performing either "online" (in memory) or "offline" (through files +via ``ncremap``\ ) remapping of horizontal data sets. The ``Remapper`` is needed in +order to support remapping to and from grids grids not currently supported by +``ncremap`` such as polar stereographic grids commonly used for polar data sets. + + +.. raw:: html + +

Requirements

+ + + +.. raw:: html + +

Requirement: Support for remapping to and from stereographic grids
+ Date last modified: 2017/04/15
+ Contributors: Xylar Asay-Davis +

+ + +There should exist a method for interpolating from stereographic grids to +the comparison grid used in MPAS-Analysis. This is needed to support +observations that are stored on stereographic grids. + +It would often be more efficient (in terms of the size of data sets) and more +practical to perform analysis of polar data sets on a stereographic grid +centered at that pole. Support for mapping to stereographic grids should be +included, if feasible. + + +.. raw:: html + +

Algorithmic Formulations

+ + + +.. raw:: html + +

Design solution: Support for remapping to and from stereographic grids
+ Date last modified: 2017/04/15
+ Contributors: Xylar Asay-Davis +

+ + +The design solution is somewhat complex and will be described in multiple +sections. + + +.. raw:: html + +

MeshDescriptor classes

+ + +To support mapping to and from MPAS meshes, lat/lon grid and stereographic +grids (as well as future grids we might want to support), I propose defining a +"mesh descriptor" that defines the mesh either by reading it from a file or by +creating it from simple numpy ndarrays. Each ``MeshDescriptor`` class defines +enough information (such as the locations of cell centers and corners) about +the mesh or grid to allow remapping between meshes. + +An ``MpasMeshDescriptor`` class will define MPAS meshes read from a file. + +A ``LatLonGridDescriptor`` class will define global lat/lon grids such as the +existing comparison grid. + +A ``ProjectionGridDescriptor`` class will define any grid that can be described +by a logically rectangular grid with ``pyproj`` projection. In particular, such +a projection grid could be used to support both polar stereographic grids and +regional lat/lon grids. + + +.. raw:: html + +

Remapper class

+ + +Remapping between meshes described by ``MeshDescriptor`` classes will be performed +by a ``Remapper`` class. This class will support both "online" mapping in memory +and "offline" mapping with ``ncremap``. Only "online" mapping will be supported +for grids defined with the ``ProjectionGridDescriptor``\ , as these are not +supported by ``ncremap``. A ``Remapper`` object will be created by giving it source +and destintion ``MeshDescriptor`` objects and an optional mapping file name. +(If the mapping file name is not given, it is assumed that the source and +destination grids are the same, and no remapping is needed.) + +If remapping is performed "online", it supports renormalization of masked +arrays. If a data sets includes ``NaN``\ s in a given data array, both the data +array and a mask are remapped, and renormalization is performed anywhere the +remapped mask exceeds a given threshold. + + +.. raw:: html + +

Design and Implementation

+ + + +.. raw:: html + +

Implementation: Support for remapping to and from stereographic grids
+ Date last modified: 2017/04/15
+ Contributors: Xylar Asay-Davis +

+ + +The implementation is on the branch `xylar/MPAS-Analysis/add_polar_stereographic_interp `_ + + +.. raw:: html + +

MeshDescriptor classes

+ + +Each ``MeshDescriptor`` subclass includes the following member variables or +methods: + + +* ``meshName``\ : a name of the mesh or grid, used for naming mapping files and + climatologies +* ``regional``\ : whether the mesh is regional or global +* ``coords`` and ``dims``\ : dictionaries defining the coordinates and dimensions + of this mesh, used to update a data set following remapping +* ``to_scrip`` method: used to write out a SCRIP file defining the mesh. + + +.. raw:: html + +

Remapper class

+ + +Below is a skeleton of the ``Remapper`` public API. + +.. code-block:: python + + class Remapper(object): + def __init__(self, sourceDescriptor, destinationDescriptor, + mappingFileName=None): + ''' + Create the remapper and read weights and indices from the given file + for later used in remapping fields. + ''' + + def build_mapping_file(self, method='bilinear', + additionalArgs=None): + ''' + Given a source file defining either an MPAS mesh or a lat-lon grid and + a destination file or set of arrays defining a lat-lon grid, constructs + a mapping file used for interpolation between the source and + destination grids. + ''' + + def remap_file(self, inFileName, outFileName, + variableList=None, overwrite=False): + ''' + Given a source file defining either an MPAS mesh or a lat-lon grid and + a destination file or set of arrays defining a lat-lon grid, constructs + a mapping file used for interpolation between the source and + destination grids. + ''' + + def remap(self, ds, renormalizationThreshold=None): + ''' + Given a source data set, returns a remapped version of the data set, + possibly masked and renormalized. + ''' + + +.. raw:: html + +

Testing and Validation: Support for remapping to and from stereographic + grids
+ Date last modified: 2017/04/15
+ Contributors: Xylar Asay-Davis +

+ + +On the branch `xylar/MPAS-Analysis/add_polar_stereographic_interp `_\ , +climatologies have ben updated to use ``Remapper`` objects. Analysis has been +run on both ``QU240`` and ``EC60to30`` beta0 ACME results, and results have been +compared by eye. Results from ``ncremap`` are identical, as expected. Because of +renormalization, results with "online" remapping differ from those from +``ncremap``\ , typically with less severe masking of missing data. + +Continuous integration unit tests for climatology and interpolation have both +been updated to make use of the ``Remapper`` class. New tests have been added to +perform remapping with stereographic grids. diff --git a/1.11.0rc1/_sources/design_docs/timekeeping_reorg.rst.txt b/1.11.0rc1/_sources/design_docs/timekeeping_reorg.rst.txt new file mode 100644 index 000000000..325967f2c --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/timekeeping_reorg.rst.txt @@ -0,0 +1,248 @@ + +Reorganize Timekeeping +====================== + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/02/06
+

+

Summary

+ + Currently, the `Date` class is used to parse a date object from a date string + (e.g. '0001-01-01_00:00:00') taken from MPAS namelists, streams files or time + variables (e.g. `xtime`). However, this class assumes a 365-day calendar and + cannot easily be adapted to the Gregorian calendar also supported by MPAS + components (`config_calendar_type = 'gregorian'`). Furthermore, existing + routines exist to handle most of the capabilites + of the `Date` class. The proposed reorganization would eliminate the `Date` class + in favor of a numer of helper functions that can be used to convert between various + date formats: date strings, days since a reference date, `datetime.datetime` objects + and `relativedelta` objects (see below). The success of this reorganization will be + demonstrated when the existing analysis can be performed successfully with the new + utility functions with both MPAS calendars, the `'gregorian_noleap'` (365-day) calendar + used by most existing ACME and MPAS runs and the `'gregorian'` calendar also supported + in MPAS components. + + +

Requirements

+ + + +.. raw:: html + +

Requirement: Date string parsing supports both MPAS calendars
+ Date last modified: 2017/02/06
+ Contributors: Xylar Asay-Davis +

+ + +There must be a way to parse dates from MPAS that is aware of the appropriate calendar +stored in the ``config_calendar_type`` namelist option, either ``'gregorian'`` or +``'gregorian_noleap'``. + + +.. raw:: html + +

Requirement: Capability of incrementing dates by a number of years and/or months
+ Date last modified: 2017/02/06
+ Contributors: Xylar Asay-Davis +

+ + +The analysis requires a way of incrementing a given date by an interval specified in +not only days, hours, minutes and seconds but also months and years. The standard +``datetime.timedelta`` does not support increments by years and months because they are +not fixed periods of time. The existing ``Date`` class in MPAS-Analysis supports +increments in months and years, but only for the ``'gregorian_noleap'`` (365-day) calendar. +A method must exist to increment dates on either calendar by a given number of years +and/or months (in addition to days, hours, etc.). + + +.. raw:: html + +

Design and Implementation

+ + + +.. raw:: html + +

Implementation: Date string parsing supports both MPAS calendars
+ Date last modified: 2017/02/06
+ Contributors: Xylar Asay-Davis +

+ + +The implementation is on the branch: +https://github.com/xylar/MPAS-Analysis/tree/timekeeping_reorg +and in PR #102 + +The function for converting a date string to a ``datetime.datetime`` is documented as follows: + +.. code-block:: python + + def stringToDatetime(dateString): + """ + Given a date string and a calendar, returns a `datetime.datetime` + + Parameters + ---------- + dateString : string + A date and time in one of the following formats: + - YYYY-MM-DD hh:mm:ss + - YYYY-MM-DD hh.mm.ss + - YYYY-MM-DD SSSSS + - DDD hh:mm:ss + - DDD hh.mm.ss + - DDD SSSSS + - hh.mm.ss + - hh:mm:ss + - YYYY-MM-DD + - YYYY-MM + - SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + Returns + ------- + datetime : A `datetime.datetime` object + + Raises + ------ + ValueError + If an invalid `dateString` is supplied. + + Author + ------ + Xylar Asay-Davis + + Last modified + ------------- + 02/04/2017 + """ + +As long as ``relativedelta`` objects rather than ``datetime.timedelta`` objects are used to increment +``datetime.datetime`` objects, ``datetime.datetime`` can be used to represent dates on either the Gregorian +or the 365-day calendar. + + +.. raw:: html + +

Implementation: Capability of incrementing dates by a number of years and/or months
+ Date last modified: 2017/02/09
+ Contributors: Xylar Asay-Davis +

+ + +The implementation is on the branch: +https://github.com/xylar/MPAS-Analysis/tree/timekeeping_reorg +and in PR #102 + +The proposed implementation adds a new class MpasRelativeDelta derived from +``dateutil.relativedelta.relativedelta`` to compute the expected +increments in years and months (as well as days, hours, minutes and seconds, as needed). +The class is documented as follows + +.. code-block:: python + + class MpasRelativeDelta(relativedelta): + """ + MpasRelativeDelta is a subclass of dateutil.relativedelta for relative time + intervals with different MPAS calendars. + + Only relative intervals (years, months, etc.) are supported and not the + absolute date specifications (year, month, etc.). Addition/subtraction + of datetime.datetime objects (but not other MpasRelativeDelta, + datetime.timedelta or other related objects) is supported. + + Author + ------ + Xylar Asay-Davis + + Last Modified + ------------- + 02/09/2017 + +The function for converting a date string to a ``MpasRelativeDelta`` is documented as follows: + +.. code-block:: python + + from dateutil.relativedelta import relativedelta + ... + def stringToRelativedelta(dateString, calendar='gregorian'): + """ + Given a date string and a calendar, returns an instance of + `MpasRelativeDelta` + + Parameters + ---------- + dateString : string + A date and time in one of the following formats: + - YYYY-MM-DD hh:mm:ss + - YYYY-MM-DD hh.mm.ss + - YYYY-MM-DD SSSSS + - DDD hh:mm:ss + - DDD hh.mm.ss + - DDD SSSSS + - hh.mm.ss + - hh:mm:ss + - YYYY-MM-DD + - YYYY-MM + - SSSSS + + Note: either underscores or spaces can be used to separate the date + from the time portion of the string. + + calendar: {'gregorian', 'gregorian_noleap'}, optional + The name of one of the calendars supported by MPAS cores + + Returns + ------- + relativedelta : An `MpasRelativeDelta` object + + Raises + ------ + ValueError + If an invalid `dateString` is supplied. + + Author + ------ + Xylar Asay-Davis + + Last modified + ------------- + 02/04/2017 + """ + + +.. raw:: html + +

Testing

+ + + +.. raw:: html + +

Testing and Validation: Date string parsing supports both MPAS calendars
+ Date last modified: 2017/02/08
+ Contributors: Xylar Asay-Davis +

+ Analysis will be run on Edison with all available configurations found in `configs/edison`. As there + are currently no plans to run with the `gregorian` calendar option, we do not have test runs that use this + calendar. If this situation changes in the future, we'll test at that time. + + Regression tests previously for `Date` has been modified to test the new utility functions. New tests + have been added to test that dates with both `gregorian` and `gregorian_noleap` calendars behave as + expected, particularly around the leap day. + +

Testing

+

Testing and Validation: Capability of incrementing dates by a number of years and/or months
+ Date last modified: 2017/02/06
+ Contributors: Xylar Asay-Davis +

+ + +Same as above. diff --git a/1.11.0rc1/_sources/design_docs/variable_mapping_reorg.rst.txt b/1.11.0rc1/_sources/design_docs/variable_mapping_reorg.rst.txt new file mode 100644 index 000000000..663e87b8b --- /dev/null +++ b/1.11.0rc1/_sources/design_docs/variable_mapping_reorg.rst.txt @@ -0,0 +1,291 @@ + +Moving variable mapping outside of mpas_xarray +============================================== + + +.. raw:: html + +

+ Xylar Asay-Davis
+ date: 2017/02/10
+

+

Summary

+ + In discussions with @pwolfram, it became clear that we would like to keep + mpas_xarray as general as possible, rather than adding code specific to + MPAS-Analysis. In particular, the capability for mapping variable names + that is currently part of mpas_xarray is likely a capability that only + MPAS-Analysis will need when opening xarray data sets. Likewise, there is + a desire for mpax_xarray not to use any of the functionality outside of its + own module so that it remains autonomous from MPAS-Analysis. + + At the same time, it is desirable for efficiency and parallelism to perform + certain operations during the preprocessing step within xarray, rather than + constructing a data set first and then (in serial) performing manipulations + (e.g. creating a time coordinate and slicing variables). + + The solution will be tested by making sure it produces bit-for-bit identical + results to those from the develop branch for typical test cases on LANL IC + and Edison. + +

Requirements

+ + + +.. raw:: html + +

Requirement: mpas_xarray does not include MPAS-Analysis specific + functionality
+ Date last modified: 2017/02/10
+ Contributors: Xylar Asay-Davis +

+ + +MPAS-Analysis specific functionality such as variable mapping should be +removed from mpas_xarray so it can remain an independent module, requiring +minimal modification to accommodate MPAS-Analysis' needs. + + +.. raw:: html + +

Requirement: MPAS-Analysis specific functionality should be supported in + xarray preprossing
+ Date last modified: 2017/02/10
+ Contributors: Xylar Asay-Davis +

+ + +There should be a way to perform MPAS-Analysis specific functionality such as +mapping variables during preprocessing. This functionality should be +relatively easy to add to as new preprocessing needs arise. + + +.. raw:: html + +

Algorithmic Formulations (optional)

+ + + +.. raw:: html + +

Algorithm: mpas_xarray does not include MPAS-Analysis specific + functionality
+ Date last modified: 2017/02/10
+ Contributors: Xylar Asay-Davis +

+ + +All functions and function arguments related to variable mapping will +be removed from mpas_xarray and moved elsewhere. + + +.. raw:: html + +

Algorithm: MPAS-Analysis specific functionality should be supported in + xarray preprossing
+ Date last modified: 2017/02/15
+ Contributors: Xylar Asay-Davis +

+ + +A new utility function, ``open_multifile_dataset`` will added to ``mpas_xarray`` +that simplifies current calls to ``xarray.open_mfdataset`` to hide the +preprocessor and take care of removing redundant time indices once the dataset +has been built. (This function doesn't directly address the requirement but +is meant to make ``mpas_xarray`` easier to use and made sense because it +has a one-to-one correspondence with other functionality, described below, +that does address the requirement.) + +A new module, ``generalized_reader`` will also be added with its own +``open_multifile_dataset`` function. This version takes additional arguments +including a variable map and start and end dates for the dataset. +``generalized_reader.open_multifile_dataset`` will create a data set +by calling ``xarray.open_mfdataset`` with its own preprocessing function, +``generalized_reader._preprocess`` that first maps variable names, then +calls ``mpas_xarray.preprocess`` to finish the job. Once the dataset has +been constructed, redundant time indices are removed and the 'Time' +coordinate is sliced to be between the supplied start and end dates. + +This solution may add some confusion in terms of which reader should +be used to open xarray datasets. It is my sense that most developers +adding new functionality will do so by modifying existing scripts, and +these examples should make it clear which version of +``open_multifile_dataset`` is most appropriate. Nevertheless, clear +documentation of ``generalized_reader`` and ``mpas_xarray``\ , and their +differences are needed. + +Here is a typical usage of ``generalized_reader.open_multifile_dataset``\ : + +.. code-block:: python + + from mpas_analysis.shared.generalized_reader.generalized_reader \ + import open_multifile_dataset + + file_name = 'example_jan_feb.nc' + timestr = ['xtime_start', 'xtime_end'] + var_list = ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature'] + variable_map = { + 'avgSurfaceTemperature': + ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature', + 'other_string', + 'yet_another_string'], + 'daysSinceStartOfSim': + ['time_avg_daysSinceStartOfSim', + 'xtime', + 'something_else']} + ds = open_multifile_dataset(file_names=file_name, + calendar=calendar, + time_variable_name=timestr, + variable_list=var_list, + start_date='0001-01-01', + end_date='9999-12-31', + variable_map=variable_map, + year_offset=1850) + +Here is the same for ``mpas_xarray.open_multifile_dataset`` without the +variable map, start and end dates: + +.. code-block:: python + + from mpas_analysis.shared.mpas_xarray.mpas_xarray \ + import open_multifile_dataset + + file_name = 'example_jan_feb.nc' + timestr = ['xtime_start', 'xtime_end'] + var_list = ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature'] + + ds = open_multifile_dataset(file_names=file_name, + calendar=calendar, + time_variable_name=timestr, + variable_list=var_list, + year_offset=1850) + + +.. raw:: html + +

Design and Implementation

+ + + +.. raw:: html + +

Implementation: mpas_xarray does not include MPAS-Analysis specific + functionality
+ Date last modified: 2017/02/15
+ Contributors: Xylar Asay-Davis +

+ + +A test branch can be found here +`xylar/MPAS-Analysis/variable_mapping_reorg `_ + +I have removed ``map_variable`` and ``rename_variables`` from ``mpas_xarray``. +I also removed any mention of the variable map from the rest of ``mpas_xarray``. + +This branch also includes several other cleanup operations that are not +addressing any requirements. These include: + + +* I added a new helper function, ``open_multifile_dataset``\ , for opening an + xarray data set in a single, simple command without reference to the + preprocessor. This function should make opening new data sets more + intuitive for mpas_xarray users. +* making several utility functions non-public (it is unclear to me why anyone + want to call these directly): + + * ``_assert_valid_datetimes`` + * ``_assert_valid_selections`` + * ``_ensure_list`` + * ``_get_datetimes`` + +* I have removed the ability to run ``mpas_xarray.py`` as a script and the associated + tests. This is on the premise that 1) the test were outdated and would have + needed to be updated to work with the current code and 2) unit testing in + ``test/test_mpas_xarray.py`` takes care of this capability in a better way. +* I have tried to make variable names a bit more verbose in various places. + However, at @pwolfram'2 request, I have left ds for datasets, following the + ``xarray`` convention. +* I have tried to improve the docstrings using a syntax that should be useful + for generating documentation later on. +* I have update unit testing to work with the new inerface, notably the + ``open_multifile_dataset`` function. + + +.. raw:: html + +

Implementation: MPAS-Analysis specific functionality should be supported in + xarray preprossing
+ Date last modified: 2017/02/15
+ Contributors: Xylar Asay-Davis +

+ + +In the same branch as above, I have added a ``generalized_reader`` module that +extends the capabilities of ``mpas_xarray`` to include mapping of variable names. +The file structure is as follows: + +.. code-block:: bash + + mpas_analysis/shared/ + - generalized_reader/ + __init__.py + generalized_reader.py + +``generalized_reader.py`` contains a function ``open_multifile_dataset`` that is similar to +the one in ``mpas_xarray`` but with additional arguments needed by analysis: + + +* ``variable_map``\ , a map between MPAS and MPAS-Analysis variable names +* ``start_date``\ , the start date of the analysis +* ``end_date``\ , the end date of the analysis + This function performs the same steps as ``mpas_xarray.open_multifile_dataset`` + but uses the local preprocessing function, ``_preprocess``\ , and also slices + the 'Time' coordinate using the given start and end dates as a final step. + +The ``generalized_reader._preprocess`` funciton first maps variable names, then calls +``mpas_xarray.preprocess`` to do the rest of the preprocessing as normal. + +Two private functions, ``_map_variable_name`` and ``_rename_variables`` (take out of +``mpas_xarray``\ ) are used to perform variable-name mapping. + + +.. raw:: html + +

Testing

+ + + +.. raw:: html + +

Testing and Validation: MPAS-Analysis specific functionality should be supported in + xarray preprossing
+ Date last modified: 2017/02/15
+ Contributors: Xylar Asay-Davis +

+ + +In `xylar/MPAS-Analysis/variable_mapping_reorg `_\ , +the unit testing for mpas_xarray has been updated. This includes moving unit testing for +variable mapping elsewhere. + +I will make sure all tests with config files in the ``configs/lanl`` and ``configs/edison`` +directories produce bit-for-bit results with the current ``develop``. + + +.. raw:: html + +

Testing and Validation: MPAS-Analysis specific functionality should be supported in + xarray preprossing
+ Date last modified: 2017/02/10
+ Contributors: Xylar Asay-Davis +

+ + +Largely, the same as above. + +I have added unit testing for ``generalized_reader`` (via the standalone +``generalized_reader.open_multifile_dataset`` function). These tests ensure that: + + +* variable mapping works as expected +* start and end dates work as expected diff --git a/1.11.0rc1/_sources/developers_guide/api.rst.txt b/1.11.0rc1/_sources/developers_guide/api.rst.txt new file mode 100644 index 000000000..b619d9016 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/api.rst.txt @@ -0,0 +1,309 @@ +############# +API reference +############# + +This page provides an auto-generated summary of the MPAS-Analysis API. For +more details and examples, refer to the relevant chapters in the main part of +the documentation. + +Top-level script: mpas_analysis +=============================== + +.. currentmodule:: mpas_analysis.__main__ + +.. autosummary:: + :toctree: generated/ + + main + build_analysis_list + determine_analyses_to_generate + add_task_and_subtasks + update_generate + run_analysis + wait_for_task + +Downloading data +================ + +.. currentmodule:: mpas_analysis.download_data + +.. autosummary:: + :toctree: generated/ + + download_analysis_data + +Analysis tasks +============== + +Base Class +---------- + +.. currentmodule:: mpas_analysis.shared + +.. autosummary:: + :toctree: generated/ + + AnalysisTask + AnalysisTask.setup_and_check + AnalysisTask.run_task + AnalysisTask.run_after + AnalysisTask.add_subtask + AnalysisTask.run + AnalysisTask.check_generate + AnalysisTask.check_analysis_enabled + AnalysisTask.set_start_end_date + +Ocean tasks +----------- + +.. currentmodule:: mpas_analysis.ocean + +.. autosummary:: + :toctree: generated/ + + ConservationTask + ClimatologyMapSST + ClimatologyMapSSS + ClimatologyMapMLD + ClimatologyMapMLDMinMax + ClimatologyMapSSH + ClimatologyMapEKE + ClimatologyMapOHCAnomaly + ClimatologyMapAntarcticMelt + ClimatologyMapSose + ClimatologyMapArgoTemperature + ClimatologyMapArgoSalinity + ClimatologyMapWaves + IndexNino34 + MeridionalHeatTransport + OceanHistogram + StreamfunctionMOC + TimeSeriesOHCAnomaly + TimeSeriesTemperatureAnomaly + TimeSeriesSalinityAnomaly + TimeSeriesSST + TimeSeriesAntarcticMelt + TimeSeriesOceanRegions + TimeSeriesTransport + +.. currentmodule:: mpas_analysis.ocean.compute_anomaly_subtask + +.. autosummary:: + :toctree: generated/ + + ComputeAnomalySubtask + +.. currentmodule:: mpas_analysis.ocean.plot_depth_integrated_time_series_subtask + +.. autosummary:: + :toctree: generated/ + + PlotDepthIntegratedTimeSeriesSubtask + +.. currentmodule:: mpas_analysis.ocean.plot_hovmoller_subtask + +.. autosummary:: + :toctree: generated/ + + PlotHovmollerSubtask + + +Sea ice tasks +------------- + +.. currentmodule:: mpas_analysis.sea_ice + +.. autosummary:: + :toctree: generated/ + + ClimatologyMapSeaIceConc + ClimatologyMapSeaIceThick + TimeSeriesSeaIce + ClimatologyMapIcebergConc + + +Shared modules +============== + +Reading MPAS Datasets +--------------------- +.. currentmodule:: mpas_analysis.shared.io + +.. autosummary:: + :toctree: generated/ + + open_mpas_dataset + +.. currentmodule:: mpas_analysis.shared.mpas_xarray + +.. autosummary:: + :toctree: generated/ + + mpas_xarray.open_multifile_dataset + mpas_xarray.preprocess + mpas_xarray.remove_repeated_time_index + mpas_xarray.subset_variables + +.. currentmodule:: mpas_analysis.shared.generalized_reader + +.. autosummary:: + :toctree: generated/ + + generalized_reader.open_multifile_dataset + + +Climatology +----------- +.. currentmodule:: mpas_analysis.shared.climatology + +.. autosummary:: + :toctree: generated/ + + get_comparison_descriptor + get_remapper + compute_monthly_climatology + compute_climatology + add_years_months_days_in_month + get_unmasked_mpas_climatology_directory + get_unmasked_mpas_climatology_file_name + get_masked_mpas_climatology_file_name + get_remapped_mpas_climatology_file_name + + MpasClimatologyTask + MpasClimatologyTask.add_variables + MpasClimatologyTask.get_file_name + + RemapMpasClimatologySubtask + RemapMpasClimatologySubtask.setup_and_check + RemapMpasClimatologySubtask.run_task + RemapMpasClimatologySubtask.add_comparison_grid_descriptor + RemapMpasClimatologySubtask.get_masked_file_name + RemapMpasClimatologySubtask.get_remapped_file_name + RemapMpasClimatologySubtask.customize_masked_climatology + RemapMpasClimatologySubtask.customize_remapped_climatology + + RemapObservedClimatologySubtask + RemapObservedClimatologySubtask.get_observation_descriptor + RemapObservedClimatologySubtask.build_observational_dataset + RemapObservedClimatologySubtask.get_file_name + +Time Series +----------- +.. currentmodule:: mpas_analysis.shared.time_series + +.. autosummary:: + :toctree: generated/ + + cache_time_series + compute_moving_avg_anomaly_from_start + compute_moving_avg + + MpasTimeSeriesTask + +Namelist and Streams Files +-------------------------- +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. autosummary:: + :toctree: generated/ + + convert_namelist_to_dict + NameList.__init__ + NameList.__getattr__ + NameList.__getitem__ + NameList.get + NameList.getint + NameList.getfloat + NameList.getbool + + StreamsFile.__init__ + StreamsFile.read + StreamsFile.readpath + StreamsFile.has_stream + StreamsFile.find_stream + +I/O Utilities +------------- +.. currentmodule:: mpas_analysis.shared.io + +.. autosummary:: + :toctree: generated/ + + utility.paths + utility.make_directories + utility.build_config_full_path + utility.check_path_exists + write_netcdf + + +Plotting +-------- +.. currentmodule:: mpas_analysis.shared.plot + +.. autosummary:: + :toctree: generated/ + + timeseries_analysis_plot + timeseries_analysis_plot_polar + plot_polar_comparison + plot_global_comparison + plot_1D + plot_vertical_section_comparison + plot_vertical_section + colormap.setup_colormap + ticks.plot_xtick_format + add_inset + + PlotClimatologyMapSubtask + PlotClimatologyMapSubtask.set_plot_info + + + +Projection +---------- +.. currentmodule:: mpas_analysis.shared.projection + +.. autosummary:: + :toctree: generated/ + + get_pyproj_projection + get_cartopy_projection + + +Regions +------- +.. currentmodule:: mpas_analysis.shared.regions + +.. autosummary:: + :toctree: generated/ + + compute_region_masks.ComputeRegionMasks + compute_region_masks_subtask.ComputeRegionMasksSubtask + compute_region_masks_subtask.get_feature_list + +Timekeeping +----------- +.. currentmodule:: mpas_analysis.shared.timekeeping + +.. autosummary:: + :toctree: generated/ + + utility.get_simulation_start_time + utility.string_to_datetime + utility.string_to_relative_delta + utility.string_to_days_since_date + utility.days_to_datetime + utility.datetime_to_days + utility.date_to_days + MpasRelativeDelta.MpasRelativeDelta + +Transects +--------- +.. currentmodule:: mpas_analysis.shared.transects + +.. autosummary:: + :toctree: generated/ + + compute_transect_masks_subtask.compute_mpas_transect_masks + compute_transect_masks_subtask.ComputeTransectMasksSubtask + diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.rst.txt new file mode 100644 index 000000000..a8742b003 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.add\_task\_and\_subtasks +==================================================== + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: add_task_and_subtasks \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.rst.txt new file mode 100644 index 000000000..7b5758af1 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.build\_analysis\_list +================================================= + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: build_analysis_list \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.rst.txt new file mode 100644 index 000000000..e6b757e0e --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.determine\_analyses\_to\_generate +============================================================= + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: determine_analyses_to_generate \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.main.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.main.rst.txt new file mode 100644 index 000000000..3bfc45b92 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.main.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.main +================================ + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: main \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.run_analysis.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.run_analysis.rst.txt new file mode 100644 index 000000000..dc118192c --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.run_analysis.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.run\_analysis +========================================= + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: run_analysis \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.update_generate.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.update_generate.rst.txt new file mode 100644 index 000000000..e9fab9d71 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.update_generate.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.update\_generate +============================================ + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: update_generate \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.wait_for_task.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.wait_for_task.rst.txt new file mode 100644 index 000000000..fcd6e6233 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.__main__.wait_for_task.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.\_\_main\_\_.wait\_for\_task +=========================================== + +.. currentmodule:: mpas_analysis.__main__ + +.. autofunction:: wait_for_task \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.rst.txt new file mode 100644 index 000000000..e541d7f23 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.download\_data.download\_analysis\_data +====================================================== + +.. currentmodule:: mpas_analysis.download_data + +.. autofunction:: download_analysis_data \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.rst.txt new file mode 100644 index 000000000..6c66e4512 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapAntarcticMelt +================================================ + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapAntarcticMelt + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapAntarcticMelt.__init__ + ~ClimatologyMapAntarcticMelt.add_subtask + ~ClimatologyMapAntarcticMelt.check_analysis_enabled + ~ClimatologyMapAntarcticMelt.check_generate + ~ClimatologyMapAntarcticMelt.close + ~ClimatologyMapAntarcticMelt.is_alive + ~ClimatologyMapAntarcticMelt.join + ~ClimatologyMapAntarcticMelt.kill + ~ClimatologyMapAntarcticMelt.run + ~ClimatologyMapAntarcticMelt.run_after + ~ClimatologyMapAntarcticMelt.run_task + ~ClimatologyMapAntarcticMelt.set_start_end_date + ~ClimatologyMapAntarcticMelt.setup_and_check + ~ClimatologyMapAntarcticMelt.start + ~ClimatologyMapAntarcticMelt.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapAntarcticMelt.BLOCKED + ~ClimatologyMapAntarcticMelt.FAIL + ~ClimatologyMapAntarcticMelt.READY + ~ClimatologyMapAntarcticMelt.RUNNING + ~ClimatologyMapAntarcticMelt.SUCCESS + ~ClimatologyMapAntarcticMelt.UNSET + ~ClimatologyMapAntarcticMelt.authkey + ~ClimatologyMapAntarcticMelt.daemon + ~ClimatologyMapAntarcticMelt.exitcode + ~ClimatologyMapAntarcticMelt.ident + ~ClimatologyMapAntarcticMelt.name + ~ClimatologyMapAntarcticMelt.pid + ~ClimatologyMapAntarcticMelt.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.rst.txt new file mode 100644 index 000000000..efd53a060 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapArgoSalinity +=============================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapArgoSalinity + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapArgoSalinity.__init__ + ~ClimatologyMapArgoSalinity.add_subtask + ~ClimatologyMapArgoSalinity.check_analysis_enabled + ~ClimatologyMapArgoSalinity.check_generate + ~ClimatologyMapArgoSalinity.close + ~ClimatologyMapArgoSalinity.is_alive + ~ClimatologyMapArgoSalinity.join + ~ClimatologyMapArgoSalinity.kill + ~ClimatologyMapArgoSalinity.run + ~ClimatologyMapArgoSalinity.run_after + ~ClimatologyMapArgoSalinity.run_task + ~ClimatologyMapArgoSalinity.set_start_end_date + ~ClimatologyMapArgoSalinity.setup_and_check + ~ClimatologyMapArgoSalinity.start + ~ClimatologyMapArgoSalinity.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapArgoSalinity.BLOCKED + ~ClimatologyMapArgoSalinity.FAIL + ~ClimatologyMapArgoSalinity.READY + ~ClimatologyMapArgoSalinity.RUNNING + ~ClimatologyMapArgoSalinity.SUCCESS + ~ClimatologyMapArgoSalinity.UNSET + ~ClimatologyMapArgoSalinity.authkey + ~ClimatologyMapArgoSalinity.daemon + ~ClimatologyMapArgoSalinity.exitcode + ~ClimatologyMapArgoSalinity.ident + ~ClimatologyMapArgoSalinity.name + ~ClimatologyMapArgoSalinity.pid + ~ClimatologyMapArgoSalinity.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.rst.txt new file mode 100644 index 000000000..9e08c7075 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapArgoTemperature +================================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapArgoTemperature + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapArgoTemperature.__init__ + ~ClimatologyMapArgoTemperature.add_subtask + ~ClimatologyMapArgoTemperature.check_analysis_enabled + ~ClimatologyMapArgoTemperature.check_generate + ~ClimatologyMapArgoTemperature.close + ~ClimatologyMapArgoTemperature.is_alive + ~ClimatologyMapArgoTemperature.join + ~ClimatologyMapArgoTemperature.kill + ~ClimatologyMapArgoTemperature.run + ~ClimatologyMapArgoTemperature.run_after + ~ClimatologyMapArgoTemperature.run_task + ~ClimatologyMapArgoTemperature.set_start_end_date + ~ClimatologyMapArgoTemperature.setup_and_check + ~ClimatologyMapArgoTemperature.start + ~ClimatologyMapArgoTemperature.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapArgoTemperature.BLOCKED + ~ClimatologyMapArgoTemperature.FAIL + ~ClimatologyMapArgoTemperature.READY + ~ClimatologyMapArgoTemperature.RUNNING + ~ClimatologyMapArgoTemperature.SUCCESS + ~ClimatologyMapArgoTemperature.UNSET + ~ClimatologyMapArgoTemperature.authkey + ~ClimatologyMapArgoTemperature.daemon + ~ClimatologyMapArgoTemperature.exitcode + ~ClimatologyMapArgoTemperature.ident + ~ClimatologyMapArgoTemperature.name + ~ClimatologyMapArgoTemperature.pid + ~ClimatologyMapArgoTemperature.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.rst.txt new file mode 100644 index 000000000..7a7ca8f01 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapEKE +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapEKE + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapEKE.__init__ + ~ClimatologyMapEKE.add_subtask + ~ClimatologyMapEKE.check_analysis_enabled + ~ClimatologyMapEKE.check_generate + ~ClimatologyMapEKE.close + ~ClimatologyMapEKE.is_alive + ~ClimatologyMapEKE.join + ~ClimatologyMapEKE.kill + ~ClimatologyMapEKE.run + ~ClimatologyMapEKE.run_after + ~ClimatologyMapEKE.run_task + ~ClimatologyMapEKE.set_start_end_date + ~ClimatologyMapEKE.setup_and_check + ~ClimatologyMapEKE.start + ~ClimatologyMapEKE.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapEKE.BLOCKED + ~ClimatologyMapEKE.FAIL + ~ClimatologyMapEKE.READY + ~ClimatologyMapEKE.RUNNING + ~ClimatologyMapEKE.SUCCESS + ~ClimatologyMapEKE.UNSET + ~ClimatologyMapEKE.authkey + ~ClimatologyMapEKE.daemon + ~ClimatologyMapEKE.exitcode + ~ClimatologyMapEKE.ident + ~ClimatologyMapEKE.name + ~ClimatologyMapEKE.pid + ~ClimatologyMapEKE.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.rst.txt new file mode 100644 index 000000000..654e1b99b --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapMLD +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapMLD + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapMLD.__init__ + ~ClimatologyMapMLD.add_subtask + ~ClimatologyMapMLD.check_analysis_enabled + ~ClimatologyMapMLD.check_generate + ~ClimatologyMapMLD.close + ~ClimatologyMapMLD.is_alive + ~ClimatologyMapMLD.join + ~ClimatologyMapMLD.kill + ~ClimatologyMapMLD.run + ~ClimatologyMapMLD.run_after + ~ClimatologyMapMLD.run_task + ~ClimatologyMapMLD.set_start_end_date + ~ClimatologyMapMLD.setup_and_check + ~ClimatologyMapMLD.start + ~ClimatologyMapMLD.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapMLD.BLOCKED + ~ClimatologyMapMLD.FAIL + ~ClimatologyMapMLD.READY + ~ClimatologyMapMLD.RUNNING + ~ClimatologyMapMLD.SUCCESS + ~ClimatologyMapMLD.UNSET + ~ClimatologyMapMLD.authkey + ~ClimatologyMapMLD.daemon + ~ClimatologyMapMLD.exitcode + ~ClimatologyMapMLD.ident + ~ClimatologyMapMLD.name + ~ClimatologyMapMLD.pid + ~ClimatologyMapMLD.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.rst.txt new file mode 100644 index 000000000..cebe54ec0 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapMLDMinMax +============================================ + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapMLDMinMax + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapMLDMinMax.__init__ + ~ClimatologyMapMLDMinMax.add_subtask + ~ClimatologyMapMLDMinMax.check_analysis_enabled + ~ClimatologyMapMLDMinMax.check_generate + ~ClimatologyMapMLDMinMax.close + ~ClimatologyMapMLDMinMax.is_alive + ~ClimatologyMapMLDMinMax.join + ~ClimatologyMapMLDMinMax.kill + ~ClimatologyMapMLDMinMax.run + ~ClimatologyMapMLDMinMax.run_after + ~ClimatologyMapMLDMinMax.run_task + ~ClimatologyMapMLDMinMax.set_start_end_date + ~ClimatologyMapMLDMinMax.setup_and_check + ~ClimatologyMapMLDMinMax.start + ~ClimatologyMapMLDMinMax.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapMLDMinMax.BLOCKED + ~ClimatologyMapMLDMinMax.FAIL + ~ClimatologyMapMLDMinMax.READY + ~ClimatologyMapMLDMinMax.RUNNING + ~ClimatologyMapMLDMinMax.SUCCESS + ~ClimatologyMapMLDMinMax.UNSET + ~ClimatologyMapMLDMinMax.authkey + ~ClimatologyMapMLDMinMax.daemon + ~ClimatologyMapMLDMinMax.exitcode + ~ClimatologyMapMLDMinMax.ident + ~ClimatologyMapMLDMinMax.name + ~ClimatologyMapMLDMinMax.pid + ~ClimatologyMapMLDMinMax.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.rst.txt new file mode 100644 index 000000000..4fd9d1f58 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapOHCAnomaly +============================================= + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapOHCAnomaly + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapOHCAnomaly.__init__ + ~ClimatologyMapOHCAnomaly.add_subtask + ~ClimatologyMapOHCAnomaly.check_analysis_enabled + ~ClimatologyMapOHCAnomaly.check_generate + ~ClimatologyMapOHCAnomaly.close + ~ClimatologyMapOHCAnomaly.is_alive + ~ClimatologyMapOHCAnomaly.join + ~ClimatologyMapOHCAnomaly.kill + ~ClimatologyMapOHCAnomaly.run + ~ClimatologyMapOHCAnomaly.run_after + ~ClimatologyMapOHCAnomaly.run_task + ~ClimatologyMapOHCAnomaly.set_start_end_date + ~ClimatologyMapOHCAnomaly.setup_and_check + ~ClimatologyMapOHCAnomaly.start + ~ClimatologyMapOHCAnomaly.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapOHCAnomaly.BLOCKED + ~ClimatologyMapOHCAnomaly.FAIL + ~ClimatologyMapOHCAnomaly.READY + ~ClimatologyMapOHCAnomaly.RUNNING + ~ClimatologyMapOHCAnomaly.SUCCESS + ~ClimatologyMapOHCAnomaly.UNSET + ~ClimatologyMapOHCAnomaly.authkey + ~ClimatologyMapOHCAnomaly.daemon + ~ClimatologyMapOHCAnomaly.exitcode + ~ClimatologyMapOHCAnomaly.ident + ~ClimatologyMapOHCAnomaly.name + ~ClimatologyMapOHCAnomaly.pid + ~ClimatologyMapOHCAnomaly.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.rst.txt new file mode 100644 index 000000000..9f083f9ca --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapSSH +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapSSH + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSSH.__init__ + ~ClimatologyMapSSH.add_subtask + ~ClimatologyMapSSH.check_analysis_enabled + ~ClimatologyMapSSH.check_generate + ~ClimatologyMapSSH.close + ~ClimatologyMapSSH.is_alive + ~ClimatologyMapSSH.join + ~ClimatologyMapSSH.kill + ~ClimatologyMapSSH.run + ~ClimatologyMapSSH.run_after + ~ClimatologyMapSSH.run_task + ~ClimatologyMapSSH.set_start_end_date + ~ClimatologyMapSSH.setup_and_check + ~ClimatologyMapSSH.start + ~ClimatologyMapSSH.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSSH.BLOCKED + ~ClimatologyMapSSH.FAIL + ~ClimatologyMapSSH.READY + ~ClimatologyMapSSH.RUNNING + ~ClimatologyMapSSH.SUCCESS + ~ClimatologyMapSSH.UNSET + ~ClimatologyMapSSH.authkey + ~ClimatologyMapSSH.daemon + ~ClimatologyMapSSH.exitcode + ~ClimatologyMapSSH.ident + ~ClimatologyMapSSH.name + ~ClimatologyMapSSH.pid + ~ClimatologyMapSSH.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.rst.txt new file mode 100644 index 000000000..636f59054 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapSSS +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapSSS + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSSS.__init__ + ~ClimatologyMapSSS.add_subtask + ~ClimatologyMapSSS.check_analysis_enabled + ~ClimatologyMapSSS.check_generate + ~ClimatologyMapSSS.close + ~ClimatologyMapSSS.is_alive + ~ClimatologyMapSSS.join + ~ClimatologyMapSSS.kill + ~ClimatologyMapSSS.run + ~ClimatologyMapSSS.run_after + ~ClimatologyMapSSS.run_task + ~ClimatologyMapSSS.set_start_end_date + ~ClimatologyMapSSS.setup_and_check + ~ClimatologyMapSSS.start + ~ClimatologyMapSSS.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSSS.BLOCKED + ~ClimatologyMapSSS.FAIL + ~ClimatologyMapSSS.READY + ~ClimatologyMapSSS.RUNNING + ~ClimatologyMapSSS.SUCCESS + ~ClimatologyMapSSS.UNSET + ~ClimatologyMapSSS.authkey + ~ClimatologyMapSSS.daemon + ~ClimatologyMapSSS.exitcode + ~ClimatologyMapSSS.ident + ~ClimatologyMapSSS.name + ~ClimatologyMapSSS.pid + ~ClimatologyMapSSS.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.rst.txt new file mode 100644 index 000000000..ba0ac8626 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapSST +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapSST + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSST.__init__ + ~ClimatologyMapSST.add_subtask + ~ClimatologyMapSST.check_analysis_enabled + ~ClimatologyMapSST.check_generate + ~ClimatologyMapSST.close + ~ClimatologyMapSST.is_alive + ~ClimatologyMapSST.join + ~ClimatologyMapSST.kill + ~ClimatologyMapSST.run + ~ClimatologyMapSST.run_after + ~ClimatologyMapSST.run_task + ~ClimatologyMapSST.set_start_end_date + ~ClimatologyMapSST.setup_and_check + ~ClimatologyMapSST.start + ~ClimatologyMapSST.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSST.BLOCKED + ~ClimatologyMapSST.FAIL + ~ClimatologyMapSST.READY + ~ClimatologyMapSST.RUNNING + ~ClimatologyMapSST.SUCCESS + ~ClimatologyMapSST.UNSET + ~ClimatologyMapSST.authkey + ~ClimatologyMapSST.daemon + ~ClimatologyMapSST.exitcode + ~ClimatologyMapSST.ident + ~ClimatologyMapSST.name + ~ClimatologyMapSST.pid + ~ClimatologyMapSST.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.rst.txt new file mode 100644 index 000000000..05d774494 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapSose +======================================= + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapSose + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSose.__init__ + ~ClimatologyMapSose.add_subtask + ~ClimatologyMapSose.check_analysis_enabled + ~ClimatologyMapSose.check_generate + ~ClimatologyMapSose.close + ~ClimatologyMapSose.is_alive + ~ClimatologyMapSose.join + ~ClimatologyMapSose.kill + ~ClimatologyMapSose.run + ~ClimatologyMapSose.run_after + ~ClimatologyMapSose.run_task + ~ClimatologyMapSose.set_start_end_date + ~ClimatologyMapSose.setup_and_check + ~ClimatologyMapSose.start + ~ClimatologyMapSose.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSose.BLOCKED + ~ClimatologyMapSose.FAIL + ~ClimatologyMapSose.READY + ~ClimatologyMapSose.RUNNING + ~ClimatologyMapSose.SUCCESS + ~ClimatologyMapSose.UNSET + ~ClimatologyMapSose.authkey + ~ClimatologyMapSose.daemon + ~ClimatologyMapSose.exitcode + ~ClimatologyMapSose.ident + ~ClimatologyMapSose.name + ~ClimatologyMapSose.pid + ~ClimatologyMapSose.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.rst.txt new file mode 100644 index 000000000..ea8333fa7 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ClimatologyMapWaves +======================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ClimatologyMapWaves + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapWaves.__init__ + ~ClimatologyMapWaves.add_subtask + ~ClimatologyMapWaves.check_analysis_enabled + ~ClimatologyMapWaves.check_generate + ~ClimatologyMapWaves.close + ~ClimatologyMapWaves.is_alive + ~ClimatologyMapWaves.join + ~ClimatologyMapWaves.kill + ~ClimatologyMapWaves.run + ~ClimatologyMapWaves.run_after + ~ClimatologyMapWaves.run_task + ~ClimatologyMapWaves.set_start_end_date + ~ClimatologyMapWaves.setup_and_check + ~ClimatologyMapWaves.start + ~ClimatologyMapWaves.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapWaves.BLOCKED + ~ClimatologyMapWaves.FAIL + ~ClimatologyMapWaves.READY + ~ClimatologyMapWaves.RUNNING + ~ClimatologyMapWaves.SUCCESS + ~ClimatologyMapWaves.UNSET + ~ClimatologyMapWaves.authkey + ~ClimatologyMapWaves.daemon + ~ClimatologyMapWaves.exitcode + ~ClimatologyMapWaves.ident + ~ClimatologyMapWaves.name + ~ClimatologyMapWaves.pid + ~ClimatologyMapWaves.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ConservationTask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ConservationTask.rst.txt new file mode 100644 index 000000000..f77b4cfbb --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.ConservationTask.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.ConservationTask +===================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: ConservationTask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ConservationTask.__init__ + ~ConservationTask.add_subtask + ~ConservationTask.check_analysis_enabled + ~ConservationTask.check_generate + ~ConservationTask.close + ~ConservationTask.is_alive + ~ConservationTask.join + ~ConservationTask.kill + ~ConservationTask.run + ~ConservationTask.run_after + ~ConservationTask.run_task + ~ConservationTask.set_start_end_date + ~ConservationTask.setup_and_check + ~ConservationTask.start + ~ConservationTask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ConservationTask.BLOCKED + ~ConservationTask.FAIL + ~ConservationTask.READY + ~ConservationTask.RUNNING + ~ConservationTask.SUCCESS + ~ConservationTask.UNSET + ~ConservationTask.authkey + ~ConservationTask.daemon + ~ConservationTask.exitcode + ~ConservationTask.ident + ~ConservationTask.name + ~ConservationTask.pid + ~ConservationTask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.IndexNino34.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.IndexNino34.rst.txt new file mode 100644 index 000000000..1b9bcb96e --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.IndexNino34.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.IndexNino34 +================================ + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: IndexNino34 + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~IndexNino34.__init__ + ~IndexNino34.add_subtask + ~IndexNino34.check_analysis_enabled + ~IndexNino34.check_generate + ~IndexNino34.close + ~IndexNino34.is_alive + ~IndexNino34.join + ~IndexNino34.kill + ~IndexNino34.run + ~IndexNino34.run_after + ~IndexNino34.run_task + ~IndexNino34.set_start_end_date + ~IndexNino34.setup_and_check + ~IndexNino34.start + ~IndexNino34.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~IndexNino34.BLOCKED + ~IndexNino34.FAIL + ~IndexNino34.READY + ~IndexNino34.RUNNING + ~IndexNino34.SUCCESS + ~IndexNino34.UNSET + ~IndexNino34.authkey + ~IndexNino34.daemon + ~IndexNino34.exitcode + ~IndexNino34.ident + ~IndexNino34.name + ~IndexNino34.pid + ~IndexNino34.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.rst.txt new file mode 100644 index 000000000..48b7e49c2 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.MeridionalHeatTransport +============================================ + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: MeridionalHeatTransport + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~MeridionalHeatTransport.__init__ + ~MeridionalHeatTransport.add_subtask + ~MeridionalHeatTransport.check_analysis_enabled + ~MeridionalHeatTransport.check_generate + ~MeridionalHeatTransport.close + ~MeridionalHeatTransport.is_alive + ~MeridionalHeatTransport.join + ~MeridionalHeatTransport.kill + ~MeridionalHeatTransport.run + ~MeridionalHeatTransport.run_after + ~MeridionalHeatTransport.run_task + ~MeridionalHeatTransport.set_start_end_date + ~MeridionalHeatTransport.setup_and_check + ~MeridionalHeatTransport.start + ~MeridionalHeatTransport.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~MeridionalHeatTransport.BLOCKED + ~MeridionalHeatTransport.FAIL + ~MeridionalHeatTransport.READY + ~MeridionalHeatTransport.RUNNING + ~MeridionalHeatTransport.SUCCESS + ~MeridionalHeatTransport.UNSET + ~MeridionalHeatTransport.authkey + ~MeridionalHeatTransport.daemon + ~MeridionalHeatTransport.exitcode + ~MeridionalHeatTransport.ident + ~MeridionalHeatTransport.name + ~MeridionalHeatTransport.pid + ~MeridionalHeatTransport.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.rst.txt new file mode 100644 index 000000000..b14e66bfe --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.OceanHistogram +=================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: OceanHistogram + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~OceanHistogram.__init__ + ~OceanHistogram.add_subtask + ~OceanHistogram.check_analysis_enabled + ~OceanHistogram.check_generate + ~OceanHistogram.close + ~OceanHistogram.is_alive + ~OceanHistogram.join + ~OceanHistogram.kill + ~OceanHistogram.run + ~OceanHistogram.run_after + ~OceanHistogram.run_task + ~OceanHistogram.set_start_end_date + ~OceanHistogram.setup_and_check + ~OceanHistogram.start + ~OceanHistogram.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~OceanHistogram.BLOCKED + ~OceanHistogram.FAIL + ~OceanHistogram.READY + ~OceanHistogram.RUNNING + ~OceanHistogram.SUCCESS + ~OceanHistogram.UNSET + ~OceanHistogram.authkey + ~OceanHistogram.daemon + ~OceanHistogram.exitcode + ~OceanHistogram.ident + ~OceanHistogram.name + ~OceanHistogram.pid + ~OceanHistogram.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.rst.txt new file mode 100644 index 000000000..ad854c0f7 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.StreamfunctionMOC +====================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: StreamfunctionMOC + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~StreamfunctionMOC.__init__ + ~StreamfunctionMOC.add_subtask + ~StreamfunctionMOC.check_analysis_enabled + ~StreamfunctionMOC.check_generate + ~StreamfunctionMOC.close + ~StreamfunctionMOC.is_alive + ~StreamfunctionMOC.join + ~StreamfunctionMOC.kill + ~StreamfunctionMOC.run + ~StreamfunctionMOC.run_after + ~StreamfunctionMOC.run_task + ~StreamfunctionMOC.set_start_end_date + ~StreamfunctionMOC.setup_and_check + ~StreamfunctionMOC.start + ~StreamfunctionMOC.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~StreamfunctionMOC.BLOCKED + ~StreamfunctionMOC.FAIL + ~StreamfunctionMOC.READY + ~StreamfunctionMOC.RUNNING + ~StreamfunctionMOC.SUCCESS + ~StreamfunctionMOC.UNSET + ~StreamfunctionMOC.authkey + ~StreamfunctionMOC.daemon + ~StreamfunctionMOC.exitcode + ~StreamfunctionMOC.ident + ~StreamfunctionMOC.name + ~StreamfunctionMOC.pid + ~StreamfunctionMOC.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.rst.txt new file mode 100644 index 000000000..23cbca452 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesAntarcticMelt +============================================ + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesAntarcticMelt + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesAntarcticMelt.__init__ + ~TimeSeriesAntarcticMelt.add_subtask + ~TimeSeriesAntarcticMelt.check_analysis_enabled + ~TimeSeriesAntarcticMelt.check_generate + ~TimeSeriesAntarcticMelt.close + ~TimeSeriesAntarcticMelt.is_alive + ~TimeSeriesAntarcticMelt.join + ~TimeSeriesAntarcticMelt.kill + ~TimeSeriesAntarcticMelt.run + ~TimeSeriesAntarcticMelt.run_after + ~TimeSeriesAntarcticMelt.run_task + ~TimeSeriesAntarcticMelt.set_start_end_date + ~TimeSeriesAntarcticMelt.setup_and_check + ~TimeSeriesAntarcticMelt.start + ~TimeSeriesAntarcticMelt.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesAntarcticMelt.BLOCKED + ~TimeSeriesAntarcticMelt.FAIL + ~TimeSeriesAntarcticMelt.READY + ~TimeSeriesAntarcticMelt.RUNNING + ~TimeSeriesAntarcticMelt.SUCCESS + ~TimeSeriesAntarcticMelt.UNSET + ~TimeSeriesAntarcticMelt.authkey + ~TimeSeriesAntarcticMelt.daemon + ~TimeSeriesAntarcticMelt.exitcode + ~TimeSeriesAntarcticMelt.ident + ~TimeSeriesAntarcticMelt.name + ~TimeSeriesAntarcticMelt.pid + ~TimeSeriesAntarcticMelt.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.rst.txt new file mode 100644 index 000000000..9402dba27 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesOHCAnomaly +========================================= + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesOHCAnomaly + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesOHCAnomaly.__init__ + ~TimeSeriesOHCAnomaly.add_subtask + ~TimeSeriesOHCAnomaly.check_analysis_enabled + ~TimeSeriesOHCAnomaly.check_generate + ~TimeSeriesOHCAnomaly.close + ~TimeSeriesOHCAnomaly.is_alive + ~TimeSeriesOHCAnomaly.join + ~TimeSeriesOHCAnomaly.kill + ~TimeSeriesOHCAnomaly.run + ~TimeSeriesOHCAnomaly.run_after + ~TimeSeriesOHCAnomaly.run_task + ~TimeSeriesOHCAnomaly.set_start_end_date + ~TimeSeriesOHCAnomaly.setup_and_check + ~TimeSeriesOHCAnomaly.start + ~TimeSeriesOHCAnomaly.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesOHCAnomaly.BLOCKED + ~TimeSeriesOHCAnomaly.FAIL + ~TimeSeriesOHCAnomaly.READY + ~TimeSeriesOHCAnomaly.RUNNING + ~TimeSeriesOHCAnomaly.SUCCESS + ~TimeSeriesOHCAnomaly.UNSET + ~TimeSeriesOHCAnomaly.authkey + ~TimeSeriesOHCAnomaly.daemon + ~TimeSeriesOHCAnomaly.exitcode + ~TimeSeriesOHCAnomaly.ident + ~TimeSeriesOHCAnomaly.name + ~TimeSeriesOHCAnomaly.pid + ~TimeSeriesOHCAnomaly.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.rst.txt new file mode 100644 index 000000000..48eb48da0 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesOceanRegions +=========================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesOceanRegions + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesOceanRegions.__init__ + ~TimeSeriesOceanRegions.add_subtask + ~TimeSeriesOceanRegions.check_analysis_enabled + ~TimeSeriesOceanRegions.check_generate + ~TimeSeriesOceanRegions.close + ~TimeSeriesOceanRegions.is_alive + ~TimeSeriesOceanRegions.join + ~TimeSeriesOceanRegions.kill + ~TimeSeriesOceanRegions.run + ~TimeSeriesOceanRegions.run_after + ~TimeSeriesOceanRegions.run_task + ~TimeSeriesOceanRegions.set_start_end_date + ~TimeSeriesOceanRegions.setup_and_check + ~TimeSeriesOceanRegions.start + ~TimeSeriesOceanRegions.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesOceanRegions.BLOCKED + ~TimeSeriesOceanRegions.FAIL + ~TimeSeriesOceanRegions.READY + ~TimeSeriesOceanRegions.RUNNING + ~TimeSeriesOceanRegions.SUCCESS + ~TimeSeriesOceanRegions.UNSET + ~TimeSeriesOceanRegions.authkey + ~TimeSeriesOceanRegions.daemon + ~TimeSeriesOceanRegions.exitcode + ~TimeSeriesOceanRegions.ident + ~TimeSeriesOceanRegions.name + ~TimeSeriesOceanRegions.pid + ~TimeSeriesOceanRegions.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.rst.txt new file mode 100644 index 000000000..f960972b6 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesSST +================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesSST + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesSST.__init__ + ~TimeSeriesSST.add_subtask + ~TimeSeriesSST.check_analysis_enabled + ~TimeSeriesSST.check_generate + ~TimeSeriesSST.close + ~TimeSeriesSST.is_alive + ~TimeSeriesSST.join + ~TimeSeriesSST.kill + ~TimeSeriesSST.run + ~TimeSeriesSST.run_after + ~TimeSeriesSST.run_task + ~TimeSeriesSST.set_start_end_date + ~TimeSeriesSST.setup_and_check + ~TimeSeriesSST.start + ~TimeSeriesSST.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesSST.BLOCKED + ~TimeSeriesSST.FAIL + ~TimeSeriesSST.READY + ~TimeSeriesSST.RUNNING + ~TimeSeriesSST.SUCCESS + ~TimeSeriesSST.UNSET + ~TimeSeriesSST.authkey + ~TimeSeriesSST.daemon + ~TimeSeriesSST.exitcode + ~TimeSeriesSST.ident + ~TimeSeriesSST.name + ~TimeSeriesSST.pid + ~TimeSeriesSST.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.rst.txt new file mode 100644 index 000000000..725913042 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesSalinityAnomaly +============================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesSalinityAnomaly + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesSalinityAnomaly.__init__ + ~TimeSeriesSalinityAnomaly.add_subtask + ~TimeSeriesSalinityAnomaly.check_analysis_enabled + ~TimeSeriesSalinityAnomaly.check_generate + ~TimeSeriesSalinityAnomaly.close + ~TimeSeriesSalinityAnomaly.is_alive + ~TimeSeriesSalinityAnomaly.join + ~TimeSeriesSalinityAnomaly.kill + ~TimeSeriesSalinityAnomaly.run + ~TimeSeriesSalinityAnomaly.run_after + ~TimeSeriesSalinityAnomaly.run_task + ~TimeSeriesSalinityAnomaly.set_start_end_date + ~TimeSeriesSalinityAnomaly.setup_and_check + ~TimeSeriesSalinityAnomaly.start + ~TimeSeriesSalinityAnomaly.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesSalinityAnomaly.BLOCKED + ~TimeSeriesSalinityAnomaly.FAIL + ~TimeSeriesSalinityAnomaly.READY + ~TimeSeriesSalinityAnomaly.RUNNING + ~TimeSeriesSalinityAnomaly.SUCCESS + ~TimeSeriesSalinityAnomaly.UNSET + ~TimeSeriesSalinityAnomaly.authkey + ~TimeSeriesSalinityAnomaly.daemon + ~TimeSeriesSalinityAnomaly.exitcode + ~TimeSeriesSalinityAnomaly.ident + ~TimeSeriesSalinityAnomaly.name + ~TimeSeriesSalinityAnomaly.pid + ~TimeSeriesSalinityAnomaly.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.rst.txt new file mode 100644 index 000000000..bc1022b80 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesTemperatureAnomaly +================================================= + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesTemperatureAnomaly + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesTemperatureAnomaly.__init__ + ~TimeSeriesTemperatureAnomaly.add_subtask + ~TimeSeriesTemperatureAnomaly.check_analysis_enabled + ~TimeSeriesTemperatureAnomaly.check_generate + ~TimeSeriesTemperatureAnomaly.close + ~TimeSeriesTemperatureAnomaly.is_alive + ~TimeSeriesTemperatureAnomaly.join + ~TimeSeriesTemperatureAnomaly.kill + ~TimeSeriesTemperatureAnomaly.run + ~TimeSeriesTemperatureAnomaly.run_after + ~TimeSeriesTemperatureAnomaly.run_task + ~TimeSeriesTemperatureAnomaly.set_start_end_date + ~TimeSeriesTemperatureAnomaly.setup_and_check + ~TimeSeriesTemperatureAnomaly.start + ~TimeSeriesTemperatureAnomaly.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesTemperatureAnomaly.BLOCKED + ~TimeSeriesTemperatureAnomaly.FAIL + ~TimeSeriesTemperatureAnomaly.READY + ~TimeSeriesTemperatureAnomaly.RUNNING + ~TimeSeriesTemperatureAnomaly.SUCCESS + ~TimeSeriesTemperatureAnomaly.UNSET + ~TimeSeriesTemperatureAnomaly.authkey + ~TimeSeriesTemperatureAnomaly.daemon + ~TimeSeriesTemperatureAnomaly.exitcode + ~TimeSeriesTemperatureAnomaly.ident + ~TimeSeriesTemperatureAnomaly.name + ~TimeSeriesTemperatureAnomaly.pid + ~TimeSeriesTemperatureAnomaly.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.rst.txt new file mode 100644 index 000000000..2cf6574ec --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.TimeSeriesTransport +======================================== + +.. currentmodule:: mpas_analysis.ocean + +.. autoclass:: TimeSeriesTransport + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesTransport.__init__ + ~TimeSeriesTransport.add_subtask + ~TimeSeriesTransport.check_analysis_enabled + ~TimeSeriesTransport.check_generate + ~TimeSeriesTransport.close + ~TimeSeriesTransport.is_alive + ~TimeSeriesTransport.join + ~TimeSeriesTransport.kill + ~TimeSeriesTransport.run + ~TimeSeriesTransport.run_after + ~TimeSeriesTransport.run_task + ~TimeSeriesTransport.set_start_end_date + ~TimeSeriesTransport.setup_and_check + ~TimeSeriesTransport.start + ~TimeSeriesTransport.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesTransport.BLOCKED + ~TimeSeriesTransport.FAIL + ~TimeSeriesTransport.READY + ~TimeSeriesTransport.RUNNING + ~TimeSeriesTransport.SUCCESS + ~TimeSeriesTransport.UNSET + ~TimeSeriesTransport.authkey + ~TimeSeriesTransport.daemon + ~TimeSeriesTransport.exitcode + ~TimeSeriesTransport.ident + ~TimeSeriesTransport.name + ~TimeSeriesTransport.pid + ~TimeSeriesTransport.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.rst.txt new file mode 100644 index 000000000..180b5ef15 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.compute\_anomaly\_subtask.ComputeAnomalySubtask +==================================================================== + +.. currentmodule:: mpas_analysis.ocean.compute_anomaly_subtask + +.. autoclass:: ComputeAnomalySubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ComputeAnomalySubtask.__init__ + ~ComputeAnomalySubtask.add_subtask + ~ComputeAnomalySubtask.check_analysis_enabled + ~ComputeAnomalySubtask.check_generate + ~ComputeAnomalySubtask.close + ~ComputeAnomalySubtask.is_alive + ~ComputeAnomalySubtask.join + ~ComputeAnomalySubtask.kill + ~ComputeAnomalySubtask.run + ~ComputeAnomalySubtask.run_after + ~ComputeAnomalySubtask.run_task + ~ComputeAnomalySubtask.set_start_end_date + ~ComputeAnomalySubtask.setup_and_check + ~ComputeAnomalySubtask.start + ~ComputeAnomalySubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ComputeAnomalySubtask.BLOCKED + ~ComputeAnomalySubtask.FAIL + ~ComputeAnomalySubtask.READY + ~ComputeAnomalySubtask.RUNNING + ~ComputeAnomalySubtask.SUCCESS + ~ComputeAnomalySubtask.UNSET + ~ComputeAnomalySubtask.authkey + ~ComputeAnomalySubtask.daemon + ~ComputeAnomalySubtask.exitcode + ~ComputeAnomalySubtask.ident + ~ComputeAnomalySubtask.name + ~ComputeAnomalySubtask.pid + ~ComputeAnomalySubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.rst.txt new file mode 100644 index 000000000..fbd5c0e51 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.rst.txt @@ -0,0 +1,55 @@ +mpas\_analysis.ocean.plot\_depth\_integrated\_time\_series\_subtask.PlotDepthIntegratedTimeSeriesSubtask +======================================================================================================== + +.. currentmodule:: mpas_analysis.ocean.plot_depth_integrated_time_series_subtask + +.. autoclass:: PlotDepthIntegratedTimeSeriesSubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~PlotDepthIntegratedTimeSeriesSubtask.__init__ + ~PlotDepthIntegratedTimeSeriesSubtask.add_subtask + ~PlotDepthIntegratedTimeSeriesSubtask.check_analysis_enabled + ~PlotDepthIntegratedTimeSeriesSubtask.check_generate + ~PlotDepthIntegratedTimeSeriesSubtask.close + ~PlotDepthIntegratedTimeSeriesSubtask.customize_fig + ~PlotDepthIntegratedTimeSeriesSubtask.is_alive + ~PlotDepthIntegratedTimeSeriesSubtask.join + ~PlotDepthIntegratedTimeSeriesSubtask.kill + ~PlotDepthIntegratedTimeSeriesSubtask.run + ~PlotDepthIntegratedTimeSeriesSubtask.run_after + ~PlotDepthIntegratedTimeSeriesSubtask.run_task + ~PlotDepthIntegratedTimeSeriesSubtask.set_start_end_date + ~PlotDepthIntegratedTimeSeriesSubtask.setup_and_check + ~PlotDepthIntegratedTimeSeriesSubtask.start + ~PlotDepthIntegratedTimeSeriesSubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~PlotDepthIntegratedTimeSeriesSubtask.BLOCKED + ~PlotDepthIntegratedTimeSeriesSubtask.FAIL + ~PlotDepthIntegratedTimeSeriesSubtask.READY + ~PlotDepthIntegratedTimeSeriesSubtask.RUNNING + ~PlotDepthIntegratedTimeSeriesSubtask.SUCCESS + ~PlotDepthIntegratedTimeSeriesSubtask.UNSET + ~PlotDepthIntegratedTimeSeriesSubtask.authkey + ~PlotDepthIntegratedTimeSeriesSubtask.daemon + ~PlotDepthIntegratedTimeSeriesSubtask.exitcode + ~PlotDepthIntegratedTimeSeriesSubtask.ident + ~PlotDepthIntegratedTimeSeriesSubtask.name + ~PlotDepthIntegratedTimeSeriesSubtask.pid + ~PlotDepthIntegratedTimeSeriesSubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.rst.txt new file mode 100644 index 000000000..c43ae45eb --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.ocean.plot\_hovmoller\_subtask.PlotHovmollerSubtask +================================================================== + +.. currentmodule:: mpas_analysis.ocean.plot_hovmoller_subtask + +.. autoclass:: PlotHovmollerSubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~PlotHovmollerSubtask.__init__ + ~PlotHovmollerSubtask.add_subtask + ~PlotHovmollerSubtask.check_analysis_enabled + ~PlotHovmollerSubtask.check_generate + ~PlotHovmollerSubtask.close + ~PlotHovmollerSubtask.is_alive + ~PlotHovmollerSubtask.join + ~PlotHovmollerSubtask.kill + ~PlotHovmollerSubtask.run + ~PlotHovmollerSubtask.run_after + ~PlotHovmollerSubtask.run_task + ~PlotHovmollerSubtask.set_start_end_date + ~PlotHovmollerSubtask.setup_and_check + ~PlotHovmollerSubtask.start + ~PlotHovmollerSubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~PlotHovmollerSubtask.BLOCKED + ~PlotHovmollerSubtask.FAIL + ~PlotHovmollerSubtask.READY + ~PlotHovmollerSubtask.RUNNING + ~PlotHovmollerSubtask.SUCCESS + ~PlotHovmollerSubtask.UNSET + ~PlotHovmollerSubtask.authkey + ~PlotHovmollerSubtask.daemon + ~PlotHovmollerSubtask.exitcode + ~PlotHovmollerSubtask.ident + ~PlotHovmollerSubtask.name + ~PlotHovmollerSubtask.pid + ~PlotHovmollerSubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.rst.txt new file mode 100644 index 000000000..981b932ed --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.sea\_ice.ClimatologyMapIcebergConc +================================================= + +.. currentmodule:: mpas_analysis.sea_ice + +.. autoclass:: ClimatologyMapIcebergConc + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapIcebergConc.__init__ + ~ClimatologyMapIcebergConc.add_subtask + ~ClimatologyMapIcebergConc.check_analysis_enabled + ~ClimatologyMapIcebergConc.check_generate + ~ClimatologyMapIcebergConc.close + ~ClimatologyMapIcebergConc.is_alive + ~ClimatologyMapIcebergConc.join + ~ClimatologyMapIcebergConc.kill + ~ClimatologyMapIcebergConc.run + ~ClimatologyMapIcebergConc.run_after + ~ClimatologyMapIcebergConc.run_task + ~ClimatologyMapIcebergConc.set_start_end_date + ~ClimatologyMapIcebergConc.setup_and_check + ~ClimatologyMapIcebergConc.start + ~ClimatologyMapIcebergConc.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapIcebergConc.BLOCKED + ~ClimatologyMapIcebergConc.FAIL + ~ClimatologyMapIcebergConc.READY + ~ClimatologyMapIcebergConc.RUNNING + ~ClimatologyMapIcebergConc.SUCCESS + ~ClimatologyMapIcebergConc.UNSET + ~ClimatologyMapIcebergConc.authkey + ~ClimatologyMapIcebergConc.daemon + ~ClimatologyMapIcebergConc.exitcode + ~ClimatologyMapIcebergConc.ident + ~ClimatologyMapIcebergConc.name + ~ClimatologyMapIcebergConc.pid + ~ClimatologyMapIcebergConc.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.rst.txt new file mode 100644 index 000000000..1761fbbb3 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.sea\_ice.ClimatologyMapSeaIceConc +================================================ + +.. currentmodule:: mpas_analysis.sea_ice + +.. autoclass:: ClimatologyMapSeaIceConc + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSeaIceConc.__init__ + ~ClimatologyMapSeaIceConc.add_subtask + ~ClimatologyMapSeaIceConc.check_analysis_enabled + ~ClimatologyMapSeaIceConc.check_generate + ~ClimatologyMapSeaIceConc.close + ~ClimatologyMapSeaIceConc.is_alive + ~ClimatologyMapSeaIceConc.join + ~ClimatologyMapSeaIceConc.kill + ~ClimatologyMapSeaIceConc.run + ~ClimatologyMapSeaIceConc.run_after + ~ClimatologyMapSeaIceConc.run_task + ~ClimatologyMapSeaIceConc.set_start_end_date + ~ClimatologyMapSeaIceConc.setup_and_check + ~ClimatologyMapSeaIceConc.start + ~ClimatologyMapSeaIceConc.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSeaIceConc.BLOCKED + ~ClimatologyMapSeaIceConc.FAIL + ~ClimatologyMapSeaIceConc.READY + ~ClimatologyMapSeaIceConc.RUNNING + ~ClimatologyMapSeaIceConc.SUCCESS + ~ClimatologyMapSeaIceConc.UNSET + ~ClimatologyMapSeaIceConc.authkey + ~ClimatologyMapSeaIceConc.daemon + ~ClimatologyMapSeaIceConc.exitcode + ~ClimatologyMapSeaIceConc.ident + ~ClimatologyMapSeaIceConc.name + ~ClimatologyMapSeaIceConc.pid + ~ClimatologyMapSeaIceConc.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.rst.txt new file mode 100644 index 000000000..d0a9f5259 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.sea\_ice.ClimatologyMapSeaIceThick +================================================= + +.. currentmodule:: mpas_analysis.sea_ice + +.. autoclass:: ClimatologyMapSeaIceThick + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ClimatologyMapSeaIceThick.__init__ + ~ClimatologyMapSeaIceThick.add_subtask + ~ClimatologyMapSeaIceThick.check_analysis_enabled + ~ClimatologyMapSeaIceThick.check_generate + ~ClimatologyMapSeaIceThick.close + ~ClimatologyMapSeaIceThick.is_alive + ~ClimatologyMapSeaIceThick.join + ~ClimatologyMapSeaIceThick.kill + ~ClimatologyMapSeaIceThick.run + ~ClimatologyMapSeaIceThick.run_after + ~ClimatologyMapSeaIceThick.run_task + ~ClimatologyMapSeaIceThick.set_start_end_date + ~ClimatologyMapSeaIceThick.setup_and_check + ~ClimatologyMapSeaIceThick.start + ~ClimatologyMapSeaIceThick.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ClimatologyMapSeaIceThick.BLOCKED + ~ClimatologyMapSeaIceThick.FAIL + ~ClimatologyMapSeaIceThick.READY + ~ClimatologyMapSeaIceThick.RUNNING + ~ClimatologyMapSeaIceThick.SUCCESS + ~ClimatologyMapSeaIceThick.UNSET + ~ClimatologyMapSeaIceThick.authkey + ~ClimatologyMapSeaIceThick.daemon + ~ClimatologyMapSeaIceThick.exitcode + ~ClimatologyMapSeaIceThick.ident + ~ClimatologyMapSeaIceThick.name + ~ClimatologyMapSeaIceThick.pid + ~ClimatologyMapSeaIceThick.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.rst.txt new file mode 100644 index 000000000..7f3ef788d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.sea\_ice.TimeSeriesSeaIce +======================================== + +.. currentmodule:: mpas_analysis.sea_ice + +.. autoclass:: TimeSeriesSeaIce + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~TimeSeriesSeaIce.__init__ + ~TimeSeriesSeaIce.add_subtask + ~TimeSeriesSeaIce.check_analysis_enabled + ~TimeSeriesSeaIce.check_generate + ~TimeSeriesSeaIce.close + ~TimeSeriesSeaIce.is_alive + ~TimeSeriesSeaIce.join + ~TimeSeriesSeaIce.kill + ~TimeSeriesSeaIce.run + ~TimeSeriesSeaIce.run_after + ~TimeSeriesSeaIce.run_task + ~TimeSeriesSeaIce.set_start_end_date + ~TimeSeriesSeaIce.setup_and_check + ~TimeSeriesSeaIce.start + ~TimeSeriesSeaIce.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~TimeSeriesSeaIce.BLOCKED + ~TimeSeriesSeaIce.FAIL + ~TimeSeriesSeaIce.READY + ~TimeSeriesSeaIce.RUNNING + ~TimeSeriesSeaIce.SUCCESS + ~TimeSeriesSeaIce.UNSET + ~TimeSeriesSeaIce.authkey + ~TimeSeriesSeaIce.daemon + ~TimeSeriesSeaIce.exitcode + ~TimeSeriesSeaIce.ident + ~TimeSeriesSeaIce.name + ~TimeSeriesSeaIce.pid + ~TimeSeriesSeaIce.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.rst.txt new file mode 100644 index 000000000..b3e05c992 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.add\_subtask +=============================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.add_subtask \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.rst.txt new file mode 100644 index 000000000..ae9489bb5 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.check\_analysis\_enabled +=========================================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.check_analysis_enabled \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.rst.txt new file mode 100644 index 000000000..2613232cf --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.check\_generate +================================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.check_generate \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.rst.txt new file mode 100644 index 000000000..6f5b2987d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.rst.txt @@ -0,0 +1,54 @@ +mpas\_analysis.shared.AnalysisTask +================================== + +.. currentmodule:: mpas_analysis.shared + +.. autoclass:: AnalysisTask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~AnalysisTask.__init__ + ~AnalysisTask.add_subtask + ~AnalysisTask.check_analysis_enabled + ~AnalysisTask.check_generate + ~AnalysisTask.close + ~AnalysisTask.is_alive + ~AnalysisTask.join + ~AnalysisTask.kill + ~AnalysisTask.run + ~AnalysisTask.run_after + ~AnalysisTask.run_task + ~AnalysisTask.set_start_end_date + ~AnalysisTask.setup_and_check + ~AnalysisTask.start + ~AnalysisTask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~AnalysisTask.BLOCKED + ~AnalysisTask.FAIL + ~AnalysisTask.READY + ~AnalysisTask.RUNNING + ~AnalysisTask.SUCCESS + ~AnalysisTask.UNSET + ~AnalysisTask.authkey + ~AnalysisTask.daemon + ~AnalysisTask.exitcode + ~AnalysisTask.ident + ~AnalysisTask.name + ~AnalysisTask.pid + ~AnalysisTask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.rst.txt new file mode 100644 index 000000000..5c91208b2 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.run +====================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.run \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.rst.txt new file mode 100644 index 000000000..70275d747 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.run\_after +============================================= + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.run_after \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.rst.txt new file mode 100644 index 000000000..cca747c69 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.run\_task +============================================ + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.run_task \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.rst.txt new file mode 100644 index 000000000..16f2a40ac --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.set\_start\_end\_date +======================================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.set_start_end_date \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.rst.txt new file mode 100644 index 000000000..8a9d6ad8d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.AnalysisTask.setup\_and\_check +==================================================== + +.. currentmodule:: mpas_analysis.shared + +.. automethod:: AnalysisTask.setup_and_check \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.rst.txt new file mode 100644 index 000000000..42470aefa --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.MpasClimatologyTask.add\_variables +==================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: MpasClimatologyTask.add_variables \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.rst.txt new file mode 100644 index 000000000..c1758a11b --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.MpasClimatologyTask.get\_file\_name +===================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: MpasClimatologyTask.get_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.rst.txt new file mode 100644 index 000000000..d13c19d35 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.rst.txt @@ -0,0 +1,57 @@ +mpas\_analysis.shared.climatology.MpasClimatologyTask +===================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autoclass:: MpasClimatologyTask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~MpasClimatologyTask.__init__ + ~MpasClimatologyTask.add_subtask + ~MpasClimatologyTask.add_variables + ~MpasClimatologyTask.check_analysis_enabled + ~MpasClimatologyTask.check_generate + ~MpasClimatologyTask.close + ~MpasClimatologyTask.get_file_name + ~MpasClimatologyTask.get_start_and_end + ~MpasClimatologyTask.is_alive + ~MpasClimatologyTask.join + ~MpasClimatologyTask.kill + ~MpasClimatologyTask.run + ~MpasClimatologyTask.run_after + ~MpasClimatologyTask.run_task + ~MpasClimatologyTask.set_start_end_date + ~MpasClimatologyTask.setup_and_check + ~MpasClimatologyTask.start + ~MpasClimatologyTask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~MpasClimatologyTask.BLOCKED + ~MpasClimatologyTask.FAIL + ~MpasClimatologyTask.READY + ~MpasClimatologyTask.RUNNING + ~MpasClimatologyTask.SUCCESS + ~MpasClimatologyTask.UNSET + ~MpasClimatologyTask.authkey + ~MpasClimatologyTask.daemon + ~MpasClimatologyTask.exitcode + ~MpasClimatologyTask.ident + ~MpasClimatologyTask.name + ~MpasClimatologyTask.pid + ~MpasClimatologyTask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.rst.txt new file mode 100644 index 000000000..5fb768dd3 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.add\_comparison\_grid\_descriptor +=============================================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.add_comparison_grid_descriptor \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.rst.txt new file mode 100644 index 000000000..4e8fb08f4 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.customize\_masked\_climatology +============================================================================================ + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.customize_masked_climatology \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.rst.txt new file mode 100644 index 000000000..ce8fce027 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.customize\_remapped\_climatology +============================================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.customize_remapped_climatology \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.rst.txt new file mode 100644 index 000000000..f92e914cd --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.get\_masked\_file\_name +===================================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.get_masked_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.rst.txt new file mode 100644 index 000000000..6c8d3679c --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.get\_remapped\_file\_name +======================================================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.get_remapped_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.rst.txt new file mode 100644 index 000000000..14d4160bf --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.rst.txt @@ -0,0 +1,59 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask +============================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autoclass:: RemapMpasClimatologySubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~RemapMpasClimatologySubtask.__init__ + ~RemapMpasClimatologySubtask.add_comparison_grid_descriptor + ~RemapMpasClimatologySubtask.add_subtask + ~RemapMpasClimatologySubtask.check_analysis_enabled + ~RemapMpasClimatologySubtask.check_generate + ~RemapMpasClimatologySubtask.close + ~RemapMpasClimatologySubtask.customize_masked_climatology + ~RemapMpasClimatologySubtask.customize_remapped_climatology + ~RemapMpasClimatologySubtask.get_masked_file_name + ~RemapMpasClimatologySubtask.get_remapped_file_name + ~RemapMpasClimatologySubtask.is_alive + ~RemapMpasClimatologySubtask.join + ~RemapMpasClimatologySubtask.kill + ~RemapMpasClimatologySubtask.run + ~RemapMpasClimatologySubtask.run_after + ~RemapMpasClimatologySubtask.run_task + ~RemapMpasClimatologySubtask.set_start_end_date + ~RemapMpasClimatologySubtask.setup_and_check + ~RemapMpasClimatologySubtask.start + ~RemapMpasClimatologySubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~RemapMpasClimatologySubtask.BLOCKED + ~RemapMpasClimatologySubtask.FAIL + ~RemapMpasClimatologySubtask.READY + ~RemapMpasClimatologySubtask.RUNNING + ~RemapMpasClimatologySubtask.SUCCESS + ~RemapMpasClimatologySubtask.UNSET + ~RemapMpasClimatologySubtask.authkey + ~RemapMpasClimatologySubtask.daemon + ~RemapMpasClimatologySubtask.exitcode + ~RemapMpasClimatologySubtask.ident + ~RemapMpasClimatologySubtask.name + ~RemapMpasClimatologySubtask.pid + ~RemapMpasClimatologySubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.rst.txt new file mode 100644 index 000000000..ffb3a8691 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.run\_task +======================================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.run_task \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.rst.txt new file mode 100644 index 000000000..e64dc9d1a --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapMpasClimatologySubtask.setup\_and\_check +=============================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapMpasClimatologySubtask.setup_and_check \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.rst.txt new file mode 100644 index 000000000..389453c4c --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapObservedClimatologySubtask.build\_observational\_dataset +=============================================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapObservedClimatologySubtask.build_observational_dataset \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.rst.txt new file mode 100644 index 000000000..f3895b458 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapObservedClimatologySubtask.get\_file\_name +================================================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapObservedClimatologySubtask.get_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.rst.txt new file mode 100644 index 000000000..d84c7cf0c --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.RemapObservedClimatologySubtask.get\_observation\_descriptor +============================================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. automethod:: RemapObservedClimatologySubtask.get_observation_descriptor \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.rst.txt new file mode 100644 index 000000000..5c8f2bcb2 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.rst.txt @@ -0,0 +1,57 @@ +mpas\_analysis.shared.climatology.RemapObservedClimatologySubtask +================================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autoclass:: RemapObservedClimatologySubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~RemapObservedClimatologySubtask.__init__ + ~RemapObservedClimatologySubtask.add_subtask + ~RemapObservedClimatologySubtask.build_observational_dataset + ~RemapObservedClimatologySubtask.check_analysis_enabled + ~RemapObservedClimatologySubtask.check_generate + ~RemapObservedClimatologySubtask.close + ~RemapObservedClimatologySubtask.get_file_name + ~RemapObservedClimatologySubtask.get_observation_descriptor + ~RemapObservedClimatologySubtask.is_alive + ~RemapObservedClimatologySubtask.join + ~RemapObservedClimatologySubtask.kill + ~RemapObservedClimatologySubtask.run + ~RemapObservedClimatologySubtask.run_after + ~RemapObservedClimatologySubtask.run_task + ~RemapObservedClimatologySubtask.set_start_end_date + ~RemapObservedClimatologySubtask.setup_and_check + ~RemapObservedClimatologySubtask.start + ~RemapObservedClimatologySubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~RemapObservedClimatologySubtask.BLOCKED + ~RemapObservedClimatologySubtask.FAIL + ~RemapObservedClimatologySubtask.READY + ~RemapObservedClimatologySubtask.RUNNING + ~RemapObservedClimatologySubtask.SUCCESS + ~RemapObservedClimatologySubtask.UNSET + ~RemapObservedClimatologySubtask.authkey + ~RemapObservedClimatologySubtask.daemon + ~RemapObservedClimatologySubtask.exitcode + ~RemapObservedClimatologySubtask.ident + ~RemapObservedClimatologySubtask.name + ~RemapObservedClimatologySubtask.pid + ~RemapObservedClimatologySubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.rst.txt new file mode 100644 index 000000000..a2ed36529 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.add\_years\_months\_days\_in\_month +===================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: add_years_months_days_in_month \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.rst.txt new file mode 100644 index 000000000..2ed8036c0 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.compute\_climatology +====================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: compute_climatology \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.rst.txt new file mode 100644 index 000000000..274f40fde --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.compute\_monthly\_climatology +=============================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: compute_monthly_climatology \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.rst.txt new file mode 100644 index 000000000..5a15b2290 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_comparison\_descriptor +============================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_comparison_descriptor \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.rst.txt new file mode 100644 index 000000000..c0a59c4f7 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_masked\_mpas\_climatology\_file\_name +============================================================================ + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_masked_mpas_climatology_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.rst.txt new file mode 100644 index 000000000..0fec9c056 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_remapped\_mpas\_climatology\_file\_name +============================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_remapped_mpas_climatology_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.rst.txt new file mode 100644 index 000000000..8315a1342 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_remapper +=============================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_remapper \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.rst.txt new file mode 100644 index 000000000..ef9e190d1 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_unmasked\_mpas\_climatology\_directory +============================================================================= + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_unmasked_mpas_climatology_directory \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.rst.txt new file mode 100644 index 000000000..379719b0f --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.climatology.get\_unmasked\_mpas\_climatology\_file\_name +============================================================================== + +.. currentmodule:: mpas_analysis.shared.climatology + +.. autofunction:: get_unmasked_mpas_climatology_file_name \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.rst.txt new file mode 100644 index 000000000..c1bd1deb6 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.generalized\_reader.generalized\_reader.open\_multifile\_dataset +====================================================================================== + +.. currentmodule:: mpas_analysis.shared.generalized_reader.generalized_reader + +.. autofunction:: open_multifile_dataset \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.rst.txt new file mode 100644 index 000000000..8c87e0ccc --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.\_\_getattr\_\_ +============================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.__getattr__ \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.rst.txt new file mode 100644 index 000000000..f587d5f1f --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.\_\_getitem\_\_ +============================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.__getitem__ \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.rst.txt new file mode 100644 index 000000000..f66a5f916 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.\_\_init\_\_ +=========================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.__init__ \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.rst.txt new file mode 100644 index 000000000..0c357fb7d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.get +================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.get \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.rst.txt new file mode 100644 index 000000000..09246342a --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.getbool +====================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.getbool \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.rst.txt new file mode 100644 index 000000000..111c7bd53 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.getfloat +======================================================================= + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.getfloat \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.rst.txt new file mode 100644 index 000000000..6f586a5b8 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.NameList.getint +===================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: NameList.getint \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.rst.txt new file mode 100644 index 000000000..5a3dc48af --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.StreamsFile.\_\_init\_\_ +============================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: StreamsFile.__init__ \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.rst.txt new file mode 100644 index 000000000..a03235f7f --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.StreamsFile.find\_stream +============================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: StreamsFile.find_stream \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.rst.txt new file mode 100644 index 000000000..7785250fd --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.StreamsFile.has\_stream +============================================================================= + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: StreamsFile.has_stream \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.rst.txt new file mode 100644 index 000000000..44d06cff2 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.StreamsFile.read +====================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: StreamsFile.read \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.rst.txt new file mode 100644 index 000000000..9b39cd0c8 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.StreamsFile.readpath +========================================================================== + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. automethod:: StreamsFile.readpath \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.rst.txt new file mode 100644 index 000000000..f92f53fc5 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.namelist\_streams\_interface.convert\_namelist\_to\_dict +================================================================================= + +.. currentmodule:: mpas_analysis.shared.io.namelist_streams_interface + +.. autofunction:: convert_namelist_to_dict \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.rst.txt new file mode 100644 index 000000000..be629c321 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.open\_mpas\_dataset +============================================ + +.. currentmodule:: mpas_analysis.shared.io + +.. autofunction:: open_mpas_dataset \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.rst.txt new file mode 100644 index 000000000..050880f43 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.utility.build\_config\_full\_path +========================================================== + +.. currentmodule:: mpas_analysis.shared.io.utility + +.. autofunction:: build_config_full_path \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.rst.txt new file mode 100644 index 000000000..cb064e5ce --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.utility.check\_path\_exists +==================================================== + +.. currentmodule:: mpas_analysis.shared.io.utility + +.. autofunction:: check_path_exists \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.rst.txt new file mode 100644 index 000000000..db288f263 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.utility.make\_directories +================================================== + +.. currentmodule:: mpas_analysis.shared.io.utility + +.. autofunction:: make_directories \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.paths.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.paths.rst.txt new file mode 100644 index 000000000..c159dac00 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.utility.paths.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.io.utility.paths +====================================== + +.. currentmodule:: mpas_analysis.shared.io.utility + +.. autofunction:: paths \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.rst.txt new file mode 100644 index 000000000..512beee86 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.rst.txt @@ -0,0 +1,29 @@ +mpas\_analysis.shared.io.write\_netcdf +====================================== + +.. automodule:: mpas_analysis.shared.io.write_netcdf + + + + + + + + .. rubric:: Functions + + .. autosummary:: + + write_netcdf_with_fill + + + + + + + + + + + + + diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.rst.txt new file mode 100644 index 000000000..2166dd8db --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.mpas\_xarray.mpas\_xarray.open\_multifile\_dataset +======================================================================== + +.. currentmodule:: mpas_analysis.shared.mpas_xarray.mpas_xarray + +.. autofunction:: open_multifile_dataset \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.rst.txt new file mode 100644 index 000000000..7a3938183 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.mpas\_xarray.mpas\_xarray.preprocess +========================================================== + +.. currentmodule:: mpas_analysis.shared.mpas_xarray.mpas_xarray + +.. autofunction:: preprocess \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.rst.txt new file mode 100644 index 000000000..c546cc021 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.mpas\_xarray.mpas\_xarray.remove\_repeated\_time\_index +============================================================================= + +.. currentmodule:: mpas_analysis.shared.mpas_xarray.mpas_xarray + +.. autofunction:: remove_repeated_time_index \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.rst.txt new file mode 100644 index 000000000..b465b76cd --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.mpas\_xarray.mpas\_xarray.subset\_variables +================================================================= + +.. currentmodule:: mpas_analysis.shared.mpas_xarray.mpas_xarray + +.. autofunction:: subset_variables \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.rst.txt new file mode 100644 index 000000000..e4d2b47c1 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.rst.txt @@ -0,0 +1,55 @@ +mpas\_analysis.shared.plot.PlotClimatologyMapSubtask +==================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autoclass:: PlotClimatologyMapSubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~PlotClimatologyMapSubtask.__init__ + ~PlotClimatologyMapSubtask.add_subtask + ~PlotClimatologyMapSubtask.check_analysis_enabled + ~PlotClimatologyMapSubtask.check_generate + ~PlotClimatologyMapSubtask.close + ~PlotClimatologyMapSubtask.is_alive + ~PlotClimatologyMapSubtask.join + ~PlotClimatologyMapSubtask.kill + ~PlotClimatologyMapSubtask.run + ~PlotClimatologyMapSubtask.run_after + ~PlotClimatologyMapSubtask.run_task + ~PlotClimatologyMapSubtask.set_plot_info + ~PlotClimatologyMapSubtask.set_start_end_date + ~PlotClimatologyMapSubtask.setup_and_check + ~PlotClimatologyMapSubtask.start + ~PlotClimatologyMapSubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~PlotClimatologyMapSubtask.BLOCKED + ~PlotClimatologyMapSubtask.FAIL + ~PlotClimatologyMapSubtask.READY + ~PlotClimatologyMapSubtask.RUNNING + ~PlotClimatologyMapSubtask.SUCCESS + ~PlotClimatologyMapSubtask.UNSET + ~PlotClimatologyMapSubtask.authkey + ~PlotClimatologyMapSubtask.daemon + ~PlotClimatologyMapSubtask.exitcode + ~PlotClimatologyMapSubtask.ident + ~PlotClimatologyMapSubtask.name + ~PlotClimatologyMapSubtask.pid + ~PlotClimatologyMapSubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.rst.txt new file mode 100644 index 000000000..7e0d46c8d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.PlotClimatologyMapSubtask.set\_plot\_info +==================================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. automethod:: PlotClimatologyMapSubtask.set_plot_info \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.add_inset.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.add_inset.rst.txt new file mode 100644 index 000000000..09f991b92 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.add_inset.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.add\_inset +===================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: add_inset \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.rst.txt new file mode 100644 index 000000000..e6312c152 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.colormap.setup\_colormap +=================================================== + +.. currentmodule:: mpas_analysis.shared.plot.colormap + +.. autofunction:: setup_colormap \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.rst.txt new file mode 100644 index 000000000..3887346bf --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.plot\_1D +=================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: plot_1D \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.rst.txt new file mode 100644 index 000000000..904d23138 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.plot\_global\_comparison +=================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: plot_global_comparison \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.rst.txt new file mode 100644 index 000000000..40dbcda3b --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.plot\_polar\_comparison +================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: plot_polar_comparison \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.rst.txt new file mode 100644 index 000000000..7c4933d8f --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.plot\_vertical\_section +================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: plot_vertical_section \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.rst.txt new file mode 100644 index 000000000..6e2801d85 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.plot\_vertical\_section\_comparison +============================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: plot_vertical_section_comparison \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.rst.txt new file mode 100644 index 000000000..20e1e37db --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.ticks.plot\_xtick\_format +==================================================== + +.. currentmodule:: mpas_analysis.shared.plot.ticks + +.. autofunction:: plot_xtick_format \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.rst.txt new file mode 100644 index 000000000..e9ec8c68e --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.timeseries\_analysis\_plot +===================================================== + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: timeseries_analysis_plot \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.rst.txt new file mode 100644 index 000000000..f05fb2985 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.plot.timeseries\_analysis\_plot\_polar +============================================================ + +.. currentmodule:: mpas_analysis.shared.plot + +.. autofunction:: timeseries_analysis_plot_polar \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.rst.txt new file mode 100644 index 000000000..b39531d5b --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.projection.get\_cartopy\_projection +========================================================= + +.. currentmodule:: mpas_analysis.shared.projection + +.. autofunction:: get_cartopy_projection \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.rst.txt new file mode 100644 index 000000000..055d9f7a7 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.projection.get\_pyproj\_projection +======================================================== + +.. currentmodule:: mpas_analysis.shared.projection + +.. autofunction:: get_pyproj_projection \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.rst.txt new file mode 100644 index 000000000..a45ddcceb --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.rst.txt @@ -0,0 +1,55 @@ +mpas\_analysis.shared.regions.compute\_region\_masks.ComputeRegionMasks +======================================================================= + +.. currentmodule:: mpas_analysis.shared.regions.compute_region_masks + +.. autoclass:: ComputeRegionMasks + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ComputeRegionMasks.__init__ + ~ComputeRegionMasks.add_mask_subtask + ~ComputeRegionMasks.add_subtask + ~ComputeRegionMasks.check_analysis_enabled + ~ComputeRegionMasks.check_generate + ~ComputeRegionMasks.close + ~ComputeRegionMasks.is_alive + ~ComputeRegionMasks.join + ~ComputeRegionMasks.kill + ~ComputeRegionMasks.run + ~ComputeRegionMasks.run_after + ~ComputeRegionMasks.run_task + ~ComputeRegionMasks.set_start_end_date + ~ComputeRegionMasks.setup_and_check + ~ComputeRegionMasks.start + ~ComputeRegionMasks.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ComputeRegionMasks.BLOCKED + ~ComputeRegionMasks.FAIL + ~ComputeRegionMasks.READY + ~ComputeRegionMasks.RUNNING + ~ComputeRegionMasks.SUCCESS + ~ComputeRegionMasks.UNSET + ~ComputeRegionMasks.authkey + ~ComputeRegionMasks.daemon + ~ComputeRegionMasks.exitcode + ~ComputeRegionMasks.ident + ~ComputeRegionMasks.name + ~ComputeRegionMasks.pid + ~ComputeRegionMasks.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.rst.txt new file mode 100644 index 000000000..7ee6a1eb3 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.rst.txt @@ -0,0 +1,56 @@ +mpas\_analysis.shared.regions.compute\_region\_masks\_subtask.ComputeRegionMasksSubtask +======================================================================================= + +.. currentmodule:: mpas_analysis.shared.regions.compute_region_masks_subtask + +.. autoclass:: ComputeRegionMasksSubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ComputeRegionMasksSubtask.__init__ + ~ComputeRegionMasksSubtask.add_subtask + ~ComputeRegionMasksSubtask.check_analysis_enabled + ~ComputeRegionMasksSubtask.check_generate + ~ComputeRegionMasksSubtask.close + ~ComputeRegionMasksSubtask.expand_region_names + ~ComputeRegionMasksSubtask.is_alive + ~ComputeRegionMasksSubtask.join + ~ComputeRegionMasksSubtask.kill + ~ComputeRegionMasksSubtask.make_region_mask + ~ComputeRegionMasksSubtask.run + ~ComputeRegionMasksSubtask.run_after + ~ComputeRegionMasksSubtask.run_task + ~ComputeRegionMasksSubtask.set_start_end_date + ~ComputeRegionMasksSubtask.setup_and_check + ~ComputeRegionMasksSubtask.start + ~ComputeRegionMasksSubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ComputeRegionMasksSubtask.BLOCKED + ~ComputeRegionMasksSubtask.FAIL + ~ComputeRegionMasksSubtask.READY + ~ComputeRegionMasksSubtask.RUNNING + ~ComputeRegionMasksSubtask.SUCCESS + ~ComputeRegionMasksSubtask.UNSET + ~ComputeRegionMasksSubtask.authkey + ~ComputeRegionMasksSubtask.daemon + ~ComputeRegionMasksSubtask.exitcode + ~ComputeRegionMasksSubtask.ident + ~ComputeRegionMasksSubtask.name + ~ComputeRegionMasksSubtask.pid + ~ComputeRegionMasksSubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.rst.txt new file mode 100644 index 000000000..45fdce020 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.regions.compute\_region\_masks\_subtask.get\_feature\_list +================================================================================ + +.. currentmodule:: mpas_analysis.shared.regions.compute_region_masks_subtask + +.. autofunction:: get_feature_list \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.rst.txt new file mode 100644 index 000000000..9faa7ba03 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.rst.txt @@ -0,0 +1,55 @@ +mpas\_analysis.shared.time\_series.MpasTimeSeriesTask +===================================================== + +.. currentmodule:: mpas_analysis.shared.time_series + +.. autoclass:: MpasTimeSeriesTask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~MpasTimeSeriesTask.__init__ + ~MpasTimeSeriesTask.add_subtask + ~MpasTimeSeriesTask.add_variables + ~MpasTimeSeriesTask.check_analysis_enabled + ~MpasTimeSeriesTask.check_generate + ~MpasTimeSeriesTask.close + ~MpasTimeSeriesTask.is_alive + ~MpasTimeSeriesTask.join + ~MpasTimeSeriesTask.kill + ~MpasTimeSeriesTask.run + ~MpasTimeSeriesTask.run_after + ~MpasTimeSeriesTask.run_task + ~MpasTimeSeriesTask.set_start_end_date + ~MpasTimeSeriesTask.setup_and_check + ~MpasTimeSeriesTask.start + ~MpasTimeSeriesTask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~MpasTimeSeriesTask.BLOCKED + ~MpasTimeSeriesTask.FAIL + ~MpasTimeSeriesTask.READY + ~MpasTimeSeriesTask.RUNNING + ~MpasTimeSeriesTask.SUCCESS + ~MpasTimeSeriesTask.UNSET + ~MpasTimeSeriesTask.authkey + ~MpasTimeSeriesTask.daemon + ~MpasTimeSeriesTask.exitcode + ~MpasTimeSeriesTask.ident + ~MpasTimeSeriesTask.name + ~MpasTimeSeriesTask.pid + ~MpasTimeSeriesTask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.rst.txt new file mode 100644 index 000000000..203d82c02 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.time\_series.cache\_time\_series +====================================================== + +.. currentmodule:: mpas_analysis.shared.time_series + +.. autofunction:: cache_time_series \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.rst.txt new file mode 100644 index 000000000..aaa58220c --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.time\_series.compute\_moving\_avg +======================================================= + +.. currentmodule:: mpas_analysis.shared.time_series + +.. autofunction:: compute_moving_avg \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.rst.txt new file mode 100644 index 000000000..dca2cc350 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.time\_series.compute\_moving\_avg\_anomaly\_from\_start +============================================================================= + +.. currentmodule:: mpas_analysis.shared.time_series + +.. autofunction:: compute_moving_avg_anomaly_from_start \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.rst.txt new file mode 100644 index 000000000..9be4ed820 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.rst.txt @@ -0,0 +1,29 @@ +mpas\_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta +===================================================================== + +.. currentmodule:: mpas_analysis.shared.timekeeping.MpasRelativeDelta + +.. autoclass:: MpasRelativeDelta + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~MpasRelativeDelta.__init__ + ~MpasRelativeDelta.normalized + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~MpasRelativeDelta.weeks + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.rst.txt new file mode 100644 index 000000000..834f39a31 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.date\_to\_days +======================================================== + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: date_to_days \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.rst.txt new file mode 100644 index 000000000..bd2dfa2fc --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.datetime\_to\_days +============================================================ + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: datetime_to_days \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.rst.txt new file mode 100644 index 000000000..e51bab299 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.days\_to\_datetime +============================================================ + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: days_to_datetime \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.rst.txt new file mode 100644 index 000000000..29f3c581d --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.get\_simulation\_start\_time +====================================================================== + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: get_simulation_start_time \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.rst.txt new file mode 100644 index 000000000..8867ae87f --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.string\_to\_datetime +============================================================== + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: string_to_datetime \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.rst.txt new file mode 100644 index 000000000..9d9858058 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.string\_to\_days\_since\_date +======================================================================= + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: string_to_days_since_date \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.rst.txt new file mode 100644 index 000000000..171d797de --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.timekeeping.utility.string\_to\_relative\_delta +===================================================================== + +.. currentmodule:: mpas_analysis.shared.timekeeping.utility + +.. autofunction:: string_to_relative_delta \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.rst.txt new file mode 100644 index 000000000..e5025b7d6 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.rst.txt @@ -0,0 +1,56 @@ +mpas\_analysis.shared.transects.compute\_transect\_masks\_subtask.ComputeTransectMasksSubtask +============================================================================================= + +.. currentmodule:: mpas_analysis.shared.transects.compute_transect_masks_subtask + +.. autoclass:: ComputeTransectMasksSubtask + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~ComputeTransectMasksSubtask.__init__ + ~ComputeTransectMasksSubtask.add_subtask + ~ComputeTransectMasksSubtask.check_analysis_enabled + ~ComputeTransectMasksSubtask.check_generate + ~ComputeTransectMasksSubtask.close + ~ComputeTransectMasksSubtask.expand_transect_names + ~ComputeTransectMasksSubtask.is_alive + ~ComputeTransectMasksSubtask.join + ~ComputeTransectMasksSubtask.kill + ~ComputeTransectMasksSubtask.make_transect_mask + ~ComputeTransectMasksSubtask.run + ~ComputeTransectMasksSubtask.run_after + ~ComputeTransectMasksSubtask.run_task + ~ComputeTransectMasksSubtask.set_start_end_date + ~ComputeTransectMasksSubtask.setup_and_check + ~ComputeTransectMasksSubtask.start + ~ComputeTransectMasksSubtask.terminate + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ComputeTransectMasksSubtask.BLOCKED + ~ComputeTransectMasksSubtask.FAIL + ~ComputeTransectMasksSubtask.READY + ~ComputeTransectMasksSubtask.RUNNING + ~ComputeTransectMasksSubtask.SUCCESS + ~ComputeTransectMasksSubtask.UNSET + ~ComputeTransectMasksSubtask.authkey + ~ComputeTransectMasksSubtask.daemon + ~ComputeTransectMasksSubtask.exitcode + ~ComputeTransectMasksSubtask.ident + ~ComputeTransectMasksSubtask.name + ~ComputeTransectMasksSubtask.pid + ~ComputeTransectMasksSubtask.sentinel + + \ No newline at end of file diff --git a/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.rst.txt b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.rst.txt new file mode 100644 index 000000000..0aa38a6d9 --- /dev/null +++ b/1.11.0rc1/_sources/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.rst.txt @@ -0,0 +1,6 @@ +mpas\_analysis.shared.transects.compute\_transect\_masks\_subtask.compute\_mpas\_transect\_masks +================================================================================================ + +.. currentmodule:: mpas_analysis.shared.transects.compute_transect_masks_subtask + +.. autofunction:: compute_mpas_transect_masks \ No newline at end of file diff --git a/1.11.0rc1/_sources/index.rst.txt b/1.11.0rc1/_sources/index.rst.txt new file mode 100644 index 000000000..4f72191c6 --- /dev/null +++ b/1.11.0rc1/_sources/index.rst.txt @@ -0,0 +1,54 @@ +.. MPAS-Analysis documentation master file, created by + sphinx-quickstart on Sat Mar 25 14:39:11 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +MPAS-Analysis +============= + +.. image:: users_guide/_static/sst_example.png + :width: 300 px + :align: center + +Analysis for simulations produced with Model for Prediction Across Scales +(MPAS) components and the Energy Exascale Earth System Model (E3SM), which +used those components. + +.. toctree:: + :caption: User's guide + :maxdepth: 2 + + users_guide/quick_start + users_guide/configuration + users_guide/analysis_tasks + users_guide/components + users_guide/observations + +.. toctree:: + :caption: Developer's guide + :maxdepth: 2 + + developers_guide/api + + design_docs/index + +.. toctree:: + :caption: Tutorials + :maxdepth: 1 + + tutorials/getting_started + tutorials/dev_getting_started + tutorials/dev_understand_a_task + tutorials/dev_add_task + +.. toctree:: + :caption: Authors + :maxdepth: 1 + + authors + +.. toctree:: + :caption: Versions + :maxdepth: 1 + + versions diff --git a/1.11.0rc1/_sources/tutorials/dev_add_task.rst.txt b/1.11.0rc1/_sources/tutorials/dev_add_task.rst.txt new file mode 100644 index 000000000..8cb0c8e5c --- /dev/null +++ b/1.11.0rc1/_sources/tutorials/dev_add_task.rst.txt @@ -0,0 +1,1405 @@ +.. _tutorial_dev_add_task: + +Developers: Adding a new analysis task +====================================== + +This tutorial walks a new developer through the basics of creating a new +analysis task in MPAS-Analysis. It is a common practice to find an existing +analysis task that is as close as possible to the new analysis, and to copy +that existing task as a template for the new task. That is the strategy we +will demonstrate here. + +To provide a real example, we will show how we copy and modify an analysis +task used to compute the anomaly in ocean heat content +(:py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly`) to instead compute +the barotropic streamfunction (BSF). + +For computing the BSF itself, we will make use of a script that was developed +outside of MPAS-Analysis for this purpose. This is also a common development +technique: first develop the analysis as a script or +`jupyter notebook `_. Nearly always, the scripts or +notebooks include hard-coded paths and are otherwise not easily applied to new +simulations without considerable effort. This is the motivation for adapting +the code to MPAS-Analysis. + +1. Getting started +------------------ + +To begin, please follow the :ref:`tutorial_dev_getting_started` tutorial, which +will help you through the basics of creating a fork of MPAS-Analysis, +cloning it onto the machine(s) where you will do your development, making +a worktree for the feature you will develop, creating a conda environment for +testing your new MPAS-Analysis development, and running MPAS-Analysis. + +Then, please follow the :ref:`tutorial_understand_a_task`. This will give +you a tour of the :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` +analysis task that we will use as a starting point for developing a new task. + + +2. The reference scripts +------------------------ + +I have two scripts I used in the past to compute the barotropic streamfunction +and write it out, and then to plot it. These scripts yanked out some code +from MPAS-Analysis so there are a few similarities but there's a lot of work +to do. + +Here's the script for computing the BSF: + +.. code-block:: python + + #!/usr/bin/env python + + import xarray + import numpy + import scipy.sparse + import scipy.sparse.linalg + import sys + + from mpas_tools.io import write_netcdf + + + def main(): + + ds = xarray.open_dataset(sys.argv[1]) + ds = ds[['timeMonthly_avg_layerThickness', + 'timeMonthly_avg_normalVelocity']] + ds.load() + + dsMesh = xarray.open_dataset(sys.argv[2]) + dsMesh = dsMesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', + 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', + 'dcEdge', 'dvEdge', 'lonCell', 'latCell', 'lonVertex', + 'latVertex']] + dsMesh.load() + + out_filename = sys.argv[3] + + bsfVertex = _compute_barotropic_streamfunction_vertex(dsMesh, ds) + print('bsf on vertices computed.') + bsfCell = _compute_barotropic_streamfunction_cell(dsMesh, bsfVertex) + print('bsf on cells computed.') + dsBSF = xarray.Dataset() + dsBSF['bsfVertex'] = bsfVertex + dsBSF.bsfVertex.attrs['units'] = 'Sv' + dsBSF.bsfVertex.attrs['description'] = 'barotropic streamfunction ' \ + 'on vertices' + dsBSF['bsfCell'] = bsfCell + dsBSF.bsfCell.attrs['units'] = 'Sv' + dsBSF.bsfCell.attrs['description'] = 'barotropic streamfunction ' \ + 'on cells' + dsBSF = dsBSF.transpose('Time', 'nCells', 'nVertices') + for var in dsMesh: + dsBSF[var] = dsMesh[var] + write_netcdf(dsBSF, out_filename) + + + def _compute_transport(dsMesh, ds): + + cellsOnEdge = dsMesh.cellsOnEdge - 1 + innerEdges = numpy.logical_and(cellsOnEdge.isel(TWO=0) >= 0, + cellsOnEdge.isel(TWO=1) >= 0) + + # convert from boolean mask to indices + innerEdges = numpy.flatnonzero(innerEdges.values) + + cell0 = cellsOnEdge.isel(nEdges=innerEdges, TWO=0) + cell1 = cellsOnEdge.isel(nEdges=innerEdges, TWO=1) + + layerThickness = ds.timeMonthly_avg_layerThickness + normalVelocity = ds.timeMonthly_avg_normalVelocity.isel(nEdges=innerEdges) + + layerThicknessEdge = 0.5*(layerThickness.isel(nCells=cell0) + + layerThickness.isel(nCells=cell1)) + transport = dsMesh.dvEdge[innerEdges] * \ + (layerThicknessEdge * normalVelocity).sum(dim='nVertLevels') + + # ds = xarray.Dataset() + # ds['transport'] = transport + # ds['innerEdges'] = ('nEdges', innerEdges) + # write_netcdf(ds, 'transport.nc') + + return innerEdges, transport + + + def _compute_barotropic_streamfunction_vertex(dsMesh, ds): + innerEdges, transport = _compute_transport(dsMesh, ds) + print('transport computed.') + + nVertices = dsMesh.sizes['nVertices'] + nTime = ds.sizes['Time'] + + cellsOnVertex = dsMesh.cellsOnVertex - 1 + verticesOnEdge = dsMesh.verticesOnEdge - 1 + isBoundaryCOV = cellsOnVertex == -1 + boundaryVertices = numpy.logical_or(isBoundaryCOV.isel(vertexDegree=0), + isBoundaryCOV.isel(vertexDegree=1)) + boundaryVertices = numpy.logical_or(boundaryVertices, + isBoundaryCOV.isel(vertexDegree=2)) + + # convert from boolean mask to indices + boundaryVertices = numpy.flatnonzero(boundaryVertices.values) + + nBoundaryVertices = len(boundaryVertices) + nInnerEdges = len(innerEdges) + + indices = numpy.zeros((2, 2*nInnerEdges+nBoundaryVertices), dtype=int) + data = numpy.zeros(2*nInnerEdges+nBoundaryVertices, dtype=float) + + # The difference between the streamfunction at vertices on an inner edge + # should be equal to the transport + v0 = verticesOnEdge.isel(nEdges=innerEdges, TWO=0).values + v1 = verticesOnEdge.isel(nEdges=innerEdges, TWO=1).values + + ind = numpy.arange(nInnerEdges) + indices[0, 2*ind] = ind + indices[1, 2*ind] = v1 + data[2*ind] = 1. + + indices[0, 2*ind+1] = ind + indices[1, 2*ind+1] = v0 + data[2*ind+1] = -1. + + # the streamfunction should be zero at all boundary vertices + ind = numpy.arange(nBoundaryVertices) + indices[0, 2*nInnerEdges + ind] = nInnerEdges + ind + indices[1, 2*nInnerEdges + ind] = boundaryVertices + data[2*nInnerEdges + ind] = 1. + + bsfVertex = xarray.DataArray(numpy.zeros((nTime, nVertices)), + dims=('Time', 'nVertices')) + + for tIndex in range(nTime): + rhs = numpy.zeros(nInnerEdges+nBoundaryVertices, dtype=float) + + # convert to Sv + ind = numpy.arange(nInnerEdges) + rhs[ind] = 1e-6*transport.isel(Time=tIndex) + + ind = numpy.arange(nBoundaryVertices) + rhs[nInnerEdges + ind] = 0. + + M = scipy.sparse.csr_matrix((data, indices), + shape=(nInnerEdges+nBoundaryVertices, + nVertices)) + + solution = scipy.sparse.linalg.lsqr(M, rhs) + + bsfVertex[tIndex, :] = -solution[0] + + return bsfVertex + + + def _compute_barotropic_streamfunction_cell(dsMesh, bsfVertex): + ''' + Interpolate the barotropic streamfunction from vertices to cells + ''' + nEdgesOnCell = dsMesh.nEdgesOnCell + edgesOnCell = dsMesh.edgesOnCell - 1 + verticesOnCell = dsMesh.verticesOnCell - 1 + areaEdge = 0.25*dsMesh.dcEdge*dsMesh.dvEdge + + nCells = dsMesh.sizes['nCells'] + maxEdges = dsMesh.sizes['maxEdges'] + + areaVert = xarray.DataArray(numpy.zeros((nCells, maxEdges)), + dims=('nCells', 'maxEdges')) + + for iVert in range(maxEdges): + edgeIndices = edgesOnCell.isel(maxEdges=iVert) + mask = iVert < nEdgesOnCell + areaVert[:, iVert] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices) + + for iVert in range(maxEdges-1): + edgeIndices = edgesOnCell.isel(maxEdges=iVert+1) + mask = iVert+1 < nEdgesOnCell + areaVert[:, iVert] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices) + + edgeIndices = edgesOnCell.isel(maxEdges=0) + mask = nEdgesOnCell == maxEdges + areaVert[:, maxEdges-1] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices) + + bsfCell = ((areaVert * bsfVertex[:, verticesOnCell]).sum(dim='maxEdges') / + areaVert.sum(dim='maxEdges')) + + return bsfCell + + + if __name__ == '__main__': + main() + +And here's the one for plotting it: + +.. code-block:: python + + #!/usr/bin/env python + + import xarray + import numpy + import matplotlib + import matplotlib.pyplot as plt + import matplotlib.ticker as mticker + import matplotlib.colors as cols + from mpl_toolkits.axes_grid1 import make_axes_locatable + import matplotlib.patches as mpatches + import cmocean + import cartopy + import pyproj + import os + + from pyremap import ProjectionGridDescriptor + + + def get_antarctic_stereographic_projection(): # {{{ + """ + Get a projection for an Antarctic steregraphic comparison grid + + Returns + ------- + projection : ``pyproj.Proj`` object + The projection + """ + # Authors + # ------- + # Xylar Asay-Davis + + projection = pyproj.Proj('+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 ' + '+k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84') + + return projection # }}} + + + def get_fris_stereographic_comparison_descriptor(): # {{{ + """ + Get a descriptor of a region of a polar stereographic grid centered on the + Filchner-Ronne Ice Shelf, used for remapping and determining the grid name + + Returns + ------- + descriptor : ``ProjectionGridDescriptor`` object + A descriptor of the FRIS comparison grid + """ + # Authors + # ------- + # Xylar Asay-Davis + + x = numpy.linspace(-1.6e6, -0.5e6, 1101) + y = numpy.linspace(0., 1.1e6, 1101) + Lx = 1e-3*(x[-1] - x[0]) + Ly = 1e-3*(y[-1] - y[0]) + dx = 1e-3*(x[1] - x[0]) + + projection = get_antarctic_stereographic_projection() + + meshName = '{}x{}km_{}km_FRIS_stereo'.format(Lx, Ly, dx) + descriptor = ProjectionGridDescriptor.create(projection, x, y, meshName) + + return descriptor # }}} + + + def add_land_lakes_coastline(ax): + land_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'land', '50m', edgecolor='k', + facecolor='#cccccc', linewidth=0.5) + lakes_50m = cartopy.feature.NaturalEarthFeature( + 'physical', 'lakes', '50m', edgecolor='k', + facecolor='white', + linewidth=0.5) + ax.add_feature(land_50m, zorder=2) + ax.add_feature(lakes_50m, zorder=4) + + + def add_arrow_to_line2D(ax, path, arrow_spacing=100e3,): + """ + https://stackoverflow.com/a/27637925/7728169 + Add arrows to a matplotlib.lines.Line2D at selected locations. + + Parameters: + ----------- + axes: + line: list of 1 Line2D object as returned by plot command + arrow_spacing: distance in m between arrows + + Returns: + -------- + arrows: list of arrows + """ + v = path.vertices + x = v[:, 0] + y = v[:, 1] + + arrows = [] + s = numpy.cumsum(numpy.sqrt(numpy.diff(x) ** 2 + numpy.diff(y) ** 2)) + indices = numpy.searchsorted(s, arrow_spacing*numpy.arange(1, + int(s[-1]/arrow_spacing))) + for n in indices: + dx = numpy.mean(x[n-2:n]) - x[n] + dy = numpy.mean(y[n-2:n]) - y[n] + p = mpatches.FancyArrow( + x[n], y[n], dx, dy, length_includes_head=False, width=4e3, + facecolor='k') + ax.add_patch(p) + arrows.append(p) + return arrows + + + def savefig(filename, tight=True, pad_inches=0.1, plot_pdf=True): + """ + Saves the current plot to a file, then closes it. + Parameters + ---------- + filename : str + the file name to be written + config : mpas_analysis.configuration.MpasAnalysisConfigParser + Configuration options + tight : bool, optional + whether to tightly crop the figure + pad_inches : float, optional + The boarder around the image + """ + # Authors + # ------- + # Xylar Asay-Davis + + if tight: + bbox_inches = 'tight' + else: + bbox_inches = None + + filenames = [filename] + + if plot_pdf: + pdf_filename = '{}.pdf'.format(os.path.splitext(filename)[0]) + filenames.append(pdf_filename) + + for path in filenames: + plt.savefig(path, dpi='figure', bbox_inches=bbox_inches, + pad_inches=pad_inches) + + plt.close() + + + descriptor = get_fris_stereographic_comparison_descriptor() + + projection = cartopy.crs.Stereographic( + central_latitude=-90., central_longitude=0.0, + true_scale_latitude=-71.0) + + matplotlib.rc('font', size=14) + + x = descriptor.xCorner + y = descriptor.yCorner + + extent = [x[0], x[-1], y[0], y[-1]] + + dx = x[1] - x[0] + dy = y[1] - y[0] + + fig = plt.figure(figsize=[15, 7.5], dpi=200) + + titles = ['control (yrs 51-60)', 'control (yrs 111-120)'] + + for index, yrs in enumerate(['0051-0060', '0111-0120']): + filename = 'control/bsf_{}_1100.0x1100.0km_1.0km_' \ + 'FRIS_stereo_patch.nc'.format(yrs) + with xarray.open_dataset(filename) as ds: + + ds = ds.isel(Time=0) + + bsf = ds.bsfVertex + bsf = bsf.where(bsf != 0.).values + + #u = 1e6*(bsf[2:, 1:-1] - bsf[:-2, 1:-1])/dy + #v = -1e6*(bsf[1:-1, 2:] - bsf[1:-1, :-2])/dx + + #x = 0.5*(x[1:-2] + x[2:-1]) + #y = 0.5*(y[1:-2] + y[2:-1]) + + xc = 0.5*(x[0:-1] + x[1:]) + yc = 0.5*(y[0:-1] + y[1:]) + + ax = fig.add_subplot(121+index, projection=projection) + + ax.set_title(titles[index], y=1.06, size=16) + + ax.set_extent(extent, crs=projection) + + gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), color='k', + linestyle=':', zorder=5, draw_labels=False) + gl.xlocator = mticker.FixedLocator(numpy.arange(-180., 181., 10.)) + gl.ylocator = mticker.FixedLocator(numpy.arange(-88., 81., 2.)) + gl.n_steps = 100 + gl.rotate_labels = False + gl.x_inline = False + gl.y_inline = False + gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER + gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER + gl.left_labels = False + gl.right_labels = False + + add_land_lakes_coastline(ax) + + norm = cols.SymLogNorm(linthresh=0.1, linscale=0.5, vmin=-10., vmax=10.) + ticks = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3., 10.] + + levels = numpy.linspace(-1., 1., 11) + + handle = plt.pcolormesh(x, y, bsf, norm=norm, cmap='cmo.curl', + rasterized=True) + + cs = plt.contour(xc, yc, bsf, levels=levels, colors='k') + + for collection in cs.collections: + for path in collection.get_paths(): + add_arrow_to_line2D(ax, path) + + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.1, + axes_class=plt.Axes) + if index < 1: + cax.set_axis_off() + else: + cbar = plt.colorbar(handle, cax=cax) + cbar.set_label('Barotropic streamfunction (Sv)') + cbar.set_ticks(ticks) + cbar.set_ticklabels(['{}'.format(tick) for tick in ticks]) + +Here's a plot that I think was produced with this code (but I'm not 100% sure). + +.. image:: images/bsf.png + :width: 903 px + :align: center + +3. Selecting an existing task to copy +------------------------------------- + +I selected :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` as the +analysis task that was closest to what I envision for a new +``ClimatologyMapBSF`` task. Here were my thoughts: + +* Both OHC and BSF plot 2D fields (as opposed to some of the analysis like + WOA, Argo and SOSE that work with 3D temperature, salinity and sometimes + other fields). + +* Neither OHC nor BSF have observations to compare with. + +* Both OHC and BSF require computing a new field, rather than directly using + output from MPAS-Ocean. + +On the other hand, there are some major differences between the 2 that will +mean my job isn't a simple substitution: + +* While OHC is computed over different depth ranges, we do not want that for + the BSF analysis. + +* We will eventually want some "fancier" plotting for the BSF that draws + streamlines with arrows. That's not currently available in any MPAS-Analysis + tasks. + +* OHC involves computing an anomaly, but that isn't anything we need for BSF. + +Even so, :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` seems like +a reasonable starting point. + +4. Developing the task +---------------------- + +I'll start just by making a new worktree, then copying the "template" analysis +task to the new name: + +.. code-block:: bash + + git worktree add ../add_climatology_map_bsf + cd ../add_climatology_map_bsf + cp mpas_analysis/ocean/climatology_map_ohc_anomaly.py mpas_analysis/ocean/climatology_map_bsf.py + +Then, I'll open this new worktree in PyCharm. (You can, of course, use +whatever editor you like.) + +.. code-block:: bash + + pycharm-community . + +I'll create or recreate my ``mpas_dev`` environment as in +:ref:`tutorial_dev_getting_started`, and then make sure to at least do: + +.. code-block:: bash + + conda activate mpas_dev + python -m pip install -e . + +4.1 ``ClimatologyMapBSF`` class +------------------------------- + +In the editor, I rename the class from ``ClimatologyMapOHCAnomaly`` to +``ClimatologyMapBSF`` and task name from ``climatologyMapOHCAnomaly`` to +``climatologyMapBSF``. + +Then, I update the docstring right away because otherwise I'll forget! + +.. code-block:: python + + class ClimatologyMapBSF(AnalysisTask): + """ + An analysis task for computing and plotting maps of the barotropic + streamfunction (BSF) + + Attributes + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + """ + +I keep the ``mpas_climatology_task`` attribute because I'm going to need a +climatology of the velocity field and layer thicknesses that I will get from +that task, but I know I won't need the ``ref_year_climatology_task`` attribute +so I get rid of it. + +4.2 Constructor +~~~~~~~~~~~~~~~ + +Then, I move on to the constructor. The main things I need to do besides +renaming the task are: + +* rename the field I'm processing to ``barotropicStreamfunction``. + +* clean up the ``tags`` a little bit (change ``anomaly`` to ``streamfunction``). + +* get rid of ``ref_year_climatology_task`` since I'm not computing anomalies. + +* get rid of ``depth_range`` because I'm using only the full ocean column. + +.. code-block:: python + + def __init__(self, config, mpas_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + control_config : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + + field_name = 'barotropicStreamfunction' + # call the constructor from the base class (AnalysisTask) + super().__init__(config=config, taskName='climatologyMapBSF', + componentName='ocean', + tags=['climatology', 'horizontalMap', field_name, + 'publicObs', 'streamfunction']) + + self.mpas_climatology_task = mpas_climatology_task + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of seasons') + + comparison_grid_names = config.getexpression(section_name, + 'comparisonGrids') + + if len(comparison_grid_names) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of comparison grids') + +Next, I need to update the ``mpas_field_name`` (which I can choose since I'm +computing the field here, it's not something produced by MPAS-Ocean). And then +I need to specify the fields from the ``timeSeriesStatsMonthlyOutput`` data +that I will use in the computation: + +.. code-block:: python + + mpas_field_name = field_name + + variable_list = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_layerThickness'] + +In the next block of code, I: + +* get rid of the for-loop over depth ranges and unindent the code that was in + it. + +* rename ``RemapMpasOHCClimatology`` to ``RemapMpasBSFClimatology`` (we will + get to this in section 5) + +* make my best guess about the arguments I do and don't need for the + constructor of ``RemapMpasBSFClimatology`` + +.. code-block:: python + + remap_climatology_subtask = RemapMpasBSFClimatology( + mpas_climatology_task=mpas_climatology_task, + parent_task=self, + climatology_name=field_name, + variable_list=variable_list, + comparison_grid_names=comparison_grid_names, + seasons=seasons) + + self.add_subtask(remap_climatology_subtask) + +In the remainder of the constructor, I + +* update things like the name of the field being plotted and the units + +* continue to get rid of things related to depth range + +.. code-block:: python + + out_file_label = field_name + remap_observations_subtask = None + if control_config is None: + ref_title_label = None + ref_field_name = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_title_label = f'Control: {control_run_name}' + ref_field_name = mpas_field_name + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=f'Barotropic Streamfunction', + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel='Sv', + imageCaption='Barotropic Streamfunction', + galleryGroup='Barotropic Streamfunction', + groupSubtitle=None, + groupLink='bsf', + galleryName=None) + + self.add_subtask(subtask) + +This will result in a "gallery" on the web page called "Barotropic +Streamfunction" with a single image in it. That seems a little silly but +we'll change that later if we feel the need. + +4.3 ``setup_and_check()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In the OHC analysis task, we needed to check if the reference year for the +anomaly and the climatology year were different from one another. We don't +need this check for the BSF because we're not computing an anomaly here. So +we can get rid of the ``setup_and_check()`` method entirely and the version +from ``AnalysisTask`` (the superclass) will be called automatically. + +At this point, I commit my changes even though I'm less than halfway done. + +.. code-block:: bash + + git add mpas_analysis/ocean/climatology_map_bsf.py + git commit + +I can always do + +.. code-block:: bash + + git commit --amend mpas_analysis/ocean/climatology_map_bsf.py + +to keep adding changes to my commit as I go. + +5. Developing a subtask +----------------------- + +Similarly to how ``RemapMpasOHCClimatology`` computes the ocean heat content, +we need a class for computing the barotropic streamfunction before we remap +to the comparison grid. In general, it is important to perform computations +on the native MPAS mesh before remapping to the comparison grid but in the +case of the barotropic streamfunction, this is especially true. Any attempt +to compute this analysis directly on the comparison grid (e.g. using remapped, +reconstructed velocity components) would be woefully inaccurate. + +5.1 ``RemapMpasBSFClimatology`` class +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We start by renaming the class from ``RemapMpasOHCClimatology`` to +``RemapMpasBSFClimatology``, updating the docstring, removing the unneeded +attributes: + +.. code-block:: python + + class RemapMpasBSFClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of the barotropic streamfunction + from climatologies of normal velocity and layer thickness + """ + +3.2 Constructor +~~~~~~~~~~~~~~~ + +I started by taking out all of the unneeded parameters from the constructor. +What I was left with was simply a call to the constructor of the superclass +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`. +In such a case, there is no point in overriding the constructor. We should +simply leave the constructor for the superclass. The main difference is that +I had switched away from mixed capitalization in the +``RemapMpasOHCClimatology`` to conform to the PEP8 style guide. The superclass +still uses mixed case so we will have to change the call in +``ClimatologyMapBSF`` just a little: + + +.. code-block:: python + + remap_climatology_subtask = RemapMpasBSFClimatology( + mpasClimatologyTask=mpas_climatology_task, + parentTask=self, + climatologyName=field_name, + variableList=variable_list, + comparisonGridNames=comparison_grid_names, + seasons=seasons) + +5.3 ``setup_and_check()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The same turns out to be true of ``setup_and_check()``. As soon as I get rid +of everything we no longer need in the BSF version, all I am left with is a +call to the superclass' version, and in that case we might as well get rid of +the method entirely. + +5.4 ``customize_masked_climatology()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Finally, we've gotten to the part where the real work will take place! + +The sub task will run in the same way as described in +:ref:`tutorial_understand_a_task_subtask_run_task` of the +:ref:`tutorial_understand_a_task` tutorial. In the process, the +``customize_masked_climatology()`` method will get called and that's our chance +to make some changes. + +Before writing that method, first, I copy the 3 helper functions +``_compute_transport()``, ``_compute_barotropic_streamfunction_vertex()``, and +``_compute_barotropic_streamfunction_cell()`` from my example script. Other +than making them methods instead of functions and cleaning up the syntax a bit +so they conform to the PEP8 style guide, I leave them unchanged: + +.. code-block:: python + + def _compute_transport(self, ds_mesh, ds): + + cells_on_edge = ds_mesh.cellsOnEdge - 1 + inner_edges = np.logical_and(cells_on_edge.isel(TWO=0) >= 0, + cells_on_edge.isel(TWO=1) >= 0) + + # convert from boolean mask to indices + inner_edges = np.flatnonzero(inner_edges.values) + + cell0 = cells_on_edge.isel(nEdges=inner_edges, TWO=0) + cell1 = cells_on_edge.isel(nEdges=inner_edges, TWO=1) + + layer_thickness = ds.timeMonthly_avg_layerThickness + normal_velocity = \ + ds.timeMonthly_avg_normalVelocity.isel(nEdges=inner_edges) + + layer_thickness_edge = 0.5*(layer_thickness.isel(nCells=cell0) + + layer_thickness.isel(nCells=cell1)) + transport = ds_mesh.dvEdge[inner_edges] * \ + (layer_thickness_edge * normal_velocity).sum(dim='nVertLevels') + + return inner_edges, transport + + def _compute_barotropic_streamfunction_vertex(self, ds_mesh, ds): + inner_edges, transport = self._compute_transport(ds_mesh, ds) + print('transport computed.') + + nvertices = ds_mesh.sizes['nVertices'] + ntime = ds.sizes['Time'] + + cells_on_vertex = ds_mesh.cellsOnVertex - 1 + vertices_on_edge = ds_mesh.verticesOnEdge - 1 + is_boundary_cov = cells_on_vertex == -1 + boundary_vertices = np.logical_or(is_boundary_cov.isel(vertexDegree=0), + is_boundary_cov.isel(vertexDegree=1)) + boundary_vertices = np.logical_or(boundary_vertices, + is_boundary_cov.isel(vertexDegree=2)) + + # convert from boolean mask to indices + boundary_vertices = np.flatnonzero(boundary_vertices.values) + + n_boundary_vertices = len(boundary_vertices) + n_inner_edges = len(inner_edges) + + indices = np.zeros((2, 2*n_inner_edges+n_boundary_vertices), dtype=int) + data = np.zeros(2*n_inner_edges+n_boundary_vertices, dtype=float) + + # The difference between the streamfunction at vertices on an inner + # edge should be equal to the transport + v0 = vertices_on_edge.isel(nEdges=inner_edges, TWO=0).values + v1 = vertices_on_edge.isel(nEdges=inner_edges, TWO=1).values + + ind = np.arange(n_inner_edges) + indices[0, 2*ind] = ind + indices[1, 2*ind] = v1 + data[2*ind] = 1. + + indices[0, 2*ind+1] = ind + indices[1, 2*ind+1] = v0 + data[2*ind+1] = -1. + + # the streamfunction should be zero at all boundary vertices + ind = np.arange(n_boundary_vertices) + indices[0, 2*n_inner_edges + ind] = n_inner_edges + ind + indices[1, 2*n_inner_edges + ind] = boundary_vertices + data[2*n_inner_edges + ind] = 1. + + bsf_vertex = xr.DataArray(np.zeros((ntime, nvertices)), + dims=('Time', 'nVertices')) + + for tindex in range(ntime): + rhs = np.zeros(n_inner_edges+n_boundary_vertices, dtype=float) + + # convert to Sv + ind = np.arange(n_inner_edges) + rhs[ind] = 1e-6*transport.isel(Time=tindex) + + ind = np.arange(n_boundary_vertices) + rhs[n_inner_edges + ind] = 0. + + matrix = scipy.sparse.csr_matrix( + (data, indices), + shape=(n_inner_edges+n_boundary_vertices, nvertices)) + + solution = scipy.sparse.linalg.lsqr(matrix, rhs) + + bsf_vertex[tindex, :] = -solution[0] + + return bsf_vertex + + def _compute_barotropic_streamfunction_cell(self, ds_mesh, bsf_vertex): + """ + Interpolate the barotropic streamfunction from vertices to cells + """ + n_edges_on_cell = ds_mesh.nEdgesOnCell + edges_on_cell = ds_mesh.edgesOnCell - 1 + vertices_on_cell = ds_mesh.verticesOnCell - 1 + area_edge = 0.25*ds_mesh.dcEdge*ds_mesh.dvEdge + + ncells = ds_mesh.sizes['nCells'] + max_edges = ds_mesh.sizes['maxEdges'] + + area_vert = xr.DataArray(np.zeros((ncells, max_edges)), + dims=('nCells', 'maxEdges')) + + for ivert in range(max_edges): + edge_indices = edges_on_cell.isel(maxEdges=ivert) + mask = ivert < n_edges_on_cell + area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices) + + for ivert in range(max_edges-1): + edge_indices = edges_on_cell.isel(maxEdges=ivert+1) + mask = ivert+1 < n_edges_on_cell + area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices) + + edge_indices = edges_on_cell.isel(maxEdges=0) + mask = n_edges_on_cell == max_edges + area_vert[:, max_edges-1] += \ + 0.5*mask*area_edge.isel(nEdges=edge_indices) + + bsf_cell = \ + ((area_vert * bsf_vertex[:, vertices_on_cell]).sum(dim='maxEdges') / + area_vert.sum(dim='maxEdges')) + + return bsf_cell + +I also add some missing imports and delete an unused one at the top: + +.. code-block:: python + + import xarray as xr + import numpy as np + import scipy.sparse + import scipy.sparse.linalg + + from mpas_analysis.shared import AnalysisTask + from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask + from mpas_analysis.ocean.plot_climatology_map_subtask import \ + PlotClimatologyMapSubtask + +Finally, I substitute the functionality of the ``main()`` function in my +script into the ``customize_masked_climatology()`` function: + +.. code-block:: python + + def customize_masked_climatology(self, climatology, season): + """ + Compute the ocean heat content (OHC) anomaly from the temperature + and layer thickness fields. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + logger = self.logger + + ds_mesh = xr.open_dataset(self.restartFileName) + ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', + 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', + 'dcEdge', 'dvEdge']] + ds_mesh.load() + + bsf_vertex = self._compute_barotropic_streamfunction_vertex( + ds_mesh, climatology) + logger.info('bsf on vertices computed.') + bsf_cell = self._compute_barotropic_streamfunction_cell( + ds_mesh, bsf_vertex) + logger.info('bsf on cells computed.') + + climatology['barotropicStreamfunction'] = \ + bsf_cell.transpose('Time', 'nCells', 'nVertices') + climatology.barotropicStreamfunction.attrs['units'] = 'Sv' + climatology.barotropicStreamfunction.attrs['description'] = \ + 'barotropic streamfunction at cell centers' + + climatology = climatology.drop_vars(self.variableList) + + return climatology + +We get mesh variables from a restart file to make the xarray dataset +``ds_mesh``. These are passed on to the helper functions. + +We use ``logger.info()`` instead of ``print()`` so the output goes to a log +file. (This isn't strictly necessary since MPAS-Analysis also hijacks the +``print()`` function to make sure its output goes to log files, but it makes +clearer what we expect and also opens up the opportunity to use +``logger.debug()``, ``logger.warn()`` and ``logger.error()`` where appropriate. + +There isn't a way to store the barotropic streamfunction on vertices in the +climatology, as was done in the original script, because the remapping code is +expecting data only at cell centers. + +Before we return the modified climatology, we drop the normal velocity and +layer thickness from the data set, since they were only needed to help us +compute the BSF. + +6. Config options +----------------- + +We're not quite done yet. We need to set some config options for the analysis +task that the :py:class:`~mpas_analysis.ocean.plot_climatology_map_subtask.PlotClimatologyMapSubtask` +subtask is expecting. Again, an easy starting point is to copy the +``[climatologyMapOHCAnomaly]`` section of the ``default.cfg`` file into a new +``[climatologyMapBSF]`` section, and then delete the things we don't need, +and finally make a few modifications so the color map and data range is more +similar to the plot script I used above: + +.. code-block:: ini + + [climatologyMapBSF] + ## options related to plotting horizontally remapped climatologies of + ## the barotropic streamfunction (BSF) against control model results + ## (if available) + + # colormap for model/observations + colormapNameResult = cmo.curl + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = symLog + # A dictionary with keywords for the norm + normArgsResult = {'linthresh': 0.1, 'linscale': 0.5, 'vmin': -10., 'vmax': 10.} + colorbarTicksResult = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3., 10.] + + # colormap for differences + colormapNameDifference = cmo.balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = symLog + # A dictionary with keywords for the norm + normArgsDifference = {'linthresh': 0.1, 'linscale': 0.5, 'vmin': -10., + 'vmax': 10.} + colorbarTicksDifference = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3., + 10.] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +7. Adding the task +------------------ + +There is one last step required to add this task to MPAS-Analysis. You should +add the task to the ``mpas_analysis//__init__.py`` so it is a little +easier to import the task. Try to add it near similar tasks: + +.. code-block:: python + :emphasize-lines: 2-3 + + from mpas_analysis.ocean.climatology_map_eke import ClimatologyMapEKE + from mpas_analysis.ocean.climatology_map_bsf import \ + ClimatologyMapBSF + from mpas_analysis.ocean.climatology_map_ohc_anomaly import \ + ClimatologyMapOHCAnomaly + +Then, add the task in ``mpas_analysis/__main__.py``: + +.. code-block:: python + :emphasize-lines: 4-6 + + analyses.append(ocean.ClimatologyMapEKE(config, + oceanClimatolgyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapBSF(config, + oceanClimatolgyTasks['avg'], + controlConfig)) + analyses.append(ocean.ClimatologyMapOHCAnomaly( + config, oceanClimatolgyTasks['avg'], oceanRefYearClimatolgyTask, + controlConfig)) + +A quick way to check if the task has been added correctly is to run: + +.. code-block:: bash + + mpas_analysis --list + +You should see the new task in the list of tasks. + + +8. The full code for posterity +------------------------------ + +Since the ``ClimatologyMapBSF`` analysis task is not in MPAS-Analysis yet and +since it may have evolved by the time it gets added, here is the full code as +described in this tutorial: + +.. code-block:: python + + # This software is open source software available under the BSD-3 license. + # + # Copyright (c) 2022 Triad National Security, LLC. All rights reserved. + # Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights + # reserved. + # Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. + # + # Additional copyright and license information can be found in the LICENSE file + # distributed with this code, or at + # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE + import xarray as xr + import numpy as np + import scipy.sparse + import scipy.sparse.linalg + + from mpas_analysis.shared import AnalysisTask + from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask + from mpas_analysis.ocean.plot_climatology_map_subtask import \ + PlotClimatologyMapSubtask + + + class ClimatologyMapBSF(AnalysisTask): + """ + An analysis task for computing and plotting maps of the barotropic + streamfunction (BSF) + + Attributes + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + """ + + def __init__(self, config, mpas_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + control_config : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + + field_name = 'barotropicStreamfunction' + # call the constructor from the base class (AnalysisTask) + super().__init__(config=config, taskName='climatologyMapBSF', + componentName='ocean', + tags=['climatology', 'horizontalMap', field_name, + 'publicObs', 'streamfunction']) + + self.mpas_climatology_task = mpas_climatology_task + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of seasons') + + comparison_grid_names = config.getexpression(section_name, + 'comparisonGrids') + + if len(comparison_grid_names) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of comparison grids') + + mpas_field_name = field_name + + variable_list = ['timeMonthly_avg_normalVelocity', + 'timeMonthly_avg_layerThickness'] + + remap_climatology_subtask = RemapMpasBSFClimatology( + mpasClimatologyTask=mpas_climatology_task, + parentTask=self, + climatologyName=field_name, + variableList=variable_list, + comparisonGridNames=comparison_grid_names, + seasons=seasons) + + self.add_subtask(remap_climatology_subtask) + + out_file_label = field_name + remap_observations_subtask = None + if control_config is None: + ref_title_label = None + ref_field_name = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_title_label = f'Control: {control_run_name}' + ref_field_name = mpas_field_name + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=f'Barotropic Streamfunction', + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel='Sv', + imageCaption='Barotropic Streamfunction', + galleryGroup='Barotropic Streamfunction', + groupSubtitle=None, + groupLink='bsf', + galleryName=None) + + self.add_subtask(subtask) + + + class RemapMpasBSFClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of the barotropic streamfunction + from climatologies of normal velocity and layer thickness + """ + + def customize_masked_climatology(self, climatology, season): + """ + Compute the ocean heat content (OHC) anomaly from the temperature + and layer thickness fields. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + logger = self.logger + + ds_mesh = xr.open_dataset(self.restartFileName) + ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell', + 'edgesOnCell', 'verticesOnCell', 'verticesOnEdge', + 'dcEdge', 'dvEdge']] + ds_mesh.load() + + bsf_vertex = self._compute_barotropic_streamfunction_vertex( + ds_mesh, climatology) + logger.info('bsf on vertices computed.') + bsf_cell = self._compute_barotropic_streamfunction_cell( + ds_mesh, bsf_vertex) + logger.info('bsf on cells computed.') + + climatology['barotropicStreamfunction'] = \ + bsf_cell.transpose('Time', 'nCells', 'nVertices') + climatology.barotropicStreamfunction.attrs['units'] = 'Sv' + climatology.barotropicStreamfunction.attrs['description'] = \ + 'barotropic streamfunction at cell centers' + + climatology = climatology.drop_vars(self.variableList) + + return climatology + + def _compute_transport(self, ds_mesh, ds): + + cells_on_edge = ds_mesh.cellsOnEdge - 1 + inner_edges = np.logical_and(cells_on_edge.isel(TWO=0) >= 0, + cells_on_edge.isel(TWO=1) >= 0) + + # convert from boolean mask to indices + inner_edges = np.flatnonzero(inner_edges.values) + + cell0 = cells_on_edge.isel(nEdges=inner_edges, TWO=0) + cell1 = cells_on_edge.isel(nEdges=inner_edges, TWO=1) + + layer_thickness = ds.timeMonthly_avg_layerThickness + normal_velocity = \ + ds.timeMonthly_avg_normalVelocity.isel(nEdges=inner_edges) + + layer_thickness_edge = 0.5*(layer_thickness.isel(nCells=cell0) + + layer_thickness.isel(nCells=cell1)) + transport = ds_mesh.dvEdge[inner_edges] * \ + (layer_thickness_edge * normal_velocity).sum(dim='nVertLevels') + + return inner_edges, transport + + def _compute_barotropic_streamfunction_vertex(self, ds_mesh, ds): + inner_edges, transport = self._compute_transport(ds_mesh, ds) + print('transport computed.') + + nvertices = ds_mesh.sizes['nVertices'] + ntime = ds.sizes['Time'] + + cells_on_vertex = ds_mesh.cellsOnVertex - 1 + vertices_on_edge = ds_mesh.verticesOnEdge - 1 + is_boundary_cov = cells_on_vertex == -1 + boundary_vertices = np.logical_or(is_boundary_cov.isel(vertexDegree=0), + is_boundary_cov.isel(vertexDegree=1)) + boundary_vertices = np.logical_or(boundary_vertices, + is_boundary_cov.isel(vertexDegree=2)) + + # convert from boolean mask to indices + boundary_vertices = np.flatnonzero(boundary_vertices.values) + + n_boundary_vertices = len(boundary_vertices) + n_inner_edges = len(inner_edges) + + indices = np.zeros((2, 2*n_inner_edges+n_boundary_vertices), dtype=int) + data = np.zeros(2*n_inner_edges+n_boundary_vertices, dtype=float) + + # The difference between the streamfunction at vertices on an inner + # edge should be equal to the transport + v0 = vertices_on_edge.isel(nEdges=inner_edges, TWO=0).values + v1 = vertices_on_edge.isel(nEdges=inner_edges, TWO=1).values + + ind = np.arange(n_inner_edges) + indices[0, 2*ind] = ind + indices[1, 2*ind] = v1 + data[2*ind] = 1. + + indices[0, 2*ind+1] = ind + indices[1, 2*ind+1] = v0 + data[2*ind+1] = -1. + + # the streamfunction should be zero at all boundary vertices + ind = np.arange(n_boundary_vertices) + indices[0, 2*n_inner_edges + ind] = n_inner_edges + ind + indices[1, 2*n_inner_edges + ind] = boundary_vertices + data[2*n_inner_edges + ind] = 1. + + bsf_vertex = xr.DataArray(np.zeros((ntime, nvertices)), + dims=('Time', 'nVertices')) + + for tindex in range(ntime): + rhs = np.zeros(n_inner_edges+n_boundary_vertices, dtype=float) + + # convert to Sv + ind = np.arange(n_inner_edges) + rhs[ind] = 1e-6*transport.isel(Time=tindex) + + ind = np.arange(n_boundary_vertices) + rhs[n_inner_edges + ind] = 0. + + matrix = scipy.sparse.csr_matrix( + (data, indices), + shape=(n_inner_edges+n_boundary_vertices, nvertices)) + + solution = scipy.sparse.linalg.lsqr(matrix, rhs) + + bsf_vertex[tindex, :] = -solution[0] + + return bsf_vertex + + def _compute_barotropic_streamfunction_cell(self, ds_mesh, bsf_vertex): + """ + Interpolate the barotropic streamfunction from vertices to cells + """ + n_edges_on_cell = ds_mesh.nEdgesOnCell + edges_on_cell = ds_mesh.edgesOnCell - 1 + vertices_on_cell = ds_mesh.verticesOnCell - 1 + area_edge = 0.25*ds_mesh.dcEdge*ds_mesh.dvEdge + + ncells = ds_mesh.sizes['nCells'] + max_edges = ds_mesh.sizes['maxEdges'] + + area_vert = xr.DataArray(np.zeros((ncells, max_edges)), + dims=('nCells', 'maxEdges')) + + for ivert in range(max_edges): + edge_indices = edges_on_cell.isel(maxEdges=ivert) + mask = ivert < n_edges_on_cell + area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices) + + for ivert in range(max_edges-1): + edge_indices = edges_on_cell.isel(maxEdges=ivert+1) + mask = ivert+1 < n_edges_on_cell + area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices) + + edge_indices = edges_on_cell.isel(maxEdges=0) + mask = n_edges_on_cell == max_edges + area_vert[:, max_edges-1] += \ + 0.5*mask*area_edge.isel(nEdges=edge_indices) + + bsf_cell = \ + ((area_vert * bsf_vertex[:, vertices_on_cell]).sum(dim='maxEdges') / + area_vert.sum(dim='maxEdges')) + + return bsf_cell diff --git a/1.11.0rc1/_sources/tutorials/dev_getting_started.rst.txt b/1.11.0rc1/_sources/tutorials/dev_getting_started.rst.txt new file mode 100644 index 000000000..abee8f44c --- /dev/null +++ b/1.11.0rc1/_sources/tutorials/dev_getting_started.rst.txt @@ -0,0 +1,794 @@ +.. _tutorial_dev_getting_started: + +Developer: Getting Started +========================== + +This mini-tutorial is meant as the starting point for other tutorials for +developers. It describes the process for creating a fork of the MPAS-Analysis +repo, cloning the repository (and your fork) locally, making a git worktree for +development, and creating a conda environment that includes the +``mpas_analysis`` package and all of its dependencies, installed in a mode +appropriate for development. + +1. Getting started on GitHub +---------------------------- + +1.1 Forking MPAS-Analysis +~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you would like to contribute to MPAS-Analysis, you will need to create your +own fork of the `repository `_. Go +to the link and click on ``Fork`` near the top right corner of the page. The +Owner should be your GitHub username and the Repository name should be +``MPAS-Analysis``. Check the box for "Copy the develop branch only". Click +"Create fork". + +1.2 Adding SSH keys +~~~~~~~~~~~~~~~~~~~ + +If you have not already done so, you should add SSH keys to GitHub that allow +you to push to your fork from the machine(s) where you will do your +development. Instructions can be found +`here `_. + +1.3 Local git configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +It will be convenient to have some basic configuration for ``git`` taken care +of before we clone the repository. Here are some recommended config options +to set. Edit your ``~/.gitconfig`` (create it if it doesn't exist). + +.. code-block:: ini + + [user] + name = Xylar Asay-Davis + email = xylarstorm@gmail.com + [core] + editor = vim + [color] + ui = true + [alias] + logg = log --graph --oneline --decorate + [rebase] + autosquash = true + +Obviously, change ``[user]`` config options to appropriate values for you. +You *must* use the email address associated with your GitHub account. +Otherwise, your commits will not be associated with your GitHub user name. + +2. Cloning the repository +------------------------- + +You will want to clone both the main MPAS-Analysis repository and your own +fork. The MPAS-Analysis development tutorials assume that you will be +developing branches in different worktrees and recommend a directory structure +appropriate for this approach. + +.. note:: + + If you are on a machine with an old version of ``git``, you may need to + add: + + .. code-block:: bash + + module load git + + to your ``.bashrc``. You want a pretty recent version of ``git`` so you + have the ``git worktree`` command. + +Begin by creating a "base" directory for development in a convenient location +for keeping code. This should not be on a "scratch" or other temporary drive +on an HPC machine. The base directory should be named ``MPAS-Analysis``, +``mpas-analysis`` or something similar. + +.. code-block:: bash + + $ mkdir mpas-analysis + $ cd mpas-analysis + +Within the base directory, clone the main repository into a directory called +``develop`` (the default branch is the ``develop`` branch): + +.. code-block:: bash + + $ git clone git@github.com:MPAS-Dev/MPAS-Analysis.git develop + $ cd develop + +Add your fork as a "remote": + +.. code-block:: bash + + $ git remote add /MPAS-Analysis git@github.com:/MPAS-Analysis.git + +Make sure to replace ```` with your GitHub username. + + +3. Making a worktree +-------------------- + +To do your development, first make sure you are in the ``develop`` directory +within your base directory (e.g. ``mpas-analysis/develop``). Then, "fetch" and +changes that might have happened on the ``develop`` branch so you are using +the latest version as a starting point: + +.. code-block:: bash + + $ git fetch --all -p + +This will fetch all branches from both the main repository and your fork. It +will also prune (``-p``) any branches you might have deleted. + +Then, make a worktree for developing your new feature: + +.. code-block:: bash + + $ git worktree add ../add_my_fancy_task + +The last argument (``add_my_fancy_task`` in this example) is both the name of +a directory within the base directory (``mpas-analysis``) and the name of the +branch you will be developing. + +Go into that directory to do your development: + +.. code-block:: bash + + $ cd ../add_my_fancy_task + +4. Making a conda environment +----------------------------- + +MPAS-Analysis relies on several packages that are only available as conda +packages from the ``conda-forge`` channel. The first step for running +MPAS-Analysis is to create a conda environment with all the needed packages. + +4.1 Installing Mambaforge +~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you have not yet installed Anaconda, Miniconda or Mambaforge, you will need +to begin there. The concept behind Anaconda is that just about everything you +would need for a typical python workflow is included. The concept behind +Miniconda and Mambaforge is that you create different environments for +different purposes. This allows for greater flexibility and tends to lead to +fewer conflicts between incompatible packages, particularly when using a +channel other than the ``defaults`` supplied by Anaconda. Since we will use +the ``conda-forge`` channel and the ``mamba`` tools to speed up installation, +the Mambaforge approach is strongly recommended. The main advantage of +Mambaforge over Miniconda is that it automatically takes care of a few steps +that we otherwise need to do manually. + +First download the +`Mambaforge installer `_ +for your operating system, then run it: + +.. code-block:: bash + + $ /bin/bash Mambaforge-Linux-x86_64.sh + +.. note:: + + MPAS-Analysis and many of the packages it depends on support OSX and Linux + but not Windows. + +If you are on an HPC system, you can still install Miniconda into your home +directory. Typically, you will need the Linux version. + +.. note:: + + At this time, we don't have experience with installing or running + MPAS-Analysis on ARM or Power8/9 architectures. + +You will be asked to agree to the terms and conditions. Type ``yes`` to +continue. + +You will be prompted with a location to install. In this tutorial, we assume +that Mambaforge is installed in the default location, ``~/mambaforge``. If +you are using Miniconda or chose to install Mambaforge somewhere else, just +make sure to make the appropriate substitution whenever you see a reference to +this path below. + +.. note:: + + On some HPC machines (particularly at LANL Institutional Computing and + NERSC) the space in your home directory is quite limited. You may want to + install Mambaforge in an alternative location to avoid running out of + space. + +You will see prompt like this: + +.. code-block:: + + Do you wish the installer to initialize Mambaforge + by running conda init? [yes|no] + [no] >>> + +You may wish to skip the step (answer ``no``) if you are working on a system +where you will also be using other conda environments, most notably +E3SM-Unified (which has its own Miniconda installation). If you do not run +conda init, you have to manually activate ``conda`` whenever you need it. +For ``bash`` and similar shells, this is: + +.. code-block:: bash + + $ source ~/mambaforge/etc/profile.d/conda.sh + $ conda activate + +If you use ``csh``, ``tcsh`` or related shells, this becomes: + +.. code-block:: csh + + > source ~/mambaforge/etc/profile.d/conda.csh + > conda activate + +You may wish to create an alias in your ``.bashrc`` or ``.cshrc`` to make +this easier. For example: + +.. code-block:: bash + + alias init_conda="source ~/mambaforge/etc/profile.d/conda.sh; conda activate" + + +4.2 One-time Miniconda setup +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you installed Miniconda, rather than Mambaforge, you will need to add the +`conda-forge channel `_ and make sure it always takes +precedence for packages available on that channel: + +.. code-block:: bash + + $ conda config --add channels conda-forge + $ conda config --set channel_priority strict + +Then, you will need to install the ``mamba`` package: + +.. code-block:: bash + + $ conda install -y mamba + +If you installed Mambaforge, these steps will happen automatically. + +4.3 Create a development environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can create a new conda environment called ``mpas_dev`` and install the +dependencies that MPAS-Analysis needs by running the following in the worktree +where you are doing your development: + +.. code-block:: bash + + $ mamba create -y -n mpas_dev --file dev-spec.txt "esmf=*=nompi_*" + +The last argument is only needed on HPC machines because the conda version of +MPI doesn't work properly on these machines. You can omit it if you're +setting up the conda environment on your laptop. + +Then, you can activate the environment and install MPAS-Analysis in "edit" +mode by running: + +.. code-block:: bash + + $ conda activate mpas_dev + $ python -m pip install -e . + +In this mode, any edits you make to the code in the worktree will be available +in the conda environment. If you run ``mpas_analysis`` on the command line, +it will know about the changes. + +.. note:: + + If you add or remove files in the code, you will need to re-install + MPAS-Analysis in the conda environment by rerunning + + .. code-block:: bash + + python -m pip install -e . + +.. _tutorial_dev_get_started_activ_env: + +4.4 Activating the environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Each time you open a new terminal window, to activate the ``mpas_dev`` +environment, you will need to run either for ``bash``: + +.. code-block:: bash + + $ source ~/mambaforge/etc/profile.d/conda.sh + $ conda activate mpas_dev + +or for ``csh``: + +.. code-block:: csh + + > source ~/mambaforge/etc/profile.d/conda.csh + > conda activate mpas_dev + +You can skip the ``source`` command if you chose to initialize Mambaforge or +Miniconda3 so it loads automatically. You can also use the ``init_conda`` +alias for this step if you defined one. + +4.5 Switching worktrees +~~~~~~~~~~~~~~~~~~~~~~~ + +If you switch to a different worktree, it is safest to rerun the whole +process for creating the ``mpas_dev`` conda environment. If you know that +the dependencies are the same as the worktree used to create ``mpas_dev``, +You can just reinstall ``mpas_analysis`` itself by rerunning + +.. code-block:: bash + + python -m pip install -e . + +in the new worktree. If you forget this step, you will find that changes you +make in the worktree don't affect the ``mpas_dev`` conda environment you are +using. + +5. Editing code +--------------- + +You may, of course, edit the MPAS-Analysis code using whatever tool you like. +I strongly recommend editing on your laptop and using +`PyCharm community edition `_ +to do the editing. PyCharm provides many features including flagging +deviations from preferred coding style guidelines known as +`PEP8 `_ and syntax error detection using +the ``mpas_dev`` conda environment you created. + +6. Running MPAS-Analysis on a laptop +------------------------------------ + +If you wish to run MPAS-Analysis on your laptop (or desktop machine), you will +need to follow steps 2-6 of the :ref:`tutorial_getting_started` tutorial. + +7. Running MPAS-Analysis on an E3SM supported machine +----------------------------------------------------- + +7.1 Configuring MPAS-Analysis +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We configure MPAS-Analysis is with Python ``cfg`` (also called ``ini``) files: + +.. code-block:: ini + + [runs] + # mainRunName is a name that identifies the simulation being analyzed. + mainRunName = runName + + [execute] + ... + +The `default config file `_ +contains thousands of config options, which gives a lot of flexibility to +MPAS-Analysis but can be more than bit overwhelming to new users and +developers. + +The file `example_e3sm.cfg `_ +provides you with an example with some of the most common config options you +might need to change on an E3SM supported machine. If you specify the name of +the supported machine with the ``--machine`` (or ``-m``) flag when you call +``mpas_analysis``, there are several config options that will be set for you +automatically. + +First, you should copy this file to a new name for a specific run (say +``myrun.cfg``). Then, you should modify any config options you want to change +in your new config file. At a minimum, you need to specify: + +* ``mainRunName`` in ``[runs]``: A name for the run to be included plot titles + and legends (best if it's not super long) +* ``baseDirectory`` in ``[input]``: The directory for the simulation results + to analyze +* ``mpasMeshName`` in ``[input]``: The official name of the MPAS-Ocean and + -Seaice mesh +* ``baseDirectory`` in ``[output]``: The directory for the analysis results + +We will cover these and a few other common options in this tutorial. With the +exception of a few paths that you will need to provide, the config options +displayed below are the ones appropriate for the example E3SM simulation using +the QU480 MPAS mesh. + +7.1.1 [runs] +++++++++++++ + +The ``[runs]`` section contains options related to which E3SM simulation(s) are +being analyzed: + +.. code-block:: ini + + [runs] + ## options related to the run to be analyzed and control runs to be + ## compared against + + # mainRunName is a name that identifies the simulation being analyzed. + mainRunName = A_WCYCL1850.ne4_oQU480.anvil + +The ``mainRunName`` can be any useful name that will appear at the top of each +web page of the analysis output and in the legends or titles of the figures. +Often, this is the full name of the E3SM simulation but sometimes it is +convenient to have a shorter name. In this case, we use part of the run name +but leave off the date of the simulation to keep it a little shorter. + +7.1.2 [execute] ++++++++++++++++ + +The ``[execute]`` section contains options related to serial or parallel +execution of the individual "tasks" that make up an MPAS-Analysis run. For +the most part, you can let MPAS-Analysis take care of this on supported +machines. The exception is that, in a development conda environment, you will +be using a version of ESMF that cannot run in parallel so you will need the +following: + +.. code-block:: ini + + [execute] + ## options related to executing parallel tasks + + # the number of MPI tasks to use in creating mapping files (1 means tasks run in + # serial, the default) + mapMpiTasks = 1 + + # "None" if ESMF should perform mapping file generation in serial without a + # command, or one of "srun" or "mpirun" if it should be run in parallel (or in + # serial but with a command) + mapParallelExec = None + +If you are running into trouble with MPAS-Analysis, such as running out of +memory, you may want to explore other config options from this section. + +7.1.3 [input] ++++++++++++++ + +The ``[input]`` section provides paths to the E3SM simulation data and the name +of the MPAS-Ocean and MPAS-Seaice mesh. + +.. code-block:: ini + + [input] + ## options related to reading in the results to be analyzed + + # directory containing model results + baseDirectory = /lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil + + # Note: an absolute path can be supplied for any of these subdirectories. + # A relative path is assumed to be relative to baseDirectory. + # In this example, results are assumed to be in /run + + # subdirectory containing restart files + runSubdirectory = run + # subdirectory for ocean history files + oceanHistorySubdirectory = archive/ocn/hist + # subdirectory for sea ice history files + seaIceHistorySubdirectory = archive/ice/hist + + # names of namelist and streams files, either a path relative to baseDirectory + # or an absolute path. + oceanNamelistFileName = run/mpaso_in + oceanStreamsFileName = run/streams.ocean + seaIceNamelistFileName = run/mpassi_in + seaIceStreamsFileName = run/streams.seaice + + # name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3, + # ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.) + mpasMeshName = oQU480 + +The ``baseDirectory`` is the path for the E3SM simulation. Here are paths to +some very low resolution simulations you can use on various supported machines: + +Anvil or Chrysalis: + +.. code-block:: + + /lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil + /lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil + +Cori and Perlmutter: + +.. code-block:: + + /global/cfs/cdirs/e3sm/xylar/20200305.A_WCYCL1850.ne4_oQU480.anvil + +Compy: + +.. code-block:: + + /compyfs/asay932/analysis_testing/test_output/20200305.A_WCYCL1850.ne4_oQU480.anvil + + +The ``mpasMeshName`` is the official name of the MPAS-Ocean and -Seaice mesh +used in the simulation, which should be in the simulation name and must be a +directory on the +`inputdata `_ +server In this example, this is ``oQU480``, meaning the quasi-uniform 480-km +mesh for the ocean and sea ice. + +The ``runSubdirectory`` must contain valid MPAS-Ocean and MPAS-Seaice restart +files, used to get information about the MPAS mesh and the ocean vertical grid. + +The ``oceanHistorySubdirectory`` must contain MPAS-Ocean monthly mean output +files, typically named:: + + mpaso.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc + +Similarly, ``seaIceHistorySubdirectory`` contains the MPAS-Seaice monthly mean +output:: + + mpassi.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc + +In this example, we are using a run where short-term archiving has been used +so the output is not in the ``run`` directory. + +Finally, MPAS-Analysis needs a set of "namelists" and "streams" files that +provide information on the E3SM configuration for MPAS-Ocean and MPAS-Seaice, +and about the output files, respectively. These are typically also found in +the ``run`` directory. + +.. _tutorial_dev_get_started_config_output: + +7.1.4 [output] +++++++++++++++ + +The ``[output]`` section provides a path where the output from the analysis run +will be written, the option to output the results web pages to another +location, and a list of analysis to be generated (or explicitly skipped). + +.. code-block:: ini + + [output] + ## options related to writing out plots, intermediate cached data sets, logs, + ## etc. + + # The subdirectory for the analysis and output on the web portal + subdir = ${runs:mainRunName}/clim_${climatology:startYear}-${climatology:endYear}_ts_${timeSeries:startYear}-${timeSeries:endYear} + + # directory where analysis should be written + # NOTE: This directory path must be specific to each test case. + baseDirectory = /lcrc/group/e3sm/${web_portal:username}/analysis/${output:subdir} + + # provide an absolute path to put HTML in an alternative location (e.g. a web + # portal) + htmlSubdirectory = ${web_portal:base_path}/${web_portal:username}/analysis/${output:subdir} + + # a list of analyses to generate. Valid names can be seen by running: + # mpas_analysis --list + # This command also lists tags for each analysis. + # Shortcuts exist to generate (or not generate) several types of analysis. + # These include: + # 'all' -- all analyses will be run + # 'all_publicObs' -- all analyses for which observations are available on the + # public server (the default) + # 'all_' -- all analysis with a particular tag will be run + # 'all_' -- all analyses from a given component (either 'ocean' + # or 'seaIce') will be run + # 'only_', 'only_' -- all analysis from this component or + # with this tag will be run, and all + # analysis for other components or + # without the tag will be skipped + # 'no_' -- skip the given task + # 'no_', 'no_' -- in analogy to 'all_*', skip all analysis + # tasks from the given component or with + # the given tag. Do + # mpas_analysis --list + # to list all task names and their tags + # an equivalent syntax can be used on the command line to override this + # option: + # mpas_analysis analysis.cfg --generate \ + # only_ocean,no_timeSeries,timeSeriesSST + generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', + 'no_landIceCavities'] + +In this example, I have made liberal use of +`extended interpolation `_ +in the config file to make use of config options in other config options. + +``subdir`` is the subdirectory for both the analysis and the output on the +web portal. It typically indicates the run being used and the years covered +by the climatology (and sometimes the time series as in this example). See +:ref:`tutorial_dev_get_started_config_clim` for more info on these config +options. + +``baseDirectory`` is any convenient location for the output. In this example, +I have used a typical path on Anvil or Chrysalis, including the +``${web_portal:username}`` that will be populated automatically on a supported +machine and ``${output:subdir}``, the subdirectory from above. + +``htmlSubdirectory`` is set using the location of the web portal, which is +automatically determined on an E3SM machine, the user name, and the same +subdirectory used for analysis output. You can modify the path as needed to +match your own preferred workflow. + +.. note:: + + On some E3SM supported machines like Chicoma, there is no + web portal so you will want to just manually replace the part of the + ``basePath`` given by ``/lcrc/group/e3sm/${web_portal:username}`` in the + example above. + + You will need to just put the web output in an ``html`` subdirectory within + the analysis output: + + .. code-block:: ini + + htmlSubdirectory = html + + and copy this from the supercomputer to your laptop to view it in your + browser. + +Finally, the ``generate`` option provides a python list of flags that can be +used to determine which analysis will be generated. In this case, we are +turning off some analysis that will not work because some features +(biogeochemistry, icebergs, eddy kinetic energy and land-ice cavities) are not +available in this run and one (the El Niño climate index) is not useful. + +.. _tutorial_dev_get_started_config_clim: + +7.1.5. [climatology], [timeSeries] and [index] +++++++++++++++++++++++++++++++++++++++++++++++ + +These options determine the start and end years of climatologies (time averages +over a particular month, season or the full year), time series or the El Niño +climate index. + +.. code-block:: ini + + [climatology] + ## options related to producing climatologies, typically to compare against + ## observations and previous runs + + # the first year over which to average climatalogies + startYear = 3 + # the last year over which to average climatalogies + endYear = 5 + + [timeSeries] + ## options related to producing time series plots, often to compare against + ## observations and previous runs + + # start and end years for timeseries analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 5 + + [index] + ## options related to producing nino index. + + # start and end years for El Nino 3.4 analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 5 + +For each of these, options a full year of data must exist for that year to +be included in the analysis. + +For the example QU480 simulation, only 5 years of output are available, so we +are doing a climatology over the last 3 years (3 to 5) and displaying time +series over the full 5 years. (If the El Niño index weren't disabled, it would +also be displayed over the full 5 years.) + +7.2 Running MPAS-Analysis +~~~~~~~~~~~~~~~~~~~~~~~~~ + +The hard work is done. Now that we have a config file, we are ready to run. + +To run MPAS-Analysis, you should either create a job script or log into +an interactive session on a compute node. Then, activate the ``mpas_dev`` +conda environment as in :ref:`tutorial_dev_get_started_activ_env`. + +On many file systems, MPAS-Analysis and other python-based software that used +NetCDF files based on the HDF5 file structure can experience file access errors +unless the following environment variable is set as follows in bash: + +.. code-block:: bash + + $ export HDF5_USE_FILE_LOCKING=FALSE + +or under csh: + +.. code-block:: csh + + > setenv HDF5_USE_FILE_LOCKING FALSE + +Then, running MPAS-Analysis is as simple as: + +.. code-block:: bash + + $ mpas_analysis -m myrun.cfg + +where ```` is the name of the machine (all lowercase). On Cori, we +only support the Haswell nodes (so the machine name is ``cori-haswell``). For +now, we only support CPU nodes on Perlmutter (``pm-cpu``) and Chicoma +(``chicoma-cpu``). + +Typical output is the analysis is running correctly looks something like: + +.. code-block:: none + + $ mpas_analysis -m anvil myrun.cfg + Detected E3SM supported machine: anvil + Using the following config files: + /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/default.cfg + /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/lib/python3.10/site-packages/mache/machines/anvil.cfg + /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/configuration/anvil.cfg + /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/__main__.py + /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg + copying /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg to HTML dir. + + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_WOCE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + Preprocessing SOSE transect data... + temperature + salinity + potentialDensity + zonalVelocity + meridionalVelocity + velMag + Done. + running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped + + Running tasks: 100% |##########################################| Time: 0:06:42 + + Log files for executed tasks can be found in /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/logs + Total setup time: 0:02:13.78 + Total run time: 0:08:55.86 + Generating webpage for viewing results... + Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5 + + +The first part of the output, before the progress bar, is the "setup" phase +where MPAS-Analysis is checking if the requested analysis can be run on the +simulation results. The specific output shown here is related to 1) +listing the config files used to determine the final set of config options +used in the analysis, and 2) creating mapping files that are used to +interpolate between the ``oQU480`` mesh and the various grids MPAS-Analysis +uses to compare with observations. Since MPAS-Analysis didn't know about that +``oQU480`` mesh ahead of time, it is creating mapping files and regions masks +for this mesh on the fly. + +The ``mpas_analysis`` command-line tool has several more options you can +explore with + +.. code-block:: bash + + $ mpas_analysis --help + +These include listing the available analysis tasks and their tags, purging a +previous analysis run before running the analysis again, plotting all available +color maps, and outputting verbose python error messages when the analysis +fails during the setup phase (before a progress bar appears). + +7.3 Viewing the Output +~~~~~~~~~~~~~~~~~~~~~~ + +The primary output from MPAS-Analysis is a set of web pages, each containing +galleries of figures. The output can be found in the directory you provided in +:ref:`tutorial_dev_get_started_config_output` and given in the last line of +the analysis output (if you are on a supported machine with a web portal), +e.g.: + +.. code-block:: none + + Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5 + +.. note:: + + On Cori and Perlmutter, you will need to change the permissions so you can + see the webpage online: + + .. code-block:: bash + + $ chmod -R ugo+rX /global/cfs/cdirs/e3sm/www/ + + where ```` is your NERSC username. + +If the web page is incomplete, it presumably means there was an error during +the analysis run, since the web page is generated as the final step. Check +the analysis output and then the log files for individual analysis tasks to +see what when wrong. See :ref:`tutorial_getting_started_trouble` or ask for +help if you run into trouble. + +The main web page has links to the ocean and sea-ice web pages as well as some +"provenance" information about which version of MPAS-Analysis you were using +and how it was configured. + +The web page generated by this tutorial should look something like this +(somewhat outdated) +`example output `_. diff --git a/1.11.0rc1/_sources/tutorials/dev_understand_a_task.rst.txt b/1.11.0rc1/_sources/tutorials/dev_understand_a_task.rst.txt new file mode 100644 index 000000000..508931b20 --- /dev/null +++ b/1.11.0rc1/_sources/tutorials/dev_understand_a_task.rst.txt @@ -0,0 +1,1193 @@ +.. _tutorial_understand_a_task: + +Developers: Understanding an analysis task +========================================== + +This tutorial walks a new developer through an existing analysis task to get +a more in-depth understanding of the code. This tutorial is meant as the +starting point for the :ref:`tutorial_dev_add_task` tutorial. It is a common +practice to find an existing analysis task that is as close as possible to the +new analysis, and to copy that existing task as a template for the new task. +This tutorial describes an existing analysis task, and +:ref:`tutorial_dev_add_task` uses it as a starting point for developing a new +task. + +1. The big picture +------------------ + +MPAS-Analysis is meant to provide a first look at E3SM simulation output from +the MPAS-Ocean and MPAS-Seaice components. The analysis is intended to be +robust and automated. However, there is currently little effort to ensure that +the time period covered by the observations and model output are the same. +In other words, we often compare pre-industrial simulation results with +present-day observations. The justification for this is twofold. First, we +typically have few if any observations covering the pre-industrial period. +Second, we may be attempting to reduce biases that we assess to be much larger +than expected differences between pre-industrial and present-day climate +conditions. Under these conditions, MPAS-Analysis provides us with a useful +first impression of how our simulation is doing. + +1.1 MPAS output +~~~~~~~~~~~~~~~ + +The primary output from MPAS-Ocean and MPAS-Seaice are monthly and daily +averages of a large number of data fields. Here are links to the list of: + +* `MPAS-Ocean monthly fields `_ + +* `MPAS-Ocean daily fields `_ + +* `MPAS-Seaice monthly fields `_ + +* `MPAS-Seaice daily fields `_ + +The components also produce a smaller amount of more specialized output, such +as monthly maximum/minimum values. + +MPAS data is provided on unstructured meshes, meaning that it isn't +particularly amenable to analysis with standard tools such as +`ESMValTool `_. Additionally, E3SM's science +campaigns require unique, sometimes regionally focused analysis not available +in existing tools. + +1.2 Analysis tasks +~~~~~~~~~~~~~~~~~~ + +MPAS-Analysis is designed to a series of interdependent analysis tasks in +parallel with one another. It builds up dependency graph between the tasks, +allowing independent tasks to run at the same time while putting dependent +tasks on hold until the tasks they depend on are completed. Additionally, +MPAS-Analysis has some rudimentary tools for keeping track of the resources +that some computationally intensive tasks require to prevent the tool from +running out of memory. + +Currently, nearly all operations in MPAS-Analysis must run on a single HPC +node. (The exception is +`ncclimo `_, +which is used to generate climatologies, and which can run in parallel across +up to 12 nodes if desired.) We hope to support broader task parallelism in +the not-too-distant future using the `parsl `_ +python package. + +Each analysis tasks is a class that descends from the +:py:class:`~mpas_analysis.shared.AnalysisTask` base class. Tasks +can also have "subtasks" that do part of the work needed for the final +analysis. A subtask might perform a computation on a specific region, period +of time, or season. It might combine data from other subtasks into a single +dataset. Or it might plot the data computed by a previous task. The +advantages of dividing up the work are 1) that each subtask can potentially +run in parallel with other subtasks and 2) it can allow code reuse if the same +subtask can be used across multiple analysis tasks. + + +1.3 Shared framework +~~~~~~~~~~~~~~~~~~~~ + +MPAS-Analysis includes a shared framework used across analysis tasks. The +framework is made up mostly of functions that can be called from within +analysis tasks but also includes some analysis tasks and subtasks that are +common to MPAS-Ocean, MPAS-Seaice and potentially other MPAS components +(notably MALI) that may be supported in the future. + +This tutorial will not go though the shared framework in detail. In addition +to the :py:class:`~mpas_analysis.shared.AnalysisTask` base class, the shared +framework includes the following +packages: + +.. code-block:: none + + $ ls mpas_analysis/shared + climatology + constants + generalized_reader + html + interpolation + io + mpas_xarray + plot + projection + regions + time_series + timekeeping + transects + ... + +A separate tutorial will explore the shared framework and how how to modify it. + +2. Tour of an analysis task (``ClimatologyMapOHCAnomaly``) +---------------------------------------------------------- + +Aside from some code that takes care of managing analysis tasks and generating +web pages, MPAS-Analysis is made up almost entirely of analysis tasks and +shared functions they can call. Since adding new analysis nearly always +mean creating a new class for the task, we start with a tour of an existing +analysis task as well as the :py:class:`~mpas_analysis.shared.AnalysisTask` +base class that it descends from. + +We will use :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` as an +example analysis task for this tour because it will turn out to be a useful +staring point for the analysis we want to add in :ref:`tutorial_dev_add_task`. +You can read more about :ref:`task_climatologyMapOHCAnomaly` in the User's +Guide. + +It will be useful to open the following links in your browser to have a look +at the code directly: +`ClimatologyMapOHCAnomaly `_ + +.. + To do: switch the previous URL to https://github.com/MPAS-Dev/MPAS-Analysis/blob/develop + +If you want to be a little more adventurous, you can also pull up the code +for the base class: +`AnalysisTask `_ + +2.1 Attributes +~~~~~~~~~~~~~~ + +Classes can contain pieces of data called attributes. In MPAS-Analysis, the +objects representing tasks share several attributes that they inherit from +the :py:class:`~mpas_analysis.shared.AnalysisTask` class. A few of the most +important attributes of an analysis task are: + +* ``config`` - an object for getting the values of config options + +* ``namelist`` - an object for getting namelist options from the E3SM + simulation + +* ``runStreams`` - an object for finding MPAS output files in the ``run`` + directory. In practice, this is always a restart file used to get the + MPAS mesh and, for MPAS-Ocean, the vertical coordinate. + +* ``historyStreams`` - an object for finding MPAS history streams (often + ``timeSeriesStatsMonthlyOutput``). + +* ``calendar`` - the name of the calendar that was used in the MPAS run + (in practice always ``'noleap'`` or until recently ``'gregorian_noleap'``). + +* ``xmlFileNames`` - a list of XML files associated with plots produced by this + analysis task. As we will discuss, these are used to help populate the + web page showing the analysis. + +* ``logger`` - and object that keeps track of sending output to log files + (rather than the terminal) when the analysis is running. During the + ``run_task()`` phase of the analysis when tasks are running in parallel with + each other, make sure to use ``logger.info()`` instead of ``print()`` to + send output to the log file. + +Within the methods of analysis task class, these attributes can be accessed +using the ``self`` object, e.g. ``self.config``. It is often helpful to make +a local reference to the object to make the code more compact, e.g.: + +.. code-block:: python + + config = self.config + seasons = config.getexpression('climatologyMapOHCAnomaly', 'seasons') + +The analysis task we're looking at, :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` +has some attributes of its own: + +* ``mpasClimatologyTask`` - the task that produced the climatology to be + remapped and plotted + +* ``refYearClimatologyTask`` - The task that produced the climatology from the + first year to be remapped and then subtracted from the main climatology + (since we want to plot an anomaly from the beginning of the simulation) + +2.2 Constructor +~~~~~~~~~~~~~~~ + +Almost all classes have "constructors", which are methods for making a new +object of that class. In python, the constructor is called ``__init__()``. +In general, the ``__`` (double underscore) is used in python to indicate a +function or method with special meaning. + +The constructor of a subclass (such as +:py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly`) always calls the +constructor of the superclass (:py:class:`~mpas_analysis.shared.AnalysisTask` +in this case). So we'll talk about the constructor for +:py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` first and then get +to :py:class:`~mpas_analysis.shared.AnalysisTask`. + +The constructor for :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` +starts off like this: + +.. code-block:: python + + def __init__(self, config, mpas_climatology_task, + ref_year_climatology_task, control_config=None): + +As with all methods, it takes the ``self`` object as the first argument. +Then, it takes a ``config`` object, which is true of all analysis tasks. Then, +it has some other arguments that are more specific to the analysis being +performed. Here, we have 2 other analysis tasks as arguments: +``mpasClimatologyTask`` and ``refYearClimatologyTask``. As described in +the previous section, these are tasks for computing climatologies that will +later be remapped to a comparison grid for plotting. A little later in the +constructor, we store references to these tasks as attributes: + +.. code-block:: python + + self.mpas_climatology_task = mpas_climatology_task + self.ref_year_climatology_task = ref_year_climatology_task + +Returning to the constructor above, the first thing we do it to call the +super class's ``__init__()`` method: + +.. code-block:: python + + def __init__(self, config, mpas_climatology_task, + ref_year_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + control_config : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + + field_name = 'deltaOHC' + # call the constructor from the base class (AnalysisTask) + super().__init__(config=config, taskName='climatologyMapOHCAnomaly', + componentName='ocean', + tags=['climatology', 'horizontalMap', field_name, + 'publicObs', 'anomaly']) + +We're passing along the ``config`` options to the base class so it can store +them. Then, we're giving the task a unique ``taskName`` (the same as the class +name except that it starts with a lowercase letter). We're saying that the +MPAS ``componentName`` is the ocean. + +Then, we giving the task a number of ``tags`` that can be helpful in +determining whether or not to generate this particular analysis based on the +:ref:`config_generate`. The tags are used to describe various aspects of the +analysis. Here, we will produce plots of a ``climatology`` (as opposed to a +time series). The plot will be a ``horizontalMap``. It will involve the +variable ``deltaOHC``. This analysis doesn't involve any observations, but we +include a ``publicObs`` tag to indicate that it doesn't require any proprietary +observational data sets that we do not have the rights to make public. +(Currently, we have a few such data sets for things like Antarctic melt rates.) +Finally, the analysis involves an ``anomaly`` computed relative to the +beginning of the simulation. + +From there, we get the values of some config options, raising errors if we +find something unexpected: + +.. code-block:: python + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of seasons') + + comparison_grid_names = config.getexpression(section_name, + 'comparisonGrids') + + if len(comparison_grid_names) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of comparison grids') + + depth_ranges = config.getexpression('climatologyMapOHCAnomaly', + 'depthRanges', + use_numpyfunc=True) + +By default, these config options look like this: + +.. code-block:: ini + + [climatologyMapOHCAnomaly] + ## options related to plotting horizontally remapped climatologies of + ## ocean heat content (OHC) against control model results (if available) + + ... + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # A list of pairs of minimum and maximum depths (positive up, in meters) to + # include in the vertical sums. The default values are the equivalents of the + # default ranges of the timeSeriesOHCAnomaly task, with a value of -10,000 m + # intended to be well below the bottom of the ocean for all existing MPAS-O + # meshes. + depthRanges = [(0.0, -10000.0), (0.0, -700.0), (-700.0, -2000.0), (-2000.0, -10000.0)] + +We plot only the annual mean OHC anomaly and we plot it only on a global +latitude-longitude grid. The range of depths is: + +* the full ocean column + +* sea surface to 700 m depth + +* 700 m to 2000 m depth + +* 2000 m to the seafloor + +A user would be free to change any of these config options, and the analysis +should run correctly. They could choose to plot on a different comparison +grid, add new seasons, or change the depth range. As long as they ran the +analysis in a fresh directory (or purged output from a previous analysis run), +this should work correctly. + +Next, we store some values that will be useful later: + +.. code-block:: python + + mpas_field_name = 'deltaOHC' + + variable_list = ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_layerThickness'] + +This particular analysis involves 4 different depth ranges over which we +compute the ocean heat content. The remainder of the analysis is performed +separately for each of these depth ranges in subtask. We loop over the +depth range and add a subtask that will first compute the ocean heat content +(OHC) and then remap it to the comparison grids (``RemapMpasOHCClimatology``): + +.. code-block:: python + + for min_depth, max_depth in depth_ranges: + depth_range_string = \ + f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + remap_climatology_subtask = RemapMpasOHCClimatology( + mpas_climatology_task=mpas_climatology_task, + ref_year_climatology_task=ref_year_climatology_task, + parent_task=self, + climatology_name=f'{field_name}_{depth_range_string}', + variable_list=variable_list, + comparison_grid_names=comparison_grid_names, + seasons=seasons, + min_depth=min_depth, + max_depth=max_depth) + + self.add_subtask(remap_climatology_subtask) + + ... + +We will explore the ``RemapMpasOHCClimatology`` subtask later in the tutorial +so we will not discuss it further here. + +Still within the loop over depth range, we then add a subtask +(``PlotClimatologyMapSubtask``) for plot we want to create, one for each each +comparison grid and season. (By default, there is only one comparison grid +and one "season": the full year, ``ANN``.) + +.. code-block:: python + + for min_depth, max_depth in depth_ranges: + ... + out_file_label = f'deltaOHC_{depth_range_string}' + remap_observations_subtask = None + if control_config is None: + ref_title_label = None + ref_field_name = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_title_label = f'Control: {control_run_name}' + ref_field_name = mpas_field_name + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}_{depth_range_string}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=f'$\\Delta$OHC over {depth_range_string}', + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel=r'GJ m$^{-2}$', + imageCaption=f'Anomaly in Ocean Heat Content over {depth_range_string}', + galleryGroup='OHC Anomaly', + groupSubtitle=None, + groupLink='ohc_anom', + galleryName=None) + + self.add_subtask(subtask) + +First, we make sure the subtask has a unique name. If two tasks or subtasks +have the same ``taskName`` and ``subtaskName``, MPAS-Analysis will only run +the last one and the task manager may become confused. + +Then, we create a ``subtask`` object that is an instance of the +:py:class:`~mpas_analysis.ocean.plot_climatology_map_subtask.PlotClimatologyMapSubtask` +class. This class is shared between several ocean analysis tasks for plotting +climatologies as horizontal maps. It can plot just MPAS output, remapped to +one or more comparison grids and averaged over one or more seasons. It can +also plot that data against an observational field that has been remapped to +the same comparison grid and averaged over the same seasons. In this case, +there are no observations available for comparison +(``remap_observations_subtask = None``). A user may have provided a +"control" run of MPAS-Analysis to compare with this analysis run (a so-called +"model vs. model" comparison). If so, ``control_config`` will have config +options describing the other analysis run. If not, ``control_config`` is +``None``. + +Next, We call the +:py:meth:`~mpas_analysis.ocean.plot_climatology_map_subtask.PlotClimatologyMapSubtask.set_plot_info` +method of :py:class:`~mpas_analysis.ocean.plot_climatology_map_subtask.PlotClimatologyMapSubtask` +to provide things like the title and units for the plot and the field to plot. +We also provide information needed for the final analysis web page such as the +name of the gallery group. (We do not provide a gallery name within the +gallery group because there will be no other galleries within this group.) +All the plots for a given comparison grid will end up in the same gallery, +with different depths and seasons one after the other. + +Finally, we call :py:meth:`~mpas_analysis.shared.AnalysisTask.add_subtask()` +to add the ``subtask`` to this task. + +2.3 ``setup_and_check()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The ``setup_and_check()`` method of an analysis task is called when it is clear +that this particular analysis has been requested (but before the analysis is +actually ready to run). This is in contrast to the constructor, which is +run for *every* analysis task everytime MPAS-Analysis runs because we need +information from the analysis task (its name, component and tags) in order to +determine if it should run or not. + +In this method, we would typically perform checks to make sure the simulation +has been configured properly to run the analysis. For example, is the +necessary analysis member enabled. + +.. code-block:: python + + def setup_and_check(self): + """ + Checks whether analysis is being performed only on the reference year, + in which case the analysis will not be meaningful. + + Raises + ------ + ValueError: if attempting to analyze only the reference year + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super().setup_and_check() + + start_year, end_year = self.mpas_climatology_task.get_start_and_end() + ref_start_year, ref_end_year = \ + self.ref_year_climatology_task.get_start_and_end() + + if (start_year == ref_start_year) and (end_year == ref_end_year): + raise ValueError('OHC Anomaly is not meaningful and will not work ' + 'when climatology and ref year are the same.') + +In this particular case, we first call the super class' version of the +:py:meth:`~mpas_analysis.shared.AnalysisTask.setup_and_check()` method. This +takes care of some important setup. + +Then, we use this method to check if the user has specified meaningful values +for the climatology start and end year and the reference year. If they happen +to be the same, it doesn't really make sense to run the analysis and it will +raise an error so the analysis gets skipped. + +The ``ClimatologyMapOHCAnomaly`` has delegated all its work to its subtasks +so it doesn't define a ``run_task()`` method. Tasks or subtasks that actually +do the work typically need to define this method, as we will explore below. + +3. Tour of a subtask (``RemapMpasOHCClimatology``) +-------------------------------------------------- + +The class ``RemapMpasOHCClimatology`` is, in some ways, more complicated than +its "parent" task :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly`. +It descends not from the :py:class:`~mpas_analysis.shared.AnalysisTask` base +class but from another subtask, +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`. +This tutorial won't attempt to cover +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask` in +all its detail. The basics are that that class starts with MPAS climatology +data over one or more ``seasons`` that has previously been computed by an +:py:class:`~mpas_analysis.shared.climatology.MpasClimatologyTask` task. It +remaps that data from the MPAS mesh to one or more comparison grids (e.g. +global latitude-longitude or Antarctic stereographic) where it can be plotted +and compared with observations or another MPAS-Analysis run. + +Here, we are not just using +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask` +directly because we need to add to its functionality. We need to compute the +OHC, which is not available straight from MPAS-Ocean output, from the +monthly-mean temperature and layer thickness. + +3.1 Attributes +~~~~~~~~~~~~~~ + +The docstring indicates the attributes that ``RemapMpasOHCClimatology`` +includes. (It also has all the attributes of its super class, +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`, +and that class' super class, :py:class:`~mpas_analysis.shared.AnalysisTask`, +but we don't redundantly document these in the docstring in part because that +would be a maintenance nightmare.) + +.. code-block:: python + + class RemapMpasOHCClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of ocean heat content from + climatologies of temperature + + Attributes + ---------- + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + +The attributes are a task for computing the climatology over the reference +year (usually the start of the simulation), ``ref_year_climatology_task``, +and the minimum and maximum depth over which the ocean heat content will be +integrated. + + +3.2 Constructor +~~~~~~~~~~~~~~~ + +.. code-block:: python + + def __init__(self, mpas_climatology_task, ref_year_climatology_task, + parent_task, climatology_name, variable_list, seasons, + comparison_grid_names, min_depth, max_depth): + + """ + Construct the analysis task and adds it as a subtask of the + ``parent_task``. + + Parameters + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + parent_task : mpas_analysis.shared.AnalysisTask + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatology_name : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variable_list : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + comparison_grid_names : list of {'latlon', 'antarctic'} + The name(s) of the comparison grid to use for remapping. + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + + depth_range_string = f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + subtask_name = f'remapMpasClimatology_{depth_range_string}' + # call the constructor from the base class + # (RemapMpasClimatologySubtask) + super().__init__( + mpas_climatology_task, parent_task, climatology_name, + variable_list, seasons, comparison_grid_names, + subtaskName=subtask_name) + + self.ref_year_climatology_task = ref_year_climatology_task + self.run_after(ref_year_climatology_task) + self.min_depth = min_depth + self.max_depth = max_depth + +Most of the arguments to the constructor are passed along to the constructor +of :py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`. +These include a reference to the class for computing MPAS climatologies +(used to find the input files and to make sure this task waits until that +task is finished), a reference to the "parent" +:py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly` task for some of its +attributes, the name of the climatology supplied by the parent (something like +``deltaOHC_0-700m``, depending on the depth range), a list of the variables +that go into computing the OHC, the season(s) over which the climatology was +requested, the comparison grid(s) to plot on and a unique name for this +subtask. + +The ``ref_year_climatology_task`` that computing the climatology over the +reference year is retained as an attribute to the class along with +the depth range. These attributes will all be needed later when we compute the +OHC. We indicate that this task must wait for the reference climatology to be +available by calling the :py:meth:`~mpas_analysis.shared.AnalysisTask.run_after()`. +The super class will do the same for the ``mpas_climatology_task`` task. It +will also add this task as a subtask of the parent task. + +3.3 ``setup_and_check()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +As in the parent task, we need to define the ``setup_and_check()`` method. + +.. code-block:: python + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + + # first, call setup_and_check from the base class + # (RemapMpasClimatologySubtask), which will set up remappers and add + # variables to mpas_climatology_task + super().setup_and_check() + + # don't add the variables and seasons to mpas_climatology_task until + # we're sure this subtask is supposed to run + self.ref_year_climatology_task.add_variables(self.variableList, + self.seasons) + +In this particular case, we first call the super class' version of the +:py:meth:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check()` +method. This takes care of some important setup including adding the variables +and season(s) we need to the ``mpas_climatology_task``. + +Then, we use this method to add variables we need +and the requested season(s) to the task for computing the climatology over the +reference year (``ref_year_climatology_task``). We don't do this in the +constructor because if we did, we would always be asking for the variables +needed to compute the OHC even if we don't actually end up computing it. This +could be a big waste of time and disk space. The super class +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask` can't +take care of this for us because it isn't designed for computing anomalies, +just "normal" climatologies over a range of years. + +.. _tutorial_understand_a_task_subtask_run_task: + +3.4 ``run_task()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Normally, the main work of a task happens in the ``run_task()`` method. +The ``RemapMpasOHCClimatology`` class doesn't define this method because it is +happy to inherit the +:py:meth:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task()` +method from its super class, +:py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`. + +An abbreviated version of that method looks like this: + +.. code-block:: python + + def run_task(self): + """ + Compute the requested climatologies + """ + ... + for season in self.seasons: + self._mask_climatologies(season, dsMask) + ... + +It calls a private helper method: + +.. code-block:: python + + def _mask_climatologies(self, season, dsMask): + """ + For each season, creates a masked version of the climatology + """ + ... + if not os.path.exists(maskedClimatologyFileName): + ... + + # customize (if this function has been overridden) + climatology = self.customize_masked_climatology(climatology, + season) + + write_netcdf(climatology, maskedClimatologyFileName) + +This private method (the leading underscore indicates that it is private), in +turn, calls the ``customize_masked_climatology()`` method, which is our chance +to make changes to the climatology before it gets remapped. That's where +we will actually compute the OHC from variables available from MPAS output. + +3.5 ``customize_masked_climatology()`` method +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Here is how we compute the OHC itself: + +.. code-block:: python + + def customize_masked_climatology(self, climatology, season): + """ + Compute the ocean heat content (OHC) anomaly from the temperature + and layer thickness fields. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + + ohc = self._compute_ohc(climatology) + + ... + +We call a private helper method to do the actual work, so let's take a look +at that before we continue with ``customize_masked_climatology()``. + +.. code-block:: python + + def _compute_ohc(self, climatology): + """ + Compute the OHC from the temperature and layer thicknesses in a given + climatology data sets. + """ + ds_restart = xr.open_dataset(self.restartFileName) + ds_restart = ds_restart.isel(Time=0) + + # specific heat [J/(kg*degC)] + cp = self.namelist.getfloat('config_specific_heat_sea_water') + # [kg/m3] + rho = self.namelist.getfloat('config_density0') + + units_scale_factor = 1e-9 + + n_vert_levels = ds_restart.sizes['nVertLevels'] + + z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, + ds_restart.layerThickness) + + vert_index = xr.DataArray.from_dict( + {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) + + temperature = climatology['timeMonthly_avg_activeTracers_temperature'] + layer_thickness = climatology['timeMonthly_avg_layerThickness'] + + masks = [vert_index < ds_restart.maxLevelCell, + z_mid <= self.min_depth, + z_mid >= self.max_depth] + for mask in masks: + temperature = temperature.where(mask) + layer_thickness = layer_thickness.where(mask) + + ohc = units_scale_factor * rho * cp * layer_thickness * temperature + ohc = ohc.sum(dim='nVertLevels') + return ohc + +This function uses a combination of mesh information taken from an MPAS +restart file (available from the ``self.restartFileName`` attribute inherited +from :py:class:`~mpas_analysis.shared.climatology.RemapMpasClimatologySubtask`), +namelist options available from the ``self.namelist`` reader (inherited from +:py:class:`~mpas_analysis.shared.AnalysisTask`), and ``temperature`` and +``layer_thickness`` from the ``climatology`` dataset itself. As the +docstring for ``customize_masked_climatology()`` states, ``climatology`` is +and :py:class:`xarray.Dataset`. We know it has variables +``timeMonthly_avg_activeTracers_temperature`` and +``timeMonthly_avg_layerThickness`` because we requested them back in the +constructor of :py:class:`~mpas_analysis.ocean.ClimatologyMapOHCAnomaly`. +We compute the ``ohc`` as an :py:class:`xarray.DataArray` that we return from +this helper method. + +Back to ``customize_masked_climatology()``, we have: + +.. code-block:: python + + def customize_masked_climatology(self, climatology, season): + ... + ohc = self._compute_ohc(climatology) + + ref_file_name = self.ref_year_climatology_task.get_file_name(season) + ref_year_climo = xr.open_dataset(ref_file_name) + if 'Time' in ref_year_climo.dims: + ref_year_climo = ref_year_climo.isel(Time=0) + ref_ohc = self._compute_ohc(ref_year_climo) + + climatology['deltaOHC'] = ohc - ref_ohc + climatology.deltaOHC.attrs['units'] = 'GJ m^-2$' + start_year = self.ref_year_climatology_task.startYear + climatology.deltaOHC.attrs['description'] = \ + f'Anomaly from year {start_year} in ocean heat content' + climatology = climatology.drop_vars(self.variableList) + + return climatology + +We use the same helper function to compute the ``ref_ohc`` using the +climatology for the reference year. Then, we compute the anomaly (the +difference between these two, ``deltaOHC``) and we add some attributes, +``units`` and ``description``, to make the NetCDF output that will go into the +analysis output directory a little more useful. + +4. The full code for posterity +------------------------------ + +Since the ``ClimatologyMapOHCAnomaly`` analysis task may evolve in the future, +here is the full analysis task as described in this tutorial: + +.. code-block:: python + + # This software is open source software available under the BSD-3 license. + # + # Copyright (c) 2022 Triad National Security, LLC. All rights reserved. + # Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights + # reserved. + # Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. + # + # Additional copyright and license information can be found in the LICENSE file + # distributed with this code, or at + # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE + import xarray as xr + import numpy as np + + from mpas_analysis.shared import AnalysisTask + from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask + from mpas_analysis.ocean.plot_climatology_map_subtask import \ + PlotClimatologyMapSubtask + from mpas_analysis.ocean.utility import compute_zmid + + + class ClimatologyMapOHCAnomaly(AnalysisTask): + """ + An analysis task for comparison of the anomaly from a reference year + (typically the start of the simulation) of ocean heat content (OHC) + + Attributes + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + """ + + def __init__(self, config, mpas_climatology_task, + ref_year_climatology_task, control_config=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped and plotted + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + control_config : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + + field_name = 'deltaOHC' + # call the constructor from the base class (AnalysisTask) + super().__init__(config=config, taskName='climatologyMapOHCAnomaly', + componentName='ocean', + tags=['climatology', 'horizontalMap', field_name, + 'publicObs', 'anomaly']) + + self.mpas_climatology_task = mpas_climatology_task + self.ref_year_climatology_task = ref_year_climatology_task + + section_name = self.taskName + + # read in what seasons we want to plot + seasons = config.getexpression(section_name, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of seasons') + + comparison_grid_names = config.getexpression(section_name, + 'comparisonGrids') + + if len(comparison_grid_names) == 0: + raise ValueError(f'config section {section_name} does not contain ' + f'valid list of comparison grids') + + depth_ranges = config.getexpression('climatologyMapOHCAnomaly', + 'depthRanges', + use_numpyfunc=True) + + mpas_field_name = 'deltaOHC' + + variable_list = ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_layerThickness'] + + for min_depth, max_depth in depth_ranges: + depth_range_string = \ + f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + remap_climatology_subtask = RemapMpasOHCClimatology( + mpas_climatology_task=mpas_climatology_task, + ref_year_climatology_task=ref_year_climatology_task, + parent_task=self, + climatology_name=f'{field_name}_{depth_range_string}', + variable_list=variable_list, + comparison_grid_names=comparison_grid_names, + seasons=seasons, + min_depth=min_depth, + max_depth=max_depth) + + self.add_subtask(remap_climatology_subtask) + + out_file_label = f'deltaOHC_{depth_range_string}' + remap_observations_subtask = None + if control_config is None: + ref_title_label = None + ref_field_name = None + diff_title_label = 'Model - Observations' + + else: + control_run_name = control_config.get('runs', 'mainRunName') + ref_title_label = f'Control: {control_run_name}' + ref_field_name = mpas_field_name + diff_title_label = 'Main - Control' + + for comparison_grid_name in comparison_grid_names: + for season in seasons: + # make a new subtask for this season and comparison grid + subtask_name = f'plot{season}_{comparison_grid_name}_{depth_range_string}' + + subtask = PlotClimatologyMapSubtask( + self, season, comparison_grid_name, + remap_climatology_subtask, remap_observations_subtask, + controlConfig=control_config, subtaskName=subtask_name) + + subtask.set_plot_info( + outFileLabel=out_file_label, + fieldNameInTitle=f'$\\Delta$OHC over {depth_range_string}', + mpasFieldName=mpas_field_name, + refFieldName=ref_field_name, + refTitleLabel=ref_title_label, + diffTitleLabel=diff_title_label, + unitsLabel=r'GJ m$^{-2}$', + imageCaption=f'Anomaly in Ocean Heat Content over {depth_range_string}', + galleryGroup='OHC Anomaly', + groupSubtitle=None, + groupLink='ohc_anom', + galleryName=None) + + self.add_subtask(subtask) + + def setup_and_check(self): + """ + Checks whether analysis is being performed only on the reference year, + in which case the analysis will not be meaningful. + + Raises + ------ + ValueError: if attempting to analyze only the reference year + """ + + # first, call setup_and_check from the base class (AnalysisTask), + # which will perform some common setup, including storing: + # self.runDirectory , self.historyDirectory, self.plotsDirectory, + # self.namelist, self.runStreams, self.historyStreams, + # self.calendar + super().setup_and_check() + + start_year, end_year = self.mpas_climatology_task.get_start_and_end() + ref_start_year, ref_end_year = \ + self.ref_year_climatology_task.get_start_and_end() + + if (start_year == ref_start_year) and (end_year == ref_end_year): + raise ValueError('OHC Anomaly is not meaningful and will not work ' + 'when climatology and ref year are the same.') + + + class RemapMpasOHCClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of ocean heat content from + climatologies of temperature + + Attributes + ---------- + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + + def __init__(self, mpas_climatology_task, ref_year_climatology_task, + parent_task, climatology_name, variable_list, seasons, + comparison_grid_names, min_depth, max_depth): + + """ + Construct the analysis task and adds it as a subtask of the + ``parent_task``. + + Parameters + ---------- + mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask + The task that produced the climatology to be remapped + + ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask + The task that produced the climatology from the first year to be + remapped and then subtracted from the main climatology + + parent_task : mpas_analysis.shared.AnalysisTask + The parent task, used to get the ``taskName``, ``config`` and + ``componentName`` + + climatology_name : str + A name that describes the climatology (e.g. a short version of + the important field(s) in the climatology) used to name the + subdirectories for each stage of the climatology + + variable_list : list of str + A list of variable names in ``timeSeriesStatsMonthly`` to be + included in the climatologies + + seasons : list of str, optional + A list of seasons (keys in ``shared.constants.monthDictionary``) + to be computed or ['none'] (not ``None``) if only monthly + climatologies are needed. + + comparison_grid_names : list of {'latlon', 'antarctic'} + The name(s) of the comparison grid to use for remapping. + + min_depth, max_depth : float + The minimum and maximum depths for integration + """ + + depth_range_string = f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m' + subtask_name = f'remapMpasClimatology_{depth_range_string}' + # call the constructor from the base class + # (RemapMpasClimatologySubtask) + super().__init__( + mpas_climatology_task, parent_task, climatology_name, + variable_list, seasons, comparison_grid_names, + subtaskName=subtask_name) + + self.ref_year_climatology_task = ref_year_climatology_task + self.run_after(ref_year_climatology_task) + self.min_depth = min_depth + self.max_depth = max_depth + + def setup_and_check(self): + """ + Perform steps to set up the analysis and check for errors in the setup. + """ + + # first, call setup_and_check from the base class + # (RemapMpasClimatologySubtask), which will set up remappers and add + # variables to mpas_climatology_task + super().setup_and_check() + + # don't add the variables and seasons to mpas_climatology_task until + # we're sure this subtask is supposed to run + self.ref_year_climatology_task.add_variables(self.variableList, + self.seasons) + + def customize_masked_climatology(self, climatology, season): + """ + Compute the ocean heat content (OHC) anomaly from the temperature + and layer thickness fields. + + Parameters + ---------- + climatology : xarray.Dataset + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : xarray.Dataset + the modified climatology data set + """ + + ohc = self._compute_ohc(climatology) + ref_file_name = self.ref_year_climatology_task.get_file_name(season) + ref_year_climo = xr.open_dataset(ref_file_name) + if 'Time' in ref_year_climo.dims: + ref_year_climo = ref_year_climo.isel(Time=0) + ref_ohc = self._compute_ohc(ref_year_climo) + + climatology['deltaOHC'] = ohc - ref_ohc + climatology.deltaOHC.attrs['units'] = 'GJ m^-2' + start_year = self.ref_year_climatology_task.startYear + climatology.deltaOHC.attrs['description'] = \ + f'Anomaly from year {start_year} in ocean heat content' + climatology = climatology.drop_vars(self.variableList) + + return climatology + + def _compute_ohc(self, climatology): + """ + Compute the OHC from the temperature and layer thicknesses in a given + climatology data sets. + """ + ds_restart = xr.open_dataset(self.restartFileName) + ds_restart = ds_restart.isel(Time=0) + + # specific heat [J/(kg*degC)] + cp = self.namelist.getfloat('config_specific_heat_sea_water') + # [kg/m3] + rho = self.namelist.getfloat('config_density0') + + units_scale_factor = 1e-9 + + n_vert_levels = ds_restart.sizes['nVertLevels'] + + z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1, + ds_restart.layerThickness) + + vert_index = xr.DataArray.from_dict( + {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)}) + + temperature = climatology['timeMonthly_avg_activeTracers_temperature'] + layer_thickness = climatology['timeMonthly_avg_layerThickness'] + + masks = [vert_index < ds_restart.maxLevelCell, + z_mid <= self.min_depth, + z_mid >= self.max_depth] + for mask in masks: + temperature = temperature.where(mask) + layer_thickness = layer_thickness.where(mask) + + ohc = units_scale_factor * rho * cp * layer_thickness * temperature + ohc = ohc.sum(dim='nVertLevels') + return ohc diff --git a/1.11.0rc1/_sources/tutorials/getting_started.rst.txt b/1.11.0rc1/_sources/tutorials/getting_started.rst.txt new file mode 100644 index 000000000..1a0036a11 --- /dev/null +++ b/1.11.0rc1/_sources/tutorials/getting_started.rst.txt @@ -0,0 +1,636 @@ +.. _tutorial_getting_started: + +User: Getting Started +===================== + +This tutorial walks a new user through the basics of using MPAS-Analysis. + +1 Setting up a Conda Environment +--------------------------------- + +MPAS-Analysis relies on several packages that are only available as conda +packages from the ``conda-forge`` channel. The first step for running +MPAS-Analysis is to create a conda environment with all the needed packages. + +1.1 Installing Miniconda +~~~~~~~~~~~~~~~~~~~~~~~~ + +If you have not yet installed Anaconda or Miniconda, you will need to begin +there. The concept behind Anaconda is that just about everything you would +need for a typical python workflow is included. The concept behind Miniconda +is that you create different environments for different purposes. This allows +for greater flexibility and tends to lead to fewer conflicts between +incompatible packages, particularly when using a channel other than the +``defaults`` supplied by Anaconda. Since we will use the ``conda-forge`` +channel, the Miniconda approach is strongly recommended. + +First download the `Miniconda3 installer`_ for your operating system, then run +it: + +.. code-block:: bash + + $ /bin/bash Miniconda3-latest-Linux-x86_64.sh + +.. note:: + + MPAS-Analysis and many of the packages it depends on support OSX and Linux + but not Windows. + +In this tutorial, we assume that Miniconda is installed in the default location, +``~/miniconda3``. If you choose to install it somewhere else, just make sure +to make the appropriate substitution whenever you see a reference to this path +below. + +You will see prompt like this:: + + Do you wish the installer to initialize Miniconda3 + by running conda init? [yes|no] + +You may wish to skip the step (answer "no") if you are working on a system +where you will also be using other conda environments. This means you have to +manually activate ``conda`` whenever you need it. For ``bash`` and similar +shells, this is: + +.. code-block:: bash + + $ source ~/miniconda3/etc/profile.d/conda.sh + +If you use ``csh``, ``tcsh`` or related shells, this becomes: + +.. code-block:: csh + + > source ~/miniconda3/etc/profile.d/conda.csh + +1.2 Creating a conda environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +First, we need to add the `conda-forge channel`_ and make sure it always takes +precedence for packages available on that channel: + +.. code-block:: bash + + $ conda config --add channels conda-forge + $ conda config --set channel_priority strict + +Then, you can create a new conda environment called ``mpas-analysis`` and +install the latest version of the ``mpas-analysis`` package into it: + +.. code-block:: bash + + $ conda create -n mpas-analysis python=3.8 mpas-analysis + +1.3 Activating the environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Each time you open a new terminal window, to activate the ``mpas-analysis`` +environment, you will need to run either for ``bash``: + +.. code-block:: bash + + $ source ~/miniconda3/etc/profile.d/conda.sh + $ conda activate mpas-analysis + +or for ``csh``: + +.. code-block:: csh + + > source ~/miniconda3/etc/profile.d/conda.csh + > conda activate mpas-analysis + +You can skip the ``source`` command if you chose to initialize Miniconda3 so it +loads automatically. + +2 Downloading observations +--------------------------- + +MPAS-Analysis can only run a very limited set of analysis without observational +datasets for comparison. Many analysis tasks also require "mapping files" that +describe interpolation between different grids and meshes. Some tasks also +need masks that define regions of interest on different grids and meshes. + +Most of the observational datasets as well as some common mapping files and +region masks are available for public download by calling: + +.. code-block:: bash + + $ download_analysis_data -o + +Substitute ```` with a convenient location +for storing the diagnostics data. You will need this path later on when you +set up a configuration file for running the analysis. + +.. note:: + The observational data is approximately **45GB** in size and may take some + time to download. If the download gets interrupted, only incomplete or + missing files will be downloaded if you run ``download_analysis_data`` again. + +.. note:: + + If you are on an E3SM supported system, the data has already + been downloaded for you to use. Please look at the ``baseDirectory`` config + option in the ``[diagnostics]`` of an example `config file`_ + for your supported machine. + +.. note:: + + We do not currently support downloading a subset of the data but we plan to + add this capability in the future. In a pinch, you could manually download + files. For reference, here is the `full list of files`_ that are + automatically downloaded from the `diagnostics web server`_. + + +3 Downloading an example run +----------------------------- + +We provide data for an "ultra-low-resolution" E3SM simulation (480 km grid cells +for the ocean and sea-ice components) as a relatively small example dataset for +testing MPAS-Analysis. The simulation is too coarse and too short to be of +scientific interest, but we wanted to keep the size manageable. The ocean and +sea-ice output are about 950 MB. + +Download the `Ultra-low-res ocean and sea-ice dataset`_ and untar it into a +convenient directory: + +.. code-block:: bash + + $ mkdir A_WCYCL1850.ne4_oQU480.anvil + $ cd A_WCYCL1850.ne4_oQU480.anvil + $ tar -xvf ../20200305.A_WCYCL1850.ne4_oQU480.anvil.ocean_seaice.tar.gz + $ cd .. + +4 Configuring MPAS-Analysis +---------------------------- + +Configuration is with Python ``cfg`` (also called ``ini``) files: + +.. code-block:: ini + + [runs] + # mainRunName is a name that identifies the simulation being analyzed. + mainRunName = runName + + [execute] + # the number of parallel tasks (1 means tasks run in serial, the default) + parallelTaskCount = 1 + # the parallelism mode in ncclimo ("serial" or "bck") + ncclimoParallelMode = serial + ... + +The `default config file`_ contains over 1,000 config options, which gives a lot +of flexibility to MPAS-Analysis but can be more than bit overwhelming to new +users. + +You can modify the default options with one or more custom config files. We +provide an `example config file`_ as a starting point. This file contains the +most common config options that a user might want to customize. The values are +mostly the same as in the `default config file`_. + +If you are on an E3SM supported machine, you can use the `E3SM example config file`_ instead. If you specify the name of the supported machine with the +``--machine`` flag when you call ``mpas_analysis``, there are several config +options that will be set for you automatically. E3SM supported machines aren't +the primary focus of this tutorial, the following will assume you're working +on an unknown machine (or simply not taking advantage of known paths on a +supported machine). + +First, you should copy this file to a new name for a specific run (say +``myrun.cfg``). Then, you should modify any config options you want to change +in your new config file. At a minimum, you need to specify: + +* ``[runs]/mainRunName``: A name for the run to be included plot titles + and legends +* ``[diagnostics]/base_path``: The base directory for observations, + mapping files and region masks +* ``[input]/baseDirectory``: The directory for the simulation results + to analyze +* ``[input]/mpasMeshName``: The name of the MPAS ocean/sea ice mesh +* ``[output]/baseDirectory``: The directory for the analysis results + +We will cover these and a few other common options in this tutorial. With the +exception of a few paths that you will need to provide, the config options +displayed below are the ones appropriate for the example E3SM simulation from +Section 3. + +4.1 [runs] +~~~~~~~~~~ + +The ``[runs]`` section contains options related to which E3SM simulation(s) are +being analyzed: + +.. code-block:: ini + + [runs] + ## options related to the run to be analyzed and control runs to be + ## compared against + + # mainRunName is a name that identifies the simulation being analyzed. + mainRunName = A_WCYCL1850.ne4_oQU480.anvil + +The ``mainRunName`` can be any useful name that will appear at the top of each +web page of the analysis output and in the legends or titles of the figures. +Often, this is the full name of the E3SM simulation but sometimes it is +convenient to have a shorter name. In this case, we use part of the run name +but leave off the date of the simulation to keep it a little shorter. + +4.2 [execute] +~~~~~~~~~~~~~ + +The ``[execute]`` section contains options related to serial or parallel +execution of the individual "tasks" that make up an MPAS-Analysis run. + +.. code-block:: ini + + [execute] + ## options related to executing parallel tasks + + # the number of parallel tasks (1 means tasks run in serial, the default) + parallelTaskCount = 4 + + # the parallelism mode in ncclimo ("serial", "bck" or "mpi") + # Set this to "bck" (background parallelism) if running on a machine that can + # handle 12 simultaneous processes, one for each monthly climatology. + # Set to "mpi" to run one MPI task on each node and however many threads per + # node to reach 12 total threads. + ncclimoParallelMode = bck + +``parallelTaskCount`` should typically be equal to the number of cores on a +laptop or a desktop. In a high-performance computing (HPC) node, it is +typically not possible to run 30 to 60 tasks in parallel even if the node has +that many cores. This is because many tasks consume enough memory that the +job will crash with too many tasks running at the same time. We have found that +``parallelTaskCount`` should typically be somewhere between 6 and 12 for the +HPC machines we use for E3SM. + +``ncclimoParallelMode`` indicates how `ncclimo`_ should be run to make +climatologies used in many MPAS-Analysis plots. Typically, we recommend +``bck``, meaning ``ncclimo`` runs with 12 threads at once on the same node. +In circumstances where ``ncclimo`` is crashing and it appears to be running out +of memory, it is worth exploring ``serial`` or ``mpi`` modes, or using the +``xarray`` and ``dask`` instead to compute climatologies by setting +``[climatology]/useNcclimo = False`` + +For this tutorial, we suggest starting with 4 parallel tasks and ``ncclimo`` in +``bck`` mode. + +4.3 [diagnostics] +~~~~~~~~~~~~~~~~~ + +The ``diagnostics`` section is used to supply the directory where you downloaded +observations in Section 2. + +.. code-block:: ini + + [diagnostics] + ## config options related to observations, mapping files and region files used + ## by MPAS-Analysis in diagnostics computations. + + # The base path to the diagnostics directory. Typically, this will be a shared + # directory on each E3SM supported machine (see the example config files for + # its location). For other machines, this would be the directory pointed to + # when running "download_analysis_data.py" to get the public observations, + # mapping files and region files. + base_path = /path/to/diagnostics + +For ``base_path``, supply the path where you downloaded the data +````. + +4.4 [input] +~~~~~~~~~~~ + +The ``[input]`` section provides paths to the E3SM simulation data and the name +of the MPAS-Ocean and MPAS-Seaice mesh. + +.. code-block:: ini + + [input] + ## options related to reading in the results to be analyzed + + # directory containing model results + baseDirectory = /dir/for/model/output + + # Note: an absolute path can be supplied for any of these subdirectories. + # A relative path is assumed to be relative to baseDirectory. + # In this example, results are assumed to be in /run + + # subdirectory containing restart files + runSubdirectory = run + # subdirectory for ocean history files + oceanHistorySubdirectory = archive/ocn/hist + # subdirectory for sea ice history files + seaIceHistorySubdirectory = archive/ice/hist + + # names of namelist and streams files, either a path relative to baseDirectory + # or an absolute path. + oceanNamelistFileName = run/mpaso_in + oceanStreamsFileName = run/streams.ocean + seaIceNamelistFileName = run/mpassi_in + seaIceStreamsFileName = run/streams.seaice + + # name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3, + # ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.) + mpasMeshName = oQU480 + +The ``baseDirectory`` is the path where you untarred the example run. + +The ``mpasMeshName`` is the standard E3SM name for the MPAS-Ocean and +MPAS-Seaice mesh. In this example, this is ``oQU480``, meaning the +quasi-uniform 480-km mesh for the ocean and sea ice. + +The ``runSubdirectory`` must contain valid MPAS-Ocean and MPAS-Seaice restart +files, used to get information about the MPAS mesh and the ocean vertical grid. + +The ``oceanHistorySubdirectory`` must contain MPAS-Ocean monthly mean output +files, typically named:: + + mpaso.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc + +Similarly, ``seaIceHistorySubdirectory`` contains the MPAS-Seaice monthly mean +output:: + + mpassi.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc + +Finally, MPAS-Analysis needs a set of "namelists" and "streams" files that +provide information on the E3SM configuration for MPAS-Ocean and MPAS-Seaice, +and about the output files, respectively. These are typically also found in +the ``run`` directory. + +For the example data, only ``baseDirectory`` and ``mpasMeshName`` need to be +set, the other options can be left as the defaults from the +`example config file`_. + +For this tutorial, you just need to set ``baseDirectory`` to the place where +you untarred the simulation results. The other config options should be as +they are in the code block above. + +4.5 [output] +~~~~~~~~~~~~ + +The ``[output]`` section provides a path where the output from the analysis run +will be written, the option to output the results web pages to another +location, and a list of analysis to be generated (or explicitly skipped). + +.. code-block:: ini + + [output] + ## options related to writing out plots, intermediate cached data sets, logs, + ## etc. + + # directory where analysis should be written + # NOTE: This directory path must be specific to each test case. + baseDirectory = /dir/for/analysis/output + + # provide an absolute path to put HTML in an alternative location (e.g. a web + # portal) + htmlSubdirectory = html + + # a list of analyses to generate. Valid names can be seen by running: + # mpas_analysis --list + # This command also lists tags for each analysis. + # Shortcuts exist to generate (or not generate) several types of analysis. + # These include: + # 'all' -- all analyses will be run + # 'all_publicObs' -- all analyses for which observations are available on the + # public server (the default) + # 'all_' -- all analysis with a particular tag will be run + # 'all_' -- all analyses from a given component (either 'ocean' + # or 'seaIce') will be run + # 'only_', 'only_' -- all analysis from this component or + # with this tag will be run, and all + # analysis for other components or + # without the tag will be skipped + # 'no_' -- skip the given task + # 'no_', 'no_' -- in analogy to 'all_*', skip all analysis + # tasks from the given component or with + # the given tag. Do + # mpas_analysis --list + # to list all task names and their tags + # an equivalent syntax can be used on the command line to override this + # option: + # mpas_analysis analysis.cfg --generate \ + # only_ocean,no_timeSeries,timeSeriesSST + generate = ['all_publicObs'] + +``baseDirectory`` is any convenient location for the output. + +``htmlSubdirectory`` can simply be the ``/html``, the default or +an absolute path to another location. The later is useful for HPC machines that +have a web portal. + +Finally, the ``generate`` option provides a python list of flags that can be +used to determine which analysis will be generated. For this tutorial, we will +stick with the default, ``'all_publicObs'``, indicating that we will only run +analysis where the observations are included on the public server and which +were downloaded in Section 2 (or analysis that does not require observations). + +4.6 [climatology], [timeSeries] and [index] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +These options determine the start and end years of climatologies (time averages +over a particular month, season or the full year), time series or the El Niño +climate index. + +.. code-block:: ini + + [climatology] + ## options related to producing climatologies, typically to compare against + ## observations and previous runs + + # the first year over which to average climatalogies + startYear = 3 + # the last year over which to average climatalogies + endYear = 5 + + [timeSeries] + ## options related to producing time series plots, often to compare against + ## observations and previous runs + + # start and end years for timeseries analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 5 + + [index] + ## options related to producing nino index. + + # start and end years for timeseries analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 5 + +For each of these, options a full year of data must exist for that year to +be included in the analysis. + +For the example E3SM simulation that we downloaded in Section 3, only 5 years of +simulation data are available, so we are doing a climatology over the last 3 +years (3 to 5) and displaying time series and the El Niño index over the full +5 years. + +5 Running MPAS-Analysis +----------------------- + +The hard work is done. Now that we have a config file, we are ready to run. + +On many file systems, MPAS-Analysis and other python-based software that used +NetCDF files based on the HDF5 file structure can experience file access errors +unless the following environment variable is set as follows in bash: + +.. code-block:: bash + + $ export HDF5_USE_FILE_LOCKING=FALSE + +or under csh: + +.. code-block:: csh + + > setenv HDF5_USE_FILE_LOCKING FALSE + +Then, running MPAS-Analysis is as simple as: + +.. code-block:: bash + + $ mpas_analysis myrun.cfg + +Typical output is the analysis is running correctly looks something like: + +.. code-block:: none + + running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight + Gen --source /tmp/tmph58_hgz4/src_mesh.nc --destination /tmp/tmph58_hgz4/dst_mes + h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480. + anvil/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf + 4 --no_log --src_regional --ignore_unmapped + running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight + Gen --source /tmp/tmpxt8x1h_6/src_mesh.nc --destination /tmp/tmpxt8x1h_6/dst_mes + h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480. + anvil/mapping/map_obs_eke_0.25x0.25degree_to_0.5x0.5degree_bilinear.nc --method + bilinear --netcdf4 --no_log --src_regional --ignore_unmapped + running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight + Gen --source /tmp/tmp3_7gpndz/src_mesh.nc --destination /tmp/tmp3_7gpndz/dst_mes + h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480. + anvil/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc + --method bilinear --netcdf4 --no_log --src_regional --dst_regional --ignore_unma + pped + Preprocessing SOSE transect data... + temperature + salinity + potentialDensity + zonalVelocity + meridionalVelocity + velMag + Done. + running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight + Gen --source /tmp/tmpt9n4vb5n/src_mesh.nc --destination /tmp/tmpt9n4vb5n/dst_mes + h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480. + anvil/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --n + etcdf4 --no_log --src_regional --dst_regional --ignore_unmapped + + Running tasks: 2% | | ETA: 0:09:04 + Running tasks: 52% |###################### | ETA: 0:06:13 + Running tasks: 100% |##########################################| Time: 0:18:50 + + Log files for executed tasks can be found in /home/xylar/Desktop/analysis_test/a + nalysis/A_WCYCL1850.ne4_oQU480.anvil/logs + Total setup time: 0:03:11.74 + Total run time: 0:22:02.33 + Generating webpage for viewing results... + Done. + +The first part of the output, before the progress bar, is the "setup" phase +where MPAS-Analysis is checking if the requested analysis can be run on the +simulation results. The specific output shown here is related to creating +so-called mapping files that are used to interpolate between the ``oQU480`` mesh +and the various grids MPAS-Analysis uses to compare with observations. Since +MPAS-Analysis didn't know about that ``oQU480`` mesh ahead of time, it is +creating mapping files and regions masks for this mesh on the fly. + +The command-line tool has several more options you can explore with + +.. code-block:: bash + + $ mpas_analysis --help + +These include listing the available analysis tasks and their tags, purging a +previous analysis run before running the analysis again, plotting all available +color maps, and outputting verbose python error messages when the analysis fails +during the setup phase (before a progress bar appears). + +6 Viewing the Output +-------------------- + +The primary output from MPAS-Analysis is a set of web pages, each containing +galleries of figures. The output can be found in the directory you provided in +Section 4.5, which is the ``html`` subdirectory of the base output directory by +default. If the web page is incomplete, it presumably means there was an error +during the analysis run, since the web page is generated as the final step. + +The main web page has links to the ocean and sea-ice web pages as well as some +"provenance" information about which version of MPAS-Analysis you were using +and how it was configured. + +The web page generated by this tutorial should look something like +`this example output`_. + +.. _tutorial_getting_started_trouble: + +7 Troubleshooting +----------------- + +This section briefly describes strategies for diagnosing errors in +MPAS-Analysis. This tutorial cannot hope to provide a comprehensive guide to +troubleshooting these errors. Please search the documentation, Google the error +online, or get in touch with the MPAS-Analysis developer team (by +`posting an issue`_ on GitHub) if you are experiencing an error. + +7.1 Purging old Analysis +~~~~~~~~~~~~~~~~~~~~~~~~ + +One thing you might want to try first if you are experiencing problems is to +delete any analysis you may already have in your output directory: + +.. code-block:: bash + + $ mpas_analysis --purge myrun.cfg + +This will first delete existing analysis and then run the analysis again. + +7.2 Errors During Setup +~~~~~~~~~~~~~~~~~~~~~~~ + +If an error occurs during setup, by default the full python traceback is +suppressed. This is because some tasks fail because the run being analyzed was +not configured for that analysis. In such cases, many users want the analysis +to continue, simply skipping the tasks that can't be run. + +However, this means that sometime the analysis is not configured properly and +as a results most or all tasks are not running. To find out why, you will +probably need to run: + +.. code-block:: bash + + $ mpas_analysis --verbose myrun.cfg + +This will give you a detailed python stack trace. Even if this is not helpful +to you, it might help developers to troubleshoot your issue. + +7.2 Errors in Tasks +~~~~~~~~~~~~~~~~~~~ + +If you see the progress bar start but error occur during running of analysis +tasks, the error messages will not be displayed to the screen. Instead, they +will be in log files (as stated in the short error message letting you know +that a task has failed). The contents of these log files may help you to +determine the cause of the error. If not, please include them if you are +`posting an issue`_ on GitHub. + + +.. _`Miniconda3 installer`: https://docs.conda.io/en/latest/miniconda.html +.. _`conda-forge channel`: https://conda-forge.org/ +.. _`config file`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/main/configs +.. _`Ultra-low-res ocean and sea-ice dataset`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/test_output/20200305.A_WCYCL1850.ne4_oQU480.anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil.ocean_seaice.tar.gz +.. _`full list of files`: https://github.com/MPAS-Dev/MPAS-Analysis/blob/main/mpas_analysis/obs/analysis_input_files +.. _`diagnostics web server`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ +.. _`default config file`: https://github.com/MPAS-Dev/MPAS-Analysis/blob/main/mpas_analysis/default.cfg +.. _`example config file`: https://github.com/MPAS-Dev/MPAS-Analysis/blob/main/example.cfg +.. _`E3SM example config file`: https://github.com/MPAS-Dev/MPAS-Analysis/blob/main/example_e3sm.cfg +.. _`ncclimo`: http://nco.sourceforge.net/nco.html#ncclimo-netCDF-Climatology-Generator +.. _`this example output`: https://mpas-dev.github.io/MPAS-Analysis/examples/QU480 +.. _`posting an issue`: https://github.com/MPAS-Dev/MPAS-Analysis/issues diff --git a/1.11.0rc1/_sources/users_guide/all_obs.rst.txt b/1.11.0rc1/_sources/users_guide/all_obs.rst.txt new file mode 100644 index 000000000..368382f1a --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/all_obs.rst.txt @@ -0,0 +1,32 @@ +.. toctree:: + :maxdepth: 1 + + obs/hadley_center_sst.rst + obs/aquarius_sss.rst + obs/woa18_t_s.rst + obs/aviso_ssh.rst + obs/argo_mld.rst + obs/trenberth_mht.rst + obs/roemmich_gilson_argo.rst + obs/sose.rst + obs/rignot_melt.rst + obs/adusumilli_melt.rst + obs/paolo_melt.rst + obs/hadisst_nino.rst + obs/ers_sst_nino.rst + obs/schmidtko.rst + obs/woce.rst + obs/woa.rst + obs/landschuetzer-som-ffn.rst + obs/seawifs.rst + obs/glodapv2.rst + obs/drifter_eke.rst + obs/era5_waves.rst + obs/sscci_waves.rst + obs/nasateam_conc.rst + obs/bootstrap_conc.rst + obs/ssmi_ice_area.rst + obs/icesat_thickness.rst + obs/piomass_ice_volume.rst + obs/aniceflux.rst + obs/altiberg.rst diff --git a/1.11.0rc1/_sources/users_guide/analysis_tasks.rst.txt b/1.11.0rc1/_sources/users_guide/analysis_tasks.rst.txt new file mode 100644 index 000000000..313adda83 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/analysis_tasks.rst.txt @@ -0,0 +1,53 @@ +Analysis Tasks +============== + +.. toctree:: + :maxdepth: 1 + + tasks/climatologyMapMLD + tasks/climatologyMapMLDMinMax + tasks/climatologyMapSST + tasks/climatologyMapSSS + tasks/climatologyMapSSH + tasks/climatologyMapEKE + tasks/climatologyMapOHCAnomaly + tasks/climatologyMapSose + tasks/climatologyMapBGC + tasks/climatologyMapArgoTemperature + tasks/climatologyMapArgoSalinity + tasks/climatologyMapWoa + tasks/climatologyMapSchmidtko + tasks/climatologyMapAntarcticMelt + tasks/climatologyMapWaves + + tasks/hovmollerOceanRegions + tasks/timeSeriesAntarcticMelt + tasks/timeSeriesOceanRegions + tasks/timeSeriesTemperatureAnomaly + tasks/timeSeriesSalinityAnomaly + tasks/timeSeriesOHCAnomaly + tasks/timeSeriesSST + tasks/timeSeriesTransport + tasks/meridionalHeatTransport + tasks/streamfunctionMOC + tasks/indexNino34 + tasks/woceTransects + tasks/soseTransects + tasks/geojsonTransects + tasks/oceanRegionalProfiles + tasks/regionalTSDiagrams + tasks/oceanHistogram + tasks/conservation + + tasks/climatologyMapSeaIceConcNH + tasks/climatologyMapSeaIceThickNH + tasks/climatologyMapSeaIceConcSH + tasks/climatologyMapSeaIceThickSH + tasks/timeSeriesSeaIceAreaVol + + tasks/climatologyMapSeaIceProductionNH + tasks/climatologyMapSeaIceMeltingNH + tasks/climatologyMapSeaIceProductionSH + tasks/climatologyMapSeaIceMeltingSH + + tasks/climatologyMapIcebergConcSH diff --git a/1.11.0rc1/_sources/users_guide/components.rst.txt b/1.11.0rc1/_sources/users_guide/components.rst.txt new file mode 100644 index 000000000..b8caaba87 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/components.rst.txt @@ -0,0 +1,6 @@ +MPAS Components and E3SM +======================== + +.. include:: mpaso.rst +.. include:: mpasseaice.rst +.. include:: e3sm.rst diff --git a/1.11.0rc1/_sources/users_guide/config/climatology.rst.txt b/1.11.0rc1/_sources/users_guide/config/climatology.rst.txt new file mode 100644 index 000000000..83036aed3 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/climatology.rst.txt @@ -0,0 +1,144 @@ +.. _config_climatology: + +Climatology +=========== + +The ``[climatology]`` section of a configuration file contains options used to +determine the start and end years of climatologies, the reference years for +anomalies and to control remapping of climatologies to comparions grids:: + + [climatology] + ## options related to producing climatologies, typically to compare against + ## observations and previous runs + + # the year from which to compute anomalies if not the start year of the + # simulation. This might be useful if a long spin-up cycle is performed and + # only the anomaly over a later span of years is of interest. + # anomalyRefYear = 249 + + # the first year over which to average climatalogies + startYear = 11 + # the last year over which to average climatalogies + endYear = 20 + + # The comparison lat/lon grid resolution in degrees + comparisonLatResolution = 0.5 + comparisonLonResolution = 0.5 + + # The comparison Antarctic polar stereographic grid size and resolution in km + comparisonAntarcticStereoWidth = 6000. + comparisonAntarcticStereoResolution = 10. + + # interpolation order for model and observation results. Likely values are + # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' + mpasInterpolationMethod = bilinear + + # should climatologies be performed with ncclimo or with xarray/dask + useNcclimo = True + + # should remapping be performed with ncremap or with the Remapper class + # directly in MPAS-Analysis + useNcremap = True + + # The minimum weight of a destination cell after remapping. Any cell with + # weights lower than this threshold will therefore be masked out. + renormalizationThreshold = 0.01 + + # if useNcclimo = False, the number of threads dask is allowed to spawn for + # each process computing a climatology for a given month or season + # Decrease this number if mpasClimatology* subtasks are running + # out of available threads + daskThreads = 2 + + # if useNcclimo = False, the number of subprocesses that each climatology + # subtask gets counted as occupying. + # Increase this number if mpasClimatology* subtasks are running + # out of memory, and fewer tasks will be allowed to run at once + subprocessCount = 1 + + +Start and End Year +------------------ + +A custom config file should specify a start and end year for climatologies. +Simulation data must exist for all 12 months of each year in this range. +Otherwise, the range will be reduced to those years with complete data and +a warning message will be displayed. + + +Anomaly Reference Year +---------------------- + +Anomalies between a climatology and the associated field from a reference year +is used in a few analysis tasks. By default, the reference year is not +specified in the configuration file and it taken to be the start of the +simulation (determined from the contents of a restart file). Under certain +circumstances (e.g. repetition of forcing data for several cycles, as in +the `Common Ocean Reference Experiments, CORE`_), it may be desirable to +specify a different year to use for computing anomalies:: + + anomalyRefYear = 249 + +.. _config_remapping: + +Remapping Options +----------------- + +Climatologies are remapped from MPAS meshes and observations grids to common +comparison grids. The remapping can be performed with any of three methods: +``bilinear``, ``neareststod`` (nearest neighbor) or ``conserve``. Mapping +files are created with the `ESMF_RegridWeightGen tool`_. The default method +is ``bilinear`` and these are the mapping files distributed from the +`E3SM public data repository`_. The ``conseve`` method is know to be *much* +slower to compute and should only be used if it is necessary (e.g. because +remapped data will be checked for conservation). + +MPAS-Analysis typically uses the `NCO`_ tool ``ncremap`` to perform remapping. +However, a python remapping capability is also available. The user can force +remapping to use the python-based remapping by specifying:: + + useNcremap = False + +This capability is available largely for debugging purposes. + +Remapped data typically only makes sense if it is renormalized after remapping. +For remapping of conserved quatntities like fluxes, renormalization would not +be desirable but for quantities like potential temperature, salinity and +potential density commonly used in MPAS-Anlaysis tasks, values become +physically meaningless near land boundaries and regions without data unless +renormalization is performed. A threshold is needed to determine how much of a +cell's area on the output grid must contain valid data from the input grid or +mesh, below which that cell is considered invalid and is masked out of the +destination data set. This threshold is specified as a fraction:: + + renormalizationThreshold = 0.01 + +If noisy or unphysical values occur near maked regions on the comparison grid, +it might be necessary to increase this threshold. If too much data appears to +be being masked out unnecessarily on the comparison grid, perhaps this value +should be made smaller. + + +Computing climatologies +----------------------- + +MPAS-Analysis typically uses the `NCO`_ tool ``ncclimo`` to compute +climatologies. For some large data sets on a single node, ``ncclimo`` runs +out of memory in ``bck`` mode but is painfully slow in ``serial`` mode and +wastes extra nodes in ``mpi`` mode. (See :ref: `config_execute` for more on +configuring ``ncclimo``.) For such cases, there is also an xarray/dask method +of computing climatologies:: + + useNcremap = False + + +Other Options +------------- + +* :ref:`config_comparison_grids` +* :ref:`dask_threads` + +.. _`Common Ocean Reference Experiments, CORE`: http://data1.gfdl.noaa.gov/nomads/forms/mom4/CORE.html +.. _`ESMF_RegridWeightGen tool`: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_7_1_0r/ESMF_refdoc/node3.html#SECTION03020000000000000000 +.. _`E3SM public data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ +.. _`NCO`: http://nco.sourceforge.net/nco.html diff --git a/1.11.0rc1/_sources/users_guide/config/colormaps.rst.txt b/1.11.0rc1/_sources/users_guide/config/colormaps.rst.txt new file mode 100644 index 000000000..2c26fbf3a --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/colormaps.rst.txt @@ -0,0 +1,141 @@ +.. _config_colormaps: + +Colormaps +========== + +Each analysis task that includes plots with colormaps has a series of +configuration options that can be used to manipulate the colormap. +MPAS-Analysis supports two types of plots, each of which has an associated +set of options for configuring its colormap. + +Supported Colormaps +-------------------- + +MPAS-Analysis supports all `matplotlib colormaps`_, all `cmocean colormaps`_, +all `Scientific Colour Map Suite 3.0`_ colormaps, the selection of key +`SciVisColor colormaps`_ and five custom color maps: ``ferret``, +``erdc_iceFire_H``, ``erdc_iceFire_L``, ``BuOr`` and ``Maximenko``. + +.. figure:: images/colormaps.png + :width: 720 px + :alt: All available colormaps + :align: center + + All available colormaps + +To plot all availabe colormpas (as above), run:: + + mpas_analysis --plot_colormaps + +Indexed Colormaps +------------------ + +Indexed colormaps are used in association with contour plots in MPAS-Analysis. +The following configuration options are associated with an indexed colormap:: + + # colormap name + colormapName = RdYlBu_r + # color indices into colormapName for filled contours + colormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevels = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + +The indices are between 0 and 255, indicating the location within the colormap +to sample. The levels are the contour values to plot. Since the region +between two contours will be filled with a solid color specified by a colormap +index, the number of levels is one greater than the number of indices. The +exception is when a separate shading should be used for values that fall +outside the range of the contours, in which case the number of colormap indices +is one more than the number of levels values, and the first and last index +are used to specify the "under" and "over" color values. + +Continuous Colormaps +--------------------- + +A continuous colormap is specified with a different set of options:: + + # colormap for model/observations + colormapName = erdc_iceFire_H + # the type of norm used in the colormap + normType = symLog + # A dictionary with keywords for the norm + normArgs = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100., + 'vmax': 100.} + +MPAS Analysis supports three norms for the continuous color map, with an +associated dictionary of arguments. These are: + + * `linear`_ + * `log`_ + * `symLog`_ + +See the associated documentaiton for the arguments to each norm. + +Colormpas in Three Panel Plots +------------------------------ + +Many MPAS-Analysis tasks include a panel for the main model run, another for +observations or a control run and a third panel for displaying a bias or the +the model-reference difference. In such cases, we support one colormap for the +first two panels and another for the final panel. Options for each of these +colormaps are specified by appending either ``Result`` or ``Difference`` to +each. For example:: + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + + # colormap for differences + colormapNameDifference = RdBu_r + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, + 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] + +Adding Contour Lines +-------------------- + +Contour lines can be added to a plot with the following options:: + + # contour line levels + contourLevels = numpy.arange(-240., 130., 10.) + # contour line thickness + contourThickness = 0.25 + # contour color + contourColor = 0.25 + +The levels are the field values for each contour. The line thickness is +specified in points. The ``contourColor`` can be any color name supported +by ``matplotlib`` or a floating point number between 0 and 1 specifying a +shade of gray. + +Append ``Result`` or ``Difference`` to these options for a 3-panel plot. + +Specifying Colorbar Tick Marks +------------------------------ + +By default, colorbar tick marks are chosen automatically by ``matplotlib``. +To specify tick marks explicitly, use:: + + colorbarTicks = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5., + 10., 20., 50., 100.] + +Alternatively, ``numpy`` functions can be used to specify the tick locations:: + + colorbarTicks = numpy.linspace(-100, 100, 9) + +Again, append ``Result`` or ``Difference`` to these options for a 3-panel plot. + + +.. _`matplotlib colormaps`: https://matplotlib.org/users/colormaps.html +.. _`cmocean colormaps`: https://matplotlib.org/cmocean/ +.. _`Scientific Colour Map Suite 3.0`: http://www.fabiocrameri.ch/colourmaps.php +.. _`SciVisColor colormaps`: https://sciviscolor.org/home/colormaps/ +.. _`linear`: https://matplotlib.org/users/colormapnorms.html +.. _`log`: https://matplotlib.org/users/colormapnorms.html#logarithmic +.. _`symLog`: https://matplotlib.org/users/colormapnorms.html#symmetric-logarithmic + diff --git a/1.11.0rc1/_sources/users_guide/config/comparison_grids.rst.txt b/1.11.0rc1/_sources/users_guide/config/comparison_grids.rst.txt new file mode 100644 index 000000000..f6a372f5f --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/comparison_grids.rst.txt @@ -0,0 +1,36 @@ +.. _config_comparison_grids: + +Comparison Grids +================ + +Currently, MPAS-Analysis supports two comparison grids to which both model +results and observations are remapped for comparison and plotting. These are +``latlon``, a global latitude/longitude grid, and ``antarctic``, a +stereographic grid centered at the south pole. Many analysis tasks support +both of these grids, in which case either or both can be specified in a list:: + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +The resolution of each of these grids are set through a set of configuration +options that affect all tasks using these grids:: + + [climatology] + ## options related to producing climatologies, typically to compare against + ## observations and previous runs + + ... + + # The comparison lat/lon grid resolution in degrees + comparisonLatResolution = 0.5 + comparisonLonResolution = 0.5 + + # The comparison Antarctic polar stereographic grid size and resolution in km + comparisonAntarcticStereoWidth = 6000. + comparisonAntarcticStereoResolution = 10. + +These options can be altered by the user, in which case new weights for +remapping model results and observations to these grids will be generated on +the fly. Mapping files for interpolating to the standard comparison grids from +both observations and standard MPAS meshes are provided when you download the +observations files. diff --git a/1.11.0rc1/_sources/users_guide/config/dask_threads.rst.txt b/1.11.0rc1/_sources/users_guide/config/dask_threads.rst.txt new file mode 100644 index 000000000..e96d1704b --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/dask_threads.rst.txt @@ -0,0 +1,48 @@ +.. _dask_threads: + +Dask threads and subprocess count +================================= + +Several tasks and subtasks have config options ``daskThreads`` and +``subprocessCount`` used to control threading within a subtask:: + + # The number of threads dask is allowed to spawn for each task/subtask. + # Decrease this number if tasks/subtasks are running out of available threads + daskThreads = 2 + + # The number of subprocesses that each task/subtask gets counted as + # occupying. Increase this number if tasks/subtasks are running out of + # memory, so that fewer tasks will be allowed to run at once + subprocessCount = 1 + +Dask threads +------------ + +Dask and xarray support thread-parallel operations on data sets. They also +support chunk-wise operation on data sets that can't fit in memory. These +capabilities are very powerful but also difficult to configure for general +cases. Dask is also not desigend by default with the idea that multiple tasks, +each with multiple dask threads, might operate simultaneously. As a result, +it is possible to spawn huge numbers of dask threads in MPAS-Analysis that both +slow down analysis and lead to errors when the node runs out of threads +completely. + +To prevent this, many tasks or subtasks that use dask threading take the number +of execution threads from a config option, typically in the config section for +the parent task. Typically, the number of ``daskThreads`` should be around +the same as the number of cores on a node divided by the number of tasks +that will run simultaneiously. Since the number of running tasks is controlled +by ``subprocessCount``, see below, this number might differ from +``parallelTaskCount``. + +Subprocess count +---------------- + +Tasks or subtasks that use dask threading may consume too much memory or use +too many threads to "count" as a single task. That is, it might not be safe to +run with ``parallelTaskCount`` simultaneious instances of the task/subtask and +it would be better if it occupied the slot of multiple tasks in the pool of +tasks. MPAS-Analysis will treat a dask-based task or subtask as occupying +the number of task slots given by the ``subprocessCount`` option. For example, +if ``parallelTaskCount = 8`` and ``subprocessCount = 2``, up to 4 tasks or +subtasks would be allowed to run simultaneiously. diff --git a/1.11.0rc1/_sources/users_guide/config/diagnostics.rst.txt b/1.11.0rc1/_sources/users_guide/config/diagnostics.rst.txt new file mode 100644 index 000000000..a85b4927b --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/diagnostics.rst.txt @@ -0,0 +1,76 @@ +.. _config_diagnostics: + +Diagnostics +=========== + +The ``[diagnostics]`` section of a configuration file contains options related +to paths containing observations, region-mask files and mapping files used to +interpolate MPAS data and observations to common reference grids:: + + [diagnostics] + ## config options related to observations, mapping files and region files used + ## by MPAS-Analysis in diagnostics computations. + + # The base path to the diagnostics directory. Typically, this will be a shared + # directory on each E3SM supported machine (see the example config files for + # its location). For other machines, this would be the directory pointed to + # when running "download_analysis_data.py" to get the public observations, + # mapping files and region files. + baseDirectory = /path/to/diagnostics + + # A second directory where custom diagonstics data such as mapping files and + # regions masks for unsupported grids can be found. The structure of + # subdirectories in this directory must be the same as baseDirectory + customDirectory = none + + # Directory for mapping files (if they have been generated already). If mapping + # files needed by the analysis are not found here, they will be generated and + # placed in the output mappingSubdirectory. The user can supply an absolute + # path here to point to a path that is not within the baseDirectory above. + mappingSubdirectory = mpas_analysis/maps + + # Directory for region mask files. The user can supply an absolute path here to + # point to a path that is not within the baseDirectory above. + regionMaskSubdirectory = mpas_analysis/region_masks + +Diagnostics Directories +----------------------- + +The ``baseDirectory`` is the location where files were downloaded with the +``download_analysis_data.py``. If the user is on an E3SM supported machine, +this data has already been downloaded to a shared location (see example config +files in the subdirectories of the ``configs`` directory in the MPAS-Analysis +repository). + +The ``customDirectory``, if it is not ``none`` is another directory where +observations, mapping files and region masks may be stored. This is useful for +runs on non-standard grids or for testing out new observations. + +The remaining options point to the subdirectories for mapping files (see +below) and region masks (see :ref:`config_colormaps`), respectively. +Typically, there is no reason to change ``mappingSubdirectory`` or +``regionMaskSubdirectory``, as these are the standard subdirectories created +when these files are downloaded from the `E3SM public data repository`_. + +.. _config_mapping_files: + +Mapping Files +------------- + +Mapping files are used in many MPAS-Analysis tasks to remap from either the +native MPAS mesh or an observations grid to a comparison grid (see +:ref:`config_comparison_grids`). By default, these mapping files are generated +on the fly as they are needed. This can be a time-consuming process, +especially for high resolution meshes, so it is useful to store a cache of +these mapping files for reuse. Mapping files at three standard resolutions +are avaliable on the `E3SM public data repository`_. The mapping files for +the two coarser resolution meshes will be downloaded automatically along with +the publicly available observations. (See the :ref:`quick_start` for details +on downloading this data.) + +If you notice that MPAS-Analysis is generating mapping files on the fly each +time you run, you may wish to copy them from the mapping files output +directory (the subdirectory ``mapping/`` inside the output base directory) to +your mapping files cache directory. + +.. _`E3SM public data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ diff --git a/1.11.0rc1/_sources/users_guide/config/execute.rst.txt b/1.11.0rc1/_sources/users_guide/config/execute.rst.txt new file mode 100644 index 000000000..6a54ddaee --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/execute.rst.txt @@ -0,0 +1,157 @@ +.. _config_execute: + +Execute +======= + +The ``[execute]`` section of a configuration file contains options used to +control how tasks are executed within an MPAS-Analysis run:: + + [execute] + ## options related to executing parallel tasks + + # the number of parallel tasks (1 means tasks run in serial, the default) + parallelTaskCount = 1 + + # the parallelism mode in ncclimo ("serial", "bck" or "mpi") + # Set this to "bck" (background parallelism) in most cases. The default number + # of threads (see below) is 12, one for each monthly climatology. Set to "mpi" + # to run one MPI task on each node and however many threads per node to reach + # 12 total threads. + ncclimoParallelMode = serial + + # the number of total threads to use when ncclimo runs in "bck" or "mpi" mode. + # Reduce this number if ncclimo is crashing (maybe because it is out of memory). + # The number of threads must be a factor of 12 (1, 2, 3, 4, 6 or 12). + ncclimoThreads = 12 + + # the number of MPI tasks to use in creating mapping files (1 means tasks run in + # serial, the default) + mapMpiTasks = 1 + + # "None" if ESMF should perform mapping file generation in serial without a + # command, or one of "srun" or "mpirun" if it should be run in parallel (or ins + # serial but with a command) + mapParallelExec = None + + # "None" if ncremap should perform remapping without a command, or "srun" + # possibly with some flags if it should be run with that command + ncremapParallelExec = None + + # Multiprocessing method used in python mask creation ("forkserver", "fork" or + # Multiprocessing method used in python mask creation ("forkserver", "fork" or + # "spawn"). We have found that "spawn" is the only one that works in python + # 3.7 on Anvil so this is the default + multiprocessingMethod = spawn + +Parallel Tasks +-------------- + +By default, MPAS-Analysis runs one task at a time, displaying any logging +output directly to the screen, rather than storing it in log files. However, +the analysis will run much more quickly if parallel processes are used. For +example, to run 12 tasks in parallel, simply set:: + + parallelTaskCount = 12 + +MPAS-Analysis currently supports parallelism through spawning processes, rather +than with MPI, meaning that MPAS-Analysis should be run on a single node of a +multi-node machine such as a cluster or supercomputer. Given that some tasks +themselves spawn multiple threads and that some tasks are memory intensive, it +may not be desirable to launch one task per core on a node with limited memory. + +Because MPAS-Analysis does not use MPI parallelism, it can typically be run on +the login nodes of supercomputing facilities. Check with the policies of your +center to see if this is permitted and make sure not to run with a large number +of parallel tasks so as to overwhelm the shared resource. + +Parallelism in NCO +------------------ + +The ``ncclimo`` command from the `NetCDF Operators (NCO) package`_ is used +internally in MPAS-Analysis. This command supports three options for +parallelism: ``serial``, ``bck`` or ``mpi``. If set to ``serial``, the +default, any MPAS-Analysis tasks that use ``ncclimo`` will compute +climatologies one month and then one season at a time. If ``bck`` mode is +used, ``ncclimoThreads`` threads are spawned (default is 12, one for each +month), and then separate threads are used to compute each season. Given that +computing climatologies takes up a significant portion of the runtime in +MPAS-Analysis, the speed-up of nearly a factor of ``ncclimoThreads`` in these +computations can be quite noticeable. For very big data sets, it may be +necessary to run ``ncclimo`` either with fewer threads (reducing +``ncclimoThreads``, noting that it must be a factor of 12) or on multiple nodes +to prevent running out of memory. To run an MPI job, spawn a job with between +2 and 12 nodes, and set ``ncclimoParallelMode = mpi`` to run the 12 ``ncclimo`` +threads on multiple nodes. + +Again, when running MPAS-Analysis on login nodes of supercomputing facilities, +it is important to be aware of the policies regarding using shared resources. +On login nodes, ``bck`` may only be appropriate with ``ncclimoThreads`` set to a +small number and ``mpi`` mode may not work at all. + +Parallel Mapping File Creation +------------------------------ + +If mapping files from the MPAS mesh to the comparison grids aren't already +available in the diagnostics directory, they will be created before any other +MPAS-Analysis tasks are run. If you are running MPAS-Analysis out of +E3SM-Unified on a compute node, on many systems (see below), ESMF has been +built with the system version of MPI and you must run mapping-file generation +with ``srun``. If you are running with ``parallelTaskCount > 1``, the mapping +files will be generated in parallel. + +.. code-block:: cfg + + mapParallelExec = srun + +Similarly, some systems (Anvil and Chrysalis) require a parallel executable +for calls to ``ncremap`` from E3SM-Unified on compute nodes: + +.. code-block:: cfg + + ncremapParallelExec = srun + +E3SM supported machines with system MPI support in E3SM-Unified 1.8.1: + +* Anvil + +* Chicoma + +* Chrysalis + +* Compy + +* Cori-Haswell + +* Perlmutter + +These machines do **not** have MPI support in E3SM-Unified: + +* Andes + +* Acme1 + +In the very near future, we hope to add a capability to MPAS-Analysis so that +it will automatically recognize which machine it is on (or you can specify if +need be), allowing these and other config options to be set automatically. + +Parallel Mask Creation +---------------------- + +Tasks that involve :ref:`config_region_groups` can generate the masks for each +region in the group on the fly. This is done with the mask generation +command-line tools form MPAS-Tools (see +`Mask Creation with Python Multiprocessing `_), +which support 3 modes of parallelism: "spawn", "fork" and "forkserver". For +technical details on these modes, see +`Contexts and start methods `_. +We have found that "spawn" seems to be the most reliable option on Anvil under +python 3.7 and 3.8. Any of these methods works well under python 3.8 but only +"spawn" was reliable under python 3.7. Therefore, we use "spawn" as the +default. + +As we gain more experience with this setting, we may update config files for +specific machines to have different defaults. + + + +.. _`NetCDF Operators (NCO) package`: http://nco.sourceforge.net/nco.html diff --git a/1.11.0rc1/_sources/users_guide/config/html.rst.txt b/1.11.0rc1/_sources/users_guide/config/html.rst.txt new file mode 100644 index 000000000..17e7d68df --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/html.rst.txt @@ -0,0 +1,16 @@ +.. _config_html: + +HTML +==== + +The ``[html]`` section simply specifies whether or not a webpage should be +generated for displaying the plots produced by the analysis:: + + [html] + ## options related to generating a webpage to display the analysis + + # generate the webpage? + generate = True + +The webpage is produced in the directory specified by ``htmlSubdirectory`` +in the ``[output]`` section, see :ref:`config_output`. diff --git a/1.11.0rc1/_sources/users_guide/config/index.rst.txt b/1.11.0rc1/_sources/users_guide/config/index.rst.txt new file mode 100644 index 000000000..48a3a95f6 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/index.rst.txt @@ -0,0 +1,27 @@ +.. |n~| unicode:: U+00F1 + :trim: + +.. _config_index: + +Index +===== + +The ``[index]`` section of a configuration file contains options used to +determine the start and end years of climate indices (such as El Ni |n~| o +3.4):: + + [index] + ## options related to producing nino index. + + # start and end years for El Nino 3.4 analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 20 + +Start and End Year +------------------ + +A custom config file should specify a start and end year for time axis. +Out of range year will produce an error. + + diff --git a/1.11.0rc1/_sources/users_guide/config/input.rst.txt b/1.11.0rc1/_sources/users_guide/config/input.rst.txt new file mode 100644 index 000000000..413d4585b --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/input.rst.txt @@ -0,0 +1,170 @@ +.. _config_input: + +Input +===== + +The ``[input]`` section of a configuration file contains options used to +specify the location of the "main" simulation and some settings for how +these data will be read in:: + + [input] + ## options related to reading in the results to be analyzed + + # directory containing model results + baseDirectory = /dir/for/model/output + + # Note: an absolute path can be supplied for any of these subdirectories. + # A relative path is assumed to be relative to baseDirectory. + # By default, results are assumed to be directly in baseDirectory, + # i.e. /./ + + # subdirectory containing restart files + runSubdirectory = . + # subdirectory for ocean history files + oceanHistorySubdirectory = . + # subdirectory for sea ice history files + seaIceHistorySubdirectory = . + + # names of namelist and streams files, either a path relative to baseDirectory + # or an absolute path. + oceanNamelistFileName = mpaso_in + oceanStreamsFileName = streams.ocean + seaIceNamelistFileName = mpassi_in + seaIceStreamsFileName = streams.seaice + + # name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3, + # ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.) + mpasMeshName = mesh + + # Large datasets can encounter a memory error. Specification of a maximum + # chunk size `maxChunkSize` can be helpful to prevent the memory error. The + # current maximum chunk size assumes approximately 64GB of ram and large files + # with a single time slice. + maxChunkSize = 10000 + + + # Whether missing input data should produce an error. If not, the user gets + # a warning and the time bounds are adjusted to the beginning and end of the + # available data + errorOnMissing = False + +Input Directories +----------------- + +The input directories are specified through a base directory and optionally +separate subdirectories for the run (continaing namelist, streams and and one +or more restart files for each MPAS component) and for each component's +simulation output ("history" files). You will always need to set +``baseDirectory`` in a custom config file. + +By default, all files are assumed to be located in the base directory. +However, E3SM supports short-term archiving of component output to separate +directories. If this feature was enabled for the E3SM run to be analyzed, the +configuration should be:: + + baseDirectory = /dir/for/model/output + runSubdirectory = run + oceanHistorySubdirectory = archive/ocn/hist + seaIceHistorySubdirectory = archive/ice/hist + +where ``/dir/for/model/output`` is replaced with the directory containing +the ``run`` and ``archive`` directories. + +Namelist Files +-------------- + +MPAS components are configured with a namelist file containing a very large +number of parameters and other configuration settings. Part of the strength +of MPAS-Analysis is that it is aware of these namelists and can automatically +disable analysis tasks that are not supported under a given configuration. +By default, the namelist files for ocean and sea ice components are:: + + oceanNamelistFileName = mpaso_in + seaIceNamelistFileName = mpassi_in + +For older E3SM (v1 alpha and beta) runs, a different naming convention was used +and these options will need to be updated to:: + + oceanNamelistFileName = mpas-o_in + seaIceNamelistFileName = mpas-cice_in + +The location of the namelist files is relative to the baseDirectory, so if +they are located within a run directory inside the base directory, they should +instead be specified as:: + + oceanNamelistFileName = run/mpaso_in + seaIceNamelistFileName = run/mpassi_in + +Streams Files +------------- + +Streams files are used to configure input and output from MPAS components. +MPAS-Analysis can parse these files to determine the locations of output files +(using the ``*HistorySubdirectory`` to find them if they have been moved by +short-term archiving). Similarly to namelist files, there are default names:: + + oceanStreamsFileName = streams.ocean + seaIceStreamsFileName = streams.seaice + +alterations appropriate for E3SM v1 alpha and beta runs:: + + + oceanStreamsFileName = streams.ocean + seaIceStreamsFileName = streams.cice + +and the addition of the ``run/`` subdirectory if analyzing a run that used +short-term archiving:: + + oceanStreamsFileName = run/streams.ocean + seaIceStreamsFileName = run/streams.seaice + +Mesh Name +--------- + +The MPAS-Ocean and MPAS-Seaice run on the same mesh. There are a number of +standard E3SM ocean/sea ice meshes at various resolutions. The meshes +currently supported by the public release of MPAS-Analysis include: + + * ``oEC60to30v3``: An Eddy-Closure (EC) mesh with 30-km resolution at the + poles and equator and 60-km resolution at mid latitudes, + * ``oRRS30to10v3``: A Rossby-Radius-Scaled (RRS) mesh with 10-km resolution + at the poles and 30-km resolution at the equator, + * ``oRRS18to6``: An RRS mesh with 6-km resolution at the poles and 18-km + resolution at the equator. + +Mapping files (see :ref:`config_mapping_files` below) and region mask files +(see :ref:`config_region_groups`) are provided from the +`E3SM public data repository`_ for these meshes. For assistance with other +mesh resolutions, please contact the MPAS-Analysis developers. + +Xarray and Dask +--------------- + +MPAS-Analysis makes extensive use of the `xarray package`_, which uses the +`dask package`_ internally to perform operations that are too large to fit +in memory. While most tasks in MPAS-Analysis have moved away from opening +multi-file data sets using xarray in favor of concatinating these data sets +together using NCO tools, there are some legacy options that users can modify +if they experience errors related to dask:: + + maxChunkSize = 10000 + +If an out of memory error occurs, it may first be worth reducing the number +of parallel tasks running (see :ref:`config_execute`) but if the error is +clearly related to dask (which might be the case, for example, if the error +occrus in the ``streamfunctionMOC`` task), you may wish to reduce the +``maxChunkSize``. This will make tasks using dask slower but will reduce their +memory usage. + +Errors on Missing Data +---------------------- + +if ``errorOnMissing = False``, the time ranges (``startYear`` and ``endYear``) +in ``climatology``, ``timeSeries``, and ``index`` will be clipped to the range +of the available data. If this option is set to ``True``, an error will be +produced. A value of ``end`` can be used for ``endYear`` to indicate that the +full range of the available data should be used. + +.. _`E3SM public data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ +.. _`xarray package`: https://xarray.pydata.org/en/stable/ +.. _`dask package`: https://dask.pydata.org/en/latest/ diff --git a/1.11.0rc1/_sources/users_guide/config/moving_average.rst.txt b/1.11.0rc1/_sources/users_guide/config/moving_average.rst.txt new file mode 100644 index 000000000..c8684aade --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/moving_average.rst.txt @@ -0,0 +1,14 @@ +.. _config_moving_average: + +Moving Average +============== + +By default, some time series have a 12-month moving average while others do +not include a moving average (``movingAverageMonths = 1``). To perform +a moving average (e.g. over 12 months), set:: + + movingAverageMonths = 12 + +This can be useful for taking out the seasonal cycle to better examine annual +mean tends. + diff --git a/1.11.0rc1/_sources/users_guide/config/observations.rst.txt b/1.11.0rc1/_sources/users_guide/config/observations.rst.txt new file mode 100644 index 000000000..5d1a0c731 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/observations.rst.txt @@ -0,0 +1,102 @@ +.. _config_observations: + +Ocean, Sea Ice and Iceberg Observations +======================================= + +The ``[oceanObservations]``, ``[seaIceObservations]`` and +``[icebergObservations]`` sections of a configuration file contain options used +to point to the observations files and folders:: + + [oceanObservations] + ## options related to ocean observations with which the results will be compared + + # directory where ocean observations are stored + obsSubdirectory = observations/Ocean + sstSubdirectory = SST + sssSubdirectory = SSS + mldSubdirectory = MLD + ninoSubdirectory = Nino + mhtSubdirectory = MHT + meltSubdirectory = Melt + soseSubdirectory = SOSE + sshSubdirectory = SSH + argoSubdirectory = ARGO + schmidtkoSubdirectory = Schmidtko + + # interpolation order for observations. Likely values are + # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' + interpolationMethod = bilinear + + # The directories where observation climatologies will be stored if they need + # to be computed. If a relative path is supplied, it is relative to the output + # base directory. If an absolute path is supplied, this should point to + # cached climatology files on the desired comparison grid. If cached remapped + # files are supplied, there is no need to provide cached files before + # remapping. + climatologySubdirectory = clim/obs + remappedClimSubdirectory = clim/obs/remapped + + ... + + [seaIceObservations] + ## options related to sea ice observations with which the results will be + ## compared + + # directory where sea ice observations are stored + obsSubdirectory = observations/SeaIce + + # interpolation order for observations. Likely values are + # 'bilinear', 'neareststod' (nearest neighbor) or 'conserve' + interpolationMethod = bilinear + + # The directories where observation climatologies will be stored if they need + # to be computed. If a relative path is supplied, it is relative to the output + # base directory. If an absolute path is supplied, this should point to + # cached climatology files on the desired comparison grid. If cached remapped + # files are supplied, there is no need to provide cached files before + # remapping. + climatologySubdirectory = clim/obs + remappedClimSubdirectory = clim/obs/remapped + + ... + + [icebergObservations] + ## options related to iceberg observations with which the results will be + ## compared + + # directory where sea ice observations are stored + obsSubdirectory = observations/Icebergs + concentrationAltibergSH = Altiberg/Altiberg_1991-2017_20180308.nc + + +Files and Directories +--------------------- + +The input directories are specified through a "base" subdirectory +``obsSubdirectory`` and either subdirectories or file names for each set of +observations. ``obsSubdirectory`` is relative to ``baseDirectory`` in the +``diagnostics`` section, while all file paths and other subdirectories are +relative to ``obsSubdirectory``. You will typically not need to change any +of these paths, since they are structured in a standard way following the +`E3SM public data repository`_ (see the :ref:`quick_start` for more details). + +The directories for storing cached datasets before and afer remapping +(specified in ``climatologySubdirectory`` and ``remappedClimSubdirectory``) +may be given any relative or absoltue path, but should typically be left as the +default values. + +Remapping +--------- + +Observational climatologies are remapped from the native grid (typically +global latitude/longitude or Antarctic stereographic) to common +comparison grids. The remapping can be performed with any of three methods: +``bilinear``, ``neareststod`` (nearest neighbor) or ``conserve``. Mapping +files are created with the `ESMF_RegridWeightGen tool`_. The default method +is ``bilinear`` and these are the mapping files distributed from the +`E3SM public data repository`_. The ``conseve`` method is know to be *much* +slower to compute and should only be used if it is necessary (e.g. because +remapped data will be checked for conservation). + +.. _`ESMF_RegridWeightGen tool`: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_7_1_0r/ESMF_refdoc/node3.html#SECTION03020000000000000000 +.. _`E3SM public data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ diff --git a/1.11.0rc1/_sources/users_guide/config/output.rst.txt b/1.11.0rc1/_sources/users_guide/config/output.rst.txt new file mode 100644 index 000000000..c7710c3fd --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/output.rst.txt @@ -0,0 +1,162 @@ +.. |n~| unicode:: U+00F1 + :trim: + +.. _config_output: + +Output +====== + +The ``[output]`` section of a configuration file contains options used to +specify the location of the "main" simulation:: + + [output] + ## options related to writing out plots, intermediate cached data sets, logs, + ## etc. + + # directory where analysis should be written + # NOTE: This directory path must be specific to each test case. + baseDirectory = /dir/for/analysis/output + + # subdirectories within baseDirectory for analysis output + plotsSubdirectory = plots + logsSubdirectory = logs + mpasClimatologySubdirectory = clim/mpas + mappingSubdirectory = mapping + timeSeriesSubdirectory = timeseries + # provide an absolute path to put HTML in an alternative location (e.g. a web + # portal) + htmlSubdirectory = html + + # a list of analyses to generate. Valid names can be seen by running: + # mpas_analysis --list + # This command also lists tags for each analysis. + # Shortcuts exist to generate (or not generate) several types of analysis. + # These include: + # 'all' -- all analyses will be run + # 'all_publicObs' -- all analyses for which observations are availabe on the + # public server (the default) + # 'all_' -- all analysis with a particular tag will be run + # 'all_' -- all analyses from a given component (either 'ocean' + # or 'seaIce') will be run + # 'only_', 'only_' -- all analysis from this component or + # with this tag will be run, and all + # analysis for other components or + # without the tag will be skipped + # 'no_' -- skip the given task + # 'no_', 'no_' -- in analogy to 'all_*', skip all analysis + # tasks from the given compoonent or with + # the given tag. Do + # mpas_analysis --list + # to list all task names and their tags + # an equivalent syntax can be used on the command line to override this + # option: + # mpas_analysis analysis.cfg --generate \ + # only_ocean,no_timeSeries,timeSeriesSST + generate = ['all_publicObs'] + +Output Directories +------------------ + +The output directories are specified through a base directory and a set of +subdirectories within that base. You will always need to set ``baseDirectory`` +in a custom configuration file. + +The subdirectories of output can be renamed if desired or an absolute path +can be specified if output to another location is desired. This is +particularly relevant to ``htmlSubdirectory``, which may be pointed to a +a public space such as a web portal for display on the web. Note: +MPAS-Analysis does not change the HTML output to be world readable so you +will need to do this manually after a run has completed (or inside of a job +script) to see the results on a public web page. + +.. _config_generate: + +Generate Option +--------------- + +The ``generate`` option is used to control which tasks run. The simplest +choice is:: + + generate = ['all'] + +in which case MPAS-Analysis will attempt to run all analysis tasks. In this +mode, some tasks may fail with a warning printed to the screen during their +:py:meth:`~mpas_analysis.shared.AnalysisTask.setup_and_check` +phase if the simulation was not configured to support that task. All tasks +that pass the +:py:meth:`~mpas_analysis.shared.AnalysisTask.setup_and_check` +phase will be run. + +The next simplest option is to specify a single task name:: + + generate = ['climatologyMapSST'] + +or a list of task names:: + + generate = ['climatologyMapSST', 'climatologyMapSSS'] + +in which case only the listed tasks are run. + +A third way to determine which tasks to generate is to make use of "tags" for +each task. To see what tags each task has, run:: + + mpas_analysis --list + +This will show all available tasks together with the component they belong to +and the tags for each. To run only those analysis tasks with a particular tag, +set, for example ``climatology``:: + + generate = ['all_climatology'] + +This will generate only those tasks that make use of climatologies. + +A useful tag for the public release of MPAS-Analysis is the ``publicObs`` tag, +which is found on all tasks that will run successfully if you have downloaded +the observations from the `E3SM public data repository`_. Some MPAS-Analysis +tasks make use of data sets that are only available after registering with a +data portal or by contacting the authors of that data set directly, so that +these data have not been included in the data repository. The default is to +generate only the tasks with observations in in the repository:: + + generate = ['all_publicObs'] + +The names of components (``ocean`` or ``seaIce``) can also be used as tags. + +There are also ways to specify that a given tag should not be present +(``no_``) or that only analysis with the given tag should be run +(``only_``). These options are useful when combined in a series with +other generate options. For example, to generate all tasks with publicly +available observation except those for the ``seaIce`` component, you could +specify:: + + generate = ['all_publicObs', 'no_seaIce'] + +If an appropriate reference year isn't available for computing anomalies, +include 'no_anomaly' in the generate list to skip all tasks that require the +reference year for computing anomalies:: + + generate = ['all_publicObs', 'no_anomaly'] + +To specify that you wanted to plot climatologies from the ocean component, you +could use:: + + generate = ['all_publicObs', 'only_climatologies', 'only_ocean'] + +If you wanted to plot all tasks with publicly available data sets that used +either climatologies or time series, you could use:: + + generate = ['all_climatologies', 'all_timeSeries', 'only_publicObs'] + +Finally, we note that the ``generate`` option in the configuration file can +be overridden by specifying the ``--generate`` option on the command line:: + + mpas_analysis --generate=all_publicObs,no_index,no_climatologyMapSST \ + my_run.cfg + +This example would override whatever ``generate`` option was specified in +``my_run.cfg`` with a directive to generate only tasks that support the +publicly available observations, skipping those using climate indices (e.g. +El Ni |n~| o 3.4) and also skipping ``climatologyMapSST``. + + +.. _`E3SM public data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/ \ No newline at end of file diff --git a/1.11.0rc1/_sources/users_guide/config/plot.rst.txt b/1.11.0rc1/_sources/users_guide/config/plot.rst.txt new file mode 100644 index 000000000..5dcafee64 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/plot.rst.txt @@ -0,0 +1,50 @@ +.. _config_plot: + +Plot +==== + +The ``[plot]`` section of a configuration file contains options that define +default properties for all plots produce by MPAS-Analysis:: + + [plot] + ## options related to plotting that are the defaults across all analysis + ## modules + + # font size on axes + axisFontSize = 16 + + # the default font size for axis ticks, etc. + defaultFontSize = 10 + + # title and axis font properties for single-panel plots + titleFontSize = 16 + titleFontColor = black + titleFontWeight = normal + axisFontSize = 12 + + # font size for cartopy grid labels + cartopyGridFontSize = 12 + + # the dots per inch of output figures + dpi = 200 + + # Write out PDFs in addition to PNGs? + pdf = False + +The options for title font size, color and weight as well as axis font size +specify properties of these parts of each plot. The default font size covers +the axis tick marks, color-bar ticks and axis labels, contour labels, etc. +The cartopy grid font size are for the labels (either interior or along the +plot boundary) for cartopy labels. Sizes are given in points. + +The value of ``dpi`` specifies the resolution of the images written out by +MPAS-Analysis (in dots per inch). The default produces large images that +are appropriate for zooming in substantially and may be sufficient for +publication. They are large (but not entirely unmanageable) for the web. + +You can set ``pdf = True`` to write out PDF files in the plots subdirectory +along with PNG files. The PDFs are not copied to the HTML folder. + +Many types of individual plots, including climatologies, transects, Hovmoller +plots, and most time series, also support setting the ``defaultFontSize`` and +``titleFontSize`` config options just of that type of plot. diff --git a/1.11.0rc1/_sources/users_guide/config/preprocessed.rst.txt b/1.11.0rc1/_sources/users_guide/config/preprocessed.rst.txt new file mode 100644 index 000000000..41fa989b8 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/preprocessed.rst.txt @@ -0,0 +1,28 @@ +.. _config_preprocessed: + +Preprocessed Reference Runs +=========================== + +The ``[oceanPreprocessedReference]`` and ``[seaIcePreprocessedReference]`` +sections of a configuration file contain options used to point to preprocessed +data from E3SM v0 reference runs:: + + [oceanPreprocessedReference] + ## options related to preprocessed ocean reference run with which the results + ## will be compared (e.g. a POP, CESM or ACME v0 run) + + # directory where ocean reference simulation results are stored + baseDirectory = /dir/to/ocean/reference + + ... + + [seaIcePreprocessedReference] + ## options related to preprocessed sea ice reference run with which the results + ## will be compared (e.g. a CICE, CESM or ACME v0 run) + + # directory where ocean reference simulation results are stored + baseDirectory = /dir/to/seaice/reference + +If such a preprocessed reference run is available, the name of the reference +run should be specified (see :ref:`config_runs`) and the base directories +should be specified here. diff --git a/1.11.0rc1/_sources/users_guide/config/regions.rst.txt b/1.11.0rc1/_sources/users_guide/config/regions.rst.txt new file mode 100644 index 000000000..fb83a7c7a --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/regions.rst.txt @@ -0,0 +1,67 @@ +.. _config_regions: + +Regions +======= + +Region Config Section +--------------------- + +The ``[regions]`` section of a configuration file contains options related +to regions either defined in MPAS components' online analysis or determined +within MPAS-Analysis using region mask files:: + + [regions] + ## options related to ocean regions used in several analysis modules + + # list of region names (needs to be in the same order as region indices in + # time-series stats) + regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global'] + # list of plot titles (needs to be in the same order as region indices in + # time-series stats) + plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3', + 'Nino 4', 'Nino 3.4', 'Global Ocean'] + + +MPAS-Ocean currently has hard coded into its online analysis 7 regions in a +fixed order, as given in the ``regions`` option. This should not be altered +unless corresponding changes to the MPAS-Ocean code have been made. + +The corresponding ``plotTitles`` can be modified as desired to update how +these regions are named in plot titles and in gallery names on the resulting +web page. + +.. _config_region_groups: + +Region Groups +------------- + +Currently, eight analysis tasks (:ref:`task_climatologyMapAntarcticMelt`, +:ref:`task_hovmollerOceanRegions`, :ref:`task_oceanRegionalProfiles`, +:ref:`task_regionalTSDiagrams`, :ref:`task_streamfunctionMOC`, +:ref:`task_oceanHistogram`, :ref:`task_timeSeriesAntarcticMelt`, and +:ref:`task_timeSeriesOceanRegions`) use masks that define regions in an MPAS +mesh as part of their analysis. Most of these region group are defined in +:py:func:`geometric_features.aggregation.get_aggregator_by_name()`. +Several tasks (:ref:`task_hovmollerOceanRegions`, :ref:`task_oceanHistogram`, +:ref:`task_oceanRegionalProfiles`, :ref:`task_regionalTSDiagrams`, and +:ref:`task_timeSeriesOceanRegions`) can use any of the defined region groups. +Currently, available region groups are: ``Artic Ocean Regions``, ``Antarctic Regions``, +``Ocean Basins``, ``Ice Shelves``, and ``Ocean Subbasins``. + +The option ``regionMaskSubdirectory`` in the ``[diagnostics]`` section specifies +the path to cached mask files for these region groups, typically +``diagnostics/mpas_analysis/region_masks``. Region masks for common MPAS Ocean +and Seaice meshes are supplied as part of the data from the +`E3SM public data repository `_ +(see the :ref:`quick_start`). + +If masks for for a given grid don't already exist in the cached mask location, +they will be generated automatically from the aggregation function from the +``geometric_features`` package, see +`Aggregate Existing Features `_. +The mask data will be stored in a geojson file with the region group's prefix +and date stamp (e.g. ``iceShelves20200621.geojson``). Then, masks on the MPAS +Ocean and Seaice mesh will be computed, a process that can be time consuming for +large meshes. To generate the masks in advance (using threading to speed up the +process), see the example utility script ``utility_scripts/make_region_mask.py``. + diff --git a/1.11.0rc1/_sources/users_guide/config/runs.rst.txt b/1.11.0rc1/_sources/users_guide/config/runs.rst.txt new file mode 100644 index 000000000..6ffd07e42 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/runs.rst.txt @@ -0,0 +1,106 @@ +.. _config_runs: + +Runs +==== + +The ``[runs]`` section of a configuration file contains options used to name +the "main" run, a preprocessed E3SM v0 run (if any) and to point to analysis +of a control E3SM v1 or standalone MPAS run (if any):: + + [runs] + ## options related to the run to be analyzed and control runs to be + ## compared against + + # mainRunName is a name that identifies the simulation being analyzed. + mainRunName = runName + + # preprocessedReferenceRunName is the name of a control run that has been + # preprocessed to compare against (or None to turn off comparison). Reference + # runs of this type would have preprocessed results because they were not + # performed with MPAS components (so they cannot be easily ingested by + # MPAS-Analysis) + preprocessedReferenceRunName = None + + # config file for a control run to which this run will be compared. The + # analysis should have already been run to completion once with this config + # file, so that the relevant MPAS climatologies already exist and have been + # remapped to the comparison grid. Leave this option commented out if no + # control run is desired. + # controlRunConfigFile = /path/to/config/file + + # config file for a main run on which the analysis was already run to + # completion. The relevant MPAS climatologies already exist and have been + # remapped to the comparison grid and time series have been extracted. + # Leave this option commented out if the analysis for the main run should be + # performed. + # mainRunConfigFile = /path/to/config/file + +The name of the "main" run (as opposed to a control run, if any) can be any +identifier that will be used in figure titles, legends, web pages and file +names to identify this run. It does not need to be the name of the simulation +as specified in E3SM:: + + mainRunName = runName + +A few of the time series plots in MPAS-Analysis can be compared against a +preprocessed control run from E3SM v0 (which was similar to the CESM, the +Community Earth System Model). If these data are available and the comparison +to these runs is desired, the name of the control run should be specified +here and the paths to the data set should be specified (see +:ref:`config_preprocessed`). If not this name should be left as ``None``:: + + preprocessedReferenceRunName = None + +MPAS-Analysis supports comparison between the "main" run and a control run +from either E3SM or a standalone MPAS component. By default, this feature is +disabled by commenting out the configuration option:: + + # controlRunConfigFile = /path/to/config/file + +To specify a control run, first run MPAS analysis on the control run. Be +sure that: + + * the start and end year for climatologies, time series and climate indices + is covered by the simulation output. + * most configuration options for the control run are the same as for the + main run. The exceptions are contents of the ``[run]``, ``[input]`` and + ``[output]`` sections. The range of years for climatologies can be + different, but this is discouraged. + +Once the analysis has been run on the control run, a comparison is made by +uncommenting ``controlRunConfigFile`` and specifying the path to the +configuration file use in this analysis, e.g.:: + + controlRunConfigFile = control_run.cfg + +If analysis has already been run on the "main" run in a "main vs ref" +comparison, some time can be saved in performing the comparison +(particularly for higher resolution output, for which a lot of the +computation time goes into computing climatologies and extracting time +series). By default, this feature is disabled by commenting out the +configuration option:: + + # mainRunConfigFile = /path/to/config/file + +To specify a main run, first run MPAS analysis on the main run. The +"comparison" config file should be nearly identical to the "main" config +file except that: + + * The output ``baseDirectory`` should be different. + * the start and end year for climatologies, time series and climate indices + must be the actual range used if output data was not available to span + the requested range in the "main" run. + +All configuration information for the "main" run in the "main vs ref" +comparison is taken from the "comparsion" config file, not the "main" config. +Only the output directories and subdirectories for climatologies, time series, +mapping files and mask files (if these latter 2 were generated on the fly) +will be taken from the "main" config file. Symbolic links will be made to +these directories so the comparison analysis run can reuse this data. +Specify the path to the configuration file use in "main" analysis by +uncommenting the option and providing a relative or absolute path to the +config file:: + + mainRunConfigFile = main_run.cfg + + diff --git a/1.11.0rc1/_sources/users_guide/config/seasons.rst.txt b/1.11.0rc1/_sources/users_guide/config/seasons.rst.txt new file mode 100644 index 000000000..b0fef2599 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/seasons.rst.txt @@ -0,0 +1,21 @@ +.. _config_seasons: + +Seasons +======= + +Nearly all analysis tasks that produce climatology plots include a +configuration option for specifying a list of seasons to plot:: + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + +Valid seasons include the three-letter abbreviations of each month (e.g. +``Jan``), several 2- and 3-month seasons specified by consecutive first letters +of each month name (``JFM``, ``AMJ``, ``JAS``, ``OND``, ``ON``, ``FM``, ``DJF`` +and ``JJA``), and ``ANN`` for all 12 months. + +If seasons other than these are needed, please post an issue on `GitHub`_ or +contact the developers. + +.. _`GitHub`: https://github.com/MPAS-Dev/MPAS-Analysis/issues \ No newline at end of file diff --git a/1.11.0rc1/_sources/users_guide/config/timeSeries.rst.txt b/1.11.0rc1/_sources/users_guide/config/timeSeries.rst.txt new file mode 100644 index 000000000..425ee9abb --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/timeSeries.rst.txt @@ -0,0 +1,44 @@ +.. _config_time_series: + +Time Series +=========== + +The ``[timeSeries]`` section of a configuration file contains options used to +determine the start and end years of time series plots and the reference years +for anomalies:: + + [timeSeries] + ## options related to producing time series plots, often to compare against + ## observations and previous runs + + # the year from which to compute anomalies if not the start year of the + # simulation. This might be useful if a long spin-up cycle is performed and + # only the anomaly over a later span of years is of interest. + # anomalyRefYear = 249 + + # start and end years for timeseries analysis. Out-of-bounds values will lead + # to an error. + startYear = 1 + endYear = 20 + +Start and End Year +------------------ + +A custom config file should specify a start and end year for time series. +Out of range year will produce an error. + + +Anomaly Reference Year +---------------------- + +Anomalies between a moving average of a time series and the average over a +reference year are used in several analysis tasks. By default, the reference +year is not specified in the configuration file and it taken to be the start of +the simulation (determined from the contents of a restart file). Under certain +circumstances (e.g. repetition of forcing data for several cycles, as in +the `Common Ocean Reference Experiments, CORE`_), it may be desirable to +specify a different year to use for computing anomalies:: + + anomalyRefYear = 249 + +.. _`Common Ocean Reference Experiments, CORE`: http://data1.gfdl.noaa.gov/nomads/forms/mom4/CORE.html diff --git a/1.11.0rc1/_sources/users_guide/config/time_axis_ticks.rst.txt b/1.11.0rc1/_sources/users_guide/config/time_axis_ticks.rst.txt new file mode 100644 index 000000000..8ccb9ba39 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/time_axis_ticks.rst.txt @@ -0,0 +1,16 @@ +.. _config_time_axis_ticks: + +Time-Axis Tick Marks +==================== + +By default tick marks on the time (x) axis are determined automatically by +``matplotlib``. You can explicitly control them by setting a first year and +a stride (skip) in years. For example:: + + firstYearXTicks = 2 + yearStrideXTicks = 2 + +will place the first tick mark at simulation year 2 and will give a tick mark +every 2 years. + + diff --git a/1.11.0rc1/_sources/users_guide/config/transects.rst.txt b/1.11.0rc1/_sources/users_guide/config/transects.rst.txt new file mode 100644 index 000000000..d466bfc71 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/config/transects.rst.txt @@ -0,0 +1,70 @@ +.. _config_transects: + +Output Grids for Transects +========================== + +Several tasks for producing transects use a common methodology for producing +the comparison grid for each transect:: + + # The approximate horizontal resolution (in km) of each transect. Latitude/ + # longitude between observation points will be subsampled at this interval. + # Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of + # model data on the native grid, in which case comparison with observations + # will take place on the observation grid. + #horizontalResolution = mpas + #horizontalResolution = obs + horizontalResolution = 5 + + # The name of the vertical comparison grid. Valid values are 'mpas' for the + # MPAS vertical grid, 'obs' to use the locations of observations or + # any other name if the vertical grid is defined by 'verticalComparisonGrid'. + # If horizontalResolution is 'mpas', model data (both main and control) will be + # plotted on the MPAS vertical grid, regardless of the comparison grid. + #verticalComparisonGridName = mpas + #verticalComparisonGridName = obs + verticalComparisonGridName = uniform_0_to_4000m_at_10m + + # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or + # 'obs'. This should be numpy array of (typically negative) elevations (in m). + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + + # A range for the y axis (if any) + verticalBounds = [] + +The ``horizontalResolution`` of all transects can be ``obs``, ``mpas`` or a +number of kilometers. If ``obs``, model data are sampled at latitude and +longitude points corresponding to the observations. If the horizontal grid +is ``mpas``, then the native MPAS-Ocean mesh is used for both the horizontal and +vertical grids. If a number of kilometers is given, linear interpolation +between observation points is performed with approximately the requested +resolution. The distance between observation points is always divided into an +integer number of segments of equal length so the resolution may be slightly +above or below ``horizontalResolution``. + +The vertical grid is determined by two parameters, +``verticalComparisonGridName`` and ``verticalComparisonGrid``. If +``verticalComparisonGridName = mpas``, but ``horizontalResoltuion`` is not +``mpas``, the MPAS-Ocean vertical coordinate will be interpolated horizontally +from grid cell centers to the latitude and longitude of each point along the +transect, and the observations will be interpolated vertically to the resulting +grid. If ``verticalComparisonGridName = obs``, the vertical grid of the +observations is used instead. If ``verticalComparisonGridName`` is anything +else, it is taken to be the name of a user-defined vertical grid (best to make +it descriptive and unique, e.g. ``uniform_0_to_4000m_at_10m``) and +``verticalComparisonGrid`` should be assigned a valid array of positive-up +depth values (in the form of a python list or numpy array), e.g.:: + + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + +produces points between 0 and -4000 m sampled every 10 m. + +``verticalBounds`` is a list of minimum and maximum limits for the vertical axis +of the transect. The default is an empty list, which means ``matplotlib`` +selects the axis limits to encompass the full range of the vertical grid. + +.. note:: + + Some types of transects (e.g. those produce with geojson files) do not have + a vertical grid associated with them (just horizontal latitude/longitude + points), meaning that ``verticalComparisonGridName = obs`` is not a valid + option for tasks with these transects. diff --git a/1.11.0rc1/_sources/users_guide/configuration.rst.txt b/1.11.0rc1/_sources/users_guide/configuration.rst.txt new file mode 100644 index 000000000..e602acc0c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/configuration.rst.txt @@ -0,0 +1,40 @@ +Configuration +============= + +MPAS-Analysis is controlled through configuration files, which are customized +to point to different simulation results and to control how the output is +computed and displayed. Several groups of configuration options are found +across a number of analysis tasks. + +Examples of configuration files for various E3SM supported machines can be +found in the ``configs/`` folder or can be browsed on `GitHub`_. The files +``example.cfg`` and ``example_e3sm.cfg`` provide a list of the default values +for the configuration options that are most commonly modified on unknown and +E3SM-supported machines, repsectively. + +.. toctree:: + :maxdepth: 1 + + config/runs + config/execute + config/dask_threads + config/diagnostics + config/input + config/output + config/climatology + config/timeSeries + config/index + config/regions + config/plot + config/html + config/observations + config/preprocessed + config/colormaps + config/seasons + config/comparison_grids + config/moving_average + config/time_axis_ticks + config/transects + + +.. _`GitHub`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/develop/configs diff --git a/1.11.0rc1/_sources/users_guide/e3sm.rst.txt b/1.11.0rc1/_sources/users_guide/e3sm.rst.txt new file mode 100644 index 000000000..b66b60a55 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/e3sm.rst.txt @@ -0,0 +1,18 @@ +E3SM +---- + +The Energy Exascale Earth System Model (E3SM) Project is an ongoing, +state-of-the-science Earth system modeling, simulation, and prediction project +that optimizes the use of DOE laboratory resources to meet the science needs of +the nation and the mission needs of DOE. + +A Full description of E3SM is available at: +https://e3sm.org/ + +Setting up E3SM runs +^^^^^^^^^^^^^^^^^^^^ + +All online analysis and output stream within MPAS components (MPAS-O and +MPAS-SeaIce) are configured to support MPAS-Analysis without any modifications +to namelists or streams files. + diff --git a/1.11.0rc1/_sources/users_guide/mpaso.rst.txt b/1.11.0rc1/_sources/users_guide/mpaso.rst.txt new file mode 100644 index 000000000..1308be8bb --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/mpaso.rst.txt @@ -0,0 +1,156 @@ +MPAS Ocean +---------- + +The Model for Prediction Across Scales Ocean (MPAS-O) is designed for the +simulation of the ocean system from time scales of months to millenia and +spatial scales from sub 1 km to global circulations. + +MPAS-O has demonstrated the ability to accurately reproduce mesoscale ocean +activity with a local mesh refinement strategy. + +In addition to faciliating the study of multiscale phenomena within the ocean +system, MPAS-O is intended for the study of anthropogenic climate change as +the ocean component of climate system models. + + +Full documentaiton is available at: +https://mpas-dev.github.io/ocean/ocean.html + +Setting up Standalone MPAS-O Runs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order to support all ocean analysis tasks from MPAS-Analysis, certain +"analysis members", Fortran modules that perform analysis during the +simulation, need to be enabled. + +The following is a list of suggested values for namelist options, typically +found in ``namelist.ocean`` or ``mpaso_in`` (or ``mpas-o_in`` in older E3SM runs):: + + config_AM_surfaceAreaWeightedAverages_enable = .true. + config_AM_surfaceAreaWeightedAverages_compute_interval = '0000-00-00_01:00:00' + config_AM_layerVolumeWeightedAverage_enable = .true. + config_AM_layerVolumeWeightedAverage_compute_interval = '0000-00-00_01:00:00' + config_AM_meridionalHeatTransport_enable = .true. + config_AM_meridionalHeatTransport_compute_interval = '0000-00-00_01:00:00' + config_AM_mixedLayerDepths_enable = .true. + config_AM_timeSeriesStatsMonthly_enable = .true. + +Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:: + + config_run_duration = '0002-00-00_00:00:00' + +Several streams must be defined in the streams file, typically +``streams.ocean``, (even if they will not be written out -- +``output_interval="none"``):: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +The ``filename_tempalate`` can be modified as desired (in most cases, these are +the defalult values from E3SM). For the ``timeSeriesStatsMonthlyOutput`` +stream, both the filename_interval and the output_interval must currently be +monthly (``"0000-01-00_00:00:00"``). + +Additional fields can be included in the ``timeSeriesStatsMonthlyOutput`` +streams. These are the minimum that allow the analysis to run successfully. diff --git a/1.11.0rc1/_sources/users_guide/mpasseaice.rst.txt b/1.11.0rc1/_sources/users_guide/mpasseaice.rst.txt new file mode 100644 index 000000000..bd10c2d0f --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/mpasseaice.rst.txt @@ -0,0 +1,63 @@ +MPAS-Seaice +----------- + +The Model for Prediction Across Scales Sea Ice (MPAS-Seaice) +is designed for the simulations of sea ice on unstructured grids supported by +the MPAS framework. The model has not yet been publicly released and does not +have public documentation. + +Setting up Standalone MPAS Sea Ice Runs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In order to support all sea=ice analysis tasks from MPAS-Analysis, certain +"analysis members", Fortran modules that perform analysis during the +simulation, need to be enabled. + +The following is a list of suggested values for namelist options, typically +found in ``namelist.seaice`` or ``mpassi_in`` (or ``mpas-cice_in`` in +older E3SM runs):: + + config_AM_timeSeriesStatsMonthly_enable = .true. + +Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:: + + config_run_duration = '0002-00-00_00:00:00' + +Several streams must be defined in the streams file, typically +``streams.seaice`` or ``streams.cice`` in older E3SM runs, (even if they will +not be written out -- ``output_interval="none"``):: + + + + + + + + + + + + +The ``filename_tempalate`` can be modified as desired (these are the defalult +values from E3SM). For the ``timeSeriesStatsMonthlyOutput`` stream, both the +filename_interval and the output_interval must currently be monthly +(``"0000-01-00_00:00:00"``). + +Additional fields can be included in the ``timeSeriesStatsMonthlyOutput`` +streams. These are the minimum that allow the analysis to run successfully. diff --git a/1.11.0rc1/_sources/users_guide/obs/adusumilli_melt.rst.txt b/1.11.0rc1/_sources/users_guide/obs/adusumilli_melt.rst.txt new file mode 100644 index 000000000..292746433 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/adusumilli_melt.rst.txt @@ -0,0 +1,44 @@ +.. _adusumilli_melt: + +Antarctic melt rates and fluxes +=============================== + +Description +----------- +Melt rates and melt fluxes from Adusumilli et al. (2020) + +Source +------ +`Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves`_ + +Release Policy +-------------- +Under copyright (US) + +Use: This work is available from the UC San Diego Library. This digital +copy of the work is intended to support research, teaching, and private +study. + +Constraint(s) on Use: This work is protected by the U.S. Copyright Law +(Title 17, U.S.C.). Use of this work beyond that allowed by "fair use" +or any license applied to this work requires written permission of the +copyright holder(s). Responsibility for obtaining permissions and any +use and distribution of this work rests exclusively with the user and +not the UC San Diego Library. Inquiries can be made to the UC San Diego +Library program having custody of the work. + +References +---------- +`Adusumilli et al. (2020)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapAntarcticMelt` +- :ref:`task_timeSeriesAntarcticMelt` + +.. _`Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves`: https://doi.org/10.6075/J04Q7SHT +.. _`Adusumilli et al. (2020)`: https://doi.org/10.1038/s41561-020-0616-z +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Melt/Adusumilli/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/altiberg.rst.txt b/1.11.0rc1/_sources/users_guide/obs/altiberg.rst.txt new file mode 100644 index 000000000..21a8cdf41 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/altiberg.rst.txt @@ -0,0 +1,31 @@ +.. _altiberg: + +Iceberg Concentration: Altiberg +=============================== + +Description +----------- +Iceberg probabilities from the Altiberg dataset for small icebergs (less than 3km in length) detected by altimeters using the high resolution waveforms. The database is also available for other several altimeter mission (ERS-1, ERS-2, Jason-1, Jason-2, CryoSat-2, Topex, Envisat, AltiKa). A merged product combining all the available altimeters is also provided, which is used in MPAS-Analysis. + +Source +------ +`Altiberg website`_ + +Release Policy +-------------- +Unknown, openly available on website. + +References +---------- +- `Tournade et al (2017)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapIcebergConcSH` + +.. _`Altiberg website`: http://cersat.ifremer.fr/user-community/news/item/473-altiberg-a-database-for-small-icebergs +.. _`Tournade et al (2017)`: ftp://ftp.ifremer.fr/ifremer/cersat/projects/altiberg/v2/documentation/ALTIBERG-rep_v2_1.pdf +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Icebergs//obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/aniceflux.rst.txt b/1.11.0rc1/_sources/users_guide/obs/aniceflux.rst.txt new file mode 100644 index 000000000..32e081c61 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/aniceflux.rst.txt @@ -0,0 +1,48 @@ +.. _aniceflux: + +Sea ice production and transport: Haumann et al 2016 +==================================================== + +Description +----------- +This data set provides estimates of annual freshwater fluxes related to sea-ice formation from ocean freezing +and snow-ice formation, sea-ice melting, lateral transport of sea ice in the Southern Ocean over the period +1982 to 2008. It is derived from a mass balance calculation of local sea-ice volume change and divergence +from satellite data and sea-ice reconstructions. The mass balance is calculated on a daily basis and fluxes are +then integrated over the entire year, where a year is defined from March to February of the next year (i.e. +from March 1982 to February 2009). This approach combines multiple products of sea-ice concentration +(Cavalieri & Parkinson, 2008; Comiso, 1986; Meier et al., 2013), sea-ice thickness (Kurtz & Markus, 2012; +Massonnet et al., 2013; Worby et al., 2008), and sea-ice drift (Fowler et al., 2013; Kwok 2005; Schwegmann +et al., 2011). For a detailed description of the method see Haumann et al. (2016). The data set is derived to +estimate large-scale (regional to basin-scale) fluxes on an annual basis. Our confidence is reduced on a grid +cell basis, such as for single coastal polynyas, where the method and underlying data induce large, unknown +uncertainties. + +Source +------ +`EnviDat`_ + +Release Policy +-------------- +This data set is free to use for any non-commercial purpose at the risk of the user' +and the authors do not take any liability on the use of the data set. The authors +assembled the data set carefully and assessed accuracy, errors, and uncertainties. +Please contact the authors if you find any issues. + +References +---------- +`Haumann et al (2016), data`_ +`Haumann et al (2016), paper`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSeaIceProductionSH` +- :ref:`task_climatologyMapSeaIceMeltingSH` + +.. _`EnviDat`: https://www.envidat.ch/dataset/10-16904-8 +.. _`Haumann et al (2016), data`: https://doi.org/10.16904/8 +.. _`Haumann et al (2016), paper`: https://doi.org/10.1038/nature19101 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/Haumann/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/aquarius_sss.rst.txt b/1.11.0rc1/_sources/users_guide/obs/aquarius_sss.rst.txt new file mode 100644 index 000000000..0ce8d9c90 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/aquarius_sss.rst.txt @@ -0,0 +1,36 @@ +.. _aquarius_sss: + +SSS from NASA Aquarius satellite +================================ + +Description +----------- +Level 3 Aquarius sea surface salinity (SSS) data products have a temporal +resolutions of daily, 8 day, monthly, 3 months, and annual. Monthly and +seasonal climatology products from Aquarius are also available. The Aquarius +instrument provides global coverage every 7 days. L3 products are gridded +at 1 degree spatial resolution. + +Source +------ +`NASA Aquarius Website`_ + +Release Policy +-------------- +NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +`Data Citation and Acknowledgements`_ + +References +---------- +`Lagerloef et al. (2015)`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSSS` + +.. _`NASA Aquarius Website`: https://podaac.jpl.nasa.gov/dataset/AQUARIUS_L4_OISSS_IPRC_7DAY_V4 +.. _`Data Citation and Acknowledgements`: https://podaac.jpl.nasa.gov/CitingPODAAC +.. _`Lagerloef et al. (2015)`: ftp://podaac.jpl.nasa.gov/SalinityDensity/aquarius/docs/v4/AQ-014-PS-0016_AquariusSalinityDataValidationAnalysis_DatasetVersion4.0and3.0.pdf + diff --git a/1.11.0rc1/_sources/users_guide/obs/argo_mld.rst.txt b/1.11.0rc1/_sources/users_guide/obs/argo_mld.rst.txt new file mode 100644 index 000000000..7fd109e83 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/argo_mld.rst.txt @@ -0,0 +1,47 @@ +.. _argo_mld: + +Argo Mixed Layer Depth (MLD) climatology +======================================== + +Description +----------- +A mixed layer climatology and database (described in Holte et al. 2017) +using Argo profiles and a hybrid method (Holte and Talley 2009) for +finding the mixed layer depth (MLD). The climatology incorporates over +1,385,000 Argo profiles (through February 2017). The new hybrid algorithm +models the general shape of each profile, searches for physical features +in the profile, and calculates threshold and gradient MLDs to assemble a +suite of possible MLD values. It then analyzes the patterns in the suite +to select a final MLD estimate. Results are also presented for MLDs +calculated using de Boyer Montegut et al.'s (2004) threshold values. + +Source +------ +`UCSD Mixed Layer Website`_ + +Release Policy +-------------- +`Acknowledgment:`_ If you use this data, +please cite it as: Holte, J., L. D. Talley, J. Gilson, and D. Roemmich +(2017), An Argo mixed layer climatology and database, Geophys. Res. +Lett., 44, 5618-5626, doi:10.1002/2017GL073426. + +References +---------- +- `Holte et al. (2017)`_ +- `Holte and Talley (2009)`_ +- `de Boyer Montegut et al. (2004)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapMLD` + +.. _`UCSD Mixed Layer Website`: http://mixedlayer.ucsd.edu/ +.. _`Acknowledgment:`: http://mixedlayer.ucsd.edu/ +.. _`Holte et al. (2017)`: http://onlinelibrary.wiley.com/doi/10.1002/2017GL073426/full +.. _`Holte and Talley (2009)`: http://journals.ametsoc.org/doi/abs/10.1175/2009JTECHO543.1 +.. _`de Boyer Montegut et al. (2004)`: https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2004JC002378 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/MLD/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/aviso_ssh.rst.txt b/1.11.0rc1/_sources/users_guide/obs/aviso_ssh.rst.txt new file mode 100644 index 000000000..645854085 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/aviso_ssh.rst.txt @@ -0,0 +1,46 @@ +.. _aviso_ssh: + +AVISO Absolute Dynamic Topography +================================= + +Description +----------- +`NASA JPL AVISO website`_ +This dataset contains absolute dynamic topography (similar to sea level +but with respect to the geoid) binned and averaged monthly on 1 degree +grids. The coverage is from October 1992 to December 2010. These data +were provided by AVISO (French space agency data provider) to support the +CMIP5 (Coupled Model Intercomparison Project Phase 5) under the World +Climate Research Program (WCRP) and was first made available via the JPL +Earth System Grid. The dynamic topography are derived from sea surface +height measured by several satellites including Envisat, TOPEX/Poseidon, +Jason-1 and OSTM/Jason-2, and referenced to the geoid. Along with this +dataset, two additional ancillary data files are included in the same +directory which contain the number of observations and standard error +co-located on the same 1 degree grids. + +Source +------ +`NASA JPL AVISO website`_ + +Release Policy +-------------- +When using Ssalto/Duacs data (NRT or DT along-track Absolute Dynamic +Topography (ADT), maps of SLA geostrophic currents (MSLA UV) or maps of +ADT heights and currents (MADT H and UV), climatologies and averages of +MSLA-H), please cite: "The altimeter products were produced by +Ssalto/Duacs and distributed by Aviso, with support from Cnes +(http://www.aviso.altimetry.fr/duacs/)" + +References +---------- +`AVISO: Sea Surface Height above Geoid`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSSH` + +.. _`NASA JPL AVISO website`: https://podaac.jpl.nasa.gov/dataset/AVISO_L4_DYN_TOPO_1DEG_1MO +.. _`NASA JPL AVISO website`: https://podaac.jpl.nasa.gov/dataset/AVISO_L4_DYN_TOPO_1DEG_1MO +.. _`AVISO: Sea Surface Height above Geoid`: ftp://podaac.jpl.nasa.gov/allData/aviso/L4/dynamic_topo_1deg_1mo/docs/zosTechNote_AVISO_L4_199210-201012.pdf + diff --git a/1.11.0rc1/_sources/users_guide/obs/bootstrap_conc.rst.txt b/1.11.0rc1/_sources/users_guide/obs/bootstrap_conc.rst.txt new file mode 100644 index 000000000..f27e93998 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/bootstrap_conc.rst.txt @@ -0,0 +1,47 @@ +.. _bootstrap_conc: + +Ice concentration: SSM/I, Bootstrap algorithm +============================================= + +Description +----------- +This sea ice concentration data set was derived using measurements from +the Scanning Multichannel Microwave Radiometer (SMMR) on the Nimbus-7 +satellite and from the Special Sensor Microwave/Imager (SSM/I) sensors on +the Defense Meteorological Satellite Program's (DMSP) -F8, -F11, and -F13 +satellites. Measurements from the Special Sensor Microwave Imager/Sounder +(SSMIS) aboard DMSP-F17 are also included. The data set has been +generated using the Advanced Microwave Scanning Radiometer - Earth +Observing System (AMSR-E) Bootstrap Algorithm with daily varying +tie-points. Daily (every other day prior to July 1987) and monthly data +are available for both the north and south polar regions. Data are +gridded on the SSM/I polar stereographic grid (25 x 25 km) and provided +in two-byte integer format. Data are available via FTP. + +Source +------ +`NSIDC Bootstrap Website`_ + +Release Policy +-------------- +NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +`Data Citation and Acknowledgements`_ + +References +---------- +`Comiso (2017)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSeaIceConcNH` +- :ref:`task_climatologyMapSeaIceConcSH` + +.. _`NSIDC Bootstrap Website`: http://nsidc.org/data/NSIDC-0079 +.. _`Data Citation and Acknowledgements`: https://podaac.jpl.nasa.gov/CitingPODAAC +.. _`Comiso (2017)`: https://doi.org/10.5067/7Q8HCCWS4I0R +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/SSMI/Bootstrap_NSIDC0079/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/drifter_eke.rst.txt b/1.11.0rc1/_sources/users_guide/obs/drifter_eke.rst.txt new file mode 100644 index 000000000..14ea54e98 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/drifter_eke.rst.txt @@ -0,0 +1,62 @@ +.. _drifter_eke: + +Surface Current Variance from Drifter Data +========================================== + +Description +----------- +Contains information about current variance (eddy kinetic energy) derived from +residuals with respect to the time-mean, seasonal, spatial gradient, and +SOI-related currents. The file contains the following variables: + +* Lon (1x720): longitude (degrees), negative=West. +* Lat (1x317): latitude (degrees), 73S to 85N. +* Up2bar (317x720): Zonal Velocity Variance, mean(u' ^2) (m^2/s^2). +* Vp2bar (317x720): Meridional Velocity Variance, mean(v' ^2) (m^2/s^2). +* rA (317x720): Variance ellipse semimajor axis (m^2/s^2). +* rB (317x720): Variance ellipse semiminor axis (m^2/s^2). +* angle (317x720): Orientation angle of variance ellipse (degrees, 0=east/west). +* N (317x720): Number of drifter-days per square degree. + +Note that Up2bar+Vp2bar=rA+rB, which is twice the eddy kinetic energy. + +For more information, see: http://www.aoml.noaa.gov/phod/gdp/mean_velocity.php + +The global near-surface current climatology described here is publicly available +through the `Gulf of Mexico Research Initiative Information & Data Cooperative (GRIIDC)`_, and through +`NOAA/AOML`_ at +http://www.aoml.noaa.gov/phod/dac/drifter_variance.nc. + +This climatology was developed by Rick Lumpkin (NOAA/AOML) and Lucas Laurindo +(Univ. Miami), in collaboration with Arthur Mariano (Univ. Miami), Mayra Pazos +(NOAA/AOML), and Erik Valdes (CIMAS/AOML). Previous versions were developed with +Gregory Johnson (NOAA/PMEL), Silvia Garzoli (NOAA/AOML), Jessica Redman (CIMAS), +and Zulema Garraffo(Univ. Miami). + +Source +------ +`NOAA/AOML`_ + +Release Policy +-------------- +Please cite as reference: +Laurindo, L., A. Mariano, and R. Lumpkin, 2017: An improved near-surface +velocity climatology for the global ocean from drifter observations Deep-Sea +Res. I, 124, pp.73-92, doi:10.1016/j.dsr.2017.04.009. + +References +---------- +`Laurindo et al. (2017)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapEKE` + +.. _`Gulf of Mexico Research Initiative Information & Data Cooperative (GRIIDC)`: http://dx.doi.org/10.7266/N7SJ1HN5 +.. _`NOAA/AOML`: http://www.aoml.noaa.gov/phod/dac/dac_meanvel.php +.. _`NOAA/AOML`: http://www.aoml.noaa.gov/phod/dac/dac_meanvel.php +.. _`Laurindo et al. (2017)`: https://doi.org/10.1016/j.dsr.2017.04.009 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/EKE/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/era5_waves.rst.txt b/1.11.0rc1/_sources/users_guide/obs/era5_waves.rst.txt new file mode 100644 index 000000000..4d99a10df --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/era5_waves.rst.txt @@ -0,0 +1,75 @@ +.. _era5_waves: + +Wave Reanalysis: ERA5 +===================== + +Description +----------- +ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 4 to 7 decades. +Currently data is available from 1950, with Climate Data Store entries for 1950-1978 (preliminary back extension) +and from 1959 onwards (final release plus timely updates, this page). ERA5 replaces the ERA-Interim reanalysis. + +Reanalysis combines model data with observations from across the world into a globally complete and +consistent dataset using the laws of physics. This principle, called data assimilation, is based on +the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) +a previous forecast is combined with newly available observations in an optimal way to produce a new +best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. +Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset +spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, +so there is more time to collect observations, and when going further back in time, to allow for the +ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. + +ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. +An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. +Ensemble mean and spread have been pre-computed for convenience. +Such uncertainty estimates are closely related to the information content of the available +observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. +To facilitate many climate applications, monthly-mean averages have been pre-calculated too, +though monthly means are not available for the ensemble mean and spread. + +ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). +In case that serious flaws are detected in this early release (called ERA5T), this data could be different +from the final release 2 to 3 months later. In case that this occurs users are notified. + +The data set presented here is a regridded subset of the full ERA5 data set on native resolution. +It is online on spinning disk, which should ensure fast and easy access. +It should satisfy the requirements for most common applications. + +An overview of all ERA5 datasets can be found in this `article`_. +Information on access to ERA5 data on native resolution is provided in these `guidelines`_. + +Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and +0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). +There are four main sub sets: hourly and monthly products, both on pressure levels +(upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). + +The present entry is "ERA5 monthly mean data on single levels from 1959 to present". + +Source +------ +`Copernicus Climate Data Store`_ + +Release Policy +-------------- +Access to Copernicus Products is given for any purpose in so far as it is lawful, whereas use +may include, but is not limited to: reproduction; distribution; communication to the public; +adaptation, modification and combination with other data and information; or any +combination of the foregoing. `Licence Agreement`_ + +References +---------- +- `Hersback et al. 2020`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapWaves` + +.. _`article`: https://confluence.ecmwf.int/display/CKB/The+family+of+ERA5+datasets +.. _`guidelines`: https://confluence.ecmwf.int/display/CKB/How+to+download+ERA5 +.. _`Copernicus Climate Data Store`: https://doi.org/10.24381/cds.f17050d7 +.. _`Licence Agreement`: https://cds.climate.copernicus.eu/api/v2/terms/static/licence-to-use-copernicus-products.pdf +.. _`Hersback et al. 2020`: https://rmets.onlinelibrary.wiley.com/doi/10.1002/qj.3803 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/ERA5/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/ers_sst_nino.rst.txt b/1.11.0rc1/_sources/users_guide/obs/ers_sst_nino.rst.txt new file mode 100644 index 000000000..ccef5e97a --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/ers_sst_nino.rst.txt @@ -0,0 +1,49 @@ +.. _ers_sst_nino: + +ERS SSTv4 Nino 3.4 Index +======================== + +Description +----------- +The Nino 3.4 Index is also computed using the Extended Reconstructed +Sea Surface Temperature (ERSST) dataset, which is a global monthly +sea surface temperature dataset derived from the International Comprehensive +Ocean-Atmosphere Dataset (ICOADS). It is produced on a 2 degree by 2 degree +grid with spatial completeness enhanced using statistical methods. This +monthly analysis begins in January 1854 continuing to the present and +includes anomalies computed with respect to a 1971-2000 monthly climatology. +The newest version of ERSST, version 4, is based on optimally tuned parameters +using the latest datasets and improved analysis methods. ERSST is suitable +for long-term global and basin-wide studies, and smoothed local and +short-term variations are used in the dataset. + +Source +------ +`NOAA ERSST v4 website`_ + +Release Policy +-------------- +Cite this dataset when used as a source: Boyin Huang, Viva F. Banzon, Eric Freeman, +Jay Lawrimore, Wei Liu, Thomas C. Peterson, Thomas M. Smith, Peter W. Thorne, +Scott D. Woodruff, and Huai-Min Zhang, 2015: Extended Reconstructed Sea Surface +Temperature (ERSST), Version 4. NOAA National Centers for Environmental Information. +doi:10.7289/V5KD1VVF [access date: January 2017]. + +References +---------- +- `Huang et al. (2014)`_ +- `Liu et al. (2014)`_ +- `Huang et al. (2015)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_indexNino34` + +.. _`NOAA ERSST v4 website`: https://www.ncdc.noaa.gov/data-access/marineocean-data/extended-reconstructed-sea-surface-temperature-ersst-v4 +.. _`Huang et al. (2014)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-14-00006.1 +.. _`Liu et al. (2014)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-14-00007.1 +.. _`Huang et al. (2015)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-15-0430.1 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Nino/ERS_SSTv4/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/glodapv2.rst.txt b/1.11.0rc1/_sources/users_guide/obs/glodapv2.rst.txt new file mode 100644 index 000000000..ceea78ae4 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/glodapv2.rst.txt @@ -0,0 +1,38 @@ +.. _glodapv2: + +GLODAPv2 +======== + +Description +----------- +GLODAPv2 (Global Ocean Data Analysis Project version 2) is an international +data synthesis project for interior ocean inorganic carbon data and +related variables for the global ocean. It includes data from all ocean +areas of the globe and synthesizes 724 unique cruises. + +Source +------ +`GLODAPv2 Website`_ + +Release Policy +-------------- +The GLODAPv2 database is freely available. + +References +---------- +- `Olsen et al. 2016`_ +- `Lauvset et al. 2016`_ +- `Olsen et al. 2017`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapBGC` + +.. _`GLODAPv2 Website`: https://www.nodc.noaa.gov/ocads/oceans/GLODAPv2/ +.. _`Olsen et al. 2016`: http://cdiac.ess-dive.lbl.gov/ftp/oceans/GLODAPv2/Data_Products/essd-8-297-2016.pdf +.. _`Lauvset et al. 2016`: http://cdiac.ess-dive.lbl.gov/ftp/oceans/GLODAPv2/Data_Products/essd-8-325-2016.pdf +.. _`Olsen et al. 2017`: https://www.nodc.noaa.gov/ocads/data/0162565.xml +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/BGC/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/hadisst_nino.rst.txt b/1.11.0rc1/_sources/users_guide/obs/hadisst_nino.rst.txt new file mode 100644 index 000000000..330bb522e --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/hadisst_nino.rst.txt @@ -0,0 +1,45 @@ +.. _hadisst_nino: + +HadISST Nino 3.4 Index +====================== + +Description +----------- +Nino 3.4 Index is computed from the Hadley-OI sea surface temperature +(SST) and sea ice concentration (SIC) data set. This product was +specifically developed as surface forcing data set for AMIP style +uncoupled simulations of the Community Atmosphere Model (CAM). The +Hadley Centre's SST/SIC version 1.1 (HADISST1), which is derived gridded, +bias-adjusted in situ observations, were merged with the NOAA-Optimal +Interpolation (version 2; OI.v2) analyses. The HADISST1 spanned 1870 +onward but the OI.v2, which started in November 1981, better resolved +features such as the Gulf Stream and Kuroshio Current which are important +components of the climate system. Since the two data sets used different +development methods, anomalies from a base period were used to create +a more homogeneous record. Also, additional adjustments were made to +the SIC data set. + +Source +------ +`NCAR Hadley-NOAA/OI SST website`_ + +Release Policy +-------------- +Acknowledgment: Hurrell, J. W., J. J. Hack, D. Shea, J. M. Caron, and J. Rosinski, +2008: A New Sea Surface Temperature and Sea Ice Boundary Dataset for the Community +Atmosphere Model. Journal of Climate, 21, 5145-5153. + +References +---------- +`Hurrell et al. (2008)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_indexNino34` + +.. _`NCAR Hadley-NOAA/OI SST website`: https://climatedataguide.ucar.edu/climate-data/merged-hadley-noaaoi-sea-surface-temperature-sea-ice-concentration-hurrell-et-al-2008 +.. _`Hurrell et al. (2008)`: https://doi.org/10.1175/2008JCLI2292.1 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Nino/HadISST/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/hadley_center_sst.rst.txt b/1.11.0rc1/_sources/users_guide/obs/hadley_center_sst.rst.txt new file mode 100644 index 000000000..cfd67aed6 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/hadley_center_sst.rst.txt @@ -0,0 +1,44 @@ +.. _hadley_center_sst: + +SST merged Hadley Center-NOAA/OI data set +========================================= + +Description +----------- +The merged Hadley-OI sea surface temperature (SST) and sea ice +concentration (SIC) data sets were specifically developed as surface +forcing data sets for AMIP style uncoupled simulations of the Community +Atmosphere Model (CAM). The Hadley Centre's SST/SIC version 1.1 +(HADISST1), which is derived gridded, bias-adjusted in situ observations, +were merged with the NOAA-Optimal Interpolation (version 2; OI.v2) +analyses. The HADISST1 spanned 1870 onward but the OI.v2, which started +in November 1981, better resolved features such as the Gulf Stream and +Kuroshio Current which are important components of the climate system. +Since the two data sets used different development methods, anomalies +from a base period were used to create a more homogeneous record. Also, +additional adjustments were made to the SIC data set. + +Source +------ +`NCAR Hadley-NOAA/OI SST website`_ + +Release Policy +-------------- +Acknowledgment: Hurrell, J. W., J. J. Hack, D. Shea, J. M. Caron, and J. Rosinski, +2008: A New Sea Surface Temperature and Sea Ice Boundary Dataset for the Community +Atmosphere Model. Journal of Climate, 21, 5145-5153. + +References +---------- +`Hurrell et al. (2008)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSST` + +.. _`NCAR Hadley-NOAA/OI SST website`: https://climatedataguide.ucar.edu/climate-data/merged-hadley-noaaoi-sea-surface-temperature-sea-ice-concentration-hurrell-et-al-2008 +.. _`Hurrell et al. (2008)`: https://doi.org/10.1175/2008JCLI2292.1 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/SST/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/icesat_thickness.rst.txt b/1.11.0rc1/_sources/users_guide/obs/icesat_thickness.rst.txt new file mode 100644 index 000000000..7562704bc --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/icesat_thickness.rst.txt @@ -0,0 +1,39 @@ +.. _icesat_thickness: + +IceSat Ice Thickness +==================== + +Description +----------- +This data set provides measurements of sea ice freeboard and sea ice +thickness for the Arctic region. The data were derived from measurements +made by from the Ice, Cloud, and land Elevation Satellite (ICESat) +Geoscience Laser Altimeter System (GLAS) instrument, the Special Sensor +Microwave/Imager (SSM/I), and climatologies of snow and drift of ice. + +Source +------ +`NASA: Arctic Sea Ice Freeboard and Thickness`_ + +Release Policy +-------------- +NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +`Data Citation and Acknowledgements`_ + +References +---------- +`Yi and Zwally (2009)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_timeSeriesSeaIceAreaVol` + +.. _`NASA: Arctic Sea Ice Freeboard and Thickness`: http://nsidc.org/data/NSIDC-0393 +.. _`Data Citation and Acknowledgements`: https://podaac.jpl.nasa.gov/CitingPODAAC +.. _`Yi and Zwally (2009)`: https://doi.org/10.5067/SXJVJ3A2XIZT +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/ICESat/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/landschuetzer-som-ffn.rst.txt b/1.11.0rc1/_sources/users_guide/obs/landschuetzer-som-ffn.rst.txt new file mode 100644 index 000000000..55b79b986 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/landschuetzer-som-ffn.rst.txt @@ -0,0 +1,49 @@ +.. _landschuetzer-som-ffn: + +Landschuetzerv2016 SOM-FFN +========================== + +Description +----------- +The observation-based pCO2 fields were created using a 2-step neural +network method extensively described and validated in Landschuetzer et +al. 2013, 2014, 2016. The method first clusters the global ocean into +biogeochemical provinces and in a second step reconstructs the non-liner +relationship between CO2 driver variables and observations from the 4th +release of the Surface Ocean CO2 Atlas (SOCATv4, Bakker et al. 2016). +This file contains the resulting monthly pCO2 fields at 1 x 1 deg resolution +covering the global ocean with the exception of the Arctic Ocean and few +marginal seas. The air-sea CO2 fluxes are computed from the air-sea CO2 +partial pressure difference and a bulk gas transfer formulation +following Landschuetzer et al. 2013, 2014, 2016. Furthermore, the monthly +climatology is created from the monthly average of the period 1985-2015. + +Source +------ +`SOM-FFN Website`_ + +Release Policy +-------------- +This product is free to be used. Please cite the data set as: +Landschuetzer, P., N. Gruber and D.C.E. Bakker (2017). An updated observation-based global monthly gridded sea surface pCO2 and air-sea CO2 flux product from 1982 through 2015 and its monthly climatology (NCEI Accession 0160558). Version 2.2. NOAA National Centers for Environmental Information. Dataset. [2017-07-11] + +References +---------- +- `Landschuetzer et al. 2013`_ +- `Landschuetzer et al. 2014`_ +- `Landschuetzer et al. 2016`_ +- `Bakker et al. 2016`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapBGC` + +.. _`SOM-FFN Website`: https://www.nodc.noaa.gov/ocads/oceans/SPCO2_1982_2015_ETH_SOM_FFN.html +.. _`Landschuetzer et al. 2013`: http://doi.org/10.5194/bg-10-7793-2013 +.. _`Landschuetzer et al. 2014`: http://doi.org/10.1002/2014GB004853 +.. _`Landschuetzer et al. 2016`: http://doi.org/10.1002/2015GB005359 +.. _`Bakker et al. 2016`: http://doi.org/10.5194/essd-8-383-2016 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/BGC/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/nasateam_conc.rst.txt b/1.11.0rc1/_sources/users_guide/obs/nasateam_conc.rst.txt new file mode 100644 index 000000000..7692b0878 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/nasateam_conc.rst.txt @@ -0,0 +1,40 @@ +.. _nasateam_conc: + +Ice concentration: SSM/I, NASATeam algorithm +============================================ + +Description +----------- +This data set is generated from brightness temperature data and is +designed to provide a consistent time series of sea ice concentrations +spanning the coverage of several passive microwave instruments. The data +are provided in the polar stereographic projection at a grid cell size of +25 x 25 km. + +Source +------ +`NSIDC NASATeam Website`_ + +Release Policy +-------------- +NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +`Data Citation and Acknowledgements`_ + +References +---------- +`Cavalieri et al. (1996)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSeaIceConcNH` +- :ref:`task_climatologyMapSeaIceConcSH` + +.. _`NSIDC NASATeam Website`: http://nsidc.org/data/NSIDC-0051 +.. _`Data Citation and Acknowledgements`: https://podaac.jpl.nasa.gov/CitingPODAAC +.. _`Cavalieri et al. (1996)`: https://doi.org/10.5067/8GQ8LZQVL0VL +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/SSMI/NASATeam_NSIDC0051/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/paolo_melt.rst.txt b/1.11.0rc1/_sources/users_guide/obs/paolo_melt.rst.txt new file mode 100644 index 000000000..ebbbd94be --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/paolo_melt.rst.txt @@ -0,0 +1,32 @@ +.. _paolo_melt: + +Antarctic melt rates and fluxes +=============================== + +Description +----------- +Melt rates and melt fluxes from Paolo et al. (2023) + +Source +------ +`Data from: ANT_G1920V01_IceShelfMelt.nc`_ + +Release Policy +-------------- +Not stated. + +References +---------- +`Paolo et al. (2023)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapAntarcticMelt` +- :ref:`task_timeSeriesAntarcticMelt` + +.. _`Data from: ANT_G1920V01_IceShelfMelt.nc`: https://doi.org/10.5067/SE3XH9RXQWAM +.. _`Paolo et al. (2023)`: https://doi.org/10.5194/tc-17-3409-2023 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Melt/Paolo/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/piomass_ice_volume.rst.txt b/1.11.0rc1/_sources/users_guide/obs/piomass_ice_volume.rst.txt new file mode 100644 index 000000000..3f65853c4 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/piomass_ice_volume.rst.txt @@ -0,0 +1,39 @@ +.. _piomass_ice_volume: + +PIOMAS Arctic Sea Ice Volume Reanalysis +======================================= + +Description +----------- +Sea Ice Volume is calculated using the Pan-Arctic Ice Ocean Modeling and +Assimilation System (PIOMAS, Zhang and Rothrock, 2003) developed at +APL/PSC. Anomalies for each day are calculated relative to the average +over the 1979-2016 period for that day of the year to remove the annual +cycle. The model mean annual cycle of sea ice volume over this period +ranges from 28,000 km3 in April to 11,500 km3 in September. + +Source +------ +`PIOMAS website`_ + +Release Policy +-------------- +Data is public, but they optionally ask for basic information about the +person downloading the data (name, e-mail, and affiliation). + +References +---------- +- `Schweiger et al. (2011)`_ +- `Zhang and Rothrock (2003)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_timeSeriesSeaIceAreaVol` + +.. _`PIOMAS website`: http://psc.apl.uw.edu/research/projects/arctic-sea-ice-volume-anomaly/ +.. _`Schweiger et al. (2011)`: https://doi.org/10.1029/2011JC007084 +.. _`Zhang and Rothrock (2003)`: https://doi.org/10.1175/1520-0493%282003%29131<0845:MGSIWA>2.0.CO;2 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/PIOMAS/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/rignot_melt.rst.txt b/1.11.0rc1/_sources/users_guide/obs/rignot_melt.rst.txt new file mode 100644 index 000000000..395c2d176 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/rignot_melt.rst.txt @@ -0,0 +1,32 @@ +.. _rignot_melt: + +Antarctic melt rates and fluxes +=============================== + +Description +----------- +Melt rates and melt fluxes from Rignot et al. (2013) + +Source +------ +`Ice-Shelf Melting Around Antarctica`_ + +Release Policy +-------------- +Data available upon request from co-author J. Mouginot. + +References +---------- +`Rignot et al. (2013)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapAntarcticMelt` +- :ref:`task_timeSeriesAntarcticMelt` + +.. _`Ice-Shelf Melting Around Antarctica`: http://science.sciencemag.org/content/341/6143/266 +.. _`Rignot et al. (2013)`: http://science.sciencemag.org/content/341/6143/266 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Melt/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/roemmich_gilson_argo.rst.txt b/1.11.0rc1/_sources/users_guide/obs/roemmich_gilson_argo.rst.txt new file mode 100644 index 000000000..da9e2cebe --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/roemmich_gilson_argo.rst.txt @@ -0,0 +1,45 @@ +.. _roemmich_gilson_argo: + +Roemmich-Gilson Argo Climatology +================================ + +Description +----------- +This new version of the Roemmich-Gilson Argo Climatology extends the +analysis of Argo-only derived potential temperature and salinity fields +through 2016. Several marginal seas and the Artic Oean have been added. +The analysis method is similar to what was descibed in the Progress In +Oceanography Roemmich and Gilson paper (2009). The only modification has +been to scale the zonal equatorial correlation of the optimal estimation +step, by 8 times, versus 4 times as in the 2009 paper. The additional +Argo data utilized in the analysis results in a longer monthly record as +well as better estimates of the mean and variability fields. Monthly +updates are available in between major yearly re-analyses. + +Source +------ +`Scripps Roemmich-Gilson Argo Website`_ + +Release Policy +-------------- +`Acknowledgment:`_ Roemmich, +D. and J. Gilson, 2009: The 2004-2008 mean and annual cycle of +temperature, salinity, and steric height in the global ocean from the +Argo Program. Progress in Oceanography, 82, 81-100. + +References +---------- +`Roemmich and Gilson (2009)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapArgoTemperature` +- :ref:`task_climatologyMapArgoSalinity` + +.. _`Scripps Roemmich-Gilson Argo Website`: http://sio-argo.ucsd.edu/RG_Climatology.html +.. _`Acknowledgment:`: http://sio-argo.ucsd.edu/RG_Climatology.html +.. _`Roemmich and Gilson (2009)`: http://www.sciencedirect.com/science/article/pii/S0079661109000160 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/ARGO/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/schmidtko.rst.txt b/1.11.0rc1/_sources/users_guide/obs/schmidtko.rst.txt new file mode 100644 index 000000000..6fdf74bbf --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/schmidtko.rst.txt @@ -0,0 +1,33 @@ +.. _schmidtko: + +Antarctic Seafloor Temperature and Salinity +=========================================== + +Description +----------- +Temporal means in conservative temperature and absolute salinity +of Antarctic Continental Shelf Bottom Water (ASBW) for depths shallower +than 1500 m for the period 1975 to 2012. + +Source +------ +`ASCII data file`_ + +Release Policy +-------------- +(missings) + +References +---------- +`Schmidtko et al. (2014)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSchmidtko` + +.. _`ASCII data file`: https://www.geomar.de/fileadmin/personal/fb1/po/sschmidtko/Antarctic_shelf_data.txt +.. _`Schmidtko et al. (2014)`: http://www.sciencemag.org/cgi/doi/10.1126/science.1256117 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/Schmidtko/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/seawifs.rst.txt b/1.11.0rc1/_sources/users_guide/obs/seawifs.rst.txt new file mode 100644 index 000000000..2793999e0 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/seawifs.rst.txt @@ -0,0 +1,40 @@ +.. _seawifs: + +SeaWiFS +======= + +Description +----------- +SeaWiFS (Sea-Viewing Wide Field-of-View Senson) was a project dedicated +to monitoring ocean water quality and ecological characteristics covering +multiple optical bands with a resolution of roughly 1.1km. Chlorophyll +concentrations are derived from images of ocean color. + +Source +------ +`Ocean Color Website`_ + +Release Policy +-------------- +Please provide acknowledgement of the use of Ocean Biology Processing +Group (OBPG) data products, images, and services, e.g.: + +NASA Goddard Space Flight Center, Ocean Ecology Laboratory, Ocean +Biology Processing Group; (2014): Sea-viewing Wide Field-of-view Sensor +(SeaWiFS) Ocean Color Data, NASA OB.DAAC. http://doi.org/10.5067/ORBVIEW +-2/SEAWIFS_OC.2014.0. Accessed on 2016/02/29. + +References +---------- +- `O'Reilly et al. 1998`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapBGC` + +.. _`Ocean Color Website`: https://oceancolor.gsfc.nasa.gov/ +.. _`O'Reilly et al. 1998`: https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/98JC02160 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/BGC/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/sose.rst.txt b/1.11.0rc1/_sources/users_guide/obs/sose.rst.txt new file mode 100644 index 000000000..24c0f0979 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/sose.rst.txt @@ -0,0 +1,43 @@ +.. _sose: + +2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE) +======================================================================== + +Description +----------- +Monthly potential temperature, salinity velocity components and neutral +density output from the Southern Ocean State Estimate (SOSE) covering +years 2005-2010 + +Source +------ +`SOSE Website at UCSD`_ + +Release Policy +-------------- +`Conditions of use`_: The data on +these webpages are made freely available for scientific, bona fide, +not-for-profit research only. If your use of the data is different (e.g. +commercial), you must contact the data providers and receive written +permission for your use of the data prior to any such use. The user must +acknowledge SOSE data in all products or publications that use them, e.g. +by including the following written note: "Computational resources for the +SOSE were provided by NSF XSEDE resource grant OCE130007." An appropriate +citation should also be made. + +References +---------- +`Mazloff et al. (2010)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapSose` +- :ref:`task_soseTransects` + +.. _`SOSE Website at UCSD`: http://sose.ucsd.edu/sose_stateestimation_data_05to10.html +.. _`Conditions of use`: http://sose.ucsd.edu/sose_stateestimation_disclaimer.html +.. _`Mazloff et al. (2010)`: http://doi.org/10.1175/2009JPO4236.1 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/SOSE/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/sscci_waves.rst.txt b/1.11.0rc1/_sources/users_guide/obs/sscci_waves.rst.txt new file mode 100644 index 000000000..064aa8ea2 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/sscci_waves.rst.txt @@ -0,0 +1,52 @@ +.. _sscci_waves: + +Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative +============================================================================= + +Description +----------- +The ESA Sea State Climate Change Initiative (CCI) project has produced global merged multi-sensor +time-series of monthly gridded satellite altimeter significant wave height (referred to as Level 4 (L4) data) +with a particular focus for use in climate studies. + +This dataset contains the Version 1.1 Remote Sensing Sea Surface Height product, gridded over a global +regular cylindrical projection (1°x1° resolution), averaging valid and good measurements from all +available altimeters on a monthly basis (using the L2P products also available). +These L4 products are meant for statistics and visualization. + +This first version of the Sea State CCI products is inherited from the GlobWave project, +building on experience and existing outputs. It extends and improves the GlobWave products, +which were a post-processing over existing L2 altimeter agency products with additional filtering, +corrections and variables. A major improvement consists in a new denoised sea surface height +variable using Empirical Mode Decomposition, which was used as input to these monthly statistical fields. + +The altimeter data used in the Sea State CCI dataset v1.1 come from multiple satellite missions +spanning from 1991 to 2018 (ERS-1, ERS-2, Topex, Envisat, GFO, CryoSat-2, Jason-1, Jason-2, Jason-3, SARAL). +Many altimeters are bi-frequency (Ku-C or Ku-S) and only measurements in Ku band were used, +for consistency reasons, being available on each altimeter but SARAL (Ka band). + +Source +------ +- `CEDA Archive`_ + +Release Policy +-------------- +Public data: access to these data is available to both registered and non-registered users. +Use of these data is covered by the following `licence`_. +When using these data you must cite them correctly using the citation given on the CEDA Data Catalogue record. + +References +---------- +- `Dodet et al. 2020`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapWaves` + +.. _`CEDA Archive`: http://dx.doi.org/10.5285/47140d618dcc40309e1edbca7e773478 +.. _`licence`: https://artefacts.ceda.ac.uk/licences/specific_licences/esacci_sea_state_terms_and_conditions.pdf +.. _`Dodet et al. 2020`: https://essd.copernicus.org/articles/12/1929/2020/ +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/SSCCI/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/ssmi_ice_area.rst.txt b/1.11.0rc1/_sources/users_guide/obs/ssmi_ice_area.rst.txt new file mode 100644 index 000000000..1aee3625c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/ssmi_ice_area.rst.txt @@ -0,0 +1,69 @@ +.. _ssmi_ice_area: + +Ice area and extent time series: SSM/I derived +============================================== + +Description +----------- +The sea ice data presented here were derived from satellite +passive-microwave radiometers, specifically, the Scanning Multichannel +Microwave Radiometer (SMMR) on NASA's Nimbus 7 satellite, for November +1978-August 1987, a sequence of Special Sensor Microwave Imagers (SSMIs) +on the F8, F11, and F13 satellites of the Defense Meteorological +Satellite Program (DMSP), for August 1987-December 2007, and the Special +Sensor Microwave Imager Sounder (SSMIS) on the DMSP F17 satellite for +January 2008-December 2012. The baseline data used were daily maps of +sea ice concentration. The maps are polar stereographic projections with +individual grid elements of approximately 25 km x 25 km; and the ice +concentration data are also archived at the National Snow and Ice Data +Center (NSIDC) at http://nsidc.org. The concentrations are calculated for +each ocean grid element and are used to derive 'sea ice extent', which is +calculated as the sum of all ocean elements having a sea ice +concentration of at least 15%, and 'sea ice area', which is calculated as +the sum over all ocean grid elements of the product of ice concentration +and grid element area. The data sets provided here include the +hemispheric totals and additionally the values for nine regions in the +Arctic and five regions in the Antarctic. These regions are identified in +Figures 1 and 2 respectively. Figures 3 and 4 provide plots of the trends +in the Arctic and Antarctic sea ice extents, along with monthly +deviations and 12-month running means. The monthly deviations are +calculated by taking the individual month's ice extent/area and +subtracting from it the average over the course of the data set of the +extents/areas for that month. + +Source +------ +`NASA Ice area and extent website`_ + +Release Policy +-------------- +NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +`Data Citation and Acknowledgements`_ + +References +---------- +- `Cavalieri et al. (1999)`_ +- `Cavalieri et al. (2012)`_ +- `Cavalieri and Parkinson (2012)`_ +- `Parkinson et al. (1999)`_ +- `Parkinson and Cavalieri (2012)`_ +- `Zwally et al. (2002)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_timeSeriesSeaIceAreaVol` + +.. _`NASA Ice area and extent website`: https://neptune.gsfc.nasa.gov/csb/index.php?section=59 +.. _`Data Citation and Acknowledgements`: https://podaac.jpl.nasa.gov/CitingPODAAC +.. _`Cavalieri et al. (1999)`: https://doi.org/10.1029/1999JC900081 +.. _`Cavalieri et al. (2012)`: https://doi.org/10.1109/LGRS.2011.2166754 +.. _`Cavalieri and Parkinson (2012)`: https://doi.org/10.5194/tc-6-881-2012 +.. _`Parkinson et al. (1999)`: https://doi.org/10.1029/1999JC900082 +.. _`Parkinson and Cavalieri (2012)`: https://doi.org/10.5194/tc-6-871-2012 +.. _`Zwally et al. (2002)`: https://doi.org/10.1029/2000JC000733 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/SeaIce/IceArea_timeseries/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/trenberth_mht.rst.txt b/1.11.0rc1/_sources/users_guide/obs/trenberth_mht.rst.txt new file mode 100644 index 000000000..3602cd1e2 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/trenberth_mht.rst.txt @@ -0,0 +1,38 @@ +.. _trenberth_mht: + +Meridional Heat Transport (MHT) +=============================== + +Description +----------- +The Trenbert and Caron oceanic meridional heat transport is computed +by computation of energy balance of the atmosphere, adjusted to fit +physical constraints, and using two reanalysis products: the National +Centers for Environmental Prediction-National Center for Atmospheric +Research (NCEP-NCAR) reanalysis and the European Centre for Medium-Range +Weather Forecasts (ECMWF) product. The analysis focuses on the period +from February 1985 to April 1989 when there are reliable top-of-the-atmosphere +radiation data from the Earth Radiation Budget Experiment. + +Source +------ +Data available upon request from Dr. Kevin Trenberth + +Release Policy +-------------- +Acknowledgment: please cite: Trenberth and Caron (2001). Estimates of +Meridional Atmosphere and Ocean Heat Transports, J. of Climate, 14, 3433-3443. + +References +---------- +`Trenberth and Caron (2001)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_meridionalHeatTransport` + +.. _`Trenberth and Caron (2001)`: https://doi.org/10.1175/1520-0442%282001%29014<3433%3AEOMAAO>2.0.CO%3B2 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/MHT/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/woa.rst.txt b/1.11.0rc1/_sources/users_guide/obs/woa.rst.txt new file mode 100644 index 000000000..bc8b1c6e6 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/woa.rst.txt @@ -0,0 +1,39 @@ +.. _woa: + +World Ocean Atlas v2 +==================== + +Description +----------- +The World Ocean Atlas (WOA) is a data product of the Ocean Climate +Laboratory of the National Oceanographic Data Center. It consists of +objectively analyzed climatological mean fields at 1 degree spatial +resolution. The climatology is based on all data that are available from +the early 1900s to present. + +Source +------ +- `World Ocean Atlas v2 Website`_ + +Release Policy +-------------- +Cite the World Ocean Atlas 2013 for the data sets used (see references). +The World Ocean Atlas 2013 (and all previous versions) are available free +of charge. + +References +---------- +- `Oxygen: Garcia et al. 2014a`_ +- `Nutrients: Garcia et al. 2014b`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapBGC` + +.. _`World Ocean Atlas v2 Website`: https://www.nodc.noaa.gov/OC5/woa13/ +.. _`Oxygen: Garcia et al. 2014a`: https://data.nodc.noaa.gov/woa/WOA13/DOC/woa13_vol3.pdf +.. _`Nutrients: Garcia et al. 2014b`: https://data.nodc.noaa.gov/woa/WOA13/DOC/woa13_vol4.pdf +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/BGC/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/woa18_t_s.rst.txt b/1.11.0rc1/_sources/users_guide/obs/woa18_t_s.rst.txt new file mode 100644 index 000000000..761533945 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/woa18_t_s.rst.txt @@ -0,0 +1,48 @@ +.. _woa18_t_s: + +WOA18 Temperature and Salinity Climatology +========================================== + +Description +----------- +The World Ocean Atlas 2018 (WOA18) release July 2019 updates previous +versions of the World Ocean Atlas to include approximately 3 million +new oceanographic casts added to the World Ocean Database (WOD) since +the previous release as well as renewed and updated quality control. +Animal mounted pinniped temperature profiles have been added as a data +source improving coverage in some high latitude areas. WOA18 includes +analysis for temperature, salinity, dissolved oxygen, and dissolved +inorganic nutrients (only temperature and salinity are used in MPAS-Analysis +as of Aug 2019). The atlas includes annual, seasonal, and monthly +climatologies and related statistical fields. Annual fields are available +for the full depth (0-5500 m), while monthly and seasonal fields are +available for the upper 1500 m only. Climatologies are available on a 1deg +regular grid and on a 0.25deg grid. + +Source +------ +`NOAA National Oceanographic Data Center (NODC) website`_ + +Release Policy +-------------- +Acknowledgment: Locarnini and co-authors, 2019: World Ocean Atlas 2018, +Volume 1: Temperature. A. Mishonov Technical Editor, NOAA Atlas NESDIS 81. +Zweng and co-authors, 2019: World Ocean Atlas 2018, Volume 2: Salinity. +A. Mishonov Technical Editor, NOAA Atlas NESDIS 82. + +References +---------- +- `Locarnini et al. (2019)`_ +- `Zweng et al. (2019)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_climatologyMapWoa` + +.. _`NOAA National Oceanographic Data Center (NODC) website`: https://www.nodc.noaa.gov/OC5/woa18 +.. _`Locarnini et al. (2019)`: https://data.nodc.noaa.gov/woa/WOA18/DOC/woa18_vol1.pdf +.. _`Zweng et al. (2019)`: https://data.nodc.noaa.gov/woa/WOA18/DOC/woa18_vol2.pdf +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/WOA18/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/obs/woce.rst.txt b/1.11.0rc1/_sources/users_guide/obs/woce.rst.txt new file mode 100644 index 000000000..b3c10bb87 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/obs/woce.rst.txt @@ -0,0 +1,59 @@ +.. _woce: + +WOCE sections +============= + +Description +----------- +WOCE +---- +The Hydrographic Programme of the international World Ocean Circulation +Experiment (WOCE) was a comprehensive global hydrographic survey of +physical and chemical properties, of unprecedented scope and quality, and +represents the "state of the oceans" during the 1990s. + +The "Printed Atlas" is a copy of the published volume and contains full +introductory text. The "Digital Atlas" presents the same graphical +material, with additional properties and levels, ancillary data sets, and +bibliographic material for each of the vertical sections. + +CCHDO +----- +The CCHDO's primary mission is to deliver the highest possible quality +global CTD and hydrographic data to users. These data are a product of +decades of observations related to the physical characteristics of ocean +waters carried out during WOCE, CLIVAR and numerous other oceanographic +research programs. Whenever possible we provide these data in three +easy-to-use formats: WHP-Exchange (which we recommend for data +submissions to the CCHDO), WOCE, and netCDF. +global Argo and OceanSITES programs. + +Source +------ +- `WOCE Atlas Website`_ +- `CCHDO`_ + +Release Policy +-------------- +"Public" data may be placed on-line. They may be exchanged between +investigators as desired. They may or may not be preliminary, and +especially for preliminary public data users are strongly advised to +contact the originating investigators regarding the status of updates or +further data processing. `See Full Policy`_ + +References +---------- +`Orsi and Whitworth (2005)`_ + +`bibtex file`_ + +MPAS-Analysis Tasks +------------------- +- :ref:`task_woceTransects` + +.. _`WOCE Atlas Website`: http://woceatlas.ucsd.edu/ +.. _`CCHDO`: https://cchdo.ucsd.edu/ +.. _`See Full Policy`: https://cchdo.ucsd.edu/policy +.. _`Orsi and Whitworth (2005)`: https://doi.org/10.21976/C6BC78 +.. _`bibtex file`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/WOCE/obs.bib + diff --git a/1.11.0rc1/_sources/users_guide/observations.rst.txt b/1.11.0rc1/_sources/users_guide/observations.rst.txt new file mode 100644 index 000000000..00ba33b00 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/observations.rst.txt @@ -0,0 +1,16 @@ +Observations +============ + +A variety of observational datasets are used within MPAS-Analysis: + +Ocean Observations +------------------ +.. include:: ocean_obs_table.rst + +Sea Ice Observations +-------------------- +.. include:: seaice_obs_table.rst + +Details on Each Data Set +------------------------ +.. include:: all_obs.rst diff --git a/1.11.0rc1/_sources/users_guide/ocean_obs_table.rst.txt b/1.11.0rc1/_sources/users_guide/ocean_obs_table.rst.txt new file mode 100644 index 000000000..6f5b289ce --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/ocean_obs_table.rst.txt @@ -0,0 +1,95 @@ +============================ ======================================================================================================== ======================================== +Observational Dataset Source References +============================ ======================================================================================================== ======================================== +:ref:`hadley_center_sst` `NCAR Hadley-NOAA/OI SST website`_ `Hurrell et al. (2008)`_ +:ref:`aquarius_sss` `NASA Aquarius Website`_ `Lagerloef et al. (2015)`_ +:ref:`woa18_t_s` `NOAA National Oceanographic Data Center (NODC) website`_ - `Locarnini et al. (2019)`_ + - `Zweng et al. (2019)`_ +:ref:`aviso_ssh` `NASA JPL AVISO website`_ `AVISO: Sea Surface Height above Geoid`_ +:ref:`argo_mld` `UCSD Mixed Layer Website`_ - `Holte et al. (2017)`_ + - `Holte and Talley (2009)`_ + - `de Boyer Montegut et al. (2004)`_ +:ref:`trenberth_mht` Data available upon request from Dr. Kevin Trenberth `Trenberth and Caron (2001)`_ +:ref:`roemmich_gilson_argo` `Scripps Roemmich-Gilson Argo Website`_ `Roemmich and Gilson (2009)`_ +:ref:`sose` `SOSE Website at UCSD`_ `Mazloff et al. (2010)`_ +:ref:`rignot_melt` `Ice-Shelf Melting Around Antarctica`_ `Rignot et al. (2013)`_ +:ref:`adusumilli_melt` `Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves`_ `Adusumilli et al. (2020)`_ +:ref:`paolo_melt` `Data from: ANT_G1920V01_IceShelfMelt.nc`_ `Paolo et al. (2023)`_ +:ref:`hadisst_nino` `NCAR Hadley-NOAA/OI SST website`_ `Hurrell et al. (2008)`_ +:ref:`ers_sst_nino` `NOAA ERSST v4 website`_ - `Huang et al. (2014)`_ + - `Liu et al. (2014)`_ + - `Huang et al. (2015)`_ +:ref:`schmidtko` `ASCII data file`_ `Schmidtko et al. (2014)`_ +:ref:`woce` - `WOCE Atlas Website`_ `Orsi and Whitworth (2005)`_ + - `CCHDO`_ +:ref:`woa` - `World Ocean Atlas v2 Website`_ - `Oxygen: Garcia et al. 2014a`_ + - `Nutrients: Garcia et al. 2014b`_ +:ref:`landschuetzer-som-ffn` `SOM-FFN Website`_ - `Landschuetzer et al. 2013`_ + - `Landschuetzer et al. 2014`_ + - `Landschuetzer et al. 2016`_ + - `Bakker et al. 2016`_ +:ref:`seawifs` `Ocean Color Website`_ - `O'Reilly et al. 1998`_ +:ref:`glodapv2` `GLODAPv2 Website`_ - `Olsen et al. 2016`_ + - `Lauvset et al. 2016`_ + - `Olsen et al. 2017`_ +:ref:`drifter_eke` `NOAA/AOML`_ `Laurindo et al. (2017)`_ +:ref:`era5_waves` `Copernicus Climate Data Store`_ - `Hersback et al. 2020`_ +:ref:`sscci_waves` - `CEDA Archive`_ - `Dodet et al. 2020`_ +============================ ======================================================================================================== ======================================== + +.. _`NCAR Hadley-NOAA/OI SST website`: https://climatedataguide.ucar.edu/climate-data/merged-hadley-noaaoi-sea-surface-temperature-sea-ice-concentration-hurrell-et-al-2008 +.. _`Hurrell et al. (2008)`: https://doi.org/10.1175/2008JCLI2292.1 +.. _`NASA Aquarius Website`: https://podaac.jpl.nasa.gov/dataset/AQUARIUS_L4_OISSS_IPRC_7DAY_V4 +.. _`Lagerloef et al. (2015)`: ftp://podaac.jpl.nasa.gov/SalinityDensity/aquarius/docs/v4/AQ-014-PS-0016_AquariusSalinityDataValidationAnalysis_DatasetVersion4.0and3.0.pdf +.. _`NOAA National Oceanographic Data Center (NODC) website`: https://www.nodc.noaa.gov/OC5/woa18 +.. _`Locarnini et al. (2019)`: https://data.nodc.noaa.gov/woa/WOA18/DOC/woa18_vol1.pdf +.. _`Zweng et al. (2019)`: https://data.nodc.noaa.gov/woa/WOA18/DOC/woa18_vol2.pdf +.. _`NASA JPL AVISO website`: https://podaac.jpl.nasa.gov/dataset/AVISO_L4_DYN_TOPO_1DEG_1MO +.. _`AVISO: Sea Surface Height above Geoid`: ftp://podaac.jpl.nasa.gov/allData/aviso/L4/dynamic_topo_1deg_1mo/docs/zosTechNote_AVISO_L4_199210-201012.pdf +.. _`UCSD Mixed Layer Website`: http://mixedlayer.ucsd.edu/ +.. _`Holte et al. (2017)`: http://onlinelibrary.wiley.com/doi/10.1002/2017GL073426/full +.. _`Holte and Talley (2009)`: http://journals.ametsoc.org/doi/abs/10.1175/2009JTECHO543.1 +.. _`de Boyer Montegut et al. (2004)`: https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/2004JC002378 +.. _`Trenberth and Caron (2001)`: https://doi.org/10.1175/1520-0442%282001%29014<3433%3AEOMAAO>2.0.CO%3B2 +.. _`Scripps Roemmich-Gilson Argo Website`: http://sio-argo.ucsd.edu/RG_Climatology.html +.. _`Roemmich and Gilson (2009)`: http://www.sciencedirect.com/science/article/pii/S0079661109000160 +.. _`SOSE Website at UCSD`: http://sose.ucsd.edu/sose_stateestimation_data_05to10.html +.. _`Mazloff et al. (2010)`: http://doi.org/10.1175/2009JPO4236.1 +.. _`Ice-Shelf Melting Around Antarctica`: http://science.sciencemag.org/content/341/6143/266 +.. _`Rignot et al. (2013)`: http://science.sciencemag.org/content/341/6143/266 +.. _`Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves`: https://doi.org/10.6075/J04Q7SHT +.. _`Adusumilli et al. (2020)`: https://doi.org/10.1038/s41561-020-0616-z +.. _`Data from: ANT_G1920V01_IceShelfMelt.nc`: https://doi.org/10.5067/SE3XH9RXQWAM +.. _`Paolo et al. (2023)`: https://doi.org/10.5194/tc-17-3409-2023 +.. _`NCAR Hadley-NOAA/OI SST website`: https://climatedataguide.ucar.edu/climate-data/merged-hadley-noaaoi-sea-surface-temperature-sea-ice-concentration-hurrell-et-al-2008 +.. _`Hurrell et al. (2008)`: https://doi.org/10.1175/2008JCLI2292.1 +.. _`NOAA ERSST v4 website`: https://www.ncdc.noaa.gov/data-access/marineocean-data/extended-reconstructed-sea-surface-temperature-ersst-v4 +.. _`Huang et al. (2014)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-14-00006.1 +.. _`Liu et al. (2014)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-14-00007.1 +.. _`Huang et al. (2015)`: https://journals.ametsoc.org/doi/10.1175/JCLI-D-15-0430.1 +.. _`ASCII data file`: https://www.geomar.de/fileadmin/personal/fb1/po/sschmidtko/Antarctic_shelf_data.txt +.. _`Schmidtko et al. (2014)`: http://www.sciencemag.org/cgi/doi/10.1126/science.1256117 +.. _`WOCE Atlas Website`: http://woceatlas.ucsd.edu/ +.. _`CCHDO`: https://cchdo.ucsd.edu/ +.. _`Orsi and Whitworth (2005)`: https://doi.org/10.21976/C6BC78 +.. _`World Ocean Atlas v2 Website`: https://www.nodc.noaa.gov/OC5/woa13/ +.. _`Oxygen: Garcia et al. 2014a`: https://data.nodc.noaa.gov/woa/WOA13/DOC/woa13_vol3.pdf +.. _`Nutrients: Garcia et al. 2014b`: https://data.nodc.noaa.gov/woa/WOA13/DOC/woa13_vol4.pdf +.. _`SOM-FFN Website`: https://www.nodc.noaa.gov/ocads/oceans/SPCO2_1982_2015_ETH_SOM_FFN.html +.. _`Landschuetzer et al. 2013`: http://doi.org/10.5194/bg-10-7793-2013 +.. _`Landschuetzer et al. 2014`: http://doi.org/10.1002/2014GB004853 +.. _`Landschuetzer et al. 2016`: http://doi.org/10.1002/2015GB005359 +.. _`Bakker et al. 2016`: http://doi.org/10.5194/essd-8-383-2016 +.. _`Ocean Color Website`: https://oceancolor.gsfc.nasa.gov/ +.. _`O'Reilly et al. 1998`: https://agupubs.onlinelibrary.wiley.com/doi/abs/10.1029/98JC02160 +.. _`GLODAPv2 Website`: https://www.nodc.noaa.gov/ocads/oceans/GLODAPv2/ +.. _`Olsen et al. 2016`: http://cdiac.ess-dive.lbl.gov/ftp/oceans/GLODAPv2/Data_Products/essd-8-297-2016.pdf +.. _`Lauvset et al. 2016`: http://cdiac.ess-dive.lbl.gov/ftp/oceans/GLODAPv2/Data_Products/essd-8-325-2016.pdf +.. _`Olsen et al. 2017`: https://www.nodc.noaa.gov/ocads/data/0162565.xml +.. _`NOAA/AOML`: http://www.aoml.noaa.gov/phod/dac/dac_meanvel.php +.. _`Laurindo et al. (2017)`: https://doi.org/10.1016/j.dsr.2017.04.009 +.. _`Copernicus Climate Data Store`: https://doi.org/10.24381/cds.f17050d7 +.. _`Hersback et al. 2020`: https://rmets.onlinelibrary.wiley.com/doi/10.1002/qj.3803 +.. _`CEDA Archive`: http://dx.doi.org/10.5285/47140d618dcc40309e1edbca7e773478 +.. _`Dodet et al. 2020`: https://essd.copernicus.org/articles/12/1929/2020/ + diff --git a/1.11.0rc1/_sources/users_guide/quick_start.rst.txt b/1.11.0rc1/_sources/users_guide/quick_start.rst.txt new file mode 100644 index 000000000..793411e8c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/quick_start.rst.txt @@ -0,0 +1,319 @@ +.. _quick_start: + + +Quick Start Guide +================= + +Analysis for simulations produced with Model for Prediction Across Scales +(MPAS) components and the Energy Exascale Earth System Model (E3SM), which +used those components. + + +.. image:: _static/sst_example.png + :target: _static/sst_example.png + :alt: sea surface temperature + + +Installation for users +---------------------- + +MPAS-Analysis is available as an anaconda package via the ``conda-forge`` channel: + +.. code-block:: + + conda config --add channels conda-forge + conda create -n mpas-analysis mpas-analysis + conda activate mpas-analysis + +Installation for developers +--------------------------- + +To use the latest version for developers, get the code from: + `https://github.com/MPAS-Dev/MPAS-Analysis `_ + +Then, you will need to set up a conda environment from the MPAS-Analysis repo. +This environment will include the required dependencies for the development +branch from ``dev-spec.txt`` and will install the ``mpas_analysis`` package into +the conda environment in a way that points directly to the local branch (so +changes you make to the code directly affect ``mpas_analysis`` in the conda +environment): + +.. code-block:: bash + + conda config --add channels conda-forge + conda config --set channel_priority strict + conda create -y -n mpas_dev --file dev-spec.txt + conda activate mpas_dev + python -m pip install -e . + +If you are developing another conda package at the same time (this is common +for MPAS-Tools or geometric_features), you should first comment out the other +package in ``dev-spec.txt``. Then, you can install both packages in the same +development environment, e.g.: + +.. code-block:: bash + + conda create -y -n mpas_dev --file tools/MPAS-Tools/conda_package/dev-spec.txt \ + --file analysis/MPAS-Analysis/dev-spec.txt + conda activate mpas_dev + cd tools/MPAS-Tools/conda_package + python -m pip install -e . + cd ../../../analysis/MPAS-Analysis + python -m pip install -e . + +Obviously, the paths to the repos may be different in your local clones. With +the ``mpas_dev`` environment as defined above, you can make changes to both +``mpas_tools`` and ``mpas-analysis`` packages in their respective branches, and +these changes will be reflected when refer to the packages or call their +respective entry points (command-line tools). + +Download analysis input data +---------------------------- + +If you installed the ``mpas-analysis`` package, download the data that is +necessary to MPAS-Analysis by running: + +.. code-block:: + + download_analysis_data -o /path/to/mpas_analysis/diagnostics + +where ``/path/to/mpas_analysis/diagnostics`` is the main folder that will contain +two subdirectories: + + +* ``mpas_analysis``\ , which includes mapping and region mask files for + standard resolution MPAS meshes +* ``observations``\ , which includes the pre-processed observations listed in the + `Observations table `_ + and used to evaluate the model results + +Once you have downloaded the analysis data, you will point to its location +(your equivalent of ``path/to/mpas_analysis/diagnostics`` above) in the config +option ``baseDirectory`` in the ``[diagnostics]`` section. + +List Analysis +------------- + +If you installed the ``mpas-analysis`` package, list the available analysis tasks +by running: + +.. code-block:: + + mpas_analysis --list + +This lists all tasks and their tags. These can be used in the ``generate`` +command-line option or config option. See ``mpas_analysis/default.cfg`` +for more details. + +Running the analysis +-------------------- + + +#. Create and empty config file (say ``myrun.cfg``\ ), copy ``example.cfg``\ , + or copy one of the example files in the ``configs`` directory (if using a + git repo) or download one from the + `example configs directory `_. +#. Either modify config options in your new file or copy and modify config + options from ``mpas_analysis/default.cfg`` (in a git repo) or directly + from GitHub: + `default.cfg `_. +#. If you installed the ``mpas-analysis`` package, run: + ``mpas_analysis myrun.cfg``. This will read the configuration + first from ``mpas_analysis/default.cfg`` and then replace that + configuration with any changes from from ``myrun.cfg`` +#. + If you want to run a subset of the analysis, you can either set the + ``generate`` option under ``[output]`` in your config file or use the + ``--generate`` flag on the command line. See the comments in + ``mpas_analysis/default.cfg`` for more details on this option. + + **Requirements for custom config files:** + + +* At minimum you should set ``baseDirectory`` under ``[output]`` to the folder + where output is stored. **NOTE** this value should be a unique + directory for each run being analyzed. If multiple runs are analyzed in + the same directory, cached results from a previous analysis will not be + updated correctly. +* Any options you copy into the config file **must** include the + appropriate section header (e.g. '[run]' or '[output]') +* You do not need to copy all options from ``mpas_analysis/default.cfg``. + This file will automatically be used for any options you do not include + in your custom config file. +* You should **not** modify ``mpas_analysis/default.cfg`` directly. + +List of MPAS output files that are needed by MPAS-Analysis: +----------------------------------------------------------- + + +* mpas-o files: + + * ``mpaso.hist.am.timeSeriesStatsMonthly.*.nc`` (Note: since OHC + anomalies are computed wrt the first year of the simulation, + if OHC diagnostics is activated, the analysis will need the + first full year of ``mpaso.hist.am.timeSeriesStatsMonthly.*.nc`` + files, no matter what ``[timeSeries]/startYear`` and + ``[timeSeries]/endYear`` are. This is especially important to know if + short term archiving is used in the run to analyze: in that case, set + ``[input]/runSubdirectory``\ , ``[input]/oceanHistorySubdirectory`` and + ``[input]/seaIceHistorySubdirectory`` to the appropriate run and archive + directories and choose ``[timeSeries]/startYear`` and + ``[timeSeries]/endYear`` to include only data that have been short-term + archived). + * ``mpaso.hist.am.meridionalHeatTransport.0001-03-01.nc`` (or any + ``hist.am.meridionalHeatTransport`` file) + * ``mpaso.rst.0002-01-01_00000.nc`` (or any other mpas-o restart file) + * ``streams.ocean`` + * ``mpaso_in`` + +* mpas-seaice files: + + * ``mpasseaice.hist.am.timeSeriesStatsMonthly.*.nc`` + * ``mpasseaice.rst.0002-01-01_00000.nc`` (or any other mpas-seaice restart + file) + * ``streams.seaice`` + * ``mpassi_in`` + +Note: for older runs, mpas-seaice files will be named: + + +* ``mpascice.hist.am.timeSeriesStatsMonthly.*.nc`` +* ``mpascice.rst.0002-01-01_00000.nc`` +* ``streams.cice`` +* ``mpas-cice_in`` + Also, for older runs ``mpaso_in`` will be named: +* ``mpas-o_in`` + +Purge Old Analysis +------------------ + +To purge old analysis (delete the whole output directory) before running run +the analysis, add the ``--purge`` flag. If you installed ``mpas-analysis`` as +a package, run: + +.. code-block:: + + mpas_analysis --purge + +All of the subdirectories listed in ``output`` will be deleted along with the +climatology subdirectories in ``oceanObservations`` and ``seaIceObservations``. + +It is a good policy to use the purge flag for most changes to the config file, +for example, updating the start and/or end years of climatologies (and +sometimes time series), changing the resolution of a comparison grid, renaming +the run, changing the seasons over which climatologies are computed for a given +task, updating the code to the latest version. + +Cases where it is reasonable not to purge would be, for example, changing +options that only affect plotting (color map, ticks, ranges, font sizes, etc.), +rerunning with a different set of tasks specified by the ``generate`` option +(though this will often cause climatologies to be re-computed with new +variables and may not save time compared with purging), generating only the +final website with ``--html_only``\ , and re-running after the simulation has +progressed to extend time series (however, not recommended for changing the +bounds on climatologies, see above). + +Running in parallel via a queueing system +----------------------------------------- + +If you are running from a git repo: + + +#. If you are running from a git repo, copy the appropriate job script file + from ``configs/`` to the root directory (or another directory + if preferred). The default script, ``configs/job_script.default.bash``\ , is + appropriate for a laptop or desktop computer with multiple cores. +#. If using the ``mpas-analysis`` conda package, download the job script and/or + sample config file from the + `example configs directory `_. +#. Modify the number of parallel tasks, the run name, the output directory + and the path to the config file for the run. +#. Note: the number of parallel tasks can be anything between 1 and the + number of analysis tasks to be performed. If there are more tasks than + parallel tasks, later tasks will simply wait until earlier tasks have + finished. +#. Submit the job using the modified job script + +If a job script for your machine is not available, try modifying the default +job script in ``configs/job_script.default.bash`` or one of the job scripts for +another machine to fit your needs. + +Customizing plots or creating new ones +-------------------------------------- + +There are three main ways to either customize the plots that MPAS-Analysis +already makes or creating new ones: + + +#. customize the config file. Some features, such as colormaps and colorbar + limits for color shaded plot or depth ranges for ocean region time series, + can be customized: look at ``mpas_analysis/default.cfg`` for available + customization for each analysis task. +#. read in the analysis data computed by MPAS-Analysis into custom scripts. When + running MPAS-Analysis with the purpose of generating both climatologies + and time series, the following data sets are generated: + + * ``[baseDirectory]/clim/mpas/avg/unmasked_[mpasMeshName]``\ : MPAS-Ocean + and MPAS-seaice climatologies on the native grid. + * ``[baseDirectory]/clim/mpas/avg/remapped``\ : remapped climatologies + for each chosen task (climatology files are stored in different + subdirectories according to the task name). + * ``[baseDirectory]/clim/obs``\ : observational climatologies. + * ``[baseDirectory]/clim/mpas/avg/mocStreamfunction_years[startYear]-[endYear].nc``. + * ``[baseDirectory]/clim/mpas/avg/meridionalHeatTransport_years[startYear]-[endYear].nc``. + * ``[baseDirectory]/timeseries``\ : various time series data. + Custom scripts can then utilize these datasets to generate custom plots. + +#. add a new analysis task to MPAS-Analysis (see below). + +Instructions for creating a new analysis task +--------------------------------------------- + +Analysis tasks can be found in a directory corresponding to each component, +e.g., ``mpas_analysis/ocean`` for MPAS-Ocean. Shared functionality is contained +within the ``mpas_analysis/shared`` directory. + + +#. create a new task by ``copying mpas_analysis/analysis_task_template.py`` to + the appropriate folder (\ ``ocean``\ , ``sea_ice``\ , etc.) and modifying it as + described in the template. Take a look at + ``mpas_analysis/shared/analysis_task.py`` for additional guidance. +#. note, no changes need to be made to ``mpas_analysis/shared/analysis_task.py`` +#. modify ``mpas_analysis/default.cfg`` (and possibly any machine-specific + config files in ``configs/``\ ) +#. import new analysis task in ``mpas_analysis//__init__.py`` +#. add new analysis task to ``mpas_analysis/__main__.py`` under + ``build_analysis_list``\ , see below. + +A new analysis task can be added with: + +.. code-block:: + + analyses.append(.MyTask(config, myArg='argValue')) + +This will add a new object of the ``MyTask`` class to a list of analysis tasks +created in ``build_analysis_list``. Later on in ``run_analysis``\ , it will first +go through the list to make sure each task needs to be generated +(by calling ``check_generate``\ , which is defined in ``AnalysisTask``\ ), then, +will call ``setup_and_check`` on each task (to make sure the appropriate AM is +on and files are present), and will finally call ``run`` on each task that is +to be generated and is set up properly. + +Generating Documentation +------------------------ + +Create a development environment as described above in "Installation for +developers". Then run: +To generate the ``sphinx`` documentation, run: + +.. code-block:: + + cd docs + make clean + make html + +The results can be viewed in your web browser by opening: + +.. code-block:: + + _build/html/index.html diff --git a/1.11.0rc1/_sources/users_guide/seaice_obs_table.rst.txt b/1.11.0rc1/_sources/users_guide/seaice_obs_table.rst.txt new file mode 100644 index 000000000..3c3a2634e --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/seaice_obs_table.rst.txt @@ -0,0 +1,41 @@ +========================= =============================================== =================================== +Observational Dataset Source References +========================= =============================================== =================================== +:ref:`nasateam_conc` `NSIDC NASATeam Website`_ `Cavalieri et al. (1996)`_ +:ref:`bootstrap_conc` `NSIDC Bootstrap Website`_ `Comiso (2017)`_ +:ref:`ssmi_ice_area` `NASA Ice area and extent website`_ - `Cavalieri et al. (1999)`_ + - `Cavalieri et al. (2012)`_ + - `Cavalieri and Parkinson (2012)`_ + - `Parkinson et al. (1999)`_ + - `Parkinson and Cavalieri (2012)`_ + - `Zwally et al. (2002)`_ +:ref:`icesat_thickness` `NASA: Arctic Sea Ice Freeboard and Thickness`_ `Yi and Zwally (2009)`_ +:ref:`piomass_ice_volume` `PIOMAS website`_ - `Schweiger et al. (2011)`_ + - `Zhang and Rothrock (2003)`_ +:ref:`aniceflux` `EnviDat`_ `Haumann et al (2016), data`_ + `Haumann et al (2016), paper`_ +:ref:`altiberg` `Altiberg website`_ - `Tournade et al (2017)`_ +========================= =============================================== =================================== + +.. _`NSIDC NASATeam Website`: http://nsidc.org/data/NSIDC-0051 +.. _`Cavalieri et al. (1996)`: https://doi.org/10.5067/8GQ8LZQVL0VL +.. _`NSIDC Bootstrap Website`: http://nsidc.org/data/NSIDC-0079 +.. _`Comiso (2017)`: https://doi.org/10.5067/7Q8HCCWS4I0R +.. _`NASA Ice area and extent website`: https://neptune.gsfc.nasa.gov/csb/index.php?section=59 +.. _`Cavalieri et al. (1999)`: https://doi.org/10.1029/1999JC900081 +.. _`Cavalieri et al. (2012)`: https://doi.org/10.1109/LGRS.2011.2166754 +.. _`Cavalieri and Parkinson (2012)`: https://doi.org/10.5194/tc-6-881-2012 +.. _`Parkinson et al. (1999)`: https://doi.org/10.1029/1999JC900082 +.. _`Parkinson and Cavalieri (2012)`: https://doi.org/10.5194/tc-6-871-2012 +.. _`Zwally et al. (2002)`: https://doi.org/10.1029/2000JC000733 +.. _`NASA: Arctic Sea Ice Freeboard and Thickness`: http://nsidc.org/data/NSIDC-0393 +.. _`Yi and Zwally (2009)`: https://doi.org/10.5067/SXJVJ3A2XIZT +.. _`PIOMAS website`: http://psc.apl.uw.edu/research/projects/arctic-sea-ice-volume-anomaly/ +.. _`Schweiger et al. (2011)`: https://doi.org/10.1029/2011JC007084 +.. _`Zhang and Rothrock (2003)`: https://doi.org/10.1175/1520-0493%282003%29131<0845:MGSIWA>2.0.CO;2 +.. _`EnviDat`: https://www.envidat.ch/dataset/10-16904-8 +.. _`Haumann et al (2016), data`: https://doi.org/10.16904/8 +.. _`Haumann et al (2016), paper`: https://doi.org/10.1038/nature19101 +.. _`Altiberg website`: http://cersat.ifremer.fr/user-community/news/item/473-altiberg-a-database-for-small-icebergs +.. _`Tournade et al (2017)`: ftp://ftp.ifremer.fr/ifremer/cersat/projects/altiberg/v2/documentation/ALTIBERG-rep_v2_1.pdf + diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapAntarcticMelt.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapAntarcticMelt.rst.txt new file mode 100644 index 000000000..6c769b5a9 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapAntarcticMelt.rst.txt @@ -0,0 +1,86 @@ +.. _task_climatologyMapAntarcticMelt: + +climatologyMapAntarcticMelt +=========================== + +An analysis task for comparison of Antarctic maps of melt rates against +observations from `Paolo et al. (2023) `_. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, meltRate, landIceCavities + + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [climatologyMapAntarcticMelt] + ## options related to plotting horizontally regridded maps of Antarctic + ## sub-ice-shelf melt rates against control model results and observations + + # comparison grid(s) + # only the Antarctic really makes sense but lat-lon could technically work. + comparisonGrids = ['antarctic'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # colormap for model/observations + colormapNameResult = erdc_iceFire_H + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = symLog + # A dictionary with keywords for the norm + normArgsResult = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100., + 'vmax': 100.} + colorbarTicksResult = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5., + 10., 20., 50., 100.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = symLog + # A dictionary with keywords for the norm + normArgsDifference = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100., + 'vmax': 100.} + colorbarTicksDifference = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., + 5., 10., 20., 50., 100.] + + # make a tables of mean melt rates and melt fluxes for individual ice shelves? + makeTables = False + + # If making tables, which ice shelves? This is a list of ice shelves or + # ['all'] for all 106 ice shelves and regions. + iceShelvesInTable = [] + +The option ``meltTables`` determines if tables of mean melt rates, averaged +over each ice shelf or region, are computed. ``iceShelvesInTable`` is a list +of the ice shelf or region names to include in the table. A value of ``'all'`` +indicates that all 106 ice shelves and regions will be included in the table. +The table is in ``csv`` format and can be found in the ``tables`` subdirectory. + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`paolo_melt` + +Example Result +-------------- + +.. image:: examples/ant_melt.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoSalinity.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoSalinity.rst.txt new file mode 100644 index 000000000..8833caf6c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoSalinity.rst.txt @@ -0,0 +1,74 @@ +.. _task_climatologyMapArgoSalinity: + +climatologyMapArgoSalinity +============================= + +An analysis task for comparing salinity at various depths against +Argo observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, argo, salinity, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapArgoSalinity] + ## options related to plotting climatology maps of Global + ## salinity at various levels against + ## reference model results and the Roemmich-Gilson Argo Climatology + + # comparison grid(s) + comparisonGrids = ['latlon'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN','JFM','JAS'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface. Argo data is only available above -2000 m + depths = ['top', -25, -50, -100, -150, -200, -400, -600, -800, -1500] + + # colormap for model/observations + colormapNameResult = haline + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 30, 'vmax': 39.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +The option ``depths`` is a list of (approximate) depths at which to sample +the salinity field. A value of ``'top'`` indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +``'bot'`` indicates the seafloor. + +Observations +------------ + +:ref:`roemmich_gilson_argo` + + +Example Result +-------------- + +.. image:: examples/clim_argo_salin.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoTemperature.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoTemperature.rst.txt new file mode 100644 index 000000000..671ee22d4 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapArgoTemperature.rst.txt @@ -0,0 +1,74 @@ +.. _task_climatologyMapArgoTemperature: + +climatologyMapArgoTemperature +============================= + +An analysis task for comparing potential temperature at various depths against +Argo observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, argo, temperature, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapArgoTemperature] + ## options related to plotting climatology maps of Global + ## potential temperature at various levels against + ## reference model results and Roemmich-Gilson Argo Climatology + + # comparison grid(s) + comparisonGrids = ['latlon'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN','JAS','JFM'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface. Argo data is only available above -2000 m + depths = ['top', -25, -50, -100, -150, -200, -400, -800, -1500] + + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 30.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +The option ``depths`` is a list of (approximate) depths at which to sample +the potential temperature field. A value of ``'top'`` indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +``'bot'`` indicates the seafloor. + +Observations +------------ + +:ref:`roemmich_gilson_argo` + +Example Result +-------------- + +.. image:: examples/clim_argo_temp.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapBGC.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapBGC.rst.txt new file mode 100644 index 000000000..c79b7a0b2 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapBGC.rst.txt @@ -0,0 +1,194 @@ +.. _task_climatologyMapBGC: + +climatologyMapBGC +================= + +An analysis task for comparison of global maps of biogeochemistry (BGC) fields +against observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, BGC, PO4, NO3, SiO3, CO2_gas_flux, pH_3D, + DIC, ALK, O2, pCO2surface, Chl + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapBGC] + ## options related to plotting climatology mpas of BGC + + # Variables to plot: + # pH : 'pH_3D' + # DIC : 'DIC' + # ALK : 'ALK' + # PO4 : 'PO4' + # NO3 : 'NO3' + # SiO3 : 'SiO3' + # CO2 flux : 'CO2_gas_flux' + # O2 : 'O2' + variables = ['PO4', 'NO3', 'SiO3', 'CO2_gas_flux', 'pH_3D', 'DIC', 'ALK', + 'O2', 'pCO2surface', 'Chl'] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN', 'JFM', 'JAS'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon', 'antarctic'] + + # Whether to compare to preindustrial observations that are available. + preindustrial = False + + [climatologyMapBGC_PO4] + # Colormap for climatology + colormapNameResult = dense + # Colormap for clim - obs difference + colormapNameDifference = balance + # linear vs. log scaling for climatology + normTypeResult = linear + # Colorbar bounds for climatology + normArgsResult = {'vmin': 0, 'vmax': 2.5} + # linear vs. log scaling for obs + normTypeDifference = linear + # Colorbar bounds for obs + normArgsDifference = {'vmin': -1, 'vmax': 1} + # BGC property units + units = mmol m$^{-3}$ + # Prefix to variable name in MPAS-O output + filePrefix = timeMonthly_avg_ecosysTracers_ + # Acronym/label for observational dataset + observationsLabel = WOA + # Acronym/label for gallery variable (can be different from MPAS name) + galleryLabel = PO4 + + [climatologyMapBGC_NO3] + colormapNameResult = dense + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 0, 'vmax': 35.0} + normTypeDifference = linear + normArgsDifference = {'vmin': -10, 'vmax': 10} + units = mmol m$^{-3}$ + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = WOA + galleryLabel = NO3 + + [climatologyMapBGC_SiO3] + colormapNameResult = dense + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 0, 'vmax': 80} + normTypeDifference = linear + normArgsDifference = {'vmin': -20, 'vmax': 20} + units = mmol m$^{-3}$ + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = WOA + galleryLabel = SiO3 + + [climatologyMapBGC_CO2_gas_flux] + colormapNameResult = BrBG_r + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': -5, 'vmax': 5} + normTypeDifference = linear + normArgsDifference = {'vmin': -5, 'vmax': 5} + units = mol m$^{-2}$ yr$^{-1}$ + filePrefix = timeMonthly_avg_ + observationsLabel = SOM-FFNv2016 + galleryLabel = CO2 Flux + + [climatologyMapBGC_O2] + colormapNameResult = matter + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 2, 'vmax': 8} + normTypeDifference = linear + normArgsDifference = {'vmin': -2, 'vmax': 2} + units = mL/L + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = WOA + galleryLabel = O2 + + [climatologyMapBGC_pH_3D] + colormapNameResult = PuBuGn_r + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 8, 'vmax': 8.2} + normTypeDifference = linear + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + units = + filePrefix = timeMonthly_avg_ecosys_diag_ + observationsLabel = GLODAPv2 + galleryLabel = pH + + [climatologyMapBGC_DIC] + colormapNameResult = YlGnBu + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 1900, 'vmax': 2300} + normTypeDifference = linear + normArgsDifference = {'vmin': -100, 'vmax': 100} + units = mmol m$^{-3}$ + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = GLODAPv2 + galleryLabel = DIC + + [climatologyMapBGC_ALK] + colormapNameResult = PuBuGn + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 2150, 'vmax': 2450} + normTypeDifference = linear + normArgsDifference = {'vmin': -100, 'vmax': 100} + units = meq m$^{-3}$ + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = GLODAPv2 + galleryLabel = Alkalinity + + [climatologyMapBGC_pCO2surface] + colormapNameResult = viridis + colormapNameDifference = balance + normTypeResult = linear + normArgsResult = {'vmin': 300, 'vmax': 450} + normTypeDifference = linear + normArgsDifference = {'vmin': -50, 'vmax': 50} + units = $\mu$atm + filePrefix = timeMonthly_avg_ecosys_diag_ + observationsLabel = SOM-FFNv2016 + galleryLabel = pCO2 + + [climatologyMapBGC_Chl] + colormapNameResult = viridis + colormapNameDifference = balance + normTypeResult = log + normArgsResult = {'vmin': 0.01, 'vmax': 20} + normTypeDifference = symLog + normArgsDifference = {'linthresh': 0.1, 'vmin': -10, 'vmax': 10} + units = mg m$^{-3}$ + filePrefix = timeMonthly_avg_ecosysTracers_ + observationsLabel = SeaWIFS + galleryLabel = Chlorophyll + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + + * :ref:`woa` + * :ref:`roemmich_gilson_argo` + * :ref:`seawifs` + * :ref:`glodapv2` + + +Example Result +-------------- + +.. image:: examples/bgc.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapEKE.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapEKE.rst.txt new file mode 100644 index 000000000..755a95124 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapEKE.rst.txt @@ -0,0 +1,65 @@ +.. _task_climatologyMapEKE: + +climatologyMapEKE +================= + +An analysis task for comparison of global maps of sea surface height (SSH) +with zero mean against observations. The mean has been subtracted because the +initial sea level is somewhat arbitrary and will lead to a systematic offset +when compared with the observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, eke, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapEKE] + ## options related to plotting horizontally remapped climatologies of + ## eddy kinetic energy (EKE) against reference model results and observations + + # colormap for model/observations + colormapNameResult = magma_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 1000.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -300., 'vmax': 300.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9 + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`drifter_eke` + +Example Result +-------------- + +.. image:: examples/eke.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapIcebergConcSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapIcebergConcSH.rst.txt new file mode 100644 index 000000000..37767b848 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapIcebergConcSH.rst.txt @@ -0,0 +1,81 @@ +.. _task_climatologyMapIcebergConcSH: + +climatologyMapIcebergConcSH +=========================== + +An analysis task for plotting maps of Antarctic iceberg concentration against +observations. + +Component and Tags:: + + component: seaIce + tags: icebergs, climatology, horizontalMap + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapIcebergConcSH] + ## options related to plotting horizontally remapped climatologies of + ## iceberg concentration against reference model results and observations + ## in the southern hemisphere (SH) + + # colormap for model/observations + colormapNameResult = ice + # the type of norm used in the colormap + normTypeResult = log + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1e-5, 'vmax': 1e-2} + # specify the ticks + colorbarTicksResult = [1e-5, 1e-4, 1e-3, 1e-2] + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = symLog + # A dictionary with keywords for the norm + normArgsDifference = {'linthresh': 1e-5, 'linscale': 1, 'vmin': -1e-2, 'vmax': 1e-2} + + # Months or seasons to plot (These should be left unchanged, since + # observations are only available for these seasons) + seasons = ['ANN', DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for iceberg plots in the southern hemisphere + minimumLatitude = -50 + referenceLongitude = 180 + + # a list of prefixes describing the sources of the observations to be used + observationPrefixes = ['Altiberg'] + + # arrange subplots vertically? + vertical = False + + # observations files + concentrationAltibergSH = Altiberg/Altiberg_1991-2017_20180308.nc + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ +* :ref:`altiberg` + +Example Result +-------------- +.. image:: examples/berg_conc_sh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLD.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLD.rst.txt new file mode 100644 index 000000000..014f54363 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLD.rst.txt @@ -0,0 +1,63 @@ +.. _task_climatologyMapMLD: + +climatologyMapMLD +================= + +An analysis task for comparison of global maps of mixed layer depth (MLD) +against observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, mld, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapMLD] + ## options related to plotting horizontally remapped climatologies of + ## mixed layer depth (MLD) against control model results and observations + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`argo_mld` + +Example Result +-------------- + +.. image:: examples/mld.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLDMinMax.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLDMinMax.rst.txt new file mode 100644 index 000000000..6feb6f4c9 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapMLDMinMax.rst.txt @@ -0,0 +1,239 @@ +.. _task_climatologyMapMLDMinMax: + +climatologyMapMLDMinMax +======================= + +An analysis task for plotting climatologies of monthly min and max of +mixed layer depth (MLD). + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, mld, publicObs, min, max + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapMLDMinMax] + ## options related to plotting horizontally remapped climatologies of + ## monthly min/max mixed layer depth (MLD), optionally against control model + ## results + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + + [climatologyMapDensityMLDMinMax] + ## color mpas for density-threshold MLD min/max plots in the same figure + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = viridis + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150] + + [climatologyMapDensityMLDMin] + ## color mpas for density MLD min plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + [climatologyMapDensityMLDMax] + ## color mpas for density MLD max plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + + [climatologyMapTemperatureMLDMinMax] + ## color mpas for temperature-threshold MLD min/max plots in the same figure + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = viridis + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150] + + [climatologyMapTemperatureMLDMin] + ## color mpas for temperature MLD min plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + [climatologyMapTemperatureMLDMax] + ## color mpas for temperature MLD max plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + + [climatologyMapBLDMinMax] + ## color mpas for boundary-layer depth min/max plots in the same figure + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = viridis + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150] + + [climatologyMapBLDMin] + ## color mpas for BLD min plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + + [climatologyMapBLDMax] + ## color mpas for BLD max plots in separate figures for main vs. control + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = indexed + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.] + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = indexed + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150] + +Config sections ``climatologyMapDensityMLD*`` apply to plots of the MLD based +on a density threshold, while ``climatologyMapTemperatureMLD*`` are those +related to MLD with a temperature threshold, and ``climatologyMabBLD*`` are of +controling plots of the boundary-layer depth (BLD). Each of these has a +section ending in ``MinMax`` that controls plots in when not comparing a main +reference to observations, in which case the maximum field is in the top panel, +minimum in the middle panel and the difference in the bottom panel. When +comparing with a reference run, min. and max. plots are performed separately +along with their corresponding fields from the reference run and the difference +between main and reference. In these cases, a different color map may be +appropriate (e.g with positive and negative values for the difference, rather +than just positive for max. minus min.). + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapOHCAnomaly.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapOHCAnomaly.rst.txt new file mode 100644 index 000000000..bc8b976c0 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapOHCAnomaly.rst.txt @@ -0,0 +1,67 @@ +.. _task_climatologyMapOHCAnomaly: + +climatologyMapOHCAnomaly +======================== + +An analysis task for plotting anomalies in ocean heat content (OHC) compared +with a reference year (typically the start of the simulation). + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, deltaOHC, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapOHCAnomaly] + ## options related to plotting horizontally remapped climatologies of + ## ocean heat content (OHC) against reference model results (if available) + + # colormap for model/observations + colormapNameResult = BuOr + # color indices into colormapName for filled contours + colormapIndicesResult = numpy.array(numpy.linspace(0, 255, 38), int) + # colormap levels/values for contour boundaries + colorbarLevelsResult = numpy.linspace(-12., 12., 37) + # colormap levels/values for ticks (defaults to same as levels) + colorbarTicksResult = numpy.linspace(-12., 12., 9) + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = numpy.array(numpy.linspace(0, 255, 10), int) + # colormap levels/values for contour boundaries + colorbarLevelsDifference = numpy.linspace(-2., 2., 11) + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # A list of pairs of minimum and maximum depths (positive up, in meters) to + # include in the vertical sums. The default values are the equivalents of the + # default ranges of the timeSeriesOHCAnomaly task, with a value of -10,000 m + # intended to be well below the bottom of the ocean for all existing MPAS-O + # meshes. + depthRanges = [(0.0, -10000.0), (0.0, -700.0), (-700.0, -2000.0), + (-2000.0, -10000.0)] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +The option ``depthRange`` is a list of upper and lower bounds in meters over +which to integrate the heat content. + +Example Result +-------------- + +.. image:: examples/clim_ohc.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSH.rst.txt new file mode 100644 index 000000000..11dd09a41 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSH.rst.txt @@ -0,0 +1,79 @@ +.. _task_climatologyMapSSH: + +climatologyMapSSH +================= + +An analysis task for comparison of global maps of sea surface height (SSH) +with zero mean against observations. The mean has been subtracted because the +initial sea level is somewhat arbitrary and will lead to a systematic offset +when compared with the observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, ssh, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSSH] + ## options related to plotting horizontally remapped climatologies of + ## sea surface height (SSH) against reference model results and observations + + # colormap for model/observations + colormapNameResult = Maximenko + # color indices into colormapName for filled contours + colormapIndicesResult = numpy.array(numpy.linspace(0, 255, 38), int) + # colormap levels/values for contour boundaries + colorbarLevelsResult = numpy.arange(-240., 130., 10.) + # colormap levels/values for ticks (defaults to same as levels) + colorbarTicksResult = numpy.arange(-240., 160., 40.) + + # contour line levels + contourLevelsResult = numpy.arange(-240., 130., 10.) + # contour line thickness + contourThicknessResult = 0.25 + # contour color + contourColorResult = 0.25 + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 23, 46, 70, 93, 116, 128, 128, 139, 162, 185, + 209, 232, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-100., -80., -60., -40., -20., -10., 0., 10., + 20., 40., 60., 80., 100.] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +.. note:: + + The color map and contours for these plots were inspired by Fig. 1 from + `Maximenko et al. (2009)`_ + +Observations +------------ + +:ref:`aviso_ssh` + +Example Result +-------------- + +.. image:: examples/ssh.png + :width: 500 px + :align: center + +.. _`Maximenko et al. (2009)`: http://journals.ametsoc.org/doi/abs/10.1175/2009JTECHO672.1 \ No newline at end of file diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSS.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSS.rst.txt new file mode 100644 index 000000000..9a7230997 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSSS.rst.txt @@ -0,0 +1,60 @@ +.. _task_climatologyMapSSS: + +climatologyMapSSS +================= + +An analysis task for comparison of global maps of sea surface salinity (SSS) +against observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, sss, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSSS] + ## options related to plotting horizontally remapped climatologies of + ## sea surface salinity (SSS) against reference model results and observations + + # colormap for model/observations + colormapNameResult = haline + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38] + + # colormap for differences + colormapNameDifference = RdBu_r + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, + 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-3, -2, -1, -0.5, -0.02, 0, 0.02, 0.5, 1, 2, 3] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`aquarius_sss` + +Example Result +-------------- + +.. image:: examples/sss.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSST.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSST.rst.txt new file mode 100644 index 000000000..cd24ba0f0 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSST.rst.txt @@ -0,0 +1,74 @@ +.. _task_climatologyMapSST: + +climatologyMapSST +================= + +An analysis task for comparison of global maps of sea surface temperature (SST) +against observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, sst, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSST] + ## options related to plotting horizontally remapped climatologies of + ## sea surface temperature (SST) against reference model results and + ## observations + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + + # colormap for differences + colormapNameDifference = RdBu_r + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, + 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # first and last year of SST observational climatology (preferably one of the + # two ranges given below) + # values for preindustrial + obsStartYear = 1870 + obsEndYear = 1900 + # alternative values for present day + #obsStartYear = 1990 + #obsEndYear = 2011 + +By default, a "preindustrial" climatology is computed for comparison with the +model results. For simulations covering a different time period, the range of +years (``obsStartYear`` and ``obsEndYear``) should be updated. + +For details on the remaining configuration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`hadley_center_sst` + +Example Result +-------------- + +.. image:: examples/sst.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSchmidtko.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSchmidtko.rst.txt new file mode 100644 index 000000000..80212cb65 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSchmidtko.rst.txt @@ -0,0 +1,115 @@ +.. _task_climatologyMapSchmidtko: + +climatologyMapSchmidtko +======================= + +An analysis task for comparison of Antarctic maps of seafloor potential +temperature, salinity and potential density against observations from +`Schmidtko et al. (2014)`_. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, temperature, salinity, potentialDensity + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSchmidtko] + ## options related to plotting climatology maps of Antarctic fields at the + ## seafloor and comparing them against data from Schmidtko et al. (2014) + + # comparison grid(s) + # only the Antarctic really makes sense but lat-lon could technically work. + comparisonGrids = ['antarctic'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN','JFM','JAS'] + + [climatologyMapSchmidtkoTemperature] + ## options related to plotting climatology maps of potential temperature at the + ## seafloor and comparing them against data from Schmidtko et al. (2014) + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + + [climatologyMapSchmidtkoSalinity] + ## options related to plotting climatology maps of salinity at the + ## seafloor and comparing them against data from Schmidtko et al. (2014) + + # colormap for model/observations + colormapNameResult = haline + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 33.8, 'vmax': 35.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(33.8, 35.0, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + + [climatologyMapSchmidtkoPotentialDensity] + ## options related to plotting climatology maps of potential density at the + ## seafloor and comparing them against data from Schmidtko et al. (2014) + + # colormap for model/observations + colormapNameResult = Spectral_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(1026.5, 1028., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`schmidtko` + +Example Result +-------------- + +.. image:: examples/schmidtko_temp.png + :width: 720 px + :align: center + +.. _`Schmidtko et al. (2014)`: http://www.sciencemag.org/cgi/doi/10.1126/science.1256117 \ No newline at end of file diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcNH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcNH.rst.txt new file mode 100644 index 000000000..d4516f68d --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcNH.rst.txt @@ -0,0 +1,99 @@ +.. _task_climatologyMapSeaIceConcNH: + +climatologyMapSeaIceConcNH +========================== + +An analysis task for plotting maps of Arctic sea ice concentration against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceConc, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceConcNH] + ## options related to plotting horizontally remapped climatologies of + ## sea ice concentration against reference model results and observations + ## in the northern hemisphere (NH) + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, + 224, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2, + 0.4, 0.6, 0.8, 1.] + + # Months or seasons to plot (These should be left unchanged, since + # observations are only available for these seasons) + seasons = ['JFM', 'JAS'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = 50 + referenceLongitude = 0 + + # a list of prefixes describing the sources of the observations to be used + observationPrefixes = ['NASATeam', 'Bootstrap'] + + # arrange subplots vertically? + vertical = False + + # observations files + concentrationNASATeamNH_JFM = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jfm.interp0.5x0.5_20180710.nc + concentrationNASATeamNH_JAS = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jas.interp0.5x0.5_20180710.nc + concentrationBootstrapNH_JFM = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_NH_jfm.interp0.5x0.5_20180710.nc + concentrationBootstrapNH_JAS = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_NH_jas.interp0.5x0.5_20180710.nc + + +The option ``minimumLatitude`` determines what the southernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +By default, plots are produced comparing modeled sea ice concentration against +both observations produced with both the ``NasaTeam`` and ``Bootstrap`` +algorithms. By altering ``observationPrefixes``, you can select only one +(or none) of these. + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +The ability to modify observations files pointed to by +``concentrationNASATeamNH_JFM``, ``concentrationNASATeamNH_JAS``, +``concentrationBootstrapNH_JFM`` and ``concentrationBootstrapNH_JAS`` is +provided for debugging purposes and these options +should typically remain unchanged. + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +* :ref:`nasateam_conc` +* :ref:`bootstrap_conc` + +Example Result +-------------- + +.. image:: examples/ice_conc_nh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcSH.rst.txt new file mode 100644 index 000000000..4ca3219a8 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceConcSH.rst.txt @@ -0,0 +1,98 @@ +.. _task_climatologyMapSeaIceConcSH: + +climatologyMapSeaIceConcSH +========================== + +An analysis task for plotting maps of Antarctic sea ice concentration against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceConc, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceConcSH] + ## options related to plotting horizontally remapped climatologies of + ## sea ice concentration against reference model results and observations + ## in the southern hemisphere (SH) + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1] + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192, + 224, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2, + 0.4, 0.6, 0.8, 1.] + + # Months or seasons to plot (These should be left unchanged, since + # observations are only available for these seasons) + seasons = ['DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = -50 + referenceLongitude = 180 + + # a list of prefixes describing the sources of the observations to be used + observationPrefixes = ['NASATeam', 'Bootstrap'] + + # arrange subplots vertically? + vertical = False + + # observations files + concentrationNASATeamSH_DJF = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_djf.interp0.5x0.5_20180710.nc + concentrationNASATeamSH_JJA = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_jja.interp0.5x0.5_20180710.nc + concentrationBootstrapSH_DJF = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_SH_djf.interp0.5x0.5_20180710.nc + concentrationBootstrapSH_JJA = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_SH_jja.interp0.5x0.5_20180710.nc + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +By default, plots are produced comparing modeled sea ice concentration against +both observations produced with both the ``NasaTeam`` and ``Bootstrap`` +algorithms. By altering ``observationPrefixes``, you can select only one +(or none) of these. + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +The ability to modify observations files pointed to by +``concentrationNASATeamSH_DJF``, ``concentrationNASATeamSH_JJA``, +``concentrationBootstrapSH_DJF`` and ``concentrationBootstrapSH_JJA`` is +provided for debugging purposes and these options +should typically remain unchanged. + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +* :ref:`nasateam_conc` +* :ref:`bootstrap_conc` + +Example Result +-------------- + +.. image:: examples/ice_conc_sh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingNH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingNH.rst.txt new file mode 100644 index 000000000..97798f9b0 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingNH.rst.txt @@ -0,0 +1,78 @@ +.. _task_climatologyMapSeaIceMeltingNH: + +climatologyMapSeaIceMeltingNH +============================= + +An analysis task for plotting maps of Arctic sea ice melting. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceMelting, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceMeltingNH] + # options related to plotting horizontally remapped climatologies of + # sea ice melting against control model results and observations + # in the northern hemisphere (NH) + + # colormap for model/observations + colormapNameResult = amp + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -3., 'vmax': 3.} + # place the ticks automatically by default + colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3] + + # Times for comparison times + seasons = ['ANN', 'DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = 50 + referenceLongitude = 0 + + # arrange subplots vertically? + vertical = False + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + + +Example Result +-------------- + +.. image:: examples/ice_melting_nh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingSH.rst.txt new file mode 100644 index 000000000..2e6e4af0e --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceMeltingSH.rst.txt @@ -0,0 +1,88 @@ +.. _task_climatologyMapSeaIceMeltingSH: + +climatologyMapSeaIceMeltingSH +============================= + +An analysis task for plotting maps of Antarctic sea ice melting against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceMelting, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceMeltingSH] + # options related to plotting horizontally remapped climatologies of + # sea ice melting against control model results and observations + # in the southern hemisphere (SH) + + # colormap for model/observations + colormapNameResult = amp + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -3., 'vmax': 3.} + # place the ticks automatically by default + colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3] + + # Times for comparison times + seasons = ['ANN', 'DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = -50 + referenceLongitude = 180 + + # arrange subplots vertically? + vertical = False + + # observations files + meltingSH = AnIceFlux/aniceflux_v01_clim_mean_1982-2008.nc + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +By default, plots are produced comparing modeled sea ice melting against +``AnIceFlux`` (annual climatology only). + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +* :ref:`aniceflux` + +Example Result +-------------- + +.. image:: examples/ice_melting_sh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionNH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionNH.rst.txt new file mode 100644 index 000000000..51c0130b5 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionNH.rst.txt @@ -0,0 +1,78 @@ +.. _task_climatologyMapSeaIceProductionNH: + +climatologyMapSeaIceProductionNH +================================ + +An analysis task for plotting maps of Arctic sea ice production. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceProduction, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceProductionNH] + # options related to plotting horizontally remapped climatologies of + # sea ice production against control model results and observations + # in the northern hemisphere (NH) + + # colormap for model/observations + colormapNameResult = dense + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -3., 'vmax': 3.} + # place the ticks automatically by default + colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3] + + # Times for comparison times + seasons = ['ANN', 'DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = 50 + referenceLongitude = 0 + + # arrange subplots vertically? + vertical = False + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + + +Example Result +-------------- + +.. image:: examples/ice_production_nh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionSH.rst.txt new file mode 100644 index 000000000..72ec9cb63 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceProductionSH.rst.txt @@ -0,0 +1,88 @@ +.. _task_climatologyMapSeaIceProductionSH: + +climatologyMapSeaIceProductionSH +================================ + +An analysis task for plotting maps of Antarctic sea ice production against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceProduction, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceProductionSH] + # options related to plotting horizontally remapped climatologies of + # sea ice production against control model results and observations + # in the southern hemisphere (SH) + + # colormap for model/observations + colormapNameResult = dense + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -3., 'vmax': 3.} + # place the ticks automatically by default + colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3] + + # Times for comparison times + seasons = ['ANN', 'DJF', 'JJA'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = -50 + referenceLongitude = 180 + + # arrange subplots vertically? + vertical = False + + # observations files + productionSH = AnIceFlux/aniceflux_v01_clim_mean_1982-2008.nc + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +By default, plots are produced comparing modeled sea ice production against +``AnIceFlux`` (annual climatology only). + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +* :ref:`aniceflux` + +Example Result +-------------- + +.. image:: examples/ice_production_sh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickNH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickNH.rst.txt new file mode 100644 index 000000000..12afff086 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickNH.rst.txt @@ -0,0 +1,90 @@ +.. _task_climatologyMapSeaIceThickNH: + +climatologyMapSeaIceThickNH +=========================== + +An analysis task for plotting maps of Arctic sea ice thickness against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceThick, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceThickNH] + ## options related to plotting horizontally remapped climatologies of + ## sea ice thickness against reference model results and observations + ## in the northern hemisphere (NH) + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5] + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] + + # Months or seasons to plot (These should be left unchanged, since + # observations are only available for these seasons) + seasons = ['FM', 'ON'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = 50 + referenceLongitude = 0 + + # a list of prefixes describing the sources of the observations to be used + observationPrefixes = [''] + + # arrange subplots vertically? + vertical = False + + # observations files + thicknessNH_ON = ICESat/ICESat_gridded_mean_thickness_NH_on.interp0.5x0.5_20180710.nc + thicknessNH_FM = ICESat/ICESat_gridded_mean_thickness_NH_fm.interp0.5x0.5_20180710.nc + +The option ``minimumLatitude`` determines what the southernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +The option ``observationPrefixes`` should be left as a list of the empty +string and is included for allowing easy code reuse with the +``climatologyMapSeaIceConc*`` tasks. + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +The ability to modify observations files pointed to by ``thicknessNH_ON`` and +``thicknessNH_FM`` is provided for debugging purposes and these options +should typically remain unchanged. + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`icesat_thickness` + +Example Result +-------------- + +.. image:: examples/ice_thick_nh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickSH.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickSH.rst.txt new file mode 100644 index 000000000..8dc665315 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSeaIceThickSH.rst.txt @@ -0,0 +1,90 @@ +.. _task_climatologyMapSeaIceThickSH: + +climatologyMapSeaIceThickSH +=========================== + +An analysis task for plotting maps of Antarctic sea ice thickness against +observations. + +Component and Tags:: + + component: seaIce + tags: climatology, horizontalMap, seaIceThick, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSeaIceThickSH] + ## options related to plotting horizontally remapped climatologies of + ## sea ice thickness against reference model results and observations + ## in the southern hemisphere (SH) + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5] + + # colormap for differences + colormapNameDifference = balance + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.] + + # Months or seasons to plot (These should be left unchanged, since + # observations are only available for these seasons) + seasons = ['FM', 'ON'] + + # comparison grid(s) ('latlon', 'antarctic') on which to plot analysis + comparisonGrids = ['latlon'] + + # reference lat/lon for sea ice plots in the northern hemisphere + minimumLatitude = -50 + referenceLongitude = 180 + + # a list of prefixes describing the sources of the observations to be used + observationPrefixes = [''] + + # arrange subplots vertically? + vertical = False + + # observations files + thicknessSH_ON = ICESat/ICESat_gridded_mean_thickness_SH_on.interp0.5x0.5_20180710.nc + thicknessSH_FM = ICESat/ICESat_gridded_mean_thickness_SH_fm.interp0.5x0.5_20180710.nc + +The option ``minimumLatitude`` determines what the northernmost latitude (in +degrees) included in the plot will be. The option ``referenceLongitude`` +defines which longitude will be at the bottom of the plot. + +The option ``observationPrefixes`` should be left as a list of the empty +string and is included for allowing easy code reuse with the +``climatologyMapSeaIceConc*`` tasks. + +The option ``vertical = True`` can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image). + +The ability to modify observations files pointed to by ``thicknessSH_ON`` and +``thicknessSH_FM`` is provided for debugging purposes and these options +should typically remain unchanged. + +For details on the remaining configration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`icesat_thickness` + +Example Result +-------------- + +.. image:: examples/ice_thick_sh.png + :width: 720 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSose.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSose.rst.txt new file mode 100644 index 000000000..bc90be2b7 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapSose.rst.txt @@ -0,0 +1,264 @@ +.. _task_climatologyMapSose: + +climatologyMapSose +============================= + +An analysis task for comparing fields at various depths against +results from the `Southern Ocean State Estimate (SOSE)`_. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, sose, publicObs, temperature, salinity, + potentialDensity, mixedLayerDepth, zonalVelocity, meridionalVelocity, + velocityMagnitude + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapSose] + ## options related to plotting climatology maps of Antarctic fields at various + ## levels, including the sea floor against reference model results and SOSE + ## reanalysis data + + # comparison grid(s) + # only the Antarctic really makes sense but lat-lon could technically work. + comparisonGrids = ['antarctic'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN','JFM','JAS'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface, 'bot' for the sea floor + depths = ['top', -200, -400, -600, -800, 'bot'] + + # a list of fields top plot for each transect. All supported fields are listed + # below + fieldList = ['temperature', 'salinity', 'potentialDensity', 'mixedLayerDepth', + 'zonalVelocity', 'meridionalVelocity', 'velocityMagnitude'] + + # set the suffix for files, e.g. if you want to use a different comparison + # grid from the default + fileSuffix = 6000.0x6000.0km_10.0km_Antarctic_stereo_20180710 + + [climatologyMapSoseTemperature] + ## options related to plotting climatology maps of Antarctic + ## potential temperature at various levels, including the sea floor against + ## reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + + + [climatologyMapSoseSalinity] + ## options related to plotting climatology maps of Antarctic + ## salinity at various levels, including the sea floor against + ## reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = haline + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 33.8, 'vmax': 35.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + + + [climatologyMapSosePotentialDensity] + ## options related to plotting climatology maps of Antarctic + ## potential density at various levels, including the sea floor against + ## reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = Spectral_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(1026., 1028., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) + + + [climatologyMapSoseMixedLayerDepth] + ## options related to plotting climatology maps of Antarctic + ## mixed layer depth against reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = viridis + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = log + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 10., 'vmax': 300.} + # specify the ticks + colorbarTicksResult = [10, 20, 40, 60, 80, 100, 200, 300] + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = symLog + # A dictionary with keywords for the norm + normArgsDifference = {'linthresh': 10., 'linscale': 0.5, 'vmin': -200., + 'vmax': 200.} + colorbarTicksDifference = [-200., -100., -50., -20., -10., 0., 10., 20., 50., 100., 200.] + + + [climatologyMapSoseZonalVelocity] + ## options related to plotting climatology maps of Antarctic + ## zonal velocity against reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + + + [climatologyMapSoseMeridionalVelocity] + ## options related to plotting climatology maps of Antarctic + ## meridional against reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + + + [climatologyMapSoseVelocityMagnitude] + ## options related to plotting climatology maps of Antarctic + ## meridional against reference model results and SOSE reanalysis data + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(0, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + +There is a section for options that apply to all SOSE climatology maps and +one for each field supported for specifying the color map. + +The option ``depths`` is a list of (approximate) depths at which to sample +the potential temperature field. A value of ``'top'`` indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +``'bot'`` indicates the seafloor. + +The user can select only to plot a subset of the supported fields by adding +only the desired field names to ``fieldList``. The default value shows the +list of all available fields. + + +SOSE data for the full Southern Ocean +------------------------------------- + +The default SOSE data is on a 6,000 x 6,000 km grid focused on the Antarctic +continental shelf. An alternative data set is available on a 10,000 x 10,000 km +grid. These data can be downloaded directly from the `data repository`_ or +by calling:: + + download_analysis_data -o /output/path/for/diagnostics -d sose_10000km + +where the output path is the ``baseDirectory`` given in the ``diagnostics`` +section of the config file (see :ref:`config_diagnostics`). The data set is not +included in the default download because of its large size (~27 GB). + +Climatologies can be plotted with these data by setting:: + + fileSuffix = 10000.0x10000.0km_10.0km_Antarctic_stereo_20190603 + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + + +State Estimate +-------------- + +:ref:`sose` + +Example Result +-------------- + +.. image:: examples/clim_sose_temp.png + :width: 720 px + :align: center + +.. _`Southern Ocean State Estimate (SOSE)`: http://sose.ucsd.edu/sose_stateestimation_data_05to10.html +.. _`data repository`: https://web.lcrc.anl.gov/public/e3sm/diagnostics/observations/Ocean/SOSE/ diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWaves.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWaves.rst.txt new file mode 100644 index 000000000..41f0a1642 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWaves.rst.txt @@ -0,0 +1,108 @@ +.. _task_climatologyMapWaves: + +climatologyMapWaves +=================== + +An analysis task for comparison of global maps of wave quantities +(significant wave height and peak period) against observations. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, waves + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapWaves] + ## options related to plotting climatology maps of wave fields + ## ERA5 climatological data + + # comparison grid(s) on which to plot analysis + comparisonGrids = ['latlon'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN', 'JFM', 'JAS'] + + # a list of fields to plot ('significantWaveHeight', 'peakWavePeriod') + fieldList = ['significantWaveHeight', 'peakWavePeriod'] + + era5ObsStartYear = 1959 + era5ObsEndYear = 2021 + sscciObsStartYear = 1991 + sscciObsEndYear = 2018 + + [climatologyMapWavesSignificantWaveHeight] + ## options related to plotting climatology maps of significant wave height + + # colormap for model/observations + colormapNameResult = viridis + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0., 'vmax': 7.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 10., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -5., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-5., 5., 9) + + [climatologyMapWavesPeakWavePeriod] + ## options related to plotting climatology maps of peak wave frequency + + # colormap for model/observations + colormapNameResult = plasma + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0.0, 'vmax':15.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 10., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -5., 'vmax': 5.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-5., 5., 9) + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +:ref:`era5_waves` +:ref:`sscci_waves` + +Example Result +-------------- + +.. image:: examples/swh.png + :width: 500 px + :align: center + +.. image:: examples/peak_period.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWoa.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWoa.rst.txt new file mode 100644 index 000000000..2202960dc --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/climatologyMapWoa.rst.txt @@ -0,0 +1,115 @@ +.. _task_climatologyMapWoa: + +climatologyMapWoa +================= + +An analysis task for comparing potential temperature and salinity +at various depths against WOA18 climatology. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, woa, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapWoa] + ## options related to plotting climatology maps of Temperature and Salinity + ## fields at various levels, including the sea floor against control model + ## results and WOA climatological data + + # comparison grid(s) ('latlon', 'antarctic', 'arctic') on which to plot analysis + comparisonGrids = ['arctic'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN','JFM','JAS'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface. Note that, for seasons='ANN', depths can be as deep as 5500 m, + # otherwise use a maximum depth of 1500 m. + depths = ['top', -500, -1000] + + # a list of fields top plot for each transect. All supported fields are listed + # below + fieldList = ['temperature', 'salinity'] + + [climatologyMapWoaTemperature] + ## options related to plotting climatology maps of potential temperature + ## at various levels, including the sea floor against control model results + ## and WOA18 climatological data + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + + [climatologyMapWoaSalinity] + ## options related to plotting climatology maps of salinity + ## at various levels, including the sea floor against control model results + ## and WOA18 climatological data + + # colormap for model/observations + colormapNameResult = haline + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 33.8, 'vmax': 35.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + +For more details, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +The option ``depths`` is a list of (approximate) depths at which to sample +the temperature and salinity fields. A value of ``'top'`` indicates the sea +surface. Note that, for the annual climatology, WOA18 data is available down +to 5500 m, whereas, for the seasonal or monthly climatologies, WOA18 data +is only available down to 1500 m. + +Observations +------------ + +:ref:`woa18_t_s` + +Example Result +-------------- + +.. image:: examples/clim_woa_temp.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/conservation.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/conservation.rst.txt new file mode 100644 index 000000000..85bc54b65 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/conservation.rst.txt @@ -0,0 +1,63 @@ +.. _task_conservation: + +conservation +============ + +An analysis task for plotting histograms of 2-d variables of climatologies +in ocean regions. + +Component and Tags:: + + component: ocean + tags: timeseries, conservation + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [conservation] + ## options related to producing time series plots, often to compare against + ## observations and previous runs + + # the year from which to compute anomalies if not the start year of the + # simulation. This might be useful if a long spin-up cycle is performed and + # only the anomaly over a later span of years is of interest. + # anomalyRefYear = 249 + + # start and end years for timeseries analysis. Use endYear = end to indicate + # that the full range of the data should be used. If errorOnMissing = False, + # the start and end year will be clipped to the valid range. Otherwise, out + # of bounds values will lead to an error. In a "control" config file used in + # a "main vs. control" analysis run, the range of years must be valid and + # cannot include "end" because the original data may not be available. + startYear = 1 + endYear = end + + # Plot types to generate. The following plotTypes are supported: + # total_energy_flux : Total energy flux + # absolute_energy_error : Energy error + # ice_salt_flux : Salt flux related to land ice and sea ice + # absolute_salt_error : Salt conservation error + # total_mass_flux : Total mass flux + # total_mass_change : Total mass anomaly + # land_ice_mass_change : Mass anomaly due to land ice fluxes + # land_ice_ssh_change : SSH anomaly due to land ice fluxes + # land_ice_mass_flux_components : Mass fluxes from land ice + plotTypes = 'land_ice_mass_flux_components' + + # line colors for the main, control and obs curves + # see https://matplotlib.org/stable/gallery/color/named_colors.html + # and https://matplotlib.org/stable/tutorials/colors/colors.html + mainColor = black + controlColor = tab:red + + +Example Result +-------------- + +.. image:: examples/total_mass_flux.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/geojsonTransects.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/geojsonTransects.rst.txt new file mode 100644 index 000000000..da611e93c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/geojsonTransects.rst.txt @@ -0,0 +1,261 @@ +.. _task_geojsonTransects: + +geojsonTransects +================ + +An analysis task for interpolating MPAS fields to transects specified by files +in ``geojson`` format.. + +Component and Tags:: + + component: ocean + tags: climatology, transect, geojson + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [geojsonTransects] + ## options related to plotting model transects at points determined by a + ## geojson file. To generate your own geojson file, go to: + ## http://geojson.io/ + ## and draw one or more polylines, then add a name to each: + ## + ## "properties": { + ## "name": "My Favorite Name" + ## }, + ## and save the file as GeoJSON (say transects.geojson). Finally, set the + ## option: + ## geojsonFiles = ['transects.geojson'] + ## (giving an absolute path if necessary) in your custom config file. + + # a list of geojson files containing lat/lon points in LineStrings to be + # plotted. If relative paths are given, they are relative to the current + # working directory. The files must be listed in quotes, e.g.: + # geojsonFiles = ['file1.geojson', '/path/to/file2.geojson'] + geojsonFiles = [] + + # a list of dictionaries for each field to plot. The dictionary includes + # prefix (used for file names, task names and sections) as well as the mpas + # name of the field, units for colorbars and a the name as it should appear + # in figure titles and captions. + fields = + [{'prefix': 'temperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}, + {'prefix': 'zonalVelocity', + 'mpas': 'timeMonthly_avg_velocityZonal', + 'units': r'm s$^{-1}$', + 'titleName': 'Zonal Velocity'}, + {'prefix': 'meridionalVelocity', + 'mpas': 'timeMonthly_avg_velocityMeridional', + 'units': r'm s$^{-1}$', + 'titleName': 'Meridional Velocity'}] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # The approximate horizontal resolution (in km) of each transect. Latitude/ + # longitude between observation points will be subsampled at this interval. + # Use 'obs' to indicate no subsampling. + horizontalResolution = 5 + + # The name of the vertical comparison grid. Valid values are 'mpas' for the + # MPAS vertical grid or any other name if the vertical grid is defined by + # 'verticalComparisonGrid' + #verticalComparisonGridName = mpas + verticalComparisonGridName = uniform_0_to_4000m_at_10m + + # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas'. + # This should be numpy array of (typically negative) elevations (in m). + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + + # The minimum weight of a destination cell after remapping. Any cell with + # weights lower than this threshold will therefore be masked out. + renormalizationThreshold = 0.01 + + + [geojsonTemperatureTransects] + ## options related to plotting geojson transects of potential temperature + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the SemiLogNorm + normArgsResult = {'vmin': -2., 'vmax': 30.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(-2., 2., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the SemiLogNorm + normArgsDifference = {'vmin': -2., 'vmax': 2.} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2., 2., 9) + + + [geojsonSalinityTransects] + ## options related to plotting geojson transects of salinity + + # colormap for model/observations + colormapNameResult = haline + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the SemiLogNorm + normArgsResult = {'vmin': 30, 'vmax': 39.0} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.2, 35.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the SemiLogNorm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + + + [geojsonPotentialDensityTransects] + ## options related to plotting geojson transects of potential density + + # colormap for model/observations + colormapNameResult = Spectral_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(1026., 1028., 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) + + + [geojsonZonalVelocityTransects] + ## options related to plotting geojson transects of zonal velocity + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + + + [geojsonMeridionalVelocityTransects] + ## options related to plotting geojson transects of meridional velocity + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + +Geojson Files +------------- + +This task takes a list of geojson file names (supplied as a python list of +``str``:: + + geojsonFiles = ['file1.geojson', '/path/to/file2.geojson'] + +Transects are specified by ``LineString`` objects in the files. Some examples +are provided in the `MPAS geometric features repository`_. You can also +generate your own very easily at To generate your own geojson file, go to +`geojson.io`_ and draw one or more polylines, then add a name to each:: + + ... + "properties": { + "name": "My Favorite Name" + }, + ... + +and save the file as GeoJSON (say transects.geojson). Finally, set the +option:: + + geojsonFiles = ['transects.geojson'] + +(giving an absolute path if necessary) in your custom config file. + + +Fields +------ + +Since there are no observations associated with geojson transects, you are +free to choose which MPAS fields you would like to plot. These fields are +provided as a python dictionary. The keys are names for the fields (anything +you would like use as a prefix on files and subtask names, best if it does +not contain spaces). The values are python dictionaries. The values +associate with the ``mpas`` key are the names of the 3D fields where transects +are desired. The ``units`` entry indicates the units to display on the +colorbar. The ``titleName`` entry specifies the name of the field to include +in plot titles and captions. + +Each field must have a corresponding section in the config file defining its +color maps. For example, ``temperature`` has an associated +``[geojsonTemperatureTransect]`` section. + +Other Options +------------- + +For details on the remaining configuration options, see: + * :ref:`config_transects` + * :ref:`config_remapping` + * :ref:`config_colormaps` + * :ref:`config_seasons` + +Example Result +-------------- + +.. image:: examples/geojson_transect.png + :width: 500 px + :align: center + +.. _`MPAS geometric features repository`: https://github.com/MPAS-Dev/geometric_features +.. _`geojson.io`: http://geojson.io/ diff --git a/1.11.0rc1/_sources/users_guide/tasks/hovmollerOceanRegions.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/hovmollerOceanRegions.rst.txt new file mode 100644 index 000000000..8e0220fc8 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/hovmollerOceanRegions.rst.txt @@ -0,0 +1,223 @@ +.. _task_hovmollerOceanRegions: + +hovmollerOceanRegions +===================== + +An analysis task for plotting depth profiles vs. time of temperature, salinity, +potential density, etc. averaged over regions. + +Component and Tags:: + + component: ocean + tags: profiles, climatology, hovmoller + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [hovmollerOceanRegions] + ## options related to plotting Hovmoller diagrams (depth vs. time plots) of + ## regional means of 3D MPAS fields + + # the names of region groups to plot, each with its own section below + regionGroups = ['Ocean Basins'] + + + [hovmollerOceanBasins] + ## options related to plotting Hovmoller diagrams of ocean basins + + # a list of dictionaries for each field to plot. The dictionary includes + # prefix (used for file names, task names and sections) as well as the MPAS + # name of the field, units for colorbars and a the name as it should appear + # in figure titles and captions. + fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}] + + # a list of region names from the region masks file to plot + regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", + "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", + "Global Ocean", "Global Ocean 65N to 65S", + "Global Ocean 15S to 15N"] + + # whether to compute an anomaly with respect to the start of the time series + computeAnomaly = False + + # Number of points over which to compute moving average(e.g., for monthly + # output, movingAverageMonths=12 corresponds to a 12-month moving average + # window) + movingAverageMonths = 12 + + + [hovmollerOceanRegionsPotentialTemperature] + ## options related to plotting time series of temperature vs. depth in ocean + ## regions + + # colormap + colormapNameResult = RdYlBu_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -2., 'vmax': 30.} + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -5., 'vmax': 5.} + + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevels = 'none' + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + + # limits on depth, the full range by default + # yLim = [-6000., 0.] + + + [hovmollerOceanRegionsSalinity] + ## options related to plotting time series of salinity vs. depth in ocean + ## regions + + # colormap + colormapNameResult = haline + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 30, 'vmax': 39.0} + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevels = 'none' + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + + # limits on depth, the full range by default + # yLim = [-6000., 0.] + + + [hovmollerOceanRegionsPotentialDensity] + ## options related to plotting time series of potential density vs. depth in + ## ocean regions + + # colormap + colormapNameResult = Spectral_r + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + + # contour line levels (use [] for automatic contour selection, 'none' for no + # contour lines) + contourLevels = 'none' + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + + # limits on depth, the full range by default + # yLim = [-6000., 0.] + +The ``[hovmollerOceanRegions]`` section contains a list of ``regionGroups``, +one or more of the :ref:`config_region_groups` defined in +:py:func:`geometric_features.aggregation.get_aggregator_by_name()`. + +For each region group, there is a corresponding section +``[hovmoller]``, where ```` is the name of the region +group with spaces removed. In this section, the ``fields`` dictionary is used +to specify a list of 3D MPAS fields to average and plot. The key ``prefix`` is +a convenient name appended to tasks and file names to describe the field. +``mpas`` is the name of the field in MPAS ``timeSeriesStatsMonthly`` output +files. The ``units`` are the SI units of the field to include on the plot's +color bar and ``titleName`` is the name of the field to use in its gallery name +and on the title of the plot. + +``regionNames`` is a list of regions from the full the region group or +``regionNames = ['all']`` to indicate that all regions should be used. For +the available regions, see +`Aggregate Existing Features `_. + +If ``computeAnomaly = True``, the plots will be the anomaly with respect to the +beginning of the time series (averaged over ``movingAverageMonths`` months). + +Each field has a ``[hovmollerOceanRegions]`` section, where +```` is the associated ``prefix`` but starting with a capital letter. +Each of these sections has a ``yLim`` option that can specify the desired depth +range. The default is the full range. + +For more details on the remaining config options, see + * :ref:`config_region_groups` + * :ref:`config_colormaps` + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/hovmoller_weddell.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/indexNino34.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/indexNino34.rst.txt new file mode 100644 index 000000000..990240e06 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/indexNino34.rst.txt @@ -0,0 +1,54 @@ +.. |n~| unicode:: U+00F1 + :trim: + +.. _task_indexNino34: + +indexNino34 +=========== + +An analysis task for plotting both time series and spectra of the El Ni |n~| o +3.4 Climate Index against observations. + +Component and Tags:: + + component: ocean + tags: timeSeries, index, nino, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [indexNino34] + ## options related to plotting time series of the El Nino 3.4 index + + # Specified region for the Nino Index,'nino3', 'nino4', or 'nino3.4' + # The indexNino34 routine only accepts one value at a time + region = nino3.4 + + # Data source to read for comparison. There are two options + # 1 - ERS_SSTv4 -- Extended Reconstructed Sea Surface Temperature -- 1854 - 2016 + # 2 - HADIsst -- Hadley Center analysis -- 1870 - 2016 + observationData = HADIsst + +While the default is the El Ni |n~| o 3.4 region, you may select among +``nino3``, ``nino4``, and ``nino3.4``. See :ref:`config_regions` for more +information about regions in MPAS-Analyis. + +By default, observations are taken from the Hadley Center analysis. To use +the Extended Reconstructed Sea Surface Temperature (ERS SSTv4), set +``observationData = ERS_SSTv4``. + +Observations +------------ + +* :ref:`hadisst_nino` +* :ref:`ers_sst_nino` + + +Example Result +-------------- + +.. image:: examples/nino.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/meridionalHeatTransport.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/meridionalHeatTransport.rst.txt new file mode 100644 index 000000000..339e8266b --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/meridionalHeatTransport.rst.txt @@ -0,0 +1,75 @@ +.. _task_meridionalHeatTransport: + +meridionalHeatTransport +======================= + +An analysis task for plotting the zonal mean meridional heat transport (MHT) +against observations. + +Component and Tags:: + + component: ocean + tags: climatology, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [meridionalHeatTransport] + ## options related to plotting meridional heat transport (MHT) + + # Data source to read for comparison + observationData = mht_TrenberthCaron.NoAtm_20180710.nc + + # colormap for model results + colormapName = balance + # colormap indices for contour color + colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255] + # colorbar levels/values for contour boundaries + colorbarLevels = [-0.1, -0.05, -0.02, -0.01, -0.005, 0, 0.005, 0.01, 0.02, + 0.05, 0.1] + # contour line levels + contourLevels = [-0.1, -0.01, 0.01, 0.1] + + # latitude and depth limits + xLimGlobal = [-80, 80] + depthLimGlobal = [-1000, 0] + + # compare to observations? + compareWithObservations = True + + # plot the vertical section of MHT? + plotVerticalSection = False + + # Number of points over which to compute moving average (with respect to + # latitude) for MHT vertical section plots + movingAveragePoints = 1 + +The option ``observationData`` allows the selection of the observational file +to compare with (available largely for debugging purposes). + +By default, only a line plot of depth-integrated MHT is plotted. Optionally, +you can set ``plotVerticalSection = True`` to produce a plot of the MHT per +unit depth as a funciton of latitude and depth. Because this type of plot is +not commonly produced in ocean models and observations are not available for +comparison, it is disabled by default. + +The options ``xLimGlobal`` and ``depthLimGlobal`` control the bounds of the +x axis of both plots and the y axis of the vertical section plot, respectively. + +For more details on the remaining configuration options, see: + * :ref:`config_colormaps` + * :ref:`config_moving_average` + +Observations +------------ + +:ref:`trenberth_mht` + +Example Result +-------------- + +.. image:: examples/mht.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/oceanHistogram.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/oceanHistogram.rst.txt new file mode 100644 index 000000000..98ae858ee --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/oceanHistogram.rst.txt @@ -0,0 +1,119 @@ +.. _task_oceanHistogram: + +oceanHistogram +============== + +An analysis task for plotting histograms of 2-d variables of climatologies +in ocean regions. + +Component and Tags:: + + component: ocean + tags: climatology, histogram, regions, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [oceanHistogram] + ## options related to plotting histograms of climatologies of 2-d ocean + ## variables + + # list of variables to plot + variableList = [] + + # list of observations to compare against + obsList = ['AVISO'] + + # list of ocean variables by which to weight variables in variable list + weightList = [] + + # list of regions to plot from the region list in [regions] below + regionGroups = ['Ocean Basins'] + + # list of region names within the region group listed above + regionNames = [] + + # Seasons to conduct analysis over + # Note: start and end year will be inherited from climatology section + seasons = ['ANN'] + + # Number of histogram bins + bins = 40 + +Region Groups +------------- + +A list of groups of regions, each of which will get its own gallery on +the resulting analysis web page. See :ref:`config_region_groups` for +more information on the available region groups. By default, +the only region group is ``'Ocean Basins'``. + +Region Names +------------ + +The ``regionNames`` can be set to ``['all']`` to plot all of the +regions in the geojson file. In the case of ``Antarctic Regions``, these +are: + +.. code-block:: cfg + + ["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf", + "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf", + "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep", + "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf", + "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep", + "Western Ross Sea Shelf", "Western Ross Sea Deep", + "East Antarctic Seas Shelf", "East Antarctic Seas Deep"] + +For ``Ocean Basins``, they are: + +.. code-block:: cfg + + ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin", + "Southern_Ocean_Basin", "Mediterranean_Basin", "Global Ocean", + "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"] + +Variable List +------------- +The ``variableList`` option determines the variables to plot. Currently, only +2-d variables are supported but we envision extending to 3-d variables in the +near future. The variables are to be listed according to their registry names, with ``timeMontly_avg_`` prepended in the code. + +Observations +------------ +The ``obsList`` option contains a list of the names of observational data sets. +Currently, "AVISO" is the only data set available, but we anticipate adding +several additional data sets in the near future. + +:ref:`aviso_ssh` + +Weight List +----------- +The ``weightList`` option determines the variables to normalize by in plotting +the histogram. This is an optional feature. If used, ``weightList`` should be +of the same length as ``variableList``. We currently only support for 2-d +variables that are the same size as their corresponding variables in +``variableList`` and that are present in the restart file. For example, we use +``areaCell`` to weight cell-centered variables such as ``ssh``. + +Bins +---- +``bins`` sets the number of bins to include in the histogram. + +Other Config Options +-------------------- + +Other config options include ``lineWidth``, ``mainColor``, ``obsColor``, +``controlColor``, ``titleFontSize``, ``defaultFontSize``. For more details on +the remaining config options, see :ref:`config_seasons`. + +Example Result +-------------- + +.. image:: examples/histogram_ssh_aviso_atl.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/oceanRegionalProfiles.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/oceanRegionalProfiles.rst.txt new file mode 100644 index 000000000..2b68bab0c --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/oceanRegionalProfiles.rst.txt @@ -0,0 +1,101 @@ +.. _task_oceanRegionalProfiles: + +oceanRegionalProfiles +===================== + +An analysis task for plotting depth profiles of temperature, salinity, +potential density, etc. averaged over regions and in time. The plots also +include a measure of variability (the standard deviation in space and time). + +Component and Tags:: + + component: ocean + tags: profiles, climatology + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [oceanRegionalProfiles] + ## options related to plotting vertical profiles of regional means (and + ## variability) of 3D MPAS fields + + # The name of a region group defining the region for each profile + regionGroups = ['Ocean Basins'] + + + [profilesOceanBasins] + ## options related to plotting vertical profiles ocean basins + + # a list of dictionaries for each field to plot. The dictionary includes + # prefix (used for file names, task names and sections) as well as the mpas + # name of the field, units for colorbars and a the name as it should appear + # in figure titles and captions. + fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$\degree$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$^{-3}$', + 'titleName': 'Potential Density'}] + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['JFM', 'JAS', 'ANN'] + + # minimum and maximum depth of profile plots, or empty for the full depth range + depthRange = [] + + # a list of region names from the region masks file to plot + regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", + "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", + "Global Ocean", "Global Ocean 65N to 65S", + "Global Ocean 15S to 15N"] + + # web gallery options + profileGalleryGroup = Ocean Basin Profiles + +The ``[oceanRegionalProfiles]`` section contains a list of ``regionGroups``, +one or more of the :ref:`config_region_groups` defined in +:py:func:`geometric_features.aggregation.get_aggregator_by_name()`. + +For each region group, there is a corresponding section +``[profiles]``, where ```` is the name of the region +group with spaces removed. In this section, the ``fields`` dictionary is used +to specify a list of 3D MPAS fields to average and plot. The key ``prefix`` is +a convenient name appended to tasks and file names to describe the field. +``mpas`` is the name of the field in MPAS ``timeSeriesStatsMonthly`` output +files. The ``units`` are the SI units of the field to include on the plot's x +axis and ``titleName`` is the name of the field to use in its gallery name and +on the x axis of the profile. + +``regionNames`` is a list of regions from the full the region group or +``regionNames = ['all']`` to indicate that all regions should be used. For +the available regions, see +`Aggregate Existing Features `_. + +A config option is available to specify the names of the gallery group for the +profiles (``profileGalleryGroup``). + +A minimum and maximum depth for profiles can be specified with ``depthRange``. +The default is the full range. + +For more details on the remaining config options, see + * :ref:`config_region_groups` + * :ref:`config_seasons` + +Example Result +-------------- + +.. image:: examples/ocean_profile.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/regionalTSDiagrams.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/regionalTSDiagrams.rst.txt new file mode 100644 index 000000000..6629cd9ff --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/regionalTSDiagrams.rst.txt @@ -0,0 +1,216 @@ +.. _task_regionalTSDiagrams: + +regionalTSDiagrams +================== + +An analysis task for plotting T-S (potential temperature vs. salinity) +diagrams of climatologies in ocean regions. + +Component and Tags:: + + component: ocean + tags: climatology, regions, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [regionalTSDiagrams] + ## options related to plotting T/S diagrams of ocean regions + + # the names of region groups to plot, each with its own section below + regionGroups = ['Ocean Basins'] + + # a list of seasons to compute climatologies over + seasons = ['ANN'] + + # the number of threads dask is allowed to spawn for each process computing + # the observational climatologies + # Decrease this number if regionalTSDiagrams subtasks are running + # out of available threads + daskThreads = 8 + + # the number of subprocesses that each observational climatology subtask gets + # counted as occupying + # Increase this number if regionalTSDiagrams subtasks are running + # out of memory, and fewer tasks will be allowed to run at once + subprocessCount = 4 + + [TSDiagramsForAntarcticRegions] + ## options related to plotting T/S diagrams of Antarctic regions + + # list of regions to plot or ['all'] for all regions in the masks file. + # See "regionNames" in the antarcticRegions masks file in + # regionMaskSubdirectory for details. + regionNames = [] + + # diagram type, either 'volumetric' or 'scatter', depending on if the points + # should be binned the plot should show the volume fraction in each bin or + # scattered points colored by their depth + diagramType = volumetric + + # if diagramType == 'volumetric', the bin boundaries for T and S + # if diagramType == 'scatter', only the min and max are important (and the + # bins are only used for computing neutral density contours) + Tbins = numpy.linspace(-2.5, 4, 131) + Sbins = numpy.linspace(33.8, 34.8, 201) + + # density contour interval + rhoInterval = 0.1 + + # The color map for depth or volume + colormap = cmo.deep + # The following is more appropriate if diagramType == 'scatter' + # colormap = cmo.deep_r + # the type of norm used in the colormap {'linear', 'log'} + normType = log + + # The minimum and maximum depth over which fields are plotted, default is + # to take these values from the geojson feature's zmin and zmax properties. + # Add these to a custom config file to override the defaults. + # zmin = -1000 + # zmax = -400 + + # the minimum and maximum volume for the colorbar, default is the minimum and + # maximum over the mode output + # volMin = 3e9 + # volMax = 1e12 + + # Obserational data sets to compare against + obs = ['SOSE', 'WOA18'] + + [TSDiagramsForOceanBasins] + ## options related to plotting T/S diagrams of major ocean basins + + # list of regions to plot or ['all'] for all regions in the masks file. + # See "regionNames" in the oceanBasins masks file in + # regionMaskSubdirectory for details. + regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", + "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", + "Global Ocean", "Global Ocean 65N to 65S", + "Global Ocean 15S to 15N"] + + # diagram type, either 'volumetric' or 'scatter', depending on if the points + # should be binned the plot should show the volume fraction in each bin or + # scattered points colored by their depth + diagramType = volumetric + + # if diagramType == 'volumetric', the bin boundaries for T and S + # if diagramType == 'scatter', only the min and max are important (and the + # bins are only used for computing neutral density contours) + Tbins = numpy.linspace(-2.5, 16, 926) + Sbins = numpy.linspace(33.8, 35.8, 1001) + + # density contour interval + rhoInterval = 0.2 + + # The color map for depth or volume + colormap = white_cmo_deep + # The following is more appropriate if diagramType == 'scatter' + # colormap = cmo.deep_r + # the type of norm used in the colormap {'linear', 'log'} + normType = log + + # The minimum and maximum depth over which fields are plotted. + zmin = -1000 + zmax = 0 + + # Obserational data sets to compare against + obs = ['WOA18'] + +Region Groups +------------- + +A list of groups of regions, each of which will get its own gallery on +the resulting analysis web page. See :ref:`config_region_groups` for +more information on the available region groups. For each region group, there +should be a corresponding ``TSDiagramsFor`` section of the config +file, with any spaces removed from the name of the region group. By default, +the only region group is ``'Ocean Basins'``. + +Region Names +------------ + +The ``regionNames`` can be set to ``['all']`` to plot all of the +regions in the geojson file. In the case of ``Antarctic Regions``, these +are: + +.. code-block:: cfg + + ["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf", + "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf", + "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep", + "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf", + "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep", + "Western Ross Sea Shelf", "Western Ross Sea Deep", + "East Antarctic Seas Shelf", "East Antarctic Seas Deep"] + +For ``Ocean Basins``, they are: + +.. code-block:: cfg + + ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin", + "Southern_Ocean_Basin", "Mediterranean_Basin", "Global Ocean", + "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"] + +Diagram Type +------------ + +By default, a "volumetric" diagram is produced, where the volume of ocean water +in a region is binned in T and S space, and the volume of each bin is plotted. +This allows for more quantitative comparison with observation- and model-based +climatologies. + +If ``diagramType`` is set to ``scatter``, a point cloud is plotted instead, +shaded by depth. We anticipate that this will useful for plotting data sets +that are spatially scattered (e.g. the MEOP seal data), because each sample +does not correspond to a volume. This type of diagram may also be helpful for +comparison with publications that use scatter plots. + +For volumetric diagrams, two norms for the continuous color map are supported, +``linear`` and ``log``, with ``log`` being the default. The range of the +colormap is is between zero and the maximum bin volume for ``linear`` and +between the minimum non-zero bin volume and the max for ``log``. The min/max +bin volume is taken from the first panel containing the "main" MPAS-Ocean plot, +and the same color map range is used for all panels. + +Bins and Contour Intervals +-------------------------- +If ``diagramType = volumetric``, the number and spacing of potential +temperature and salinity bins is set in ``Tbins`` and ``Sbins``. For +``diagramType = scatter``, ``Tbins`` and ``Sbins`` are used to make contour +plots of neutral density and are used to determine the bounds of the figure +in T/S space. ``rhoInterval`` is the interval between contours of neutral +density. ``zmin`` and ``zmax`` are the minimum and maximum depths (positive +up) of the ocean region. If available (e.g. for "Antarctic Regions"), the +default is to read them from geojson file. + +Observations +------------ +The ``obs`` option contains a list of the names of observational data sets. +Currently, "SOSE" and "WOA18" are the only data sets available, but we +anticipate adding several additional data sets in the near future. + +:ref:`sose` +:ref:`woa18_t_s` + +Other Config Options +-------------------- + +For more details on the remaining config options, see + * :ref:`config_regions` + * :ref:`config_seasons` + * :ref:`config_colormaps` + * :ref:`dask_threads` + +Example Result +-------------- + +.. image:: examples/so_ts_diag.png + :width: 500 px + :align: center + +.. _`antarctic_ocean_regions`: https://github.com/MPAS-Dev/geometric_features/tree/main/feature_creation_scripts/antarctic_ocean_regions diff --git a/1.11.0rc1/_sources/users_guide/tasks/soseTransects.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/soseTransects.rst.txt new file mode 100644 index 000000000..aa0752495 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/soseTransects.rst.txt @@ -0,0 +1,279 @@ +.. |deg| unicode:: U+00B0 .. degree sign + :trim: + +.. _task_soseTransects: + +soseTransects +============= + +An analysis task for computing meridional transects of MPAS fields at evenly +spaced latitudes around Antarctica and comparing them with results from the +`Southern Ocean State Estimate (SOSE)`_. + +Component and Tags:: + + component: ocean + tags: climatology, transect, sose, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [soseTransects] + ## options related to plotting model vs. Southern Ocean State Estimate (SOSE) + ## transects. + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # The approximate horizontal resolution (in km) of each transect. Latitude/ + # longitude between observation points will be subsampled at this interval. + # Use 'obs' to indicate no subsampling. + # horizontalResolution = obs + horizontalResolution = 5 + + # The name of the vertical comparison grid. Valid values are 'mpas' for the + # MPAS vertical grid, 'obs' to use the locations of observations or + # any other name if the vertical grid is defined by 'verticalComparisonGrid' + # verticalComparisonGridName = mpas + # verticalComparisonGridName = obs + verticalComparisonGridName = uniform_0_to_4000m_at_10m + + # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or + # 'obs'. This should be numpy array of (typically negative) elevations (in m). + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + + # The minimum weight of a destination cell after remapping. Any cell with + # weights lower than this threshold will therefore be masked out. + renormalizationThreshold = 0.01 + + # min and max latitude of transects + minLat = -80 + maxLat = -60 + + # longitudes of transects + longitudes = numpy.linspace(0, 330, 12) + + # a list of fields top plot for each transect. All supported fields are listed + # below. Note that 'velocityMagnitude' cannot be plotted without + # 'zonalVelocity' and 'meridionalVelocity' because the components are needed + # to compute the magnitude. + fieldList = ['temperature', 'salinity', 'potentialDensity', 'zonalVelocity', + 'meridionalVelocity', 'velocityMagnitude'] + + + [soseTemperatureTransects] + ## options related to plotting SOSE transects of potential temperature + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0.0, 'vmax': 6.0} + # color indices into colormapName for filled contours + #colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsResult = [0, 0.25, 0.5, 0.75, 1, 2, 3, 4, 5, 6] + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(0.0, 6.0, 9) + # contour line levels + contourLevelsResult = np.arange(0.5, 6.0, 1.0) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2.0, 'vmax': 2.0} + # color indices into colormapName for filled contours + #colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsDifference = [-2, -1.5, -1.25, -1, -0.2, 0, 0.2, 1, 1.25, 1.5, 2] + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2.0, 2.0, 9) + # contour line levels + contourLevelsDifference = np.arange(-1.8, 2.0, 0.4) + + + [soseSalinityTransects] + ## options related to plotting SOSE transects of salinity + + # colormap for model/observations + colormapNameResult = haline + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 34.0, 'vmax': 35.0} + # color indices into colormapName for filled contours + #colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsResult = [34, 34.3, 34.5, 34.65, 34.675, 34.7, 34.725, 34.75, 34.8, 35] + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(34.0, 35.0, 9) + # contour line levels + contourLevelsResult = np.arange(34.1, 35.0, 0.1) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.5, 'vmax': 0.5} + # color indices into colormapName for filled contours + #colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsDifference = [-0.5, -0.2, -0.1, -0.05, -0.02, 0, 0.02, 0.05, 0.1, 0.2, 0.5] + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9) + # contour line levels + contourLevelsDifference = numpy.linspace(-0.6, 0.6, 9) + + + [sosePotentialDensityTransects] + ## options related to plotting SOSE transects of potential density + + # colormap for model/observations + colormapNameResult = Spectral_r + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1026.5, 'vmax': 1028.} + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(1026., 1028., 9) + contourLevelsResult = numpy.linspace(1026.5, 1028., 7) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.3, 'vmax': 0.3} + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9) + contourLevelsDifference = numpy.linspace(-0.3, 0.3, 9) + + + [soseZonalVelocityTransects] + ## options related to plotting SOSE transects of zonal velocity + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + contourLevelsResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9) + + + [soseMeridionalVelocityTransects] + ## options related to plotting SOSE transects of meridional velocity + + # colormap for model/observations + colormapNameResult = delta + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9) + contourLevelsResult = numpy.linspace(-0.2, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9) + + + [soseVelocityMagnitudeTransects] + ## options related to plotting SOSE transects of velocity magnitude + + # colormap for model/observations + colormapNameResult = ice + # color indices into colormapName for filled contours + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksResult = numpy.linspace(0, 0.2, 9) + contourLevelsResult = numpy.linspace(0, 0.2, 9) + + # colormap for differences + colormapNameDifference = balance + # the type of norm used in the colormap + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -0.2, 'vmax': 0.2} + # determine the ticks automatically by default, uncomment to specify + # colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9) + contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9) + + +The options ``minLat`` and ``maxLat`` determine the start and end of each +meridional transect (in degrees). The option ``longitudes`` is a list or +numpy array of longitudes for each transect, e.g.:: + + longitudes = numpy.linspace(0, 330, 12) + +produces 12 transects spaced every 30 |deg|. + +.. note:: + + SOSE's domain extends only to 25 |deg| S, so ``maxLat`` should typically be + less than -25. + +The user can select only to plot a subset of the supported fields by adding +only the desired field names to ``fieldList``. The default value shows the +list of all available fields. + +.. note:: + + Because ``velocityMagnitude`` is computed internally rather than being stored + as a separate field with the other SOSE output, it is not possible to plot + ``velocityMagnitude`` without also plotting ``zonalVelocity`` and + ``meridionalVelocity``. + +Ater the ``soseTransects`` section, there is a section for each supported field +specifying the information related to the colormap. + +For details on remaining configuration options, see: + * :ref:`config_transects` + * :ref:`config_remapping` + * :ref:`config_colormaps` + * :ref:`config_seasons` + +Observations +------------ + +:ref:`sose` + +Example Result +-------------- + +.. image:: examples/sose_transect.png + :width: 500 px + :align: center + +.. _`Southern Ocean State Estimate (SOSE)`: http://sose.ucsd.edu/sose_stateestimation_data_05to10.html diff --git a/1.11.0rc1/_sources/users_guide/tasks/streamfunctionMOC.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/streamfunctionMOC.rst.txt new file mode 100644 index 000000000..8c705c8e9 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/streamfunctionMOC.rst.txt @@ -0,0 +1,122 @@ +.. |deg| unicode:: U+00B0 .. degree sign + :trim: + +.. _task_streamfunctionMOC: + +streamfunctionMOC +================= + +An analysis task for plotting the zonal mean meridional overturning circulation +(MOC). Currently we support plots of the global and Atlantic MOCs but not of +other regions (notably the Indo-pacific). This task also produces a time +series plot of the maximum Atlantic MOC at 26.5 |deg| N. + +Component and Tags:: + + component: ocean + tags: streamfunction, moc, climatology, timeSeries, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [streamfunctionMOC] + ## options related to plotting the streamfunction of the meridional overturning + ## circulation (MOC) + + # Include the bolus velocity from the Gent-McWilliams parameterization? This + # only needs to be disabled if the simulation was run with GM turned on but + # the MOC shouldn't include the bolus term + includeBolus = True + + # Region names for basin MOC calculation. + # Supported options are Atlantic and IndoPacific + regionNames = ['Atlantic'] + + # Size of latitude bins over which MOC streamfunction is integrated + latBinSizeGlobal = 1. + latBinSizeAtlantic = 0.5 + latBinSizeIndoPacific = 0.5 + + # colormap for model results + colormapNameGlobal = RdYlBu_r + colormapNameAtlantic = RdYlBu_r + colormapNameIndoPacific = RdYlBu_r + # colormap indices for contour color + colormapIndicesGlobal = [0, 40, 80, 110, 140, 170, 200, 230, 255] + colormapIndicesAtlantic = [0, 40, 80, 110, 140, 170, 200, 230, 255] + colormapIndicesIndoPacific = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colorbar levels/values for contour boundaries + colorbarLevelsGlobal = [-20, -10, -5, -2, 2, 5, 10, 20, 30, 40] + colorbarLevelsAtlantic = [-10, -5, -2, 0, 5, 8, 10, 14, 18, 22] + colorbarLevelsIndoPacific = [-10, -5, -2, 0, 5, 8, 10, 14, 18, 22] + # contour line levels + contourLevelsGlobal = np.arange(-25.1, 35.1, 10) + contourLevelsAtlantic = np.arange(-8, 20.1, 2) + contourLevelsIndoPacific = np.arange(-8, 20.1, 2) + + # Number of points over which to compute moving average for + # MOC timeseries (e.g., for monthly output, movingAveragePoints=12 + # corresponds to a 12-month moving average window) + movingAveragePoints = 12 + + # Number of points over which to compute moving average (with respect to + # latitude) for climatological MOC plots + movingAveragePointsClimatological = 1 + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +For runs that use an eddy closure parameterization, tracer transport is +augmented with the Bolus velocity. By default, the Bolus velocity is included +in the MOC but this can be disabled with ``includeBolus = False``. +(MPAS-Analysis will automatically recognize runs where the bolus velocity is +not used and will not include it in climatology computations or add it to the +MOC in these cases to save disk space and computation time.) + +Currently, the only supported region is the Atlantic, so ``regionNames`` should +be left as it is. In the near future, we anticipate including the Indo-pacific +as well. + +Each region has its own bin size (in degrees latitudes). Adjust these as +desired, e.g.:: + + latBinSizeGlobal = 0.5 + +for half-degree bins for the global MOC. + +Each region supports its own colormap, with suffix ``Global``, ``Atlantic`` +and (soon) ``IndoPacific``. See :ref:`config_colormaps` for more details. + +The MOC time series is plotted with a 12-month moving average by default +(taking out noise and the annual cycle). For more details, see +:ref:`config_moving_average`. + +The latitude-depth MOC plots can also optionally be smoothed in latitude with +a moving average, e.g.:: + + movingAveragePointsClimatological = 4 + +will perform a 4-bin smoothing of the MOC. + +For more details on the remaining config options, see +:ref:`config_time_axis_ticks`. + +Example Result +-------------- + +.. image:: examples/moc.png + :width: 500 px + :align: center + +.. image:: examples/time_series_moc.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesAntarcticMelt.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesAntarcticMelt.rst.txt new file mode 100644 index 000000000..50e84eeb0 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesAntarcticMelt.rst.txt @@ -0,0 +1,119 @@ +.. _task_timeSeriesAntarcticMelt: + +timeSeriesAntarcticMelt +======================= + +An analysis task for plotting time series of mean melt rates per ice shelf or +Antarctic region along with observations from `Rignot et al. (2013)`_, +`Adusumilli et al. (2020) `_, +and `Paolo et al. (2023) `_. + +Component and Tags:: + + component: ocean + tags: timeSeries, melt, landIceCavities + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [timeSeriesAntarcticMelt] + ## options related to plotting time series of melt below Antarctic ice shelves + + # list of ice shelves to plot or ['all'] for all 106 ice shelves and regions. + # See "regionNames" in the ice shelf masks file in regionMaskSubdirectory for + # details. + iceShelvesToPlot = ['Antarctica', 'Peninsula', 'West Antarctica', + 'East Antarctica', 'Larsen_C', 'Filchner', 'Ronne', + 'Filchner-Ronne', 'Brunt_Stancomb', 'Fimbul', 'Amery', + 'Totten', 'Eastern_Ross', 'Western_Ross', 'Ross', 'Getz', + 'Thwaites', 'Pine_Island', 'Abbot', 'George_VI'] + + # Number of months over which to compute moving average + movingAverageMonths = 1 + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +Ice Shelf and Region Names +-------------------------- + +The ``iceShelvesToPlot`` can be set to ``['all']`` to plot all 106 ice shelves +and regions (not recommended, as this is typically a waste of time). Otherwise, +it is a list consisting of any subset of the following ice shelves:: + + ["Abbot", "Amery", "Atka", "Aviator", "Bach", "Baudouin", "Borchgrevink", + "Brahms", "Brunt_Stancomb", "Campbell", "Cheetham", "Conger_Glenzer", + "Cook", "Cosgrove", "Crosson", "Dennistoun", "Dibble", "Dotson", + "Drygalski", "Edward_VIII", "Ekstrom", "Ferrigno", "Filchner", "Fimbul", + "Fitzgerald", "Frost", "GeikieInlet", "George_VI", "Getz", "Gillet", + "Hamilton", "Hannan", "HarbordGlacier", "Helen", "Holmes", "HolmesWest", + "Hull", "Jelbart", "Land", "Larsen_B", "Larsen_C", "Larsen_D", "Larsen_E", + "Larsen_F", "Larsen_G", "Lazarev", "Lillie", "Mariner", "Matusevitch", + "Mendelssohn", "Mertz", "Moscow_University", "Moubray", "Mulebreen", + "Myers", "Nansen", "Nickerson", "Ninnis", "Nivl", "Noll", "Nordenskjold", + "Pine_Island", "PourquoiPas", "Prince_Harald", "Publications", "Quar", + "Rayner_Thyer", "Rennick", "Richter", "Riiser-Larsen", "Ronne", "Western_Ross", + "Eastern_Ross", "Shackleton", "Shirase", "Slava", "SmithInlet", "Stange", + "Sulzberger", "Suvorov", "Swinburne", "Thwaites", "Tinker", "Totten", + "Tracy_Tremenchus", "Tucker", "Underwood", "Utsikkar", "Venable", "Verdi", + "Vigrid", "Vincennes", "Voyeykov", "West", "Wilkins", "Wilma_Robert_Downer", + "Withrow", "Wordie", "Wylde", "Zubchatyy"] + +or these regions made up of 2 or more ice shelves:: + + ["Antarctica", "Peninsula", "West Antarctica", "East Antarctica", "Ross", + "Filchner-Ronne", "IMBIE1", "IMBIE2", "IMBIE3", "IMBIE4", "IMBIE5", + "IMBIE6", "IMBIE7", "IMBIE8", "IMBIE9", "IMBIE10", "IMBIE11", "IMBIE12", + "IMBIE13", "IMBIE14", "IMBIE15", "IMBIE16", "IMBIE17", "IMBIE18", "IMBIE19", + "IMBIE20", "IMBIE21", "IMBIE22", "IMBIE23", "IMBIE24", "IMBIE25", "IMBIE26", + "IMBIE27"] + +The default list of ice shelves and regions was determined to highlight the +largest ice shelves and regions along with a representative sample of smaller +shelves from different regions. + +Most of these regions are determined based on the centroid locations given +in the supplementary material of `Rignot et al. (2013)`_. Ice shelf regions +have been extended into grounded ice and onto the Antarctic continental shelf +based which floating ice shelf is closest. The `IMBIE Basins`_ are defined +following `Zwally et al. (2012)`_ + +.. note:: + + Time series are computed and stored in a NetCDF file for all 106 ice shelves + and regions even if only a subset are plotted, allowing you to plot + additional ice shelves externally or by re-running MPAS-Analysis. These + can be found in ``timeseries/iceShelfAggregatedFluxes.nc`` within your output + base directory + +Other Options +------------- + +* :ref:`config_moving_average` +* :ref:`config_time_axis_ticks` + +Observations +------------ + +* :ref:`rignot_melt` +* :ref:`adusumilli_melt` + +Example Result +-------------- + +.. image:: examples/melt_flux_east_ant.png + :width: 500 px + :align: center + +.. _`Rignot et al. (2013)`: http://doi.org/10.1126/science.1235798 +.. _`IMBIE Basins`: http://imbie.org/imbie-2016/drainage-basins/ +.. _`Zwally et al. (2012)`: https://icesat4.gsfc.nasa.gov/cryo_data/ant_grn_drainage_systems.php diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOHCAnomaly.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOHCAnomaly.rst.txt new file mode 100644 index 000000000..4ec726ef8 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOHCAnomaly.rst.txt @@ -0,0 +1,106 @@ +.. _task_timeSeriesOHCAnomaly: + +timeSeriesOHCAnomaly +==================== + +An analysis task for plotting a Hovmoller plot (time and depth axes) and +depth-integrated time series of the anomaly in ocean heat content (OHC) +from a reference year (usually the first year of the simulation). + +Component and Tags:: + + component: ocean + tags: timeSeries, ohc, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [timeSeriesOHCAnomaly] + ## options related to plotting time series of ocean heat content (OHC) + ## anomalies from year 1 + + # list of regions to plot from the region list in [regions] below + regions = ['global'] + + # approximate depths (m) separating plots of the upper, middle and lower ocean + depths = [700, 2000] + + # preprocessed file prefix, with format OHC..year*.nc + preprocessedFilePrefix = OHC + + # prefix on preprocessed field name, with format ohc_ for suffixes + # 'tot', '700m', '2000m', 'btm' + preprocessedFieldPrefix = ohc + + # Number of points over which to compute moving average(e.g., for monthly + # output, movingAveragePoints=12 corresponds to a 12-month moving average + # window) + movingAveragePoints = 12 + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + # yearStrideXTicks = 1 + + [hovmollerOHCAnomaly] + ## options related to time vs. depth Hovmoller plots of ocean heat content + ## (OHC) anomalies from year 1 + + # Note: regions and moving average points are the same as for the time series + # plot + + # colormap + colormapName = balance + # colormap indices for contour color + colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255] + # colorbar levels/values for contour boundaries + colorbarLevels = [-2.4, -0.8, -0.4, -0.2, 0, 0.2, 0.4, 0.8, 2.4] + # contour line levels + contourLevels = np.arange(-2.5, 2.6, 0.5) + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + # yearStrideXTicks = 1 + +For the depth-integrated time-series plot, the user may select the depths (in +meters) that separate the upper, middle and lower regions of the ocean, e.g.:: + + depths = [700, 2000] + +indicates that OHC will be integrated from 0 to 700 m, 700 to 2000 m, +and 2000 m to the ocean floor (as well as from 0 to the ocean floor). + +The OHC can be compared with results from a reference v0 simulation. If +``preprocessedRunName`` in the ``[runs]`` section is not ``None``, the +depth integrated time series will be read in with a file prefix given by +``preprocessedFilePrefix`` and a field prefix given by +``preprocessedFieldPrefix``. Generally, these options should not be altered +except of debugging purposes. + +Recently, a right-hand axis and an associated set of lines has been added to the +OHC anomaly time series. This axis and these lines show the equivalent +top-of-atmosphere energy flux (:math:`W/m^2`) that the ocean heat anomaly would +induce. + +For more details on other config options, see: + * :ref:`config_regions` + * :ref:`config_colormaps` + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/time_series_ohc.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOceanRegions.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOceanRegions.rst.txt new file mode 100644 index 000000000..fe4c6b378 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesOceanRegions.rst.txt @@ -0,0 +1,138 @@ +.. _task_timeSeriesOceanRegions: + +timeSeriesOceanRegions +====================== + +An analysis task for plotting time series of of temperature, salinity, +potential density and other fields of interest averaged over ocean regions. + +Component and Tags:: + + component: ocean + tags: timeSeries, regions + +Configuration Options +--------------------- + +The following configuration options are available for this task: + +.. code-block:: cfg + + [timeSeriesOceanRegions] + ## options related to plotting time series of groups of ocean regions + + # the names of region groups to plot, each with its own section below + regionGroups = ['Antarctic Regions'] + + + [timeSeriesAntarcticRegions] + ## options related to plotting time series of Antarctic regions + + # list of regions to plot or ['all'] for all regions in the masks file. + # See "regionNames" in the antarcticRegions masks file in + # regionMaskSubdirectory for details. + regionNames = [] + + # a list of variables to plot + variables = [{'name': 'temperature', + 'title': 'Temperature', + 'units': r'$^\circ$C', + 'mpas': 'timeMonthly_avg_activeTracers_temperature'}, + {'name': 'salinity', + 'title': 'Salinity', + 'units': 'PSU', + 'mpas': 'timeMonthly_avg_activeTracers_salinity'}, + {'name': 'potentialDensity', + 'title': 'Potential Density', + 'units': 'kg m$^{-3}$', + 'mpas': 'timeMonthly_avg_potentialDensity'}, + {'name': 'mixedLayerDepth', + 'title': 'Mixed Layer Depth', + 'units': 'm', + 'mpas': 'timeMonthly_avg_dThreshMLD'}] + + # The minimum and maximum depth over which fields are averaged, default is + # to take these values from the geojson feature's zmin and zmax properties. + # Add these to a custom config file to override the defaults. + # zmin = -1000 + # zmax = -400 + + # Observational data sets to compare against + obs = ['SOSE', 'WOA18'] + +Region Groups +------------- + +``regionGroup`` is a list of region groups,each of which will get its own +gallery group on the resulting analysis webpage. See +:ref:`config_region_groups` for more information on the available region +groups. For each region group, there should be a corresponding +``timeSeries`` section of the config file, with any spaces removed +from the name of the region group. By default, the only region group for this +task is ``'Antarctic Regions'``. + +Region Names +------------ + +The ``regionNames`` can be set to ``['all']`` to plot all of the regions in the +region group. In the case of ``Antarctic Regions``, these are: + +.. code-block:: cfg + + ["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf", + "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf", + "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep", + "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf", + "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep", + "Western Ross Sea Shelf", "Western Ross Sea Deep", + "East Antarctic Seas Shelf", "East Antarctic Seas Deep"] + +Variables +--------- + +The ``variables`` list has a python dictionary for each variable to be plotted. +A separate gallery will be produced for each variable with a title given by +the ``"title"`` entry in the dictionary. The ``"units"`` entry is used for the +y-axis label of each plot. The ``"name"`` is the name of the variable in +the NetCDF files as well as the text appended to subtaks names and file names. +It should contain no spaces. The ``"mpas"`` entry is the name of the +corresponding field in the MPAS-Ocean ``timeSeriesStatsMonthlyOutput`` files. + +Depth Bounds +------------ + +Some region groups such as ``Antarctic Regions`` define default depth bounds +(``zmin`` and ``zmax``) for each region. For ``Antarctic Regions``, this was +done so regions on the continental shelf (ending in "Shelf") would be averaged +over a different range (``zmax`` = -200 m, ``zmin`` = -1000 m) than the regions +of the deeper ocean (ending in "Deep", with ``zmax`` = -400 m, +``zmin`` = -1000 m). The user can override these defaults by defining her own +``zmin`` and ``zmax``. Note that ``zmin`` is deeper and ``zmax`` is shallower +since they have negative values. + +Other Config Options +-------------------- + +For more details, see: + * :ref:`config_regions` + + +Observations +------------ + +``obs`` is a list of the observational data sets to plot as reference lines +(constant in time). Possible values are ``'SOSE'`` and ``'WOA18'``. An empty +list can be provided if no observations should be plotted. + +:ref:`sose` + +:ref:`woa18_t_s` + +Example Result +-------------- + +.. image:: examples/west_ross_shelf_temp.png + :width: 500 px + :align: center + +.. _`antarctic_ocean_regions`: https://github.com/MPAS-Dev/geometric_features/tree/main/feature_creation_scripts/antarctic_ocean_regions diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSST.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSST.rst.txt new file mode 100644 index 000000000..3ba29f8c4 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSST.rst.txt @@ -0,0 +1,51 @@ +.. _task_timeSeriesSST: + +timeSeriesSST +============= + +An analysis task for plotting a time series of sea surface temperature (SST), +optionally against results from a preprocessed E3SM v0 run (see +:ref:`config_runs`). + +Component and Tags:: + + component: ocean + tags: timeSeries, sst, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [timeSeriesSST] + ## options related to plotting time series of sea surface temperature (SST) + + # list of regions to plot from the region list in [regions] below + regions = ['global'] + + # Number of points over which to compute moving average (e.g., for monthly + # output, movingAveragePoints=12 corresponds to a 12-month moving average + # window) + movingAveragePoints = 12 + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +For more details on these config options, see: + * :ref:`config_regions` + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/time_series_sst.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSalinityAnomaly.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSalinityAnomaly.rst.txt new file mode 100644 index 000000000..8ad608e82 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSalinityAnomaly.rst.txt @@ -0,0 +1,61 @@ +.. _task_timeSeriesSalinityAnomaly: + +timeSeriesSalinityAnomaly +========================= + +An analysis task for plotting a Hovmoller plot (time and depth axes) of the +anomaly in ocean salinity from a reference year (usully the first year of +the simulation). + +Component and Tags:: + + component: ocean + tags: timeSeries, salinity, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [hovmollerSalinityAnomaly] + ## options related to plotting time series of salinity vs. depth + + # list of regions to plot from the region list in [regions] below + regions = ['global'] + + # Number of points over which to compute moving average(e.g., for monthly + # output, movingAveragePoints=12 corresponds to a 12-month moving average + # window) + movingAveragePoints = 12 + + # colormap + colormapName = balance + # color indices into colormapName for filled contours + colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevels = [-0.1, -0.02, -0.003, -0.001, 0, 0.001, 0.003, 0.02, 0.1] + # contour line levels + contourLevels = np.arange(-0.1, 0.11, 0.02) + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +For more details, see: + * :ref:`config_regions` + * :ref:`config_colormaps` + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/hovmoller_salin.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSeaIceAreaVol.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSeaIceAreaVol.rst.txt new file mode 100644 index 000000000..02d2a1d41 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesSeaIceAreaVol.rst.txt @@ -0,0 +1,77 @@ +.. _task_timeSeriesSeaIceAreaVol: + +timeSeriesSeaIceAreaVol +======================= + +An analysis task for plotting time series of sea ice area and volume for both +the Arctic and Antarctic against observations. + +Component and Tags:: + + component: seaIce + tags: timeSeries, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [timeSeriesSeaIceAreaVol] + ## options related to plotting time series of sea ice area and volume + + # compare to observations? + compareWithObservations = True + # Number of points over which to compute moving average (e.g., for monthly + # output, movingAveragePoints=12 corresponds to a 12-month moving average + # window) + movingAveragePoints = 1 + # title font properties + titleFontSize = 18 + # plot on polar plot + polarPlot = False + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + + # observations files + areaNH = IceArea_timeseries/iceAreaNH_climo_20180710.nc + areaSH = IceArea_timeseries/iceAreaSH_climo_20180710.nc + volNH = PIOMAS/PIOMASvolume_monthly_climo_20180710.nc + volSH = none + +``compareWithObservations`` can be set to ``False`` to disable comparison with +both sets of observations (see below). + +The title font size can be customized with ``titleFontSize``, given in points. + +To produce polar plots (with time progressing clockwise around the origin and +sea ice area or volume the distance from the origin) in addition to the +typical time series with time on the x axis, set ``polarPlot = True``. + +The ability to modify observations files pointed to by ``areaNH``, ``areaSH``, +``volNH`` and ``volSH`` is provided for debugging purposes and these options +should typically remain unchanged. + +For details on the remaining config options, see: + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Observations +------------ + +* :ref:`ssmi_ice_area` +* :ref:`piomass_ice_volume` + +Example Result +-------------- + +.. image:: examples/ice_area_nh.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTemperatureAnomaly.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTemperatureAnomaly.rst.txt new file mode 100644 index 000000000..4251efae6 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTemperatureAnomaly.rst.txt @@ -0,0 +1,61 @@ +.. _task_timeSeriesTemperatureAnomaly: + +timeSeriesTemperatureAnomaly +============================ + +An analysis task for plotting a Hovmoller plot (time and depth axes) of the +anomaly in ocean potential temperature from a reference year (usully the first +year of the simulation). + +Component and Tags:: + + component: ocean + tags: timeSeries, temperature, publicObs + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [hovmollerTemperatureAnomaly] + ## options related to plotting time series of potential temperature vs. depth + + # list of regions to plot from the region list in [regions] below + regions = ['global'] + + # Number of points over which to compute moving average(e.g., for monthly + # output, movingAveragePoints=12 corresponds to a 12-month moving average + # window) + movingAveragePoints = 12 + + # colormap + colormapName = balance + # color indices into colormapName for filled contours + colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevels = [-1, -0.5, -0.2, -0.05, 0, 0.05, 0.2, 0.5, 1] + # contour line levels + contourLevels = np.arange(-1.0, 1.26, 0.25) + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +For more details, see: + * :ref:`config_regions` + * :ref:`config_colormaps` + * :ref:`config_moving_average` + * :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/hovmoller_temp.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTransport.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTransport.rst.txt new file mode 100644 index 000000000..044ba77dd --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/timeSeriesTransport.rst.txt @@ -0,0 +1,74 @@ +.. _task_timeSeriesTransport: + +timeSeriesTransport +====================== + +An analysis task for plotting time series of of temperature, salinity, +potential density and other fields of interest averaged over ocean regions. + +Component and Tags:: + + component: ocean + tags: timeSeries, transport + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [timeSeriesTransport] + ## options related to plotting time series of transport through transects + + # list of ocean transects from geometric_features to plot or ['all'] for all + # available transects. + transectsToPlot = ['Drake Passage', 'Tasmania-Ant', 'Africa-Ant', 'Antilles Inflow', + 'Mona Passage', 'Windward Passage', 'Florida-Cuba', 'Florida-Bahamas', + 'Indonesian Throughflow', 'Agulhas', 'Mozambique Channel', 'Bering Strait', + 'Lancaster Sound', 'Fram Strait', 'Nares Strait'] + + # Number of months over which to compute moving average + movingAverageMonths = 1 + + # An optional first year for the tick marks on the x axis. Leave commented out + # to start at the beginning of the time series. + + # firstYearXTicks = 1 + + # An optional number of years between tick marks on the x axis. Leave + # commented out to determine the distance between ticks automatically. + + # yearStrideXTicks = 1 + +Transect Names +-------------- + +The ``transectsToPlot`` can be set to ``['all']`` to plot all of the transects +defined in the ``transportTransects`` transect group. These are:: + + ["Africa-Ant", "Agulhas", "Antarctic Peninsula", "Antilles Inflow", + "Baja CA blockage", "Baltic Sea Deepen", "Barents Sea Opening", + "Bering Strait", "Davis Strait", "Drake Passage", "English Channel Deepen", + "Florida-Bahamas", "Florida-Cuba", "Fram Strait", "Indonesian Throughflow", + "Ireland North Channel Deepen", "Japan Hokkaido blockage", + "Japan La Perouse Strait Deepen", "Japan Tsugaru Strait Deepen", + "Japan blockage", "Lancaster Sound", "Mona Passage", "Mozambique Channel", + "Nares Strait", "Nares Strait Deepen", "Persian Gulf Deepen", + "Red Sea Deepen", "Sakhalin blockage", "Strait of Gibralter Deepen 1", + "Strait of Gibralter Deepen 2", "Tasmania-Ant", "White Sea", + "Windward Passage"] + +Many of these are likely not of interest in most simulations, so a subset of +the most relevant transects has been chosen in the default configuration. + +Other Options +------------- + +* :ref:`config_moving_average` +* :ref:`config_time_axis_ticks` + +Example Result +-------------- + +.. image:: examples/drake_passage_transport.png + :width: 500 px + :align: center diff --git a/1.11.0rc1/_sources/users_guide/tasks/woceTransects.rst.txt b/1.11.0rc1/_sources/users_guide/tasks/woceTransects.rst.txt new file mode 100644 index 000000000..1b5e6edd7 --- /dev/null +++ b/1.11.0rc1/_sources/users_guide/tasks/woceTransects.rst.txt @@ -0,0 +1,135 @@ +.. _task_woceTransects: + +woceTransects +============= + +An analysis task for interpolating MPAS fields to +`World Ocean Circulation Experiment (WOCE)`_ transects and comparing them with +ship-based observations. + +Component and Tags:: + + component: ocean + tags: climatology, transect, woce + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [woceTransects] + ## options related to plotting model vs. World Ocean Circulation Experiment + ## (WOCE) transects. + + # Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, + # Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # The approximate horizontal resolution (in km) of each transect. Latitude/ + # longitude between observation points will be subsampled at this interval. + # Use 'obs' to indicate no subsampling. + horizontalResolution = obs + + # The name of the vertical comparison grid. Valid values are 'mpas' for the + # MPAS vertical grid, 'obs' to use the locations of observations or + # any other name if the vertical grid is defined by 'verticalComparisonGrid' + # verticalComparisonGridName = obs + verticalComparisonGridName = uniform_0_to_4000m_at_10m + #verticalComparisonGridName = mpas + + # The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or + # 'obs'. This should be numpy array of (typically negative) elevations (in m). + verticalComparisonGrid = numpy.linspace(0, -4000, 401) + + # The minimum weight of a destination cell after remapping. Any cell with + # weights lower than this threshold will therefore be masked out. + renormalizationThreshold = 0.01 + + + [woceTemperatureTransects] + ## options related to plotting WOCE transects of potential temperature + + # colormap for model/observations + colormapNameResult = RdYlBu_r + # the type of norm used in the colormap (linear, log, or symLog) + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 0.0, 'vmax': 18.0} + # color indices into colormapName for filled contours + #colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsResult = [0, 1, 2, 3, 4, 6, 8, 10, 14, 18] + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(0.0, 18.0, 9) + # contour line levels + contourLevelsResult = np.arange(1.0, 18.0, 2.0) + + # colormap for differences + colormapNameDifference = RdBu_r + # the type of norm used in the colormap (linear, log, or symLog) + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -2.0, 'vmax': 2.0} + # color indices into colormapName for filled contours + #colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsDifference = [-2, -1.5, -1.25, -1, -0.2, 0, 0.2, 1, 1.25, 1.5, 2] + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-2.0, 2.0, 9) + # contour line levels + contourLevelsDifference = np.arange(-1.8, 2.0, 0.4) + + + + [woceSalinityTransects] + ## options related to plotting WOCE transects of salinity + + # colormap for model/observations + colormapNameResult = BuOr + # the type of norm used in the colormap (linear, log, or symLog) + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 33.0, 'vmax': 36.0} + # color indices into colormapName for filled contours + #colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsResult = [33, 34, 34.25, 34.5, 34.6, 34.7, 34.8, 34.9, 35, 36] + # place the ticks automatically by default + # colorbarTicksResult = numpy.linspace(33.0, 36.0, 9) + # contour line levels + contourLevelsResult = np.arange(33.3, 36.0, 0.3) + + # colormap for differences + colormapNameDifference = RdBu_r + # the type of norm used in the colormap (linear, log, or symLog) + normTypeDifference = linear + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -1.0, 'vmax': 1.0} + # color indices into colormapName for filled contours + #colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + #colorbarLevelsDifference = [-1, -0.5, -0.2, -0.05, -0.02, 0, 0.02, 0.05, 0.2, 0.5, 1] + # place the ticks automatically by default + # colorbarTicksDifference = numpy.linspace(-1.0, 1.0, 9) + # contour line levels + contourLevelsDifference = np.arange(-0.9, 1.0, 0.4) + +For details on these configuration options, see: + * :ref:`config_transects` + * :ref:`config_remapping` + * :ref:`config_colormaps` + * :ref:`config_seasons` + +Observations +------------ + +:ref:`woce` + +Example Result +-------------- + +.. image:: examples/woce_transect.png + :width: 500 px + :align: center + +.. _`World Ocean Circulation Experiment (WOCE)`: http://woceatlas.ucsd.edu/ diff --git a/1.11.0rc1/_sources/versions.rst.txt b/1.11.0rc1/_sources/versions.rst.txt new file mode 100644 index 000000000..e558822d6 --- /dev/null +++ b/1.11.0rc1/_sources/versions.rst.txt @@ -0,0 +1,59 @@ +Versions +======== + +================ =============== +Documentation On GitHub +================ =============== +`stable`_ `main`_ +`latest`_ `develop`_ +`v1.2.6`_ `1.2.6`_ +`v1.2.7`_ `1.2.7`_ +`v1.2.8`_ `1.2.8`_ +`v1.2.9`_ `1.2.9`_ +`v1.3.0`_ `1.3.0`_ +`v1.4.0`_ `1.4.0`_ +`v1.5.0`_ `1.5.0`_ +`v1.6.0`_ `1.6.0`_ +`v1.6.1`_ `1.6.1`_ +`v1.7.0`_ `1.7.0`_ +`v1.7.1`_ `1.7.1`_ +`v1.7.2`_ `1.7.2`_ +`v1.8.0`_ `1.8.0`_ +`v1.9.0`_ `1.9.0`_ +`v1.10.0`_ `1.10.0`_ +================ =============== + +.. _`stable`: ../stable/index.html +.. _`latest`: ../latest/index.html +.. _`v1.2.6`: ../1.2.6/index.html +.. _`v1.2.7`: ../1.2.7/index.html +.. _`v1.2.8`: ../1.2.8/index.html +.. _`v1.2.9`: ../1.2.9/index.html +.. _`v1.3.0`: ../1.3.0/index.html +.. _`v1.4.0`: ../1.4.0/index.html +.. _`v1.5.0`: ../1.5.0/index.html +.. _`v1.6.0`: ../1.6.0/index.html +.. _`v1.6.1`: ../1.6.1/index.html +.. _`v1.7.0`: ../1.7.0/index.html +.. _`v1.7.1`: ../1.7.1/index.html +.. _`v1.7.2`: ../1.7.2/index.html +.. _`v1.8.0`: ../1.8.0/index.html +.. _`v1.9.0`: ../1.9.0/index.html +.. _`v1.10.0`: ../1.10.0/index.html +.. _`main`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/main +.. _`develop`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/develop +.. _`1.2.6`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.6 +.. _`1.2.7`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.7 +.. _`1.2.8`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.8 +.. _`1.2.9`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.9 +.. _`1.3.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.3.0 +.. _`1.4.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.4.0 +.. _`1.5.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.5.0 +.. _`1.6.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.6.0 +.. _`1.6.1`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.6.1 +.. _`1.7.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.0 +.. _`1.7.1`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.1 +.. _`1.7.2`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.2 +.. _`1.8.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.8.0 +.. _`1.9.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.9.0 +.. _`1.10.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.10.0 diff --git a/1.11.0rc1/_static/basic.css b/1.11.0rc1/_static/basic.css new file mode 100644 index 000000000..f316efcb4 --- /dev/null +++ b/1.11.0rc1/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/1.11.0rc1/_static/css/badge_only.css b/1.11.0rc1/_static/css/badge_only.css new file mode 100644 index 000000000..c718cee44 --- /dev/null +++ b/1.11.0rc1/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 000000000..6cb600001 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff2 b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 000000000..7059e2314 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 000000000..f815f63f9 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff2 b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 000000000..f2c76e5bd Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/fontawesome-webfont.eot b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 000000000..e9f60ca95 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/1.11.0rc1/_static/css/fonts/fontawesome-webfont.svg b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 000000000..855c845e5 --- /dev/null +++ b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/1.11.0rc1/_static/css/fonts/fontawesome-webfont.ttf b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 000000000..35acda2fa Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 000000000..400014a4b Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff2 b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 000000000..4d13fc604 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff b/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 000000000..88ad05b9f Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff differ diff --git a/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff2 b/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 000000000..c4e3d804b Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/lato-bold.woff b/1.11.0rc1/_static/css/fonts/lato-bold.woff new file mode 100644 index 000000000..c6dff51f0 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-bold.woff differ diff --git a/1.11.0rc1/_static/css/fonts/lato-bold.woff2 b/1.11.0rc1/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 000000000..bb195043c Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-bold.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff b/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 000000000..76114bc03 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff differ diff --git a/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff2 b/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 000000000..3404f37e2 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/1.11.0rc1/_static/css/fonts/lato-normal.woff b/1.11.0rc1/_static/css/fonts/lato-normal.woff new file mode 100644 index 000000000..ae1307ff5 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-normal.woff differ diff --git a/1.11.0rc1/_static/css/fonts/lato-normal.woff2 b/1.11.0rc1/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 000000000..3bf984332 Binary files /dev/null and b/1.11.0rc1/_static/css/fonts/lato-normal.woff2 differ diff --git a/1.11.0rc1/_static/css/theme.css b/1.11.0rc1/_static/css/theme.css new file mode 100644 index 000000000..19a446a0e --- /dev/null +++ b/1.11.0rc1/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/1.11.0rc1/_static/doctools.js b/1.11.0rc1/_static/doctools.js new file mode 100644 index 000000000..4d67807d1 --- /dev/null +++ b/1.11.0rc1/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/1.11.0rc1/_static/documentation_options.js b/1.11.0rc1/_static/documentation_options.js new file mode 100644 index 000000000..dd56e8076 --- /dev/null +++ b/1.11.0rc1/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '1.11.0rc1', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/1.11.0rc1/_static/file.png b/1.11.0rc1/_static/file.png new file mode 100644 index 000000000..a858a410e Binary files /dev/null and b/1.11.0rc1/_static/file.png differ diff --git a/1.11.0rc1/_static/js/badge_only.js b/1.11.0rc1/_static/js/badge_only.js new file mode 100644 index 000000000..526d7234b --- /dev/null +++ b/1.11.0rc1/_static/js/badge_only.js @@ -0,0 +1 @@ +!function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); \ No newline at end of file diff --git a/1.11.0rc1/_static/js/html5shiv-printshiv.min.js b/1.11.0rc1/_static/js/html5shiv-printshiv.min.js new file mode 100644 index 000000000..2b43bd062 --- /dev/null +++ b/1.11.0rc1/_static/js/html5shiv-printshiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/1.11.0rc1/_static/js/html5shiv.min.js b/1.11.0rc1/_static/js/html5shiv.min.js new file mode 100644 index 000000000..cd1c674f5 --- /dev/null +++ b/1.11.0rc1/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/1.11.0rc1/_static/js/theme.js b/1.11.0rc1/_static/js/theme.js new file mode 100644 index 000000000..1fddb6ee4 --- /dev/null +++ b/1.11.0rc1/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/1.11.0rc1/_static/minus.png b/1.11.0rc1/_static/minus.png new file mode 100644 index 000000000..d96755fda Binary files /dev/null and b/1.11.0rc1/_static/minus.png differ diff --git a/1.11.0rc1/_static/plus.png b/1.11.0rc1/_static/plus.png new file mode 100644 index 000000000..7107cec93 Binary files /dev/null and b/1.11.0rc1/_static/plus.png differ diff --git a/1.11.0rc1/_static/pygments.css b/1.11.0rc1/_static/pygments.css new file mode 100644 index 000000000..0d49244ed --- /dev/null +++ b/1.11.0rc1/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/1.11.0rc1/_static/searchtools.js b/1.11.0rc1/_static/searchtools.js new file mode 100644 index 000000000..92da3f8b2 --- /dev/null +++ b/1.11.0rc1/_static/searchtools.js @@ -0,0 +1,619 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + "Search finished, found ${resultCount} page(s) matching the search query." + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlinks", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/1.11.0rc1/_static/sphinx_highlight.js b/1.11.0rc1/_static/sphinx_highlight.js new file mode 100644 index 000000000..8a96c69a1 --- /dev/null +++ b/1.11.0rc1/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/1.11.0rc1/authors.html b/1.11.0rc1/authors.html new file mode 100644 index 000000000..e3dce1aef --- /dev/null +++ b/1.11.0rc1/authors.html @@ -0,0 +1,162 @@ + + + + + + + Main Authors — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Main Authors

+
    +
  • Xylar Asay-Davis

  • +
  • Milena Veneziani

  • +
  • Phillip J. Wolfram

  • +
+
+
+

Contributors

+
    +
  • Sterling Baldwin

  • +
  • Riley X. Brady

  • +
  • Darin Comeau

  • +
  • Charles Doutriaux

  • +
  • Jeremy Fyke

  • +
  • Matthew Hoffman

  • +
  • Joseph Kennedy

  • +
  • Mark Petersen

  • +
  • Stephen Price

  • +
  • Kevin Rosa

  • +
  • Greg Streletz

  • +
  • Adrian Turner

  • +
  • Luke Van Roekel

  • +
+

For a list of all the contributions: +https://github.com/MPAS-Dev/MPAS-Analysis/graphs/contributors

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/analysis_task_template.html b/1.11.0rc1/design_docs/analysis_task_template.html new file mode 100644 index 000000000..f2b312e7f --- /dev/null +++ b/1.11.0rc1/design_docs/analysis_task_template.html @@ -0,0 +1,837 @@ + + + + + + + Analysis Task Template — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Analysis Task Template

+

+Xylar Asay-Davis
+date: 2017/03/08
+

+

Summary

A new template python file for analysis tasks will be added to the repository. +The template will include a list of functions that each analysis task must +implement and example syntax for docstrings used to commend both the full +analysis task and the individual functions.

+

The existing analysis tasks will be updated to be consistent with this template. +The run_analysis.py driver script will also be updated to work with the template.

+

The template is needed to:

+
    +
  1. serve as a starting point for writing new analysis tasks

  2. +
  3. ensure that tasks implement a standard set of +functions, making it easier to perform actions (such as checking whether +the task should be run, checking for required model and observations files, +purging files from a previous analysis run, and running the analysis) on +each analysis task in sequence (and, in the future, in parallel)

  4. +
  5. demonstrate the syntax and style of docstrings required to comment/document +each task and each function

  6. +
+

Requirements

Requirement: Template for Analysis Tasks
+Date last modified: 2017/03/08
+Contributors: Xylar Asay-Davis +

The template should include each function that each analysis task must implement +and example docstring both for the task as a whole and for each funciton.

+

Requirement: Validation within Analysis Tasks
+Date last modified: 2017/03/08
+Contributors: Xylar Asay-Davis +

Validation, such as checking config options or adding new ones if they are missing, +or checking if required data files are present, should be performed within a +function in each task (rather than in run_analysis.py, as is sometimes the current +case).

+

Requirement: Analysis Continues even when Analysis Task Fails
+Date last modified: 2017/03/08
+Contributors: Xylar Asay-Davis +

If validation fails, an error message should be printed but other analysis +tasks should be allowed to run. Similarly, if a given analysis task raises +an exception, the error and stack trace should be printed but other analysis +tasks should still be run.

+

Requirement: List of Tasks to Perform
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

There should be a single place where new tasks are added to run_analysis.py, as +is presently the case. Yet, there should be a way to create a list of tasks to be +performed and later determine whether, when and how those tasks are to be run. +This capability also allows for operations like purging files from a prevous run +to be added in the future. The capability is also requried to allow for later task +parallelism. Currently, a task module is imported, there is a check to see if that +task should be run, and the task is performed in immediate sequence.

+

Algorithmic Formulations (optional)

Design solution: Template for Analysis Tasks
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

A base class, AnalysisTask will be added under shared/analysis_task.py. +This class will include methods:

+
    +
  • __init__: construct the task, including assigning variable and streams maps +(optional).

  • +
  • +
    setup_and_checkperforms common tasks to all analysis, such as reading

    namelist and streams files

    +
    +
    +
  • +
  • run: the base class version does nothing

  • +
+

The template will show how to set up a child class that decends from AnalysisTask. +It will show examples of:

+
    +
  • +
    __init__construct the task, including assigning the taskName, componentName

    and categories of the analysis, and calling the base class’s constructor.

    +
    +
    +
  • +
  • +
    setup_and_checkfirst, calls the base class’ version of setup_and_check, then,

    determines if the configuration is valid for running this task (e.g. if +necessary files and config options are present)

    +
    +
    +
  • +
  • run: runs the analysis task

  • +
+

The template will be located at:

+
mpas_analysis/
+    - analysis_task_template.py
+
+
+

That is, it is the only file (other than __init__.py) in the base of the +mpas_analysis directory, making it easy to find. This way, it will be the first +file most developers see when they look in mpas_analysis itself.

+

A reference to the template as the starting point for new developers will be added +to the readme.

+

Design solution: Validation within Analysis Tasks
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

The setup_and_check method within each analysis task can be used to determine if +necessary input files are present and/or if config options are set as expected. +The template will provide examples of doing this.

+

Existing checks for missing observations files in run_analysis.py will be +moved to individual analyses. This will make clearer which checks correspond +with which analysis tasks and will make clearer where such checks should be added +within future analysis tasks. Similarly, the addition of the startDate and +endDate config options will be moved to the corresponding analysis tasks.

+

Design solution: Analysis Continues even when Analysis Task Fails
+Date last modified: 2017/03/08
+Contributors: Xylar Asay-Davis +

A try/except will be used around both setup_and_check and run calls to make sure +an error message and stack trace are printed, but execution will continue +for other tasks.

+

Design solution: List of Tasks to Perform
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

By having a common base class for all analysis tasks, +each task can be checked to see if it should be run based on +the generate command-line or config option. If so, its setup_and_check +function will be run to make sure the configuration is right (and will +print a warning if not). If setup_and_check passes, the analysis can be added +to a list of functions to be run. Later, a loop through the list +can be used to run each analysis.

+

Some analysis tasks require extra arguments (e.g. the field to be +analyzed in the case of ocean.modelvsobs and the streams and variable +maps for all analysis tasks). These arguments will be passed to __init__ +and stored as member variables that can later be accessed via self.<varName>.

+

Design and Implementation

Implementation is in the branch: https://github.com/xylar/MPAS-Analysis/tree/analysis_task_template

+

Implementation: Template for Analysis Tasks
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

Here is the suggested base class AnalysisTask in full, intended to make discussion +of individual lines easier:

+
"""
+Defines the base class for analysis tasks.
+
+Authors
+-------
+Xylar Asay-Davis
+
+Last Modified
+-------------
+03/16/2017
+"""
+
+from ..shared.io import NameList, StreamsFile
+from ..shared.io.utility import build_config_full_path, make_directories
+
+
+class AnalysisTask(object):  # {{{
+    """
+    The base class for analysis tasks.
+
+    Authors
+    -------
+    Xylar Asay-Davis
+
+    Last Modified
+    -------------
+    03/16/2017
+    """
+    def __init__(self, config, streamMap=None, variableMap=None):  # {{{
+        """
+        Construct the analysis task.
+
+        Individual tasks (children classes of this base class) should first
+        call this method to perform basic initialization, then, define the
+        `taskName`, `componentName` and list of `categories` for the task.
+
+        Parameters
+        ----------
+        config :  instance of MpasAnalysisConfigParser
+            Contains configuration options
+
+        streamMap : dict, optional
+            A dictionary of MPAS-O stream names that map to their mpas_analysis
+            counterparts.
+
+        variableMap : dict, optional
+            A dictionary of MPAS-O variable names that map to their
+            mpas_analysis counterparts.
+
+        Authors
+        -------
+        Xylar Asay-Davis
+
+        Last Modified
+        -------------
+        03/16/2017
+        """
+        self.config = config
+        self.streamMap = streamMap
+        self.variableMap = variableMap  # }}}
+
+    def setup_and_check(self):  # {{{
+        """
+        Perform steps to set up the analysis (e.g. reading namelists and
+        streams files).
+
+        After this call, the following member variables are set:
+            self.inDirectory : the base input directory
+            self.plotsDirectory : the directory for writing plots (which is
+                also created if it doesn't exist)
+            self.namelist : the namelist reader
+            self.streams : the streams file reader
+            self.calendar : the name of the calendar ('gregorian' or
+                'gregoraian_noleap')
+
+        Individual tasks (children classes of this base class) should first
+        call this method to perform basic setup, then, check whether the
+        configuration is correct for a given analysis and perform additional,
+        analysis-specific setup.  For example, this function could check if
+        necessary observations and other data files are found, then, determine
+        the list of files to be read when the analysis is run.
+
+        Authors
+        -------
+        Xylar Asay-Davis
+
+        Last Modified
+        -------------
+        03/16/2017
+        """
+        # read parameters from config file
+        self.inDirectory = self.config.get('input', 'baseDirectory')
+        self.plotsDirectory = build_config_full_path(self.config, 'output',
+                                                     'plotsSubdirectory')
+        namelistFileName = self.config.get('input', 'oceanNamelistFileName')
+        self.namelist = NameList(namelistFileName, path=self.inDirectory)
+
+        streamsFileName = self.config.get('input', 'oceanStreamsFileName')
+        self.streams = StreamsFile(streamsFileName,
+                                   streamsdir=self.inDirectory)
+
+        self.calendar = self.namelist.get('config_calendar_type')
+
+        make_directories(self.plotsDirectory)
+        # }}}
+
+    def run(self):  # {{{
+        """
+        Runs the analysis task.
+
+        Individual tasks (children classes of this base class) should first
+        call this method to perform any common steps in an analysis task,
+        then, perform the steps required to run the analysis task.
+
+        Authors
+        -------
+        Xylar Asay-Davis
+
+        Last Modified
+        -------------
+        03/16/2017
+        """
+        return  # }}}
+
+    def check_generate(self):
+        # {{{
+        """
+        Determines if this analysis should be generated, based on the
+        `generate` config option and `taskName`, `componentName` and
+        `categories`.
+
+        Individual tasks do not need to create their own versions of this
+        function.
+
+        Returns
+        -------
+        generate : bool
+            Whether or not this task should be run.
+
+        Raises
+        ------
+        ValueError : If one of `self.taskName`, `self.componentName`
+            or `self.categories` has not been set.
+
+        Authors
+        -------
+        Xylar Asay-Davis
+
+        Last Modified
+        -------------
+        03/16/2017s
+        """
+
+        for memberName in ['taskName', 'componentName', 'categories']:
+            if not hasattr(self, memberName):
+                raise ValueError('Analysis tasks must define self.{} in their '
+                                 '__init__ method.'.format(memberName))
+
+        if (not isinstance(self.categories, list) and
+                self.categories is not None):
+            raise ValueError('Analysis tasks\'s member self.categories '
+                             'must be NOne or a list of strings.')
+
+        config = self.config
+        generateList = config.getExpression('output', 'generate')
+        generate = False
+        for element in generateList:
+            if '_' in element:
+                (prefix, suffix) = element.split('_', 1)
+            else:
+                prefix = element
+                suffix = None
+
+            allSuffixes = [self.componentName]
+            if self.categories is not None:
+                allSuffixes = allSuffixes + self.categories
+            noSuffixes = [self.taskName] + allSuffixes
+            if prefix == 'all':
+                if (suffix in allSuffixes) or (suffix is None):
+                    generate = True
+            elif prefix == 'no':
+                if suffix in noSuffixes:
+                    generate = False
+            elif element == self.taskName:
+                generate = True
+
+        return generate  # }}}
+# }}}
+
+# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python
+
+
+

And here is the suggested template in full:

+
"""
+This is an example analysis task to be used as a template for new tasks.
+It should be copied into one of the component folders (`ocean`, `sea_ice`,
+`land_ice`, etc.) and modified as needed.
+
+Don't forget to remove this docstring. (It's not needed.)
+
+Authors
+-------
+Xylar Asay-Davis
+
+Last Modified
+-------------
+03/16/2017
+"""
+
+# import python modules here
+
+# import mpas_analysis module here (those with relative paths starting with
+# dots)
+from ..shared.analysis_task import AnalysisTask
+
+
+class MyTask(AnalysisTask):  # {{{
+    """
+    <Describe the analysis task here.>
+
+    Authors
+    -------
+    <List of authors>
+
+    Last Modified
+    -------------
+    <MM/DD/YYYY>
+    """
+    def __init__(self, config, streamMap=None, variableMap=None,
+                 myArg='myDefaultValue'):  # {{{
+        """
+        Construct the analysis task.
+
+        Parameters
+        ----------
+        config :  instance of MpasAnalysisConfigParser
+            Contains configuration options
+
+        streamMap : dict, optional
+            A dictionary of MPAS-O stream names that map to their mpas_analysis
+            counterparts.
+
+        variableMap : dict, optional
+            A dictionary of MPAS-O variable names that map to their
+            mpas_analysis counterparts.
+
+        myNewArg : str, optional
+            <Describe the arg>
+
+        Authors
+        -------
+        <List of authors>
+
+        Last Modified
+        -------------
+        <MM/DD/YYYY>
+        """
+        # first, call the constructor from the base class (AnalysisTask)
+        super(MyTask, self).__init__(config, streamMap, variableMap).__init__(config, streamMap, variableMap)
+
+        # next, name the task, the component (ocean, sea_ice, etc.) and the
+        # categories (if any) of the component ('timeSeries', 'climatologyMap'
+        # etc.)
+        self.taskName = 'myTask'
+        self.componentName = 'component'
+        self.categories = ['category1', 'category2']
+
+        # then, store any additional arguments for use later on.  These would
+        # likely include things like the name of a field, region, month,
+        # season, etc. to be analyzed so that the same subclass of AnalysisTask
+        # can perform several different tasks (potentially in parallel)
+        self.myArg = myArg
+        # }}}
+
+    def setup_and_check(self):  # {{{
+        """
+        Perform steps to set up the analysis and check for errors in the setup.
+
+        Raises
+        ------
+        ValueError: if myArg has an invalid value
+
+        Authors
+        -------
+        <List of authors>
+
+        Last Modified
+        -------------
+        <MM/DD/YYYY>
+        """
+
+        # first, call setup_and_check from the base class (AnalysisTask),
+        # which will perform some common setup, including storing:
+        #   self.inDirectory, self.plotsDirectory, self.namelist, self.streams
+        #   self.calendar
+        super(MyTask, self).__init__(config, streamMap, variableMap).setup_and_check()
+
+        # then, perform additional checks specific to this analysis
+        possibleArgs = ['blah', 'thing', 'stuff']
+        if self.myArg not in possibleArgs:
+            # Note: we're going to allow a long line in this case because it
+            # would be confusing to break up the string (even though it
+            # violates the PEP8 standard)
+            raise ValueError('MyTask must be constructed with argument myArg having one of the values\n'
+                             '{}.'.format(possibleArgs))
+
+        section = 'MyTask'
+        startDate = '{:04d}-01-01_00:00:00'.format(
+            self.config.getint(section, 'startYear'))
+        if not self.config.has_option(section, 'startDate'):
+            self.config.set(section, 'startDate', startDate)
+        endDate = '{:04d}-12-31_23:59:59'.format(
+            self.config.getint(section, 'endYear'))
+        if not self.config.has_option(section, 'endDate'):
+            self.config.set(section, 'endDate', endDate)
+
+        # }}}
+
+    def run(self):  # {{{
+        """
+        Runs the analysis task.
+
+        Individual tasks (children classes of this base class) should first
+        call this method to perform any common steps in an analysis task,
+        then, perform the steps required to run the analysis task.
+
+        Authors
+        -------
+        <List of authors>
+
+        Last Modified
+        -------------
+        <MM/DD/YYYY>
+        """
+
+        # here is where the main "meat" of the analysis task goes
+
+        self._my_sub_task('someText', arg2='differentText')
+        return
+        # }}}
+
+    # here is where you add helper methods that are meant to be non-public
+    # (they start with an underscore), meaning you don't expect anyone to
+    # access them outside of this file.  Typically you won't put as much in
+    # the docstring as you would for a public function or method.
+    #
+    # you can either pass arguments (with or without defaults) or you can
+    # "save" arguments as member variables of `self` and then get them back
+    # (like `self.myArg` here).
+    def _my_sub_task(self, arg1, arg2=None):  # {{{
+        """
+        <Performs my favority subtask>
+        """
+
+        # perform the task
+        print 'myArg:', self.myArg
+        print 'arg1:', arg1
+        if arg2 is not None:
+            print 'arg2:', arg2
+        # }}}
+
+# }}}
+
+# vim: foldmethod=marker ai ts=4 sts=4 et sw=4 ft=python
+
+
+

Implementation: Validation within Analysis Tasks
+Date last modified: 2017/03/08
+Contributors: Xylar Asay-Davis +

Here is an example (from ocean.climatology_map.ClimatologyMap) of what +the new __init__ and setup_and_check methods :

+
def __init__(self, config, streamMap=None, variableMap=None,
+             fieldName=None):  # {{{
+    """
+    Construct the analysis task.
+
+    Parameters
+    ----------
+    config :  instance of MpasAnalysisConfigParser
+        Contains configuration options
+
+    streamMap : dict, optional
+        A dictionary of MPAS-O stream names that map to their mpas_analysis
+        counterparts.
+
+    variableMap : dict, optional
+        A dictionary of MPAS-O variable names that map to their
+        mpas_analysis counterparts.
+
+    fieldName : {'sst', 'mld', 'sss'}
+        The name of the field to be analyzed
+
+    Raises
+    ------
+    ValueError : if `fieldName` is not provided or is not one of the
+        supported values
+
+    Authors
+    -------
+    Xylar Asay-Davis
+
+    Last Modified
+    -------------
+    03/16/2017
+    """
+    # first, call the constructor from the base class (AnalysisTask)
+    AnalysisTask.__init__(config, streamMap, variableMap)
+
+    upperFieldNames = {'sst': 'SST',
+                       'mld': 'MLD',
+                       'sss': 'SSS'
+                       # 'nino34': 'Nino34',
+                       # 'mht': 'MHT'
+                       # 'moc': 'MOC'
+                       }
+
+    if fieldName is None:
+        raise ValueError('fieldName must be supplied.')
+    if fieldName not in upperFieldNames.keys():
+        raise ValueError('fieldName must be one of {}.'.format(
+            upperFieldNames.keys()))
+
+    self.fieldName = fieldName
+    self.upperFieldName = upperFieldNames[fieldName]
+
+    # name the task, component and category
+    self.taskName = 'climatologyMap{}'.format(self.upperFieldName)
+    self.componentName = 'ocean'
+    self.categories = ['climatologyMap', fieldName]
+
+    # }}}
+
+def setup_and_check(self):  # {{{
+    """
+    Perform steps to set up the analysis and check for errors in the setup.
+
+    Raises
+    ------
+    OSError
+        If files are not present
+
+    Authors
+    -------
+    Xylar Asay-Davis
+
+    Last Modified
+    -------------
+    03/16/2017
+    """
+    config = self.config
+    section = 'climatology'
+    startDate = '{:04d}-01-01_00:00:00'.format(
+        config.getint(section, 'startYear'))
+    if not config.has_option(section, 'startDate'):
+        config.set(section, 'startDate', startDate)
+    endDate = '{:04d}-12-31_23:59:59'.format(
+        config.getint(section, 'endYear'))
+    if not config.has_option(section, 'endDate'):
+        config.set(section, 'endDate', endDate)
+
+    return  # }}}
+
+
+

Much of this code has been taken out of run_analysis.py, simplifying and clarifying +the code.

+

Implementation: Analysis Continues even when Analysis Task Fails
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

Calls to check and run methods in run_analysis.py are inside of +try/except blocks, which catch the exceptions and print the stack trace +but don’t cause the code to exit.

+
try:
+    analysisTask.check()
+...
+except:
+    traceback.print_exc(file=sys.stdout)
+    print "ERROR: analysis module {} failed during check and " \
+        "will not be run".format(analysisTask.taskName)
+
+...
+
+try:
+    analysisModule.run()
+except:
+    traceback.print_exc(file=sys.stdout)
+    print "ERROR: analysis module {} failed during run".format(
+        analysisTask.taskName)
+
+
+

Implementation: List of Tasks to Perform
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

The tasks are imported and added to an anaysis list as follows:

+
analyses = []
+
+# Ocean Analyses
+from mpas_analysis.ocean.time_series_ohc import TimeSeriesOHC
+analyses.append(TimeSeriesOHC(config, streamMap=oceanStreamMap,
+                              variableMap=oceanVariableMap))
+from mpas_analysis.ocean.time_series_sst import TimeSeriesSST
+analyses.append(TimeSeriesSST(config, streamMap=oceanStreamMap,
+                              variableMap=oceanVariableMap))
+
+from mpas_analysis.ocean.climatology_map import ClimatologyMap \
+    as ClimatologyMapOcean
+for fieldName in ['sst', 'mld', 'sss']:
+    analyses.append(ClimatologyMapOcean(config, streamMap=oceanStreamMap,
+                                        variableMap=oceanVariableMap,
+                                        fieldName=fieldName))
+
+# Sea Ice Analyses
+from mpas_analysis.sea_ice.timeseries import TimeSeries as TimeSeriesSeaIce
+analyses.append(TimeSeriesSeaIce(config, streamMap=seaIceStreamMap,
+                                 variableMap=seaIceVariableMap))
+from mpas_analysis.sea_ice.climatology_map import ClimatologyMap \
+    as ClimatologyMapSeaIce
+analyses.append(ClimatologyMapSeaIce(config, streamMap=seaIceStreamMap,
+                                     variableMap=seaIceVariableMap))
+
+
+

The analyses list is a list of instances of subclasses of AnalysisTask.

+

Subsequent calls to analysis functions can loop over analyses, as in the following +example for calling run:

+
# run each analysis task
+for analysisTask in analyses:
+    try:
+        analysisTask.run()
+    except:
+        traceback.print_exc(file=sys.stdout)
+        print "ERROR: analysis module {} failed during run".format(
+            analysisTask.taskName)
+
+
+

Testing

+

Testing and Validation: Template for Analysis Tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Ideally, the test here would be having another developer create an analysis task based on this +template. Realistically, this won’t happen before the template gets merged into the repository, +so I’m counting on feedback from other developers to “test” the template before it gets merged, +and there will probably need to be subsequent PRs to make changes as issues arise.

+

Testing and Validation: Validation within Analysis Tasks
+Date last modified: 2017/03/16
+Contributors: Xylar Asay-Davis +

I have added setup_and_check functions within each analysis task. So far, these check for only a subset of +the necessary configuration and input files, and could (and should) be expanded in the future.

+

I have verified that all setup_and_check routines fail when the path to their respective observations and/or +preprocessed reference run is not found.

+

Testing and Validation: Analysis Continues even when Analysis Task Fails
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

I have verified using the GMPAS_QU240 test case and by deliberately introducing errors in the file +paths that an error in a given analysis task (either during setup_and_check or run) causes that task to +print a stack trace and an error message but does not prevent other tasks from running.

+

Testing and Validation: List of Tasks to Perform
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

As stated in implementation, there is a single place in run_analysis.py where a developer would add +her or his task to the analysis. I think this requirement has been satisfied without requiring testing.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/config_file_reorganization.html b/1.11.0rc1/design_docs/config_file_reorganization.html new file mode 100644 index 000000000..4cd6b03ed --- /dev/null +++ b/1.11.0rc1/design_docs/config_file_reorganization.html @@ -0,0 +1,237 @@ + + + + + + + Config File Reorganization — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Config File Reorganization

+

+Xylar Asay-Davis
+date: 2017/01/29
+

+

Summary

+ +This document describes various efforts to clean up the structure of the MPAS-Analysis config file. The idea is to create a template config file that will replace `config.analysis` as well as a number of example config files designed to make use of various MPAS and ACME runs on various machines. The reorganization should make the analysis easier for users to modify and run. + +

Requirements

Requirement: a simple way of turning on and off individual analysis modules
+Date last modified: 2017/01/29
+Contributors: Xylar Asay-Davis +

There should be a simple, intuitive method for turning on and off individual analysis modules (e.g. ocean/ohc_timeseries). This should replace the current approach of having a boolean generate flag for each analysis module in a separate config section. Preferably, there should be an equivalent method for turning on and off analysis modules from the command line that overrides that in the config file.

+

Requirement: there should be a simplified template for config files
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

The current example config file should be made into a general template. Simplifications should be made to the template so that it can more easily and intuitively be modified for several analyses. Example config files should also be added for analyzing several existing runs on several different machines.

+

Requirement: removal of ACME specific config options
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

To the extent possible, ACME-specific config options such as casename and ref_casename_v0 should be generalized in a way that is also appropriate not just ACME runs but also any other runs involving the MPAS components we support.

+

Requirement: consistent section and option names
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

A consistent convention of capitalization and underscores should be used throughout the config file.

+

Design and Implementation

Implementation: a simple way of turning on and off individual analysis modules
+Date last modified: 2017/02/02
+Contributors: Xylar Asay-Davis +

Implementation of the config.template file can be found here.

+

The following comment describes the planned implementation in the config file.

+
# a list of analyses to generate.  Valid names are:
+#   'timeSeriesOHC', 'timeSeriesSST', 'regriddedSST',
+#   'regriddedSSS', 'regriddedMLD', 'timeSeriesSeaIceAreaVol',
+#   'regriddedSeaIceConcThick'
+# the following shortcuts exist:
+#   'all' -- all analyses will be run
+#   'all_timeSeries' -- all time-series analyses will be run
+#   'all_regriddedHorizontal' -- all analyses involving regridded horizontal
+    #                                fields will be run
+#   'all_ocean' -- all ocean analyses will be run
+#   'all_seaIce' -- all sea-ice analyses will be run
+#   'no_timeSeriesOHC' -- skip 'timeSeriesOHC' (and similarly with the
+#                             other analyses).
+#   'no_ocean', 'no_timeSeries', etc. -- in analogy to 'all_*', skip the
+#                                            given category of analysis
+# an equivalent syntax can be used on the command line to override this
+# option:
+#    ./run_analysis.py config.analysis --generate \
+#         all,no_ocean,all_timeSeries
+generate = ['all']
+
+
+

Where there are conflicts between items in the generate list, successive items will override earlier items. For example, generate = ['all', 'no_timeSeriesOHC'] will generate all analyses except timeSeriesOHC. As another example, generate = ['all', 'no_ocean', 'all_timeSeries'] would generate all diagnostics except those comparing ocean model results with observations (and previous model results). (Note that a more efficient and intuitive way to do the same would be generate = ['all_seaIce', 'all_timeSeries'].)

+

An analogous approach has also been added at the command line, for example:

+
./run_analysis.py config.analysis --generate all,no_ocean,all_timeSeries
+
+
+

If the --generate flag is used on the command line, it will replace the generate option in the config file.

+

As an aside, I note that it is not clear if future analysis modules will fit neatly into categories like “time series” and “regridded horizontal” fields, and these categories are not meant to be all-encompassing.

+

Implementation: there should be a simplified template for config files
+Date last modified: 2017/01/29
+Contributors: Xylar Asay-Davis +

The required config.template has been implemented in #86, specifically here. A subdirectory configs will be added with several examples from runs on LANL IC and on Edison at NERSC. Other examples can be added as appropriate and useful.

+

Implementation: removal of ACME specific config options
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

casename has been renamed mainRunName, referenceRunName has been added for comparison with reference runs that have not been preprocessed (not yet supported), and ref_casename_v0 has been renamed preprocessedReferenceRunName.

+

See #86, specifically config.template.

+

Implementation: consistent section and option names
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

In config.template in #86, “CamelCase“ has been used for all sections and options. The first word is lowercase and subsequent words begin with an uppercase latter. Underscores have been removed (except in the syntax used to turn on and off options, where underscores in prefixes all_ and no_ make splitting and comparison simpler in the implementation.

+

Testing

Testing and Validation: a simple way of turning on and off individual analysis modules
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

CI will be added to make sure that the function to parse the generate list (run_analysis.check_generate) behaves as expected.

+

Testing and Validation: there should be a simplified template for config files
+Date last modified: 2017/01/29
+Contributors: Xylar Asay-Davis +

There is not a way to test the template in the usual sense. Instead, the test will be asking other developers and users to adapt the template for new runs to make sure it is intuitive.

+

Testing and Validation: removal of ACME specific config options
+Date last modified: 2017/01/29
+Contributors: Xylar Asay-Davis +

For now, the plan is just to rename the appropriate config options, so the test is simply to ensure that analysis runs correctly and produces bit-for-bit identical images to those produced by the current MPAS-Analysis/develop.

+

Testing and Validation: consistent section and option names
+Date last modified: 2017/02/01
+Contributors: Xylar Asay-Davis +

As above, the test is simply to ensure that analysis runs correctly and produces bit-for-bit identical images to those produced by the current MPAS-Analysis/develop.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/eddykineticenergy.html b/1.11.0rc1/design_docs/eddykineticenergy.html new file mode 100644 index 000000000..b429c8117 --- /dev/null +++ b/1.11.0rc1/design_docs/eddykineticenergy.html @@ -0,0 +1,257 @@ + + + + + + + Eddy Kinetic Energy Climatology Mapping — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Eddy Kinetic Energy Climatology Mapping

+

+Kevin Rosa
+date: 2018/06/18
+

+

Summary

+

The document describes a new feature which will be added to the MPAS-Analysis +tools package: visualization of surface Eddy Kinetic Energy (EKE). +The EKE climatology map will function very similarly to other climatological fields (e.g. SSH, SST, etc.). +The output file will contain three images: the modeled EKE climatology, the observed EKE climatology, and the difference. +Plotting EKE is particularly important for MPAS-O because one can configure meshes with eddy-permitting regions and would then want to compare the EKE in these regions against observations.

+
+
+

Requirements

+
    +
  1. Model output must contain the meridional and zonal components of both timeMonthly_avg_velocity* and timeMonthly_avg_velocity*Squared.

  2. +
  3. User can download the EKE observations data, via 1 of 2 methods:

    + +
  4. +
  5. In config file…

    +
      +
    1. Specify ekeSubdirectory with location of EKE observations file.

    2. +
    3. Under [climatologyMapEKE], leave seasons =  ['ANN']. Only annual observations are available currently.

    4. +
    5. When setting generate, task climatologyMapEKE has tags: climatology, horizontalMap, eke

    6. +
    +
  6. +
+
+
+

Physics

+

In the ocean, it is convenient to separate the the horizontal current, u, +into its mean and eddy components: +(1)

+

This approach separates the total kinetic energy into mean kinetic energy +(MKE) and eddy kinetic energy (EKE).

+

The EKE over much of the ocean is at least an order of magnitude greater than +the MKE (Wytrki, 1976). +This eddy energy is important for transporting momentum, heat, mass, and chemical +constituents of seawater (Robinson, 1983).

+
+
+

Algorithms

+

Time mean of equation 1:

+

The model outputs +and +while the observational dataset provides +so two different EKE equations must be used:

+
    +
  1. +
  2. +
+
+
+

Design and Implementation

+

The primary design consideration for this feature is that it integrate +seamlessly with the rest of the analysis tools. +To this end, the sea surface temperature (SST) plotting tools will be used as a +template.

+

Files to create:

+
    +
  • mpas_analysis/ocean/climatology_map_eke.py

  • +
  • docs/tasks/climatologyMapEKE.rst

  • +
  • README.md for drifter_variance.nc dataset

  • +
+

Files to edit:

+
    +
  • mpas_analysis/ocean/__init__.py

  • +
  • docs/analysis_tasks.rst

  • +
  • docs/api.rst

  • +
  • mpas_analysis/config.default

  • +
  • mpas_analysis/obs/analysis_input_files

  • +
+

The main challenge for plotting EKE is that EKE is a function of several model variables and is not itself a variable that is directly written by the model. +Because of this, the climatology mapping functions for SSH, SST, SSS, and MLD will not serve as a direct template for the EKE formulation in mpas_analysis/ocean/climatology_map_eke.py. +I will try to follow the structure of mpas_analysis/ocean/compute_transects_with_vel_mag.py as much as possible.

+

It appears that there is a method for plotting velocity magnitudes on the antarctic grid. Look into ‘climatology_map_sose.py’…

+
+
+

Testing

+

I will test runs of varying durations and resolutions to make sure the EKE plotting is working. I will also ensure that the following jobs fail:

+
    +
  1. Input model results files missing at least one of the 4 necessary velocity variables.

  2. +
  3. Request seasonal plots.

  4. +
  5. Test that ./download_analysis_data.py downloads EKE data.

  6. +
+
+
+

Bibliography

+
    +
  • https://latex.codecogs.com/eqneditor/editor.php

  • +
  • Chelton, D. B., Schlax, M. G., Samelson, R. M. & Szoeke, R. A. de. Global observations of large oceanic eddies. Geophysical Research Letters 34, (2007).

  • +
  • Laurindo, L. C., Mariano, A. J. & Lumpkin, R. An improved near-surface velocity climatology for the global ocean from drifter observations. Deep Sea Research Part I: Oceanographic Research Papers 124, 73–92 (2017).

  • +
  • Wyrtki, K., Magaard, L. & Hager, James. Eddy energy in the oceans. Journal of Geophysical Research 81, 2641–2646

  • +
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/generalize_calendar.html b/1.11.0rc1/design_docs/generalize_calendar.html new file mode 100644 index 000000000..7e1462523 --- /dev/null +++ b/1.11.0rc1/design_docs/generalize_calendar.html @@ -0,0 +1,415 @@ + + + + + + + Generalize Calendar supported by Analysis — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Generalize Calendar supported by Analysis

+

+Xylar Asay-Davis
+date: 2017/02/09
+

+

Summary

+ +Currently, the time variable in `xarray` data sets within MPAS-Analysis has two +major shortcomings, inherited from `xarray` (through `pandas` and `numpy.datetime64`). +First, only the Gregorian calendar is supported. Second, there is not support +for dates outside the years 1678 to 2262. The analysis needs to support both +the Gregorian ('gregorian') and the 365-day ('gregorian_noleap') calendars. It also needs to +support, at a minimum, years between 0001 and 9999, and preferably arbitrary +years both positive and negative. + +A major challenge is that it seems that xarray cannot easily be forced to +use an alternative representation of dates to the troublesome +`numpy.datetime64` type (see, for example, +[pydata/xarray#1084](https://github.com/pydata/xarray/issues/1084)). +The most obvious alternative, `datetime.datetime`, +seemingly cannot be used directly in `xarray` because objects of this type +are converted to `numpy.datetime64` objects at various stages when using +features from pandas, raising errors when dates are out of range. While an +alternative date class (e.g. `netcdftime.DatetimNoLeap`) might be used to +represent dates on the 'gregorian_noleap' calendar, there is no such +preexisting alternative for the 'gregorian' calendar. + +The solution proposed herein is to store time as floating-point days since the +reference date 0001-01-01 and to convert dates in this format to +`datetime.datetime` and `MpasRelativeDelta` objects whenever mathematical +manipulation of dates is required. + +A successful implementation would produce essentially identical analysis to +what is currently produced, but making use of the dates from the MPAS calendar +(whether Gregorian or 365-day) without the need for artifical offsets (e.g. +`yearOffset` used in the current code. Plots of horizontal fields would remain +unchanged while plots of time series would have a time axis with the simulation +date instead of the offset date. + + +

Requirements

Requirement: The 'Time' coordinate of xarray data sets must be consistent +with the MPAS calendar
+Date last modified: 2017/02/09
+Contributors: Xylar Asay-Davis +

For all data sets used in the analysis, the ‘Time’ coordinate must represent dates +on the appropriate MPAS calendar, either ‘gregorian’ or ‘gregorian_noleap’, depending +on the namelist option ‘config_calendar_type’. There must be ways of mathematically +manipulating times (e.g. adding/subtracting offsets and figuring out the amount of time +between two dates) and of making plots that are consistent with these calendars.

+

Requirement: The 'Time' coordinate of xarray data sets must support at least years +0001 and 9999, and preferably any conceivable value
+Date last modified: 2017/02/16
+Contributors: Xylar Asay-Davis +

For all data sets used in the analysis, the ‘Time’ coordinate must, at a minimum, +support years between 0001 and 9999 (the range of datetime.datetime) and preferably +a broader range.

+

Algorithmic Formulations (optional)

Design solution: The 'Time' coordinate of xarray data sets must be consistent +with the MPAS calendar
+Date last modified: 2017/02/11
+Contributors: Xylar Asay-Davis, Phillip J. Wolfram +

The proposed solution represents time in xarray.DataSet objects as the number of +days since the reference date 0001-01-01. +This is reasonable because the smallest unit of time output in MPAS components is +seconds (and unlikely to ever be shorter than ms). We note that a date specified +as a 64-bit float has a precision high enough to represent seconds for dates up +to +/- 100 million years:

+
>>> import sys
+>>> 1./(sys.float_info.epsilon*365*24*60*60)
+142808207.36207813
+
+
+

We should have no trouble representing any number we might want (including paleo +timescales) with this system.

+

For purposes of performing mathematical operations and plotting dates, these +values will be converted to datetime.datetime objects (via the proposed +days_to_datetime utility function) and back (via the proposed +datetime_to_days).

+

The conversion operations within datetime_to_days and days_to_datetime will be +performed with the calendar-aware functions netCDF4.date2num and +netCDF4.num2date, respectively. Both functions will support lists/arrays of dates +(for efficiency and simplicity of calling code) in addition to single values.

+

Curve ploting can be supported with matplotlib.pyplot.plot_date, which takes a date +of exactly the format used here (days since 0001-01-01). The compatibility with plot_date +was part of the reason for choosing this format for the date.

+

Design solution: The 'Time' coordinate of xarray data sets must support at least years +0001 and 9999, and preferably any conceivable value
+Date last modified: 2017/02/09
+Contributors: Xylar Asay-Davis +

Same as above. In theory, the use of days since 0001-01-01 would allow any year +to be supported, not just the range from 0001 to 9999. However, the conversions +to datetime.datetime objects for mathematical manipulation will constrain +the dates to be between datetime.min (0001-01-01) and datetime.max (9999-12-31).

+

Design and Implementation

Implementation: The 'Time' coordinate of xarray data sets must be consistent +with the MPAS calendar
+Date last modified: 2017/02/16
+Contributors: Xylar Asay-Davis +

The proposed implementation is on the branch +xylar/generalize_calendar

+

A helper funciton, mpas_xarray._parse_dataset_time, computes times as days since +0001-01-01, and serves as a replacement for mpas_xarray._get_datetimes.

+

Note: the current implementation breaks the convention that ``mpas_xarray`` remains +separate from the rest of MPAS-Analyis by using 3 functions from ``timekeeping.utility`` +in ``mpas_xarray``:

+
from ..timekeeping.utility import string_to_days_since_date, \
+    days_to_datetime, datetime_to_days
+
+
+

This violates the first requirement in the +`Design Document: Moving variable mapping out of mpas_xarray <https://github.com/xylar/MPAS-Analysis/blob/design_doc_variable_mapping_reorg/design_docs/variable_mapping_reorg.md>`_. +I am open to alternative solutions for keeping ``mpas_xarray`` separate from the rest +of analysis but these 3 functions do not conceptually belong in ``mpas_xarray``. The +problem is exacerbated by the fact that there are analysis-specific functions in +``timekeeping``, meaning that this cannot easily be made a submodule of ``mpas_xarray`` +(nor would this make very much logical sense). Having 2 ``timekeeping`` modules, one +for ``mpas_xarray`` and one for MPAS-Analysis, seems unnecessarily confunsing.

+

The functions generalized_reader.open_multifile_dataset and +mpas_xarray.open_multifile_dataset have been updated to use this method for parsing +times. This involves removing the year_offset argument and adding an optional +simulation_start_time argument for supplying a date to use to convert variables +like daysSinceStartOfSim to days since 0001-01-01.

+

An example of opening a data set and manipulating times withe the new approach in +the OHC script is:

+
from ..shared.timekeeping.utility import get_simulation_start_time, \
+    date_to_days, days_to_datetime, string_to_datetime
+...
+def ohc_timeseries(config, streamMap=None, variableMap=None):
+...
+    simulationStartTime = get_simulation_start_time(streams)
+...
+    ds = open_multifile_dataset(file_names=file_names,
+                                calendar=calendar,
+                                simulation_start_time=simulation_start_time,
+                                time_variable_name='Time',
+                                variable_list=variable_list,
+                                variable_map=variableMap,
+                                start_date=startDate,
+                                end_date=endDate)
+
+    timeStart = string_to_datetime(startDate)
+    timeEnd = string_to_datetime(endDate)
+
+    # Select year-1 data and average it (for later computing anomalies)
+    timeStartFirstYear = string_to_datetime(simulation_start_time)
+    if timeStartFirstYear < timeStart:
+        startDateFirstYear = simulation_start_time
+        firstYear = int(startDateFirstYear[0:4])
+        endDateFirstYear = '{:04d}-12-31_23:59:59'.format(firstYear)
+        filesFirstYear = streams.readpath(streamName,
+                                          startDate=startDateFirstYear,
+                                          endDate=endDateFirstYear,
+                                          calendar=calendar)
+        dsFirstYear = open_multifile_dataset(
+            file_names=filesFirstYear,
+            calendar=calendar,
+            simulation_start_time=simulation_start_time,
+            time_variable_name='Time',
+            variable_list=variable_list,
+            variable_map=variableMap,
+            start_date=startDateFirstYear,
+            end_date=endDateFirstYear)
+    else:
+        dsFirstYear = ds
+        firstYear = timeStart.year
+
+    timeStartFirstYear = date_to_days(year=firstYear, month=1, day=1,
+                                      calendar=calendar)
+    timeEndFirstYear = date_to_days(year=firstYear, month=12, day=31,
+                                    hour=23, minute=59, second=59,
+                                    calendar=calendar)
+
+    dsFirstYear = dsFirstYear.sel(Time=slice(timeStartFirstYear,
+                                             timeEndFirstYear))
+
+    meanFirstYear = dsFirstYear.mean('Time')
+...
+    yearStart = days_to_datetime(ds.Time.min()).year
+    yearEnd = days_to_datetime(ds.Time.max()).year
+    timeStart = date_to_days(year=yearStart, month=1, day=1,
+                             calendar=calendar)
+    timeEnd = date_to_days(year=yearEnd, month=12, day=31,
+                           calendar=calendar)
+
+    if preprocessedReferenceRunName != 'None':
+        print '  Load in OHC from preprocessed reference run...'
+        inFilesPreprocessed = '{}/OHC.{}.year*.nc'.format(
+            preprocessedInputDirectory, preprocessedReferenceRunName)
+        dsPreprocessed = open_multifile_dataset(
+            file_names=inFilesPreprocessed,
+            calendar=calendar,
+            simulation_start_time=simulation_start_time,
+            time_variable_name='xtime')
+        yearEndPreprocessed = days_to_datetime(dsPreprocessed.Time.max()).year
+...
+
+
+

The replicate_cycles function in sea_ice.timeseries has been a particular +challenge with the existing calendar. Here is that function with the new ‘Time’ +coordinate:

+
def replicate_cycle(ds, dsToReplicate, calendar):
+    dsStartTime = days_to_datetime(ds.Time.min(), calendar=calendar)
+    dsEndTime = days_to_datetime(ds.Time.max(), calendar=calendar)
+    repStartTime = days_to_datetime(dsToReplicate.Time.min(),
+                                    calendar=calendar)
+    repEndTime = days_to_datetime(dsToReplicate.Time.max(),
+                                  calendar=calendar)
+
+    repSecondTime = days_to_datetime(dsToReplicate.Time.isel(Time=1),
+                                     calendar=calendar)
+
+    period = (MpasRelativeDelta(repEndTime, repStartTime) +
+              MpasRelativeDelta(repSecondTime, repStartTime))
+
+    startIndex = 0
+    while(dsStartTime > repStartTime + (startIndex+1)*period):
+        startIndex += 1
+
+    endIndex = 0
+    while(dsEndTime > repEndTime + (endIndex+1)*period):
+        endIndex += 1
+
+    dsShift = dsToReplicate.copy()
+
+    times = days_to_datetime(dsShift.Time, calendar=calendar)
+    dsShift.coords['Time'] = ('Time',
+                              datetime_to_days(times + startIndex*period,
+                                               calendar=calendar))
+    # replicate cycle:
+    for cycleIndex in range(startIndex, endIndex):
+        dsNew = dsToReplicate.copy()
+        dsNew.coords['Time'] = ('Time',
+                                datetime_to_days(times + (cycleIndex+1)*period,
+                                                 calendar=calendar))
+        dsShift = xr.concat([dsShift, dsNew], dim='Time')
+
+    return dsShift
+
+
+

Implementation: The 'Time' coordinate of xarray data sets must support at least years +0001 and 9999, and preferably any conceivable value
+Date last modified: 2017/02/09
+Contributors: Xylar Asay-Davis +

Same as above.

+

Testing

Testing and Validation: The 'Time' coordinate of xarray data sets must be consistent +with the MPAS calendar
+Date last modified: 2017/02/11
+Contributors: Xylar Asay-Davis +

+In [xylar/generalize_calendar](https://github.com/xylar/MPAS-Analysis/tree/generalize_calendar), +unit testing has been added for `timekeeping` and `mpas_xarray` that checks both the `gregorian` +and `gregorian_noleap` calendars under simple test conditions. However, we have no data sets +that test `gregorian`, so we have a somewhat limited ability to test this calendar option. +Fortunately, there are also no immediate plans to run with `gregorian`. + +I will make sure all tests with config files in the `configs/lanl` and `configs/edison` +directories produce bit-for-bit results with the current `develop`. + +

Testing and Validation: The 'Time' coordinate of xarray data sets must support at least years +0001 and 9999, and preferably any conceivable value
+Date last modified: 2017/02/11
+Contributors: Xylar Asay-Davis +

Unit tests have been added to ensure that dates both close to 0001-01-01 and typical +calendar dates (e.g. 2017-01-01) function as expected.

+

@akturner’s MPAS-SeaIce run with real dates (mentioned in +#81) has been successfully +run with the proposed approach. This run started in 1958, and had presented a problem +for MPAS-Analysis with the previous calendar.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/generalized_horizontal_interpolation.html b/1.11.0rc1/design_docs/generalized_horizontal_interpolation.html new file mode 100644 index 000000000..8eeebd979 --- /dev/null +++ b/1.11.0rc1/design_docs/generalized_horizontal_interpolation.html @@ -0,0 +1,243 @@ + + + + + + + Generalized Horizontal Interpolation in MPAS-Analysis — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Generalized Horizontal Interpolation in MPAS-Analysis

+

+Xylar Asay-Davis
+date: 2017/01/19
+

Horizontal interpolation and related utilities for remapping are now in the pyremap repo.

+

Summary

Currently, MPAS-Analysis uses various methods to perform horizontal interpolation. For constructing ocean climatologies, nearest-neighbor interpolation is used, while for sea-ice climatologies, ncremap is used with the requirement that a mapping file for the appropriate source and destination grids is provided through the config file. This project intends to move MPAS-Analysis to a unified approach to horizontal interpolation that does not require pre-generated mapping files (though it should support caching mapping files for faster execution).

+

Many types of analysis in MPAS will require fields that are interpolated from MPAS grids to arbitrary points, not just to points on a lat/lon grid. This project will not attempt to address that case completely but will take that need into consideration in designing a solution that can be extended to interpolation at arbitrary points in the future.

+

Requirements

+

Requirement: Higher-order interpolation
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

+ +The option to interpolate smoothly (e.g. linearly or with barycentric coordinates) between cell-centered values should be added. The calling code should easily be able to select among various orders of interpolation with a flag. + +

Consideration: Interpolation should handle periodic boundaries
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

+ +If and when MPAS-Analysis supports planar test cases with periodic boundaries, interpolation should be extended to handle periodic boundaries + +

Consideration: Interpolation should handle Cartesian meshes
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

+ +If and when MPAS-Analysis supports planar test cases with purely Cartesian meshes (e.g. where `latCell` and `lonCell` do not vary), interpolation should be extended to handle Cartesian Coordinates + +

Consideration: Support for arbitrary output interpolation points
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis +

+ +The calling code should be able to supply any desired interpolation points, not just a regular latitude-longitude grid. + +

Consideration: Support caching results from any costly, one-time geometric computations
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

+ +For many potential algorithms used to perform interpolation, there is likely to be a relatively costly step of computing fields such as indices into input data fields and interpolation weights that 1) only need to be computed once for a given input mesh and set of output points and 2) are independent of the data in the field being interpolated. If this data were cached, it could mean that rerunning the analysis (which might be very desirable, e.g., while monitoring the progress of a run) would be much cheaper than the initial run. Also, a cached weight file from a previous analysis run could be used when analyzing a subsequent run with identical source meshes. + + + +

Algorithmic Formulations

Design solution: Higher-order interpolation
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

The approach will be to create SCRIP files (or, in the future for greater flexibility perhaps ESMF grid/mesh files) for the source and destination grids, then to use ESMF_RegridWeightGen to generate a mapping file. ESMF_RegridWeightGen supports 5 interpolation methods—bilinear, patch, nearestdtos, neareststod, and conserve—and we would likely support at least bilinear, neareststod and conserve, and perhaps all 5. The destination grid will be specified either by reading values from lat and lon coordinates of a NetCDF file or through config file options lat and lon that are typically expressions involving numpy.arange or numpy.linspace.

+

Then, ncremap will be used to remap the desired list of variables from an MPAS NetCDF file to the desired destination grid.

+

Design solution: Interpolation should handle periodic boundaries
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

For now, periodic boundaries (except for the obvious one at +/- 180 longitude) will not be supported. It appears that ESMF grid files do include support for periodic boundaries so the current solution should be relatively easy to extend to periodic boundaries in the future.

+

Design solution: Interpolation should handle Cartesian meshes
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

ESMF unstructured mesh files seem to support Cartesian coordinates. This will be investigated if and when MPAS-Analysis can accommodate a test case with Cartesian coordinates.

+

Design solution: Support for arbitrary output interpolation points
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis +

I do not intend to address this consideration in this project. It may be that ESMF_RegridWeightGen can also be used to perform interpolation to arbitrary points (in particular, a set of points that are not cell centers or vertices of a mesh), but this is not yet clear to me. If not, an alternative solution for arbitrary destination points will be needed.

+

Design solution: Support caching results from any costly, one-time geometric computations
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

This should be relatively easy to accommodate with ESMF_RegridWeightGen and ncremap. The default behavior of the function for generating interpolation weights will be to do nothing if the mapping file already exists. Further, we can support an optional config option that will point to an existing mapping file if one has already been generated and cached somewhere (e.g. in a shared directory). Eventually, we will probably want to systematically store these mapping files for typical MPAS meshes and typical output grids, particularly for those that are expensive to generate.

+

Design and Implementation

+

Implementation: Higher-order interpolation
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Implementation is in the branch https://github.com/xylar/MPAS-Analysis/tree/horiz_interp.

+

ESMF_RegridWeightGen is used to compute regridding weights that are ‘bilinear’, ‘neareststod’ (nearest neighbor) or ‘conserve’ (conservative). The order of regridding can be chosen separately for MPAS model results, ocean observations and sea-ice observationos via mpasInterpolationMethod and interpolationMethod flags (see the template: https://github.com/xylar/MPAS-Analysis/blob/horiz_interp/config.template).

+

Implementation: Interpolation should handle periodic boundaries
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Not yet supported.

+

Implementation: Interpolation should handle Cartesian meshes
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Not yet supported.

+

Implementation: Support for arbitrary output interpolation points
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis +

Not yet supported.

+

Implementation: Support caching results from any costly, one-time geometric computations
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

Mapping files, climatologies and remapped climatologies are cached when they are created. Both mapping files and the directory containing the remapped climatologies from observations can be supplied via the config file, saving the time of computing them.

+

Testing

+

Testing and Validation: Higher-order interpolation
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Testing of each of the flags (‘bilinear’, ‘neareststod’ and ‘conserve’) has been performed with the GMPAS-QU240 run, all of wich produce plots that look acceptable. Bilinear and conserve methods leave halos of invalid cells around land at coarse resolution, which is consistent with the coarse resolution of this test mesh.

+

An alpha8 and a beta0 run on edison. They ran successfully but I have not had a chance to examine the output.

+

Testing and Validation: Interpolation should handle periodic boundaries
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Not yet supported.

+

Testing and Validation: Interpolation should handle Cartesian meshes
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis

Not yet supported.

+

Testing and Validation: Support for arbitrary output interpolation points
+Date last modified: 2017/03/04
+Contributors: Xylar Asay-Davis +

Not yet supported.

+

Testing and Validation: Support caching results from any costly, one-time geometric computations
+Date last modified: 2017/02/25
+Contributors: Xylar Asay-Davis

I have verified that I can rerun without re-computing mapping files or climatologies. Using the GMPAS-QU240 run, I have verified that I can supply mapping files and remapped observation climatologies without them being re-computed

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/index.html b/1.11.0rc1/design_docs/index.html new file mode 100644 index 000000000..d4d8a9ed2 --- /dev/null +++ b/1.11.0rc1/design_docs/index.html @@ -0,0 +1,163 @@ + + + + + + + Design Documents — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ +
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/parallel_tasks.html b/1.11.0rc1/design_docs/parallel_tasks.html new file mode 100644 index 000000000..58e36d0a6 --- /dev/null +++ b/1.11.0rc1/design_docs/parallel_tasks.html @@ -0,0 +1,493 @@ + + + + + + + Support Parallel Tasks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Support Parallel Tasks

+

+Xylar Asay-Davis
+date: 2017/02/22
+

+

Summary

Currently, the full analysis suite includes 7 tasks, 5 for the ocean and 2 for sea ice. +The number of tasks is expected to grow over time. Task parallelism in some +form is needed to allow as many tasks as desired to be run simultaneiously. +Successful completion of this design will mean that the analysis suite produces +identical results to the current develop branch but that several analysis +tasks (a number selected by the user) run simultaneously.

+

Requirements

Requirement: Tasks run simultaneously
+Date last modified: 2017/02/22
+Contributors: Xylar Asay-Davis +

+There must be a mechanism for running more than one analysis task simultaneously. + +

Requirement: Select maximum number of tasks
+Date last modified: 2017/02/22
+Contributors: Xylar Asay-Davis +

+There must be a mechanism for the user to select the maximum number of tasks +to run simultaneously. This might be necessary to control the number of processors +or the amount of memory used on a given machine or (in the case of running +analysis on login nodes) to be nice to other users on a shared resource. + +

Requirement: Lock files written by multiple tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

+There must be a mechanism for locking files (during either reading or writing) if +they can be written by multiple tasks. This is necessary to prevent cases where +multiple tasks write to the same file simultaneously or one task reads from a file +at the same time another is writing. + +

Consideration: Task parallelism should work on either login or compute nodes
+Date last modified: 2017/02/22
+Contributors: Xylar Asay-Davis +

+On some systems, care needs to be taken that scripts run on the compute nodes +rather than on the management node(s). For example, on Edison and Cori, the +`aprun` command is required to ensure that scripts run on compute nodes. + +

Consideration: There may need to be a way to limit the memory used by a task
+Date last modified: 2017/02/22
+Contributors: Phillip J. Wolfram, Xylar Asay-Davis +

+It may be that `xarray-dask` with subprocess (or similar) may need to be some +initialization of xarray corresponding to reduced memory available. For example, +with 10 processes on a node, `xarray` / `dask` should be initialized to use only +1/10th of the memory and CPUs per task. `xarray-dask` may require special +initialization for efficiency and to avoid crashes. + +

Algorithmic Formulations

+ +

Design solution: Tasks run simultaneously
+Date last modified: 2017/02/23
+Contributors: Xylar Asay-Davis +

I propose to have a config option, parallelTaskCount, that is the number of concurrent +tasks that are to be performed. If this flag is set to a number greater than 1, analysis +tasks will run concurrently. To accomplish this, I propose to use subprocess.call or +one of its variants within run_analysis.pyto call itself but with only one task at a +time. Thus, if run_analysis.py gets called with only a single task (whether directly +from the command line or through subprocess.call), it would execute that task without +spawning additional subprocesses.

+

This approach would require having a method for creating a list of individual tasks +to be performed, launching parallelTaskCount of those tasks, and then waiting for +them to complete, launching additional tasks as previous tasks complete. The approach +would also require individual log files for each task, each stored in the log directory +(already a config option).

+

Design solution: Select maximum number of tasks
+Date last modified: 2017/02/23
+Contributors: Xylar Asay-Davis +

This is accomplished with the parallelTaskCount flag above. A value of +parallelTaskCount = 1 would indicate serial execution, though likely still +via launching subprocesses for each task.

+

The command subprocess.Popen allows enough flexibility that it will be possible +to launch several jobs, andthen to farm out additional jobs as each returns. It should +be possible to use a combination of os.kill(pid, 0), which checks if a +process is running, and os.waitpid(-1,0), which waits for any subprocess to finish, +to accomplish launching several processes and waiting until the first one finishes +before launching the next task, or in pseudo-code:

+
processes = launchTasks(taskNames[0:taskCount])
+remainingTasks = taskNames[taskCount:]
+while len(processes) > 0:
+    process = waitForTask(processes)
+    processes.pop(process)
+    if len(remainingTasks) > 0:
+        process = launchTasks(remainingTasks[0])
+        proceses.append(process)
+        remainingTasks = remainingTasks[1:]
+
+
+

Output from the main run_analysis.py task will list which analysis tasks were run +and which completed successfully. The full analysis will exit with an error if one +task fails, but only after attempting to run all desired analysis tasks. This allows +the failure of one analysis task not to interrupt execution of other analyses.

+

In a future PR, this work can be expanded to include checking if the appropriate +analysis member (AM) was turned on during the run and skipping any analysis tasks that +depend on that AM if not (Issue #58).

+

Design solution: Lock files written by multiple tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Teh design solution is based on the process lock in the fasteners package: +http://fasteners.readthedocs.io/en/latest/examples.html#interprocess-locks

+

Currently, only mapping files should be written by multiple tasks, requiring locks.

+

The algorithm consists of 2 changes. First, I removed the option overwriteMappingFiles, +which is now always False—if a mapping file exists, it is not overwritten. This +was necessary because now only one task will write a given mapping file if it doesn’t +already exist and the other tasks will wait for it to be written. Then, all tasks +know there is a valid mapping file that they can read without having to lock the file.

+

The second change was to add a lock around the subprocess call to ESMF_RegridWeightGen +that make sure only one process generates the mapping file. Each process attempts to +acquire the lock and checks if the mapping file already exists once it acquires the +lock. If not, it generates the mapping file and releases the lock. If so, it just +releases the lock and moves on. Thus, only the first process to acquire the lock +generates the mapping file and the others wait until it is finished.

+

Design solution: Task parallelism should work on either login or compute nodes
+Date last modified: 2017/02/23
+Contributors: Xylar Asay-Davis +

+For the time being, I propose to address only task parallelism on the login nodes and to +extend the parallelism to work robustly on compute nodes as a separate project. +Nevertheless, I will seek to implement this design in a way that should be conducive to +this later extension. Likely what will be required is a robust way of adding a prefix +to the commandline (e.g. `aprun -np 1`) when calling subprocesses. Adding such a prefix +should be relatively simple. + +

Design solution: There may need to be a way to limit the memory used by a task
+Date last modified: 2017/02/23
+Contributors: Xylar Asay-Davis +

+I am not very familiar with `dask` within `xarray` and I do not intend to address this +consideration directly in this project. However, on my brief investigation, it seems like +the proper way to handle this may be to have a `chunk` config option either for all tasks +or for individual tasks that can be used to control the size of data in memory. I think +such an approach can be investigated in parallel to this project. An intermediate solution +for situations where memory is limited would be to set `parallelTaskCount` to a small number. + + +

Design and Implementation

+ +This design has been implemented in the test branch https://github.com/xylar/MPAS-Analysis/tree/parallel_tasks + +

Implementation: Tasks run simultaneously
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Tasks can now run in parallel. This has been implemented in these 4 functions within run_analysis.py:

+
def run_parallel_tasks(config, analyses, configFiles, taskCount):
+    # {{{
+    """
+    Run this script once each for several parallel tasks.
+
+    Author: Xylar Asay-Davis
+    Last Modified: 03/08/2017
+    """
+
+    taskNames = [analysisModule.get_task_name(**kwargs) for
+                 analysisModule, kwargs in analyses]
+
+    taskCount = min(taskCount, len(taskNames))
+
+    (processes, logs) = launch_tasks(taskNames[0:taskCount], config,
+                                     configFiles)
+    remainingTasks = taskNames[taskCount:]
+    while len(processes) > 0:
+        (taskName, process) = wait_for_task(processes)
+        if process.returncode == 0:
+            print "Task {} has finished successfully.".format(taskName)
+        else:
+            print "ERROR in task {}.  See log file {} for details".format(
+                taskName, logs[taskName].name)
+        logs[taskName].close()
+        # remove the process from the process dictionary (no need to bother)
+        processes.pop(taskName)
+
+        if len(remainingTasks) > 0:
+            (process, log) = launch_tasks(remainingTasks[0:1], config,
+                                          configFiles)
+            # merge the new process and log into these dictionaries
+            processes.update(process)
+            logs.update(log)
+            remainingTasks = remainingTasks[1:]
+    # }}}
+
+
+def launch_tasks(taskNames, config, configFiles):  # {{{
+    """
+    Launch one or more tasks
+
+    Author: Xylar Asay-Davis
+    Last Modified: 03/08/2017
+    """
+    thisFile = os.path.realpath(__file__)
+
+    logsDirectory = build_config_full_path(config, 'output',
+                                           'logsSubdirectory')
+    make_directories(logsDirectory)
+
+    commandPrefix = config.getWithDefault('execute', 'commandPrefix',
+                                          default='')
+    if commandPrefix == '':
+        commandPrefix = []
+    else:
+        commandPrefix = commandPrefix.split(' ')
+
+    processes = {}
+    logs = {}
+    for taskName in taskNames:
+        args = commandPrefix + [thisFile, '--generate', taskName] + configFiles
+
+        logFileName = '{}/{}.log'.format(logsDirectory, taskName)
+
+        # write the command to the log file
+        logFile = open(logFileName, 'w')
+        logFile.write('Command: {}\n'.format(' '.join(args)))
+        # make sure the command gets written before the rest of the log
+        logFile.flush()
+        print 'Running {}'.format(taskName)
+        process = subprocess.Popen(args, stdout=logFile,
+                                   stderr=subprocess.STDOUT)
+        processes[taskName] = process
+        logs[taskName] = logFile
+
+    return (processes, logs)  # }}}
+
+
+def wait_for_task(processes):  # {{{
+    """
+    Wait for the next process to finish and check its status.  Returns both the
+    task name and the process that finished.
+
+    Author: Xylar Asay-Davis
+    Last Modified: 03/08/2017
+    """
+
+    # first, check if any process has already finished
+    for taskName, process in processes.iteritems():  # python 2.7!
+        if(not is_running(process)):
+            return (taskName, process)
+
+    # No process has already finished, so wait for the next one
+    (pid, status) = os.waitpid(-1, 0)
+    for taskName, process in processes.iteritems():
+        if pid == process.pid:
+            process.returncode = status
+            # since we used waitpid, this won't happen automatically
+            return (taskName, process)  # }}}
+
+
+def is_running(process):  # {{{
+    """
+    Returns whether a given process is currently running
+
+    Author: Xylar Asay-Davis
+    Last Modified: 03/08/2017
+    """
+
+    try:
+        os.kill(process.pid, 0)
+    except OSError:
+        return False
+    else:
+        return True  # }}}
+
+
+

Implementation: Select maximum number of tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

There is a configuration option, parallelTaskCount, which defaults to 1, meaning tasks run in serial:

+
[execute]
+## options related to executing parallel tasks
+
+# the number of parallel tasks (1 means tasks run in serial, the default)
+parallelTaskCount = 8
+
+
+

Implementation: Lock files written by multiple tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Here is the code for locking the mapping file within shared.interpolation.interpolate:

+
import fasteners
+...
+# lock the weights file in case it is being written by another process
+with fasteners.InterProcessLock(_get_lock_path(outWeightFileName)):
+    # make sure another process didn't already create the mapping file in
+    # the meantime
+    if not os.path.exists(outWeightFileName):
+        # make sure any output is flushed before we add output from the
+        # subprocess
+        subprocess.check_call(args)
+
+
+

Implementation: Task parallelism should work on either login or compute nodes
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

I have included a config option commandPrefix that should be able to be used to +run the analysis on compute nodes. If the command prefix is empty, the code should run +as normal on the compute nodes.

+
[execute]
+## options related to executing parallel tasks
+
+# the number of parallel tasks (1 means tasks run in serial, the default)
+parallelTaskCount = 1
+
+# Prefix on the commnd line before a parallel task (e.g. 'srun -n 1 python')
+# Default is no prefix (run_analysis.py is executed directly)
+commandPrefix = srun -n 1
+
+
+

Implementation: There may need to be a way to limit the memory used by a task
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

As mentioned above, I have not addressed this consideration in this project. Currently, +the suggested approach would be to limit parallelTaskCount to a number of tasks that +does not cause memory problems. More sophisticated approaches could be explored in the +future.

+

Testing

Testing and Validation: Tasks run simultaneously
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

So far, I have tested extensively on my laptop (parallelTaskCount = 1, 2, 4 and 8) +with the expected results. Later, I will test on Edison and Wolf as well.

+

Testing and Validation: Select maximum number of tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Same as above.

+

Implementation: Lock files written by multiple tasks
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

I ran multiple climatology map tasks at the same time and verified from the log files +that only one created each mapping file. Others must have waited for that file to be +written or they would have crashed almost immediately when they tried to read the +mapping file during remapping operations. So I’m confident the code is working as +intended.

+

Testing and Validation: Task parallelism should work on either login or compute nodes
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

On Edison and Wolf, I will test running the analysis with parallel tasks both on login nodes +and by submitting a job to run on the compute nodes (using the appropriate commandPrefix).

+

Testing and Validation: There may need to be a way to limit the memory used by a task
+Date last modified: 2017/03/10
+Contributors: Xylar Asay-Davis +

Assuming no crashes in my testing on compute nodes with all tasks running in parallel, I will +leave this consideration for investigation in the future.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/prerequisite_tasks.html b/1.11.0rc1/design_docs/prerequisite_tasks.html new file mode 100644 index 000000000..15cbbba67 --- /dev/null +++ b/1.11.0rc1/design_docs/prerequisite_tasks.html @@ -0,0 +1,305 @@ + + + + + + + Prerequisite Tasks and Subtasks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Prerequisite Tasks and Subtasks

+

+Xylar Asay-Davis
+date: 2017/06/12
+

Summary

Currently, no tasks depend on other tasks to run. However, in order to allow +multiple plots to be generated simulataneously, it is desirable to break tasks +into multiple subtasks, and some of these subtasks will need rely on data from +other subtasks. It is also conceivable that multiple tasks could rely on the +same data (e.g. a common climatology dataset). The proposed solution to this +problem is to allow “prerequisite tasks” to a given analysis task. The task +will only run after the prerequisite task(s) have completed. Prerequisite +tasks could be used to build up a sequence of analysis tasks in several steps. +Some of these steps could be shared between analysis tasks (e.g. computing +single data set and then plotting it in various ways). Implementation of this +design will be considered a success if dependent tasks only run once their +prerequisite tasks have completed successfully.

+

Requirements

Requirement: Define prerequisite tasks
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

A simple mechanism (such as a list of task names) exists to define prerequisite +tasks of each analysis task.

+

Requirement: Add prerequisites to task list
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

Given a task that we want to run, a mechanism must exist for adding its +prerequisites (if any) to the list of tasks to be run.

+

Requirement: Holding dependent tasks
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

Dependent tasks (those with prerequisites) must be prevented from running until +their prerequisites have successfully finished.

+

Requirement: Cancel dependents of failed prerequisites
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

If a prerequisite of a dependent tasks has failed, the dependent task should +not be run.

+

Algorithmic Formulations

Design solution: Define prerequisite tasks
+Date last modified: 2017/09/19
+Contributors: Xylar Asay-Davis +

Each task will be constructed with a list of the names of prerequisite tasks. +If a task has no prerequisites (the default), the list is empty.

+

Design solution: Add prerequisites to task list
+Date last modified: 2017/10/11
+Contributors: Xylar Asay-Davis +

A recursive function will be used to add a given task (assuming its +check_generate method returns True, meaning that task should be generated) +and its dependencies to a list of analyses to run. The code (with a few +error messages removed for brevity) is as follows:

+
analysesToGenerate = []
+# check which analysis we actually want to generate and only keep those
+for analysisTask in analyses:
+    # update the dictionary with this task and perhaps its subtasks
+    add_task_and_subtasks(analysisTask, analysesToGenerate)
+
+def add_task_and_subtasks(analysisTask, analysesToGenerate,
+                          callCheckGenerate=True):
+
+    if analysisTask in analysesToGenerate:
+        return
+
+    if callCheckGenerate and not analysisTask.check_generate():
+        # we don't need to add this task -- it wasn't requested
+        return
+
+    # first, we should try to add the prerequisites of this task and its
+    # subtasks (if they aren't also subtasks for this task)
+    prereqs = analysisTask.runAfterTasks
+    for subtask in analysisTask.subtasks:
+        for prereq in subtask.runAfterTasks:
+            if prereq not in analysisTask.subtasks:
+                prereqs.extend(subtask.runAfterTasks)
+
+    for prereq in prereqs:
+        add_task_and_subtasks(prereq, analysesToGenerate,
+                              callCheckGenerate=False)
+        if prereq._setupStatus != 'success':
+            # this task should also not run
+            analysisTask._setupStatus = 'fail'
+            return
+
+    # make sure all prereqs have been set up successfully before trying to
+    # set up this task -- this task's setup may depend on setup in the prereqs
+    try:
+        analysisTask.setup_and_check()
+    except (Exception, BaseException):
+        analysisTask._setupStatus = 'fail'
+        return
+
+    # next, we should try to add the subtasks.  This is done after the current
+    # analysis task has been set up in case subtasks depend on information
+    # from the parent task
+    for subtask in analysisTask.subtasks:
+        add_task_and_subtasks(subtask, analysesToGenerate,
+                              callCheckGenerate=False)
+        if subtask._setupStatus != 'success':
+            analysisTask._setupStatus = 'fail'
+            return
+
+    analysesToGenerate.append(analysisTask)
+    analysisTask._setupStatus = 'success'
+
+
+

Design solution: Holding dependent tasks
+Date last modified: 2017/10/11
+Contributors: Xylar Asay-Davis +

Each task is given a _runStatus attribute, which is a multiprocessing.Value +object that can be shared and changed across processes. A set of constant +possible values for this attribute, READY, BLOCKED, RUNNING, SUCCES and +FAIL are defined inAnalysisTask. If a task has no prerequisites, initially +_runStatus = READY; otherwise _runStatus = BLOCKED. Any READY +task can be run (_runStatus = 'running'). Any task that finishes is given +_runStatus = SUCCESS or _runStatus = FAIL (I know, not grammatically +consistent but compact…).

+

When a new parallel slot becomes available, all BLOCKED tasks are checked +to see if any prerequisites have failed (in which case the task also fails) or +if all prerequisites have succeeded, in which case the task is now READY. +After that, the next READY task is run.

+

Design solution: Cancel dependents of failed prerequisites
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

Same as above: When a new parallel slot becomes available, all BLOCKED +tasks are checked to see if any prerequisites have failed (in which case the +task also fails).

+

Design and Implementation

The design has been implemented in the branch +xylar/add_mpas_climatology_task

+

Implementation: Define prerequisite tasks
+Date last modified: 2017/10/11
+Contributors: Xylar Asay-Davis +

AnalysisTask now has an attribute runAfterTasks, which default to empty. +Prerequisite tasks can be added by calling run_after(self, task) with the +task that this task should follow.

+

Implementation: Add prerequisites to task list
+Date last modified: 2017/10/11
+Contributors: Xylar Asay-Davis +

build_analysis_list in run_mpas_analysis has been modified to call a +recursive function add_task_and_subtasks that adds a task, its prerequisites +(if they have not already been added) and its subtasks to the list of tasks +to run.

+

Implementation: Holding dependent tasks
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

The run_analysis function in run_mpas_analysis has been updated to be aware +of the status of each task, as described in the algorithms section.

+

Implementation: Cancel dependents of failed prerequisites
+Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

Again, the run_analysis function in run_mpas_analysis has been updated to +be aware of the status of each task, as described in the algorithms section.

+

Testing and Validation

+

Date last modified: 2017/06/12
+Contributors: Xylar Asay-Davis +

+All plots will be tested to ensure they are bit-for-bit identical to +those produced by develop for all tests defined in the configs/edison +and configs/lanl directories. Task will be run in parallel and I will +verify that no dependent tasks run before prerequisite tasks have completed.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/remapper.html b/1.11.0rc1/design_docs/remapper.html new file mode 100644 index 000000000..373adb530 --- /dev/null +++ b/1.11.0rc1/design_docs/remapper.html @@ -0,0 +1,266 @@ + + + + + + + Remapper for “online” remapping of data sets — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Remapper for “online” remapping of data sets

+

+Xylar Asay-Davis
+date: 2017/04/15
+

+

Summary

This document describes the design and implementation of a Remapper class +for performing either “online” (in memory) or “offline” (through files +via ncremap) remapping of horizontal data sets. The Remapper is needed in +order to support remapping to and from grids grids not currently supported by +ncremap such as polar stereographic grids commonly used for polar data sets.

+

Requirements

Requirement: Support for remapping to and from stereographic grids
+Date last modified: 2017/04/15
+Contributors: Xylar Asay-Davis +

There should exist a method for interpolating from stereographic grids to +the comparison grid used in MPAS-Analysis. This is needed to support +observations that are stored on stereographic grids.

+

It would often be more efficient (in terms of the size of data sets) and more +practical to perform analysis of polar data sets on a stereographic grid +centered at that pole. Support for mapping to stereographic grids should be +included, if feasible.

+

Algorithmic Formulations

Design solution: Support for remapping to and from stereographic grids
+Date last modified: 2017/04/15
+Contributors: Xylar Asay-Davis +

The design solution is somewhat complex and will be described in multiple +sections.

+

MeshDescriptor classes

To support mapping to and from MPAS meshes, lat/lon grid and stereographic +grids (as well as future grids we might want to support), I propose defining a +“mesh descriptor” that defines the mesh either by reading it from a file or by +creating it from simple numpy ndarrays. Each MeshDescriptor class defines +enough information (such as the locations of cell centers and corners) about +the mesh or grid to allow remapping between meshes.

+

An MpasMeshDescriptor class will define MPAS meshes read from a file.

+

A LatLonGridDescriptor class will define global lat/lon grids such as the +existing comparison grid.

+

A ProjectionGridDescriptor class will define any grid that can be described +by a logically rectangular grid with pyproj projection. In particular, such +a projection grid could be used to support both polar stereographic grids and +regional lat/lon grids.

+

Remapper class

Remapping between meshes described by MeshDescriptor classes will be performed +by a Remapper class. This class will support both “online” mapping in memory +and “offline” mapping with ncremap. Only “online” mapping will be supported +for grids defined with the ProjectionGridDescriptor, as these are not +supported by ncremap. A Remapper object will be created by giving it source +and destintion MeshDescriptor objects and an optional mapping file name. +(If the mapping file name is not given, it is assumed that the source and +destination grids are the same, and no remapping is needed.)

+

If remapping is performed “online”, it supports renormalization of masked +arrays. If a data sets includes NaNs in a given data array, both the data +array and a mask are remapped, and renormalization is performed anywhere the +remapped mask exceeds a given threshold.

+

Design and Implementation

Implementation: Support for remapping to and from stereographic grids
+Date last modified: 2017/04/15
+Contributors: Xylar Asay-Davis +

The implementation is on the branch xylar/MPAS-Analysis/add_polar_stereographic_interp

+

MeshDescriptor classes

Each MeshDescriptor subclass includes the following member variables or +methods:

+
    +
  • +
    meshNamea name of the mesh or grid, used for naming mapping files and

    climatologies

    +
    +
    +
  • +
  • regional: whether the mesh is regional or global

  • +
  • +
    coords and dimsdictionaries defining the coordinates and dimensions

    of this mesh, used to update a data set following remapping

    +
    +
    +
  • +
  • to_scrip method: used to write out a SCRIP file defining the mesh.

  • +
+

Remapper class

Below is a skeleton of the Remapper public API.

+
class Remapper(object):
+    def __init__(self, sourceDescriptor, destinationDescriptor,
+                 mappingFileName=None):
+        '''
+        Create the remapper and read weights and indices from the given file
+        for later used in remapping fields.
+        '''
+
+    def build_mapping_file(self, method='bilinear',
+                           additionalArgs=None):
+        '''
+        Given a source file defining either an MPAS mesh or a lat-lon grid and
+        a destination file or set of arrays defining a lat-lon grid, constructs
+        a mapping file used for interpolation between the source and
+        destination grids.
+        '''
+
+    def remap_file(self, inFileName, outFileName,
+                   variableList=None, overwrite=False):
+        '''
+        Given a source file defining either an MPAS mesh or a lat-lon grid and
+        a destination file or set of arrays defining a lat-lon grid, constructs
+        a mapping file used for interpolation between the source and
+        destination grids.
+        '''
+
+    def remap(self, ds, renormalizationThreshold=None):
+        '''
+        Given a source data set, returns a remapped version of the data set,
+        possibly masked and renormalized.
+        '''
+
+
+

Testing and Validation: Support for remapping to and from stereographic +grids
+Date last modified: 2017/04/15
+Contributors: Xylar Asay-Davis +

On the branch xylar/MPAS-Analysis/add_polar_stereographic_interp, +climatologies have ben updated to use Remapper objects. Analysis has been +run on both QU240 and EC60to30 beta0 ACME results, and results have been +compared by eye. Results from ncremap are identical, as expected. Because of +renormalization, results with “online” remapping differ from those from +ncremap, typically with less severe masking of missing data.

+

Continuous integration unit tests for climatology and interpolation have both +been updated to make use of the Remapper class. New tests have been added to +perform remapping with stereographic grids.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/timekeeping_reorg.html b/1.11.0rc1/design_docs/timekeeping_reorg.html new file mode 100644 index 000000000..9d79ac15e --- /dev/null +++ b/1.11.0rc1/design_docs/timekeeping_reorg.html @@ -0,0 +1,335 @@ + + + + + + + Reorganize Timekeeping — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Reorganize Timekeeping

+

+Xylar Asay-Davis
+date: 2017/02/06
+

+

Summary

+ +Currently, the `Date` class is used to parse a date object from a date string +(e.g. '0001-01-01_00:00:00') taken from MPAS namelists, streams files or time +variables (e.g. `xtime`). However, this class assumes a 365-day calendar and +cannot easily be adapted to the Gregorian calendar also supported by MPAS +components (`config_calendar_type = 'gregorian'`). Furthermore, existing +routines exist to handle most of the capabilites +of the `Date` class. The proposed reorganization would eliminate the `Date` class +in favor of a numer of helper functions that can be used to convert between various +date formats: date strings, days since a reference date, `datetime.datetime` objects +and `relativedelta` objects (see below). The success of this reorganization will be +demonstrated when the existing analysis can be performed successfully with the new +utility functions with both MPAS calendars, the `'gregorian_noleap'` (365-day) calendar +used by most existing ACME and MPAS runs and the `'gregorian'` calendar also supported +in MPAS components. + + +

Requirements

Requirement: Date string parsing supports both MPAS calendars
+Date last modified: 2017/02/06
+Contributors: Xylar Asay-Davis +

There must be a way to parse dates from MPAS that is aware of the appropriate calendar +stored in the config_calendar_type namelist option, either 'gregorian' or +'gregorian_noleap'.

+

Requirement: Capability of incrementing dates by a number of years and/or months
+Date last modified: 2017/02/06
+Contributors: Xylar Asay-Davis +

The analysis requires a way of incrementing a given date by an interval specified in +not only days, hours, minutes and seconds but also months and years. The standard +datetime.timedelta does not support increments by years and months because they are +not fixed periods of time. The existing Date class in MPAS-Analysis supports +increments in months and years, but only for the 'gregorian_noleap' (365-day) calendar. +A method must exist to increment dates on either calendar by a given number of years +and/or months (in addition to days, hours, etc.).

+

Design and Implementation

Implementation: Date string parsing supports both MPAS calendars
+Date last modified: 2017/02/06
+Contributors: Xylar Asay-Davis +

The implementation is on the branch: +https://github.com/xylar/MPAS-Analysis/tree/timekeeping_reorg +and in PR #102

+

The function for converting a date string to a datetime.datetime is documented as follows:

+
def stringToDatetime(dateString):
+    """
+    Given a date string and a calendar, returns a `datetime.datetime`
+
+    Parameters
+    ----------
+    dateString : string
+        A date and time in one of the following formats:
+        - YYYY-MM-DD hh:mm:ss
+        - YYYY-MM-DD hh.mm.ss
+        - YYYY-MM-DD SSSSS
+        - DDD hh:mm:ss
+        - DDD hh.mm.ss
+        - DDD SSSSS
+        - hh.mm.ss
+        - hh:mm:ss
+        - YYYY-MM-DD
+        - YYYY-MM
+        - SSSSS
+
+        Note: either underscores or spaces can be used to separate the date
+        from the time portion of the string.
+
+    Returns
+    -------
+    datetime : A `datetime.datetime` object
+
+    Raises
+    ------
+    ValueError
+        If an invalid `dateString` is supplied.
+
+    Author
+    ------
+    Xylar Asay-Davis
+
+    Last modified
+    -------------
+    02/04/2017
+    """
+
+
+

As long as relativedelta objects rather than datetime.timedelta objects are used to increment +datetime.datetime objects, datetime.datetime can be used to represent dates on either the Gregorian +or the 365-day calendar.

+

Implementation: Capability of incrementing dates by a number of years and/or months
+Date last modified: 2017/02/09
+Contributors: Xylar Asay-Davis +

The implementation is on the branch: +https://github.com/xylar/MPAS-Analysis/tree/timekeeping_reorg +and in PR #102

+

The proposed implementation adds a new class MpasRelativeDelta derived from +dateutil.relativedelta.relativedelta to compute the expected +increments in years and months (as well as days, hours, minutes and seconds, as needed). +The class is documented as follows

+
class MpasRelativeDelta(relativedelta):
+    """
+    MpasRelativeDelta is a subclass of dateutil.relativedelta for relative time
+    intervals with different MPAS calendars.
+
+    Only relative intervals (years, months, etc.) are supported and not the
+    absolute date specifications (year, month, etc.).  Addition/subtraction
+    of datetime.datetime objects (but not other MpasRelativeDelta,
+    datetime.timedelta or other related objects) is supported.
+
+    Author
+    ------
+    Xylar Asay-Davis
+
+    Last Modified
+    -------------
+    02/09/2017
+
+
+

The function for converting a date string to a MpasRelativeDelta is documented as follows:

+
from dateutil.relativedelta import relativedelta
+...
+def stringToRelativedelta(dateString, calendar='gregorian'):
+    """
+    Given a date string and a calendar, returns an instance of
+    `MpasRelativeDelta`
+
+    Parameters
+    ----------
+    dateString : string
+        A date and time in one of the following formats:
+        - YYYY-MM-DD hh:mm:ss
+        - YYYY-MM-DD hh.mm.ss
+        - YYYY-MM-DD SSSSS
+        - DDD hh:mm:ss
+        - DDD hh.mm.ss
+        - DDD SSSSS
+        - hh.mm.ss
+        - hh:mm:ss
+        - YYYY-MM-DD
+        - YYYY-MM
+        - SSSSS
+
+        Note: either underscores or spaces can be used to separate the date
+        from the time portion of the string.
+
+    calendar: {'gregorian', 'gregorian_noleap'}, optional
+        The name of one of the calendars supported by MPAS cores
+
+    Returns
+    -------
+    relativedelta : An `MpasRelativeDelta` object
+
+    Raises
+    ------
+    ValueError
+        If an invalid `dateString` is supplied.
+
+    Author
+    ------
+    Xylar Asay-Davis
+
+    Last modified
+    -------------
+    02/04/2017
+    """
+
+
+

Testing

Testing and Validation: Date string parsing supports both MPAS calendars
+Date last modified: 2017/02/08
+Contributors: Xylar Asay-Davis +

+Analysis will be run on Edison with all available configurations found in `configs/edison`. As there +are currently no plans to run with the `gregorian` calendar option, we do not have test runs that use this +calendar. If this situation changes in the future, we'll test at that time. + +Regression tests previously for `Date` has been modified to test the new utility functions. New tests +have been added to test that dates with both `gregorian` and `gregorian_noleap` calendars behave as +expected, particularly around the leap day. + +

Testing

+

Testing and Validation: Capability of incrementing dates by a number of years and/or months
+Date last modified: 2017/02/06
+Contributors: Xylar Asay-Davis +

Same as above.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/design_docs/variable_mapping_reorg.html b/1.11.0rc1/design_docs/variable_mapping_reorg.html new file mode 100644 index 000000000..8a04645bc --- /dev/null +++ b/1.11.0rc1/design_docs/variable_mapping_reorg.html @@ -0,0 +1,345 @@ + + + + + + + Moving variable mapping outside of mpas_xarray — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Moving variable mapping outside of mpas_xarray

+

+Xylar Asay-Davis
+date: 2017/02/10
+

+

Summary

+ +In discussions with @pwolfram, it became clear that we would like to keep +mpas_xarray as general as possible, rather than adding code specific to +MPAS-Analysis. In particular, the capability for mapping variable names +that is currently part of mpas_xarray is likely a capability that only +MPAS-Analysis will need when opening xarray data sets. Likewise, there is +a desire for mpax_xarray not to use any of the functionality outside of its +own module so that it remains autonomous from MPAS-Analysis. + +At the same time, it is desirable for efficiency and parallelism to perform +certain operations during the preprocessing step within xarray, rather than +constructing a data set first and then (in serial) performing manipulations +(e.g. creating a time coordinate and slicing variables). + +The solution will be tested by making sure it produces bit-for-bit identical +results to those from the develop branch for typical test cases on LANL IC +and Edison. + +

Requirements

Requirement: mpas_xarray does not include MPAS-Analysis specific +functionality
+Date last modified: 2017/02/10
+Contributors: Xylar Asay-Davis +

MPAS-Analysis specific functionality such as variable mapping should be +removed from mpas_xarray so it can remain an independent module, requiring +minimal modification to accommodate MPAS-Analysis’ needs.

+

Requirement: MPAS-Analysis specific functionality should be supported in +xarray preprossing
+Date last modified: 2017/02/10
+Contributors: Xylar Asay-Davis +

There should be a way to perform MPAS-Analysis specific functionality such as +mapping variables during preprocessing. This functionality should be +relatively easy to add to as new preprocessing needs arise.

+

Algorithmic Formulations (optional)

Algorithm: mpas_xarray does not include MPAS-Analysis specific +functionality
+Date last modified: 2017/02/10
+Contributors: Xylar Asay-Davis +

All functions and function arguments related to variable mapping will +be removed from mpas_xarray and moved elsewhere.

+

Algorithm: MPAS-Analysis specific functionality should be supported in +xarray preprossing
+Date last modified: 2017/02/15
+Contributors: Xylar Asay-Davis +

A new utility function, open_multifile_dataset will added to mpas_xarray +that simplifies current calls to xarray.open_mfdataset to hide the +preprocessor and take care of removing redundant time indices once the dataset +has been built. (This function doesn’t directly address the requirement but +is meant to make mpas_xarray easier to use and made sense because it +has a one-to-one correspondence with other functionality, described below, +that does address the requirement.)

+

A new module, generalized_reader will also be added with its own +open_multifile_dataset function. This version takes additional arguments +including a variable map and start and end dates for the dataset. +generalized_reader.open_multifile_dataset will create a data set +by calling xarray.open_mfdataset with its own preprocessing function, +generalized_reader._preprocess that first maps variable names, then +calls mpas_xarray.preprocess to finish the job. Once the dataset has +been constructed, redundant time indices are removed and the ‘Time’ +coordinate is sliced to be between the supplied start and end dates.

+

This solution may add some confusion in terms of which reader should +be used to open xarray datasets. It is my sense that most developers +adding new functionality will do so by modifying existing scripts, and +these examples should make it clear which version of +open_multifile_dataset is most appropriate. Nevertheless, clear +documentation of generalized_reader and mpas_xarray, and their +differences are needed.

+

Here is a typical usage of generalized_reader.open_multifile_dataset:

+
from mpas_analysis.shared.generalized_reader.generalized_reader \
+    import open_multifile_dataset
+
+file_name = 'example_jan_feb.nc'
+timestr = ['xtime_start', 'xtime_end']
+var_list = ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature']
+variable_map = {
+   'avgSurfaceTemperature':
+       ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature',
+        'other_string',
+        'yet_another_string'],
+   'daysSinceStartOfSim':
+       ['time_avg_daysSinceStartOfSim',
+        'xtime',
+        'something_else']}
+ds = open_multifile_dataset(file_names=file_name,
+                            calendar=calendar,
+                            time_variable_name=timestr,
+                            variable_list=var_list,
+                            start_date='0001-01-01',
+                            end_date='9999-12-31',
+                            variable_map=variable_map,
+                            year_offset=1850)
+
+
+

Here is the same for mpas_xarray.open_multifile_dataset without the +variable map, start and end dates:

+
from mpas_analysis.shared.mpas_xarray.mpas_xarray \
+    import open_multifile_dataset
+
+file_name = 'example_jan_feb.nc'
+timestr = ['xtime_start', 'xtime_end']
+var_list = ['time_avg_avgValueWithinOceanRegion_avgSurfaceTemperature']
+
+ds = open_multifile_dataset(file_names=file_name,
+                            calendar=calendar,
+                            time_variable_name=timestr,
+                            variable_list=var_list,
+                            year_offset=1850)
+
+
+

Design and Implementation

Implementation: mpas_xarray does not include MPAS-Analysis specific +functionality
+Date last modified: 2017/02/15
+Contributors: Xylar Asay-Davis +

A test branch can be found here +xylar/MPAS-Analysis/variable_mapping_reorg

+

I have removed map_variable and rename_variables from mpas_xarray. +I also removed any mention of the variable map from the rest of mpas_xarray.

+

This branch also includes several other cleanup operations that are not +addressing any requirements. These include:

+
    +
  • I added a new helper function, open_multifile_dataset, for opening an +xarray data set in a single, simple command without reference to the +preprocessor. This function should make opening new data sets more +intuitive for mpas_xarray users.

  • +
  • making several utility functions non-public (it is unclear to me why anyone +want to call these directly):

    +
      +
    • _assert_valid_datetimes

    • +
    • _assert_valid_selections

    • +
    • _ensure_list

    • +
    • _get_datetimes

    • +
    +
  • +
  • I have removed the ability to run mpas_xarray.py as a script and the associated +tests. This is on the premise that 1) the test were outdated and would have +needed to be updated to work with the current code and 2) unit testing in +test/test_mpas_xarray.py takes care of this capability in a better way.

  • +
  • I have tried to make variable names a bit more verbose in various places. +However, at @pwolfram’2 request, I have left ds for datasets, following the +xarray convention.

  • +
  • I have tried to improve the docstrings using a syntax that should be useful +for generating documentation later on.

  • +
  • I have update unit testing to work with the new inerface, notably the +open_multifile_dataset function.

  • +
+

Implementation: MPAS-Analysis specific functionality should be supported in +xarray preprossing
+Date last modified: 2017/02/15
+Contributors: Xylar Asay-Davis +

In the same branch as above, I have added a generalized_reader module that +extends the capabilities of mpas_xarray to include mapping of variable names. +The file structure is as follows:

+
mpas_analysis/shared/
+             -  generalized_reader/
+                     __init__.py
+                    generalized_reader.py
+
+
+

generalized_reader.py contains a function open_multifile_dataset that is similar to +the one in mpas_xarray but with additional arguments needed by analysis:

+
    +
  • variable_map, a map between MPAS and MPAS-Analysis variable names

  • +
  • start_date, the start date of the analysis

  • +
  • end_date, the end date of the analysis +This function performs the same steps as mpas_xarray.open_multifile_dataset +but uses the local preprocessing function, _preprocess, and also slices +the ‘Time’ coordinate using the given start and end dates as a final step.

  • +
+

The generalized_reader._preprocess funciton first maps variable names, then calls +mpas_xarray.preprocess to do the rest of the preprocessing as normal.

+

Two private functions, _map_variable_name and _rename_variables (take out of +mpas_xarray) are used to perform variable-name mapping.

+

Testing

Testing and Validation: MPAS-Analysis specific functionality should be supported in +xarray preprossing
+Date last modified: 2017/02/15
+Contributors: Xylar Asay-Davis +

In xylar/MPAS-Analysis/variable_mapping_reorg, +the unit testing for mpas_xarray has been updated. This includes moving unit testing for +variable mapping elsewhere.

+

I will make sure all tests with config files in the configs/lanl and configs/edison +directories produce bit-for-bit results with the current develop.

+

Testing and Validation: MPAS-Analysis specific functionality should be supported in +xarray preprossing
+Date last modified: 2017/02/10
+Contributors: Xylar Asay-Davis +

Largely, the same as above.

+

I have added unit testing for generalized_reader (via the standalone +generalized_reader.open_multifile_dataset function). These tests ensure that:

+
    +
  • variable mapping works as expected

  • +
  • start and end dates work as expected

  • +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/api.html b/1.11.0rc1/developers_guide/api.html new file mode 100644 index 000000000..4a9beba60 --- /dev/null +++ b/1.11.0rc1/developers_guide/api.html @@ -0,0 +1,832 @@ + + + + + + + API reference — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

API reference

+

This page provides an auto-generated summary of the MPAS-Analysis API. For +more details and examples, refer to the relevant chapters in the main part of +the documentation.

+
+

Top-level script: mpas_analysis

+ + + + + + + + + + + + + + + + + + + + + + + + +

main()

Entry point for the main script mpas_analysis

build_analysis_list(config, controlConfig)

Build a list of analysis tasks.

determine_analyses_to_generate(analyses, verbose)

Build a list of analysis tasks to run based on the 'generate' config option (or command-line flag) and prerequisites and subtasks of each requested task.

add_task_and_subtasks(analysisTask, ...[, ...])

If a task has been requested through the generate config option or if it is a prerequisite of a requested task, add it to the dictionary of tasks to generate.

update_generate(config, generate)

Update the 'generate' config option using a string from the command line.

run_analysis(config, analyses)

Run all the tasks, either in serial or in parallel

wait_for_task(runningTasks[, timeout])

Build a list of analysis modules based on the 'generate' config option.

+
+
+

Downloading data

+ + + + + + +

download_analysis_data()

Entry point for downloading the input data set from public repository for MPAS-Analysis to work.

+
+
+

Analysis tasks

+
+

Base Class

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

AnalysisTask(config, taskName, componentName)

The base class for analysis tasks.

AnalysisTask.setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

AnalysisTask.run_task()

Run the analysis.

AnalysisTask.run_after(task)

Only run this task after the given task has completed.

AnalysisTask.add_subtask(subtask)

Add a subtask to this tasks.

AnalysisTask.run([writeLogFile])

Sets up logging and then runs the analysis task.

AnalysisTask.check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

AnalysisTask.check_analysis_enabled(...[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

AnalysisTask.set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

+
+
+

Ocean tasks

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

ConservationTask(config, controlConfig)

This task generates time series plots from output from the conservation analysis member.

ClimatologyMapSST(config, mpasClimatologyTask)

An analysis task for comparison of sea surface temperature (sst) against observations

ClimatologyMapSSS(config, mpasClimatologyTask)

An analysis task for comparison of sea surface salinity (sss) against observations

ClimatologyMapMLD(config, mpasClimatologyTask)

An analysis task for comparison of mixed layer depth (mld) against observations

ClimatologyMapMLDMinMax(config, ...[, ...])

An analysis task for comparison of mixed layer depth (mld) against observations

ClimatologyMapSSH(config, mpasClimatologyTask)

An analysis task for comparison of sea surface height (ssh) against observations

ClimatologyMapEKE(config, mpasClimatologyTask)

An analysis task for comparison of eddy kinetic energy (eke) against observations

ClimatologyMapOHCAnomaly(config, ...[, ...])

An analysis task for comparison of the anomaly from a reference year (typically the start of the simulation) of ocean heat content (OHC)

ClimatologyMapAntarcticMelt(config, ...)

An analysis task for comparison of Antarctic melt rates against observations

ClimatologyMapSose(config, mpasClimatologyTask)

An analysis task for comparison of antarctic field against the Southern Ocean State Estimate

ClimatologyMapArgoTemperature(config, ...[, ...])

An analysis task for comparison of potential temperature against Argo observations

ClimatologyMapArgoSalinity(config, ...[, ...])

An analysis task for comparison of global salinity against Argo observations

ClimatologyMapWaves(config, ...)

An analysis task for the computation of wave climatologies

IndexNino34(config, mpasTimeSeriesTask[, ...])

A task for computing and plotting time series and spectra of the El Nino 3.4 climate index

MeridionalHeatTransport(config, ...[, ...])

Plot meridional heat transport from the analysis member output.

OceanHistogram(config, mpasClimatologyTask, ...)

Plots a histogram of a 2-d ocean variable.

StreamfunctionMOC(config, mpasClimatologyTask)

Computation and plotting of model meridional overturning circulation.

TimeSeriesOHCAnomaly(config, mpasTimeSeriesTask)

Performs analysis of ocean heat content (OHC) from time-series output.

TimeSeriesTemperatureAnomaly(config, ...)

Performs analysis of time series of potential temperature anomalies from a reference simulation year as a function of depth.

TimeSeriesSalinityAnomaly(config, ...)

Performs analysis of time series of salinity anomalies from the first simulation year as a function of depth.

TimeSeriesSST(config, mpasTimeSeriesTask[, ...])

Performs analysis of the time-series output of sea-surface temperature (SST).

TimeSeriesAntarcticMelt(config, ...[, ...])

Performs analysis of the time-series output of Antarctic sub-ice-shelf melt rates.

TimeSeriesOceanRegions(config, regionMasksTask)

Performs analysis of the time-series output of regionoal mean temperature, salinity, etc.

TimeSeriesTransport(config[, controlConfig])

Extract and plot time series of transport through transects on the MPAS mesh.

+ + + + + + +

ComputeAnomalySubtask(parentTask, ...[, ...])

A subtask for computing anomalies of moving averages and writing them out.

+ + + + + + +

PlotDepthIntegratedTimeSeriesSubtask(...[, ...])

Plots a time series, summed or averaged over various depth ranges

+ + + + + + +

PlotHovmollerSubtask(parentTask, regionName, ...)

Plots a time series vs.

+
+
+

Sea ice tasks

+ + + + + + + + + + + + + + + +

ClimatologyMapSeaIceConc(config, ...[, ...])

An analysis task for comparison of sea ice concentration against observations

ClimatologyMapSeaIceThick(config, ...[, ...])

An analysis task for comparison of sea ice thickness against observations

TimeSeriesSeaIce(config, mpasTimeSeriesTask)

Performs analysis of time series of sea-ice properties.

ClimatologyMapIcebergConc(config, ...[, ...])

An analysis task for comparison of iceberg concentration against observations

+
+
+
+

Shared modules

+
+

Reading MPAS Datasets

+ + + + + + +

open_mpas_dataset(fileName, calendar[, ...])

Opens and returns an xarray data set given file name(s) and the MPAS calendar name.

+ + + + + + + + + + + + + + + +

mpas_xarray.open_multifile_dataset(...[, ...])

Opens and returns an xarray data set given file name(s) and the MPAS calendar name.

mpas_xarray.preprocess(ds, calendar, ...)

Builds correct time specification for MPAS, allowing a date offset because the time must be between 1678 and 2262 based on the xarray library.

mpas_xarray.remove_repeated_time_index(ds)

Remove repeated times from xarray dataset.

mpas_xarray.subset_variables(ds, variableList)

Given a data set and a list of variable names, returns a new data set that contains only variables with those names.

+ + + + + + +

generalized_reader.open_multifile_dataset(...)

Opens and returns an xarray data set given file name(s) and the MPAS calendar name.

+
+
+

Climatology

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

get_comparison_descriptor(config, ...)

Get the comparison grid descriptor from the comparison_grid_name.

get_remapper(config, sourceDescriptor, ...)

Given config options and descriptions of the source and comparison grids, returns a pyremap.Remapper object that can be used to remap from source files or data sets to corresponding data sets on the comparison grid.

compute_monthly_climatology(ds[, calendar, ...])

Compute monthly climatologies from a data set.

compute_climatology(ds, monthValues[, ...])

Compute a monthly, seasonal or annual climatology data set from a data set.

add_years_months_days_in_month(ds[, calendar])

Add year, month and daysInMonth as data arrays in ds.

get_unmasked_mpas_climatology_directory(config)

Get the directory for an unmasked MPAS climatology produced by ncclimo, making the directory if it doesn't already exist

get_unmasked_mpas_climatology_file_name(...)

Get the file name for an unmasked MPAS climatology produced by ncclimo

get_masked_mpas_climatology_file_name(...[, op])

Get the file name for a masked MPAS climatology

get_remapped_mpas_climatology_file_name(...)

Get the file name for a masked MPAS climatology

MpasClimatologyTask(config, componentName[, ...])

An analysis tasks for computing climatologies from output from the timeSeriesStatsMonthly* analysis members.

MpasClimatologyTask.add_variables(variableList)

Add one or more variables and optionally one or more seasons for which to compute climatologies.

MpasClimatologyTask.get_file_name(season)

Returns the full path for MPAS climatology file produced by ncclimo.

RemapMpasClimatologySubtask(...[, ...])

An analysis tasks for computing climatologies from output from the timeSeriesStatsMonthly analysis member.

RemapMpasClimatologySubtask.setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

RemapMpasClimatologySubtask.run_task()

Compute the requested climatologies

RemapMpasClimatologySubtask.add_comparison_grid_descriptor(...)

Add a custom grid descriptor (something other than 'latlon', 'antarctic', 'arctic', 'north_atlantic', or 'north_pacific', or 'subpolar_north_atlantic').

RemapMpasClimatologySubtask.get_masked_file_name(season)

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

RemapMpasClimatologySubtask.get_remapped_file_name(...)

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

RemapMpasClimatologySubtask.customize_masked_climatology(...)

Override this function to customize the climatology during the masking phase (before remapping)

RemapMpasClimatologySubtask.customize_remapped_climatology(...)

Override this function to customize the climatology after remapping

RemapObservedClimatologySubtask(parentTask, ...)

An analysis task for comparison of 2D model fields against observations.

RemapObservedClimatologySubtask.get_observation_descriptor(...)

get a MeshDescriptor for the observation grid.

RemapObservedClimatologySubtask.build_observational_dataset(...)

read in the data sets for observations, and possibly rename some variables and dimensions.

RemapObservedClimatologySubtask.get_file_name(stage)

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

+
+
+

Time Series

+ + + + + + + + + + + + + + + +

cache_time_series(timesInDataSet, ...[, ...])

Create or update a NetCDF file cacheFileName containing the given time series, calculated with timeSeriesCalcFunction over the given times, start and end year, and time frequency with which results are cached.

compute_moving_avg_anomaly_from_start(...[, ...])

Compute the rolling mean of the anomaly of a quantity from the beginning of the simulation (such that the rolling mean starts at zero by definition)

compute_moving_avg(ds[, movingAveragePoints])

Compute the rolling mean of a data set

MpasTimeSeriesTask(config, componentName[, ...])

An analysis tasks for computing time series from output from the timeSeriesStatsMonthly analysis member.

+
+
+

Namelist and Streams Files

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

convert_namelist_to_dict(fname[, readonly])

Converts a namelist file to key-value pairs in dictionary.

NameList.__init__(fname[, path])

Parse the namelist file

NameList.__getattr__(key)

Accessor for dot noation, e.g., nml.field

NameList.__getitem__(key)

Accessor for bracket notation, e.g., nml['field']

NameList.get(key)

Get the value associated with a given key

NameList.getint(key)

Get the integer value associated with a given key

NameList.getfloat(key)

Get the float value associated with a given key

NameList.getbool(key)

Get the boolean value associated with a given key

StreamsFile.__init__(fname[, streamsdir])

Parse the streams file.

StreamsFile.read(streamname, attribname)

Get the value of the given attribute in the given stream

StreamsFile.readpath(streamName[, ...])

Given the name of a stream and optionally start and end dates and a calendar type, returns a list of files that match the file template in the stream.

StreamsFile.has_stream(streamName)

Does the stream file have the given stream?

StreamsFile.find_stream(possibleStreams)

If one (or more) of the names in possibleStreams is an stream in this streams file, returns the first match.

+
+
+

I/O Utilities

+ + + + + + + + + + + + + + + + + + +

utility.paths(*args)

Returns glob'd paths in list for arbitrary number of function arguments.

utility.make_directories(path)

Make the given path if it does not already exist.

utility.build_config_full_path(config, ...)

Get a full path from a base directory and a relative path

utility.check_path_exists(path)

Raise an exception if the given path does not exist.

write_netcdf

+
+
+

Plotting

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

timeseries_analysis_plot(config, dsvalues, ...)

Plots the list of time series data sets.

timeseries_analysis_plot_polar(config, ...)

Plots the list of time series data sets on a polar plot.

plot_polar_comparison(config, lon, lat, ...)

Plots a data set around either the north or south pole.

plot_global_comparison(config, Lons, Lats, ...)

Plots a data set as a longitude/latitude map.

plot_1D(config, xArrays, fieldArrays, errArrays)

Plots a 1D line plot with error bars if available.

plot_vertical_section_comparison(config, ...)

Plots vertical section plots in a three-panel format, comparing model data (in modelArray) to some reference dataset (in refArray), which can be either observations or an alternative model, and also presenting the difference plot of the two.

plot_vertical_section(config, field, ...[, ...])

Plots a data set as a x distance (latitude, longitude, or spherical distance) vs depth map (vertical section).

colormap.setup_colormap(config, ...[, suffix])

Set up a colormap from the registry

ticks.plot_xtick_format(calendar, minDays, ...)

Formats tick labels and positions along the x-axis for time series / index plots

add_inset(fig, fc[, latlonbuffer, ...])

Plots an inset map showing the location of a transect or polygon.

PlotClimatologyMapSubtask(parentTask, ...[, ...])

An analysis task for plotting 2D model fields against observations.

PlotClimatologyMapSubtask.set_plot_info(...)

Store attributes related to plots, plot file names and HTML output.

+
+
+

Projection

+ + + + + + + + + +

get_pyproj_projection(comparison_grid_name)

Get the projection from the comparison_grid_name.

get_cartopy_projection(comparison_grid_name)

Get the projection from the comparison_grid_name.

+
+
+

Regions

+ + + + + + + + + + + + +

compute_region_masks.ComputeRegionMasks(...)

An analysis tasks for computing cell masks for regions defined by geojson features

compute_region_masks_subtask.ComputeRegionMasksSubtask(...)

An analysis tasks for computing cell masks for regions defined by geojson features

compute_region_masks_subtask.get_feature_list(...)

Builds a list of features found in the geojson file

+
+
+

Timekeeping

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

utility.get_simulation_start_time(streams)

Given a StreamsFile object, returns the simulation start time parsed from a restart file.

utility.string_to_datetime(dateString)

Given a date string and a calendar, returns a datetime.datetime

utility.string_to_relative_delta(dateString)

Given a date string and a calendar, returns an instance of MpasRelativeDelta

utility.string_to_days_since_date(dateString)

Given a date string or an array-like of date strings, a reference date string, and a calendar, returns the number of days (as a float or numpy.array of floats) since the reference date

utility.days_to_datetime(days[, calendar, ...])

Covert days to datetime.datetime objects given a reference date and an MPAS calendar (either 'gregorian' or 'noleap').

utility.datetime_to_days(dates[, calendar, ...])

Given date(s), a calendar and a reference date, returns the days since the reference date, either as a single float or an array of floats.

utility.date_to_days([year, month, day, ...])

Convert a date to days since the reference date.

MpasRelativeDelta.MpasRelativeDelta([dt1, ...])

MpasRelativeDelta is a subclass of dateutil.relativedelta for relative time intervals with different MPAS calendars.

+
+
+

Transects

+ + + + + + + + + +

compute_transect_masks_subtask.compute_mpas_transect_masks(...)

Build a transect mask file from the given MPAS mesh and geojson file defining a set of transects.

compute_transect_masks_subtask.ComputeTransectMasksSubtask(...)

An analysis tasks for computing cell masks for transects defined by geojson features

+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.html new file mode 100644 index 000000000..1bb916b1a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.html @@ -0,0 +1,179 @@ + + + + + + + mpas_analysis.__main__.add_task_and_subtasks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.add_task_and_subtasks

+
+
+mpas_analysis.__main__.add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, callCheckGenerate=True)[source]
+

If a task has been requested through the generate config option or +if it is a prerequisite of a requested task, add it to the dictionary of +tasks to generate.

+
+
Parameters:
+
    +
  • analysisTask (AnalysisTask) – A task to be added

  • +
  • analysesToGenerate (OrderedDict of AnalysisTask) – The list of analysis tasks to be generated, which this call may +update to include this task and its subtasks

  • +
  • verbose (bool) – Whether to write out a full stack trace when exceptions occur during +setup_and_check() calls for each task

  • +
  • callCheckGenerate (bool) – Whether the check_generate method should be call for this task to +see if it has been requested. We skip this for subtasks and +prerequisites, since they are needed by another task regardless of +whether the user specifically requested them.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.html new file mode 100644 index 000000000..644d21cbc --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.build_analysis_list.html @@ -0,0 +1,175 @@ + + + + + + + mpas_analysis.__main__.build_analysis_list — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.build_analysis_list

+
+
+mpas_analysis.__main__.build_analysis_list(config, controlConfig)[source]
+

Build a list of analysis tasks. New tasks should be added here, following +the approach used for existing analysis tasks.

+
+
Parameters:
+
+
+
Returns:
+

analyses (list of AnalysisTask objects) – A list of all analysis tasks

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.html new file mode 100644 index 000000000..ff5d87d69 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.__main__.determine_analyses_to_generate — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.determine_analyses_to_generate

+
+
+mpas_analysis.__main__.determine_analyses_to_generate(analyses, verbose)[source]
+

Build a list of analysis tasks to run based on the ‘generate’ config +option (or command-line flag) and prerequisites and subtasks of each +requested task. Each task’s setup_and_check method is called in the +process.

+
+
Parameters:
+
    +
  • analyses (list of AnalysisTask objects) – A list of all analysis tasks

  • +
  • verbose (bool) – Whether to write out a full stack trace when exceptions occur during +setup_and_check() calls for each task

  • +
+
+
Returns:
+

analysesToGenerate (OrderedDict of AnalysisTask objects) – A dictionary of analysis tasks to run

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.main.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.main.html new file mode 100644 index 000000000..5c2428a75 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.main.html @@ -0,0 +1,162 @@ + + + + + + + mpas_analysis.__main__.main — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.main

+
+
+mpas_analysis.__main__.main()[source]
+

Entry point for the main script mpas_analysis

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.run_analysis.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.run_analysis.html new file mode 100644 index 000000000..7bcf59c35 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.run_analysis.html @@ -0,0 +1,171 @@ + + + + + + + mpas_analysis.__main__.run_analysis — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.run_analysis

+
+
+mpas_analysis.__main__.run_analysis(config, analyses)[source]
+

Run all the tasks, either in serial or in parallel

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – contains config options

  • +
  • analyses (OrderedDict of AnalysisTask objects) – A dictionary of analysis tasks to run with (task, subtask) names as +keys

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.update_generate.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.update_generate.html new file mode 100644 index 000000000..6166be84c --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.update_generate.html @@ -0,0 +1,172 @@ + + + + + + + mpas_analysis.__main__.update_generate — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.update_generate

+
+
+mpas_analysis.__main__.update_generate(config, generate)[source]
+

Update the ‘generate’ config option using a string from the command line.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – contains config options

  • +
  • generate (str) – a comma-separated string of generate flags: either names of analysis +tasks or commands of the form all_<tag> or no_<tag> indicating +that analysis with a given tag should be included or excluded).

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.wait_for_task.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.wait_for_task.html new file mode 100644 index 000000000..af28463ea --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.__main__.wait_for_task.html @@ -0,0 +1,172 @@ + + + + + + + mpas_analysis.__main__.wait_for_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.__main__.wait_for_task

+
+
+mpas_analysis.__main__.wait_for_task(runningTasks, timeout=0.1)[source]
+

Build a list of analysis modules based on the ‘generate’ config option. +New tasks should be added here, following the approach used for existing +analysis tasks.

+
+
Parameters:
+

runningTasks (dict of AnalysisTasks) – The tasks that are currently running, with task names as keys

+
+
Returns:
+

analysisTask (AnalysisTasks) – A task that finished

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.html new file mode 100644 index 000000000..99da5fdf4 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.download_data.download_analysis_data.html @@ -0,0 +1,159 @@ + + + + + + + mpas_analysis.download_data.download_analysis_data — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.download_data.download_analysis_data

+
+
+mpas_analysis.download_data.download_analysis_data()[source]
+

Entry point for downloading the input data set from public repository for +MPAS-Analysis to work. The input data set includes: pre-processed +observations data, MPAS mapping files and MPAS regional mask files +(which are used for the MOC computation), for a subset of MPAS meshes.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.html new file mode 100644 index 000000000..57fa4544d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.html @@ -0,0 +1,295 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapAntarcticMelt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapAntarcticMelt

+
+
+class mpas_analysis.ocean.ClimatologyMapAntarcticMelt(config, mpasClimatologyTask, regionMasksTask, controlConfig)[source]
+

An analysis task for comparison of Antarctic melt rates against +observations

+
+
+__init__(config, mpasClimatologyTask, regionMasksTask, controlConfig)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • regionMasksTask (ComputeRegionMasks) – A task for computing region masks

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.html new file mode 100644 index 000000000..9074e7250 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapArgoSalinity — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapArgoSalinity

+
+
+class mpas_analysis.ocean.ClimatologyMapArgoSalinity(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of global salinity against Argo +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.html new file mode 100644 index 000000000..8fb5841ed --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapArgoTemperature — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapArgoTemperature

+
+
+class mpas_analysis.ocean.ClimatologyMapArgoTemperature(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of potential temperature against Argo +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.html new file mode 100644 index 000000000..0856d889d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapEKE — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapEKE

+
+
+class mpas_analysis.ocean.ClimatologyMapEKE(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of eddy kinetic energy (eke) against +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.html new file mode 100644 index 000000000..44ce25c3a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapMLD — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapMLD

+
+
+class mpas_analysis.ocean.ClimatologyMapMLD(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of mixed layer depth (mld) against +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Check if MLD capability was turned on in the run.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.html new file mode 100644 index 000000000..6503a5eab --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.html @@ -0,0 +1,295 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapMLDMinMax — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapMLDMinMax

+
+
+class mpas_analysis.ocean.ClimatologyMapMLDMinMax(config, mpasClimatologyTasks, controlConfig=None)[source]
+

An analysis task for comparison of mixed layer depth (mld) against +observations

+
+
+__init__(config, mpasClimatologyTasks, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTasks[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Check if MLD capability was turned on in the run.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.html new file mode 100644 index 000000000..e0728e373 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.html @@ -0,0 +1,305 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapOHCAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapOHCAnomaly

+
+
+class mpas_analysis.ocean.ClimatologyMapOHCAnomaly(config, mpas_climatology_task, ref_year_climatology_task, control_config=None)[source]
+

An analysis task for comparison of the anomaly from a reference year +(typically the start of the simulation) of ocean heat content (OHC)

+
+
Variables:
+
    +
  • mpas_climatology_task (mpas_analysis.shared.climatology.MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • ref_year_climatology_task (mpas_analysis.shared.climatology.RefYearMpasClimatologyTask) – The task that produced the climatology from the first year to be +remapped and then subtracted from the main climatology

  • +
+
+
+
+
+__init__(config, mpas_climatology_task, ref_year_climatology_task, control_config=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpas_climatology_task, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Checks whether analysis is being performed only on the reference year, in which case the analysis will not be meaningful.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.html new file mode 100644 index 000000000..aa946ab14 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapSSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapSSH

+
+
+class mpas_analysis.ocean.ClimatologyMapSSH(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of sea surface height (ssh) against +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.html new file mode 100644 index 000000000..8f7992390 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapSSS — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapSSS

+
+
+class mpas_analysis.ocean.ClimatologyMapSSS(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of sea surface salinity (sss) against +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.html new file mode 100644 index 000000000..8f382914b --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapSST — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapSST

+
+
+class mpas_analysis.ocean.ClimatologyMapSST(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of sea surface temperature (sst) against +observations

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.html new file mode 100644 index 000000000..fae1d0a39 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapSose — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapSose

+
+
+class mpas_analysis.ocean.ClimatologyMapSose(config, mpasClimatologyTask, controlConfig=None)[source]
+

An analysis task for comparison of antarctic field against the Southern +Ocean State Estimate

+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.html new file mode 100644 index 000000000..8d8efb388 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.ClimatologyMapWaves — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ClimatologyMapWaves

+
+
+class mpas_analysis.ocean.ClimatologyMapWaves(config, mpasClimatologyTask, regionMasksTask, controlConfig)[source]
+

An analysis task for the computation of wave climatologies

+
+
+__init__(config, mpasClimatologyTask, regionMasksTask, controlConfig)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (MpasAnalysisConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • regionMasksTask (ComputeRegionMasks) – A task for computing region masks

  • +
  • controlConfig (MpasAnalysisConfigParser) – Configuration options for a control run

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ConservationTask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ConservationTask.html new file mode 100644 index 000000000..fffed59de --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.ConservationTask.html @@ -0,0 +1,315 @@ + + + + + + + mpas_analysis.ocean.ConservationTask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.ConservationTask

+
+
+class mpas_analysis.ocean.ConservationTask(config, controlConfig)[source]
+

This task generates time series plots from output from the conservation +analysis member. A number of different plot types are supported, as indicated +in the plotTypes config option in the conservation section.

+
+
Variables:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser) – Contains configuration options for a control run, if provided

  • +
  • outputFile (str) – The path to the output file produced by this analysis

  • +
  • runDirectory (str) – The path to the restart files from the main simulation being analyzed

  • +
  • historyDirectory (str) – The path to the history files from the main simulation being analyzed

  • +
  • startYear (int) – The year to start the analysis

  • +
  • endYear (int) – The year to end the analysis

  • +
  • inputFiles (list of str) – The paths to all conservation AM files

  • +
  • mainRunName (str) – The name of the main run from the config file

  • +
  • plotTypes (list of str) – The plot types requested in the config file

  • +
  • masterVariableList (dict of key-[list of str] pairs) – Keys are the supported plot types. Entries are lists of the variables +that are needed to produce that plot type.

  • +
  • derivedVariableList (dict of key-[list of str] pairs) – Keys are the derived variables. Entries are lists of variables in the +AM output that are needed to derive that variable.

  • +
  • xmlFileNames (list of str) – File names for xml output with full path

  • +
  • filePrefixes (list of str) – File prefixes for xml files

  • +
  • variableList (dict of key-[list of str] pairs) – Keys are the requested plot types. Entries are lists of the variables +in the AM output that are needed to produce that plot type.

  • +
+
+
+
+
+__init__(config, controlConfig)[source]
+

Construct the analysis task.

+
+
Parameters:
+

config (mpas_tools.config.MpasConfigParser) – Contains configuration options

+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, controlConfig)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Create an output netCDF file that has all of the requested conservation AM variables in the requested time window.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.IndexNino34.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.IndexNino34.html new file mode 100644 index 000000000..ae5ae56eb --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.IndexNino34.html @@ -0,0 +1,302 @@ + + + + + + + mpas_analysis.ocean.IndexNino34 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.IndexNino34

+
+
+class mpas_analysis.ocean.IndexNino34(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

A task for computing and plotting time series and spectra of the El Nino +3.4 climate index

+
+
Variables:
+
    +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
+
+
+
+
+__init__(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Computes NINO34 index and plots the time series and power spectrum with 95 and 99% confidence bounds

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.html new file mode 100644 index 000000000..87fb9a970 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.html @@ -0,0 +1,301 @@ + + + + + + + mpas_analysis.ocean.MeridionalHeatTransport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.MeridionalHeatTransport

+
+
+class mpas_analysis.ocean.MeridionalHeatTransport(config, mpasClimatologyTask, controlConfig=None)[source]
+

Plot meridional heat transport from the analysis member output.

+
+
Variables:
+
    +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
+
+
+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Process MHT analysis member data if available. Plots MHT as: 1D function of latitude 2D function of latitude and depth.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.html new file mode 100644 index 000000000..664ed1820 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.OceanHistogram.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.OceanHistogram — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.OceanHistogram

+
+
+class mpas_analysis.ocean.OceanHistogram(config, mpasClimatologyTask, regionMasksTask, controlConfig=None)[source]
+

Plots a histogram of a 2-d ocean variable.

+
+
+__init__(config, mpasClimatologyTask, regionMasksTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • regionMasksTask (ComputeRegionMasks) – A task for computing region masks

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, ...[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.html new file mode 100644 index 000000000..0fe2c86dc --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.html @@ -0,0 +1,302 @@ + + + + + + + mpas_analysis.ocean.StreamfunctionMOC — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.StreamfunctionMOC

+
+
+class mpas_analysis.ocean.StreamfunctionMOC(config, mpasClimatologyTask, controlConfig=None)[source]
+

Computation and plotting of model meridional overturning circulation. +Will eventually support:

+
+
    +
  • MOC streamfunction, post-processed

  • +
  • MOC streamfunction, from MOC analysis member

  • +
  • MOC time series (max value at 24.5N), post-processed

  • +
  • MOC time series (max value at 24.5N), from MOC analysis member

  • +
+
+
+
+__init__(config, mpasClimatologyTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.html new file mode 100644 index 000000000..4ae38bab3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.html @@ -0,0 +1,295 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesAntarcticMelt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesAntarcticMelt

+
+
+class mpas_analysis.ocean.TimeSeriesAntarcticMelt(config, mpasTimeSeriesTask, regionMasksTask, controlConfig=None)[source]
+

Performs analysis of the time-series output of Antarctic sub-ice-shelf +melt rates.

+
+
+__init__(config, mpasTimeSeriesTask, regionMasksTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • regionMasksTask (ComputeRegionMasks) – A task for computing region masks

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask, ...[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.html new file mode 100644 index 000000000..a95b48552 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.html @@ -0,0 +1,293 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesOHCAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesOHCAnomaly

+
+
+class mpas_analysis.ocean.TimeSeriesOHCAnomaly(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Performs analysis of ocean heat content (OHC) from time-series output.

+
+
+__init__(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.html new file mode 100644 index 000000000..3a4c06cef --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.html @@ -0,0 +1,294 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesOceanRegions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesOceanRegions

+
+
+class mpas_analysis.ocean.TimeSeriesOceanRegions(config, regionMasksTask, controlConfig=None)[source]
+

Performs analysis of the time-series output of regionoal mean temperature, +salinity, etc.

+
+
+__init__(config, regionMasksTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, regionMasksTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.html new file mode 100644 index 000000000..d6c1f1708 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.html @@ -0,0 +1,302 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesSST — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesSST

+
+
+class mpas_analysis.ocean.TimeSeriesSST(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Performs analysis of the time-series output of sea-surface temperature +(SST).

+
+
Variables:
+
    +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
+
+
+
+
+__init__(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Performs analysis of the time-series output of sea-surface temperature (SST).

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.html new file mode 100644 index 000000000..2ff67859e --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.html @@ -0,0 +1,293 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesSalinityAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesSalinityAnomaly

+
+
+class mpas_analysis.ocean.TimeSeriesSalinityAnomaly(config, mpasTimeSeriesTask)[source]
+

Performs analysis of time series of salinity anomalies from the first +simulation year as a function of depth.

+
+
+__init__(config, mpasTimeSeriesTask)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.html new file mode 100644 index 000000000..bc39c6db7 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.html @@ -0,0 +1,293 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesTemperatureAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesTemperatureAnomaly

+
+
+class mpas_analysis.ocean.TimeSeriesTemperatureAnomaly(config, mpasTimeSeriesTask)[source]
+

Performs analysis of time series of potential temperature anomalies from +a reference simulation year as a function of depth.

+
+
+__init__(config, mpasTimeSeriesTask)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.html new file mode 100644 index 000000000..54dc470ad --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.html @@ -0,0 +1,293 @@ + + + + + + + mpas_analysis.ocean.TimeSeriesTransport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.TimeSeriesTransport

+
+
+class mpas_analysis.ocean.TimeSeriesTransport(config, controlConfig=None)[source]
+

Extract and plot time series of transport through transects on the MPAS +mesh.

+
+
+__init__(config, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config[, controlConfig])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.html new file mode 100644 index 000000000..7af2b41e1 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.html @@ -0,0 +1,320 @@ + + + + + + + mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask

+
+
+class mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask(parentTask, mpasTimeSeriesTask, outFileName, variableList, movingAveragePoints, subtaskName='computeAnomaly', alter_dataset=None)[source]
+

A subtask for computing anomalies of moving averages and writing them out.

+
+
Variables:
+
    +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • outFileName (str) – The file name (usually without full path) where the resulting +data set should be written

  • +
  • variableList (list of str) – Variables to be included in the data set

  • +
  • movingAveragePoints (int) – The number of points (months) used in the moving average used to +smooth the data set

  • +
  • alter_dataset (function) – A function that takes an xarray.Dataset and returns an +xarray.Dataset for manipulating the data set (e.g. adding a new +variable computed from others). This operation is performed before +computing moving averages and anomalies, so that these operations are +also performed on any new variables added to the data set.

  • +
+
+
+
+
+__init__(parentTask, mpasTimeSeriesTask, outFileName, variableList, movingAveragePoints, subtaskName='computeAnomaly', alter_dataset=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent task of which this is a subtask

  • +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • outFileName (str) – The file name (usually without full path) where the resulting +data set should be written

  • +
  • variableList (list of str) – Variables to be included in the data set

  • +
  • movingAveragePoints (int) – The number of points (months) used in the moving average used to +smooth the data set

  • +
  • subtaskName (str, optional) – The name of the subtask

  • +
  • alter_dataset (function) – A function that takes an xarray.Dataset and returns an +xarray.Dataset for manipulating the data set (e.g. adding a new +variable computed from others). This operation is performed before +computing moving averages and anomalies, so that these operations +are also performed on any new variables added to the data set.

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, mpasTimeSeriesTask, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Performs analysis of ocean heat content (OHC) from time-series output.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.html new file mode 100644 index 000000000..365f669a3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.html @@ -0,0 +1,337 @@ + + + + + + + mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask

+
+
+class mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask(parentTask, regionName, inFileName, outFileLabel, fieldNameInTitle, mpasFieldName, yAxisLabel, sectionName, thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, groupLink, galleryName, subtaskName=None, controlConfig=None)[source]
+

Plots a time series, summed or averaged over various depth ranges

+
+
Variables:
+
    +
  • regionName (str) – The name of the region to plot

  • +
  • inFileName (str) – The file containing the time-depth data set to plot

  • +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • yAxisLabel (str) – the y-axis label of the plotted field (including units)

  • +
  • sectionName (str) – A section in the config file where the colormap and contour values +are defined

  • +
  • thumbnailSuffix (str) – The text to be displayed under the thumbnail image, to which the +region name will be prepended

  • +
  • imageCaption (str) – The caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – The name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str) – The subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – A short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str) – The name of the gallery in which this plot belongs

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser) – The configuration options for the control run (if any)

  • +
+
+
+
+
+__init__(parentTask, regionName, inFileName, outFileLabel, fieldNameInTitle, mpasFieldName, yAxisLabel, sectionName, thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, groupLink, galleryName, subtaskName=None, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent task of which this is a subtask

  • +
  • regionName (str) – The name of the region to plot

  • +
  • inFileName (str) – The file containing the time-depth data set to plot

  • +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • yAxisLabel (str) – the y-axis label of the plotted field

  • +
  • sectionName (str) – a section in the config file where the colormap and contour values +are defined

  • +
  • thumbnailSuffix (str) – The text to be displayed under the thumbnail image, to which the +region name will be prepended

  • +
  • imageCaption (str) – the caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – the name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str) – the subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – a short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str) – the name of the gallery in which this plot belongs

  • +
  • subtaskName (str, optional) – The name of the subtask (plotTimeSeries<RegionName> by default)

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser, optional) – The configuration options for the control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, regionName, inFileName, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

customize_fig(fig)

A function to override to customize the figure.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute vertical aggregates of the data and plot the time series

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.html new file mode 100644 index 000000000..7bf11cd41 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.html @@ -0,0 +1,338 @@ + + + + + + + mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask

+
+
+class mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask(parentTask, regionName, inFileName, outFileLabel, fieldNameInTitle, mpasFieldName, unitsLabel, sectionName, thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, groupLink, galleryName, subtaskName=None, controlConfig=None, regionMaskFile=None)[source]
+

Plots a time series vs. depth

+
+
Variables:
+
    +
  • controlconfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
  • regionName (str) – The name of the region to plot

  • +
  • inFileName (str) – The file containing the time-depth data set to plot

  • +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • unitsLabel (str) – The units of the plotted field, to be displayed on color bars

  • +
  • sectionName (str) – A section in the config file where the colormap and contour values +are defined

  • +
  • regionMaskFile (str) – A geojson file with regions (including one corresponding to +regionName) that will be used to make an inset

  • +
  • thumbnailSuffix (str) – The text to be displayed under the thumbnail image, to which the +region name will be prepended

  • +
  • imageCaption (str) – The caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – The name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str) – The subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – A short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str) – The name of the gallery in which this plot belongs

  • +
+
+
+
+
+__init__(parentTask, regionName, inFileName, outFileLabel, fieldNameInTitle, mpasFieldName, unitsLabel, sectionName, thumbnailSuffix, imageCaption, galleryGroup, groupSubtitle, groupLink, galleryName, subtaskName=None, controlConfig=None, regionMaskFile=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent task of which this is a subtask

  • +
  • regionName (str) – The name of the region to plot

  • +
  • inFileName (str) – The file containing the time-depth data set to plot

  • +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • unitsLabel (str) – the units of the plotted field, to be displayed on color bars

  • +
  • sectionName (str) – a section in the config file where the colormap and contour values +are defined

  • +
  • thumbnailSuffix (str) – The text to be displayed under the thumbnail image, to which the +region name will be prepended

  • +
  • imageCaption (str) – the caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – the name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str) – the subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – a short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str) – the name of the gallery in which this plot belongs

  • +
  • subtaskName (str, optional) – The name of the subtask (plotHovmoller<RegionName> by default)

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
  • regionMaskFile (str, optional) – A geojson file with regions (including one corresponding to +regionName) that will be used to make an inset

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, regionName, inFileName, ...)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Make the Hovmoller plot from the time series.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.html new file mode 100644 index 000000000..d6c100305 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.html @@ -0,0 +1,272 @@ + + + + + + + mpas_analysis.sea_ice.ClimatologyMapIcebergConc — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.sea_ice.ClimatologyMapIcebergConc

+
+
+class mpas_analysis.sea_ice.ClimatologyMapIcebergConc(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

An analysis task for comparison of iceberg concentration against +observations

+
+
+__init__(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • hemisphere ({'NH', 'SH'}) – The hemisphere to plot

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, hemisphere)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.html new file mode 100644 index 000000000..4f5485590 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.html @@ -0,0 +1,272 @@ + + + + + + + mpas_analysis.sea_ice.ClimatologyMapSeaIceConc — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.sea_ice.ClimatologyMapSeaIceConc

+
+
+class mpas_analysis.sea_ice.ClimatologyMapSeaIceConc(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

An analysis task for comparison of sea ice concentration against +observations

+
+
+__init__(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • hemisphere ({'NH', 'SH'}) – The hemisphere to plot

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, hemisphere)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.html new file mode 100644 index 000000000..4487c09eb --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.html @@ -0,0 +1,272 @@ + + + + + + + mpas_analysis.sea_ice.ClimatologyMapSeaIceThick — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.sea_ice.ClimatologyMapSeaIceThick

+
+
+class mpas_analysis.sea_ice.ClimatologyMapSeaIceThick(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

An analysis task for comparison of sea ice thickness against +observations

+
+
+__init__(config, mpasClimatologyTask, hemisphere, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped and plotted

  • +
  • hemisphere ({'NH', 'SH'}) – The hemisphere to plot

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasClimatologyTask, hemisphere)

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.html new file mode 100644 index 000000000..f2ea76695 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.html @@ -0,0 +1,278 @@ + + + + + + + mpas_analysis.sea_ice.TimeSeriesSeaIce — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.sea_ice.TimeSeriesSeaIce

+
+
+class mpas_analysis.sea_ice.TimeSeriesSeaIce(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Performs analysis of time series of sea-ice properties.

+
+
Variables:
+
    +
  • mpasTimeSeriesTask (MpasTimeSeriesTask) – The task that extracts the time series from MPAS monthly output

  • +
  • controlconfig (mpas_tools.config.MpasConfigParser) – Configuration options for a control run (if any)

  • +
+
+
+
+
+__init__(config, mpasTimeSeriesTask, controlConfig=None)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, mpasTimeSeriesTask[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Performs analysis of time series of sea-ice properties.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.html new file mode 100644 index 000000000..5c3b50bd1 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.html @@ -0,0 +1,174 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.add_subtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.add_subtask

+
+
+AnalysisTask.add_subtask(subtask)[source]
+

Add a subtask to this tasks. This task always runs after the subtask +has finished. However, this task gets set up before the subtask, +so the setup of the subtask can depend on fields defined during the +setup of this task (the parent).

+
+
Parameters:
+

subtask (AnalysisTask) – The subtask to run as part of this task

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.html new file mode 100644 index 000000000..67b501d8e --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.html @@ -0,0 +1,186 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.check_analysis_enabled — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.check_analysis_enabled

+
+
+AnalysisTask.check_analysis_enabled(analysisOptionName, default=False, raiseException=True)[source]
+

Check to make sure a given analysis is turned on, issuing a warning or +raising an exception if not.

+
+
Parameters:
+
    +
  • analysisOptionName (str) – The name of a boolean namelist option indicating whether the given +analysis member is enabled

  • +
  • default (bool, optional) – If no analysis option with the given name can be found, indicates +whether the given analysis is assumed to be enabled by default.

  • +
  • raiseException (bool, optional) – Whether

  • +
+
+
Returns:
+

enabled (bool) – Whether the given analysis is enabled

+
+
Raises:
+

RuntimeError – If the given analysis option is not found and default is not + True or if the analysis option is found and is False. The + exception is only raised if raiseException = True.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.html new file mode 100644 index 000000000..ff4afd8aa --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.html @@ -0,0 +1,176 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.check_generate — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.check_generate

+
+
+AnalysisTask.check_generate()[source]
+

Determines if this analysis should be generated, based on the +generate config option and taskName, componentName and +tags.

+

Individual tasks do not need to create their own versions of this +function.

+
+
Returns:
+

generate (bool) – Whether or not this task should be run.

+
+
+

:raises ValueError : If one of self.taskName, self.componentName: or self.tags has not been set.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.html new file mode 100644 index 000000000..e74a7f282 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.html @@ -0,0 +1,315 @@ + + + + + + + mpas_analysis.shared.AnalysisTask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask

+
+
+class mpas_analysis.shared.AnalysisTask(config, taskName, componentName, tags=[], subtaskName=None)[source]
+

The base class for analysis tasks.

+
+
Variables:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • taskName (str) – The name of the task, typically the same as the class name except +starting with lowercase (e.g. ‘myTask’ for class ‘MyTask’)

  • +
  • componentName ({'ocean', 'seaIce'}) – The name of the component (same as the folder where the task +resides)

  • +
  • tags (list of str) – Tags used to describe the task (e.g. ‘timeSeries’, ‘climatology’, +horizontalMap’, ‘index’, ‘transect’). These are used to determine +which tasks are generated (e.g. ‘all_transect’ or ‘no_climatology’ +in the ‘generate’ flags)

  • +
  • runDirectory (str) – The base input directory for namelists, streams files and restart files

  • +
  • historyDirectory (str) – The base input directory for history files

  • +
  • plotsDirectory (str) – The directory for writing plots (which is also created if it doesn’t +exist)

  • +
  • namelist (shared.io.NameList) – the namelist reader

  • +
  • runStreams (shared.io.StreamsFile) – the streams file reader for streams in the run directory (e.g. restart +files)

  • +
  • historyStreams (shared.io.StreamsFile) – the streams file reader for streams in the history directory (most +streams other than restart files)

  • +
  • calendar ({'gregorian', 'gregoraian_noleap'}) – The calendar used in the MPAS run

  • +
  • runAfterTasks (list of AnalysisTasks) – tasks that must be complete before this task can run

  • +
  • subtasks (list of mpas_analysis.shared.AnalysisTask) – Subtasks of this task

  • +
  • xmlFileNames (list of strings) – The XML file associated with each plot produced by this analysis, empty +if no plots were produced

  • +
  • logger (logging.Logger) – A logger for output during the run phase of an analysis task

  • +
+
+
+
+
+__init__(config, taskName, componentName, tags=[], subtaskName=None)[source]
+

Construct the analysis task.

+

Individual tasks (children classes of this base class) should first +call this method to perform basic initialization, then, define the +taskName, componentName and list of tags for the task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • taskName (str) – The name of the task, typically the same as the class name except +starting with lowercase (e.g. ‘myTask’ for class ‘MyTask’)

  • +
  • componentName ({'ocean', 'seaIce'}) – The name of the component (same as the folder where the task +resides)

  • +
  • tags (list of str, optional) – Tags used to describe the task (e.g. ‘timeSeries’, ‘climatology’, +horizontalMap’, ‘index’, ‘transect’). These are used to determine +which tasks are generated (e.g. ‘all_transect’ or ‘no_climatology’ +in the ‘generate’ flags)

  • +
  • subtaskName (str, optional) – If this is a subtask of taskName, the name of the subtask

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, taskName, componentName[, ...])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.html new file mode 100644 index 000000000..59f6e9a8c --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.html @@ -0,0 +1,174 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.run — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.run

+
+
+AnalysisTask.run(writeLogFile=True)[source]
+

Sets up logging and then runs the analysis task.

+
+
Parameters:
+

writeLogFile (bool, optional) – If True, output to stderr and stdout get written to a log file. +Otherwise, the internal logger self.logger points to stdout +and no log file is created. The intention is for logging to take +place in parallel mode but not in serial mode.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.html new file mode 100644 index 000000000..46ed93382 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.html @@ -0,0 +1,175 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.run_after — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.run_after

+
+
+AnalysisTask.run_after(task)[source]
+

Only run this task after the given task has completed. This allows a +task to be constructed of multiple subtasks, some of which may block +later tasks, while allowing some subtasks to run in parallel. It also +allows for tasks to depend on other tasks (e.g. for computing +climatologies or extracting time series for many variables at once).

+
+
Parameters:
+

task (AnalysisTask) – The task that should finish before this one begins

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.html new file mode 100644 index 000000000..048433340 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.html @@ -0,0 +1,167 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.run_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.run_task

+
+
+AnalysisTask.run_task()[source]
+

Run the analysis. Each task should override this function to do the +work of computing and/or plotting analysis

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.html new file mode 100644 index 000000000..19bedc388 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.html @@ -0,0 +1,174 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.set_start_end_date — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.set_start_end_date

+
+
+AnalysisTask.set_start_end_date(section)[source]
+

Set the start and end dates in the config correspond to the start +and end years in a given category of analysis

+
+
Parameters:
+

section (str) – The name of a section in the config file containing startYear +and endYear options. section is typically one of +climatology, timeSeries or index

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.html new file mode 100644 index 000000000..bab60e636 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.shared.AnalysisTask.setup_and_check — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.AnalysisTask.setup_and_check

+
+
+AnalysisTask.setup_and_check()[source]
+

Perform steps to set up the analysis (e.g. reading namelists and +streams files).

+

After this call, the following attributes are set (see documentation +for the class): +runDirectory, historyDirectory, plotsDirectory, namelist, runStreams, +historyStreams, calendar

+

Individual tasks (children classes of this base class) should first +call this method to perform basic setup, then, check whether the +configuration is correct for a given analysis and perform additional, +analysis-specific setup. For example, this function could check if +necessary observations and other data files are found, then, determine +the list of files to be read when the analysis is run.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.html new file mode 100644 index 000000000..b98e92d61 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.html @@ -0,0 +1,205 @@ + + + + + + + mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables

+
+
+MpasClimatologyTask.add_variables(variableList, seasons=None)[source]
+

Add one or more variables and optionally one or more seasons for which +to compute climatologies.

+
+
Parameters:
+
    +
  • variableList (list of str) – A list of variable names in the stream to be included in the +climatologies

  • +
  • seasons (list of str, optional) – A list of seasons (keys in shared.constants.monthDictionary) +to be computed or None if only monthly +climatologies are needed.

  • +
+
+
Raises:
+

ValueError – if this funciton is called before this task has been set up (so + the list of available variables has not yet been set) or if one + or more of the requested variables is not available in the stream.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.html new file mode 100644 index 000000000..7c107d36a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.html @@ -0,0 +1,196 @@ + + + + + + + mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name

+
+
+MpasClimatologyTask.get_file_name(season)[source]
+

Returns the full path for MPAS climatology file produced by ncclimo.

+
+
Parameters:
+

season (str) – One of the seasons in constants.monthDictionary

+
+
Returns:
+

fileName (str) – The path to the climatology file for the specified season.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.html new file mode 100644 index 000000000..540e9893d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.html @@ -0,0 +1,332 @@ + + + + + + + mpas_analysis.shared.climatology.MpasClimatologyTask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.MpasClimatologyTask

+
+
+class mpas_analysis.shared.climatology.MpasClimatologyTask(config, componentName, taskName=None, op='avg')[source]
+

An analysis tasks for computing climatologies from output from the +timeSeriesStatsMonthly* analysis members.

+
+
Variables:
+
    +
  • variableList (dict of lists) – A dictionary with seasons as keys and a list of variable names in +the stream to be included in the climatologies for each season in the +values.

  • +
  • allVariables (list of str) – A list of all available variable names in the stream used to raise an +exception when an unavailable variable is requested

  • +
  • inputFiles (list of str) – A list of input files used to compute the climatologies.

  • +
  • ncclimoModel ({'mpaso', 'mpascice'}) – The name of the component expected by ncclimo

  • +
  • endDate (startDate,) – The start and end dates of the climatology as strings

  • +
  • endYear (startYear,) – The start and end years of the climatology

  • +
  • seasonSubtasks (dict) – If using xarray to compute climatologies, a dictionary of subtasks, one +for each possible season

  • +
  • op ({'avg', 'min', 'max'}) – operator for monthly stats

  • +
  • streamName (str) – The name of the stream to read from, one of +timeSeriesStatsMonthlyOutput, +timeSeriesStatsMonthlyMinOutput, +timeSeriesStatsMonthlyMaxOutput

  • +
+
+
+
+
+__init__(config, componentName, taskName=None, op='avg')[source]
+

Construct the analysis task.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • componentName ({'ocean', 'seaIce'}) – The name of the component (same as the folder where the task +resides)

  • +
  • op ({'avg', 'min', 'max'}, optioinal) – operator for monthly stats

  • +
  • taskName (str, optional) – the name of the task, defaults to +mpasClimatology<ComponentName><Op>

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, componentName[, taskName, op])

Construct the analysis task.

add_subtask(subtask)

Add a subtask to this tasks.

add_variables(variableList[, seasons])

Add one or more variables and optionally one or more seasons for which to compute climatologies.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

get_file_name(season)

Returns the full path for MPAS climatology file produced by ncclimo.

get_start_and_end()

Get the start and end years and dates for the climatology.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute the requested climatologies

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.html new file mode 100644 index 000000000..52aa6d894 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.html @@ -0,0 +1,199 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor

+
+
+RemapMpasClimatologySubtask.add_comparison_grid_descriptor(comparisonGridName, comparisonDescriptor)[source]
+

Add a custom grid descriptor (something other than ‘latlon’, +‘antarctic’, ‘arctic’, ‘north_atlantic’, or ‘north_pacific’, +or ‘subpolar_north_atlantic’).

+
+
Parameters:
+
    +
  • comparisonGridName (str) – The name of the comparison grid

  • +
  • comparisonDescriptor (MeshDescriptor) – A descriptor of the comparison grid to use for +remapping

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.html new file mode 100644 index 000000000..0963279cc --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.html @@ -0,0 +1,202 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology

+
+
+RemapMpasClimatologySubtask.customize_masked_climatology(climatology, season)[source]
+

Override this function to customize the climatology during the masking +phase (before remapping)

+
+
Parameters:
+
    +
  • climatology (xarray.Dataset) – The MPAS climatology data set that has had a mask added but has +not yet been remapped

  • +
  • season (str) – The name of the season to be masked

  • +
+
+
Returns:
+

climatology (xarray.Dataset) – The same data set with any custom fields added or modifications +made

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.html new file mode 100644 index 000000000..af4d2a11f --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.html @@ -0,0 +1,201 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology

+
+
+RemapMpasClimatologySubtask.customize_remapped_climatology(climatology, comparisonGridNames, season)[source]
+

Override this function to customize the climatology after remapping

+
+
Parameters:
+
    +
  • climatology (xarray.Dataset) – The MPAS climatology data set that has been remapped

  • +
  • comparisonGridNames (str) – The name of the comparison grid to use for remapping.

  • +
  • season (str) – The name of the season to be remapped

  • +
+
+
Returns:
+

climatology (xarray.Dataset) – The same data set with any custom fields added or modifications +made

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.html new file mode 100644 index 000000000..258a6e80c --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.html @@ -0,0 +1,198 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name

+
+
+RemapMpasClimatologySubtask.get_masked_file_name(season)[source]
+

Given config options, the name of a field and a string identifying the +months in a seasonal climatology, returns the full path for MPAS +climatology files before and after remapping.

+
+
Parameters:
+

season (str) – One of the seasons in constants.monthDictionary

+
+
Returns:
+

fileName (str) – The path to the climatology file for the specified season.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.html new file mode 100644 index 000000000..1c86ca825 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.html @@ -0,0 +1,201 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name

+
+
+RemapMpasClimatologySubtask.get_remapped_file_name(season, comparisonGridName)[source]
+

Given config options, the name of a field and a string identifying the +months in a seasonal climatology, returns the full path for MPAS +climatology files before and after remapping.

+
+
Parameters:
+
    +
  • season (str) – One of the seasons in constants.monthDictionary

  • +
  • comparisonGridName (str) – The name of the comparison grid to use for remapping.

  • +
+
+
Returns:
+

fileName (str) – The path to the climatology file for the specified season.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.html new file mode 100644 index 000000000..3ec25147a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.html @@ -0,0 +1,361 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask

+
+
+class mpas_analysis.shared.climatology.RemapMpasClimatologySubtask(mpasClimatologyTask, parentTask, climatologyName, variableList, seasons, comparisonGridNames=None, iselValues=None, subtaskName='remapMpasClimatology', useNcremap=None, vertices=False)[source]
+

An analysis tasks for computing climatologies from output from the +timeSeriesStatsMonthly analysis member.

+
+
Variables:
+
    +
  • climatologyName (str) – A name that describes the climatology (e.g. a short version of +the important field(s) in the climatology) used to name the +subdirectories for each stage of the climatology

  • +
  • variableList (list of str) – A list of variable names in timeSeriesStatsMonthly to be +included in the climatologies

  • +
  • iselValues (dict) – A dictionary of dimensions and indices (or None) used to extract +a slice of the MPAS field.

  • +
  • seasons (list of str) – A list of seasons (keys in shared.constants.monthDictionary) +over which the climatology should be computed or [‘none’] if only +monthly climatologies are needed.

  • +
  • comparisonDescriptors (dict of MeshDescriptor) – Descriptors of the comparison grids to use for remapping, with +grid names as the keys.

  • +
  • restartFileName (str) – If comparisonGridName is not None, the name of a restart +file from which the MPAS mesh can be read.

  • +
  • useNcremap (bool, optional) – Whether to use ncremap to do the remapping (the other option being +an internal python code that handles more grid types and extra +dimensions)

  • +
  • op ({'avg', 'min', 'max'}) – operator for monthly stats

  • +
  • vertices (bool) – Whether to remap from vertices, rather than cells

  • +
+
+
+
+
+__init__(mpasClimatologyTask, parentTask, climatologyName, variableList, seasons, comparisonGridNames=None, iselValues=None, subtaskName='remapMpasClimatology', useNcremap=None, vertices=False)[source]
+

Construct the analysis task and adds it as a subtask of the +parentTask.

+
+
Parameters:
+
    +
  • mpasClimatologyTask (MpasClimatologyTask) – The task that produced the climatology to be remapped

  • +
  • parentTask (AnalysisTask) – The parent task, used to get the taskName, config and +componentName

  • +
  • climatologyName (str) – A name that describes the climatology (e.g. a short version of +the important field(s) in the climatology) used to name the +subdirectories for each stage of the climatology

  • +
  • variableList (list of str) – A list of variable names in timeSeriesStatsMonthly to be +included in the climatologies

  • +
  • seasons (list of str) – A list of seasons (keys in shared.constants.monthDictionary) +to be computed or [‘none’] (not None) if only monthly +climatologies are needed.

  • +
  • comparisonGridNames (list of str) – optional +The name(s) of the comparison grid to use for remapping. If none +is supplied, add_comparison_descriptor() must be called to add +one or more comparison grids.

  • +
  • iselValues (dict, optional) – A dictionary of dimensions and indices (or None) used to +extract a slice of the MPAS field(s).

  • +
  • subtaskName (str, optional) – The name of the subtask

  • +
  • useNcremap (bool, optional) – Whether to use ncremap to do the remapping (the other option being +an internal python code that handles more grid types and extra +dimensions). This defaults to the config option useNcremap +if it is not explicitly given. If a comparison grid other than +latlon is given, ncremap is not supported so this flag is set +to False.

  • +
  • vertices (bool, optional) – Whether to remap from vertices, rather than cells

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(mpasClimatologyTask, parentTask, ...)

Construct the analysis task and adds it as a subtask of the parentTask.

add_comparison_grid_descriptor(...)

Add a custom grid descriptor (something other than 'latlon', 'antarctic', 'arctic', 'north_atlantic', or 'north_pacific', or 'subpolar_north_atlantic').

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

customize_masked_climatology(climatology, season)

Override this function to customize the climatology during the masking phase (before remapping)

customize_remapped_climatology(climatology, ...)

Override this function to customize the climatology after remapping

get_masked_file_name(season)

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

get_remapped_file_name(season, ...)

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute the requested climatologies

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.html new file mode 100644 index 000000000..f929ab044 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.html @@ -0,0 +1,188 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task

+
+
+RemapMpasClimatologySubtask.run_task()[source]
+

Compute the requested climatologies

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.html new file mode 100644 index 000000000..597e4b3bc --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.html @@ -0,0 +1,195 @@ + + + + + + + mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check

+
+
+RemapMpasClimatologySubtask.setup_and_check()[source]
+

Perform steps to set up the analysis and check for errors in the setup.

+
+
Raises:
+

IOError : – If a restart file is not available from which to read mesh + information or if no history files are available from which to + compute the climatology in the desired time range.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.html new file mode 100644 index 000000000..657e0159b --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.html @@ -0,0 +1,198 @@ + + + + + + + mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset

+
+
+RemapObservedClimatologySubtask.build_observational_dataset(fileName)[source]
+

read in the data sets for observations, and possibly rename some +variables and dimensions. A subclass derived from this class must +override this method to create the appropriate data set

+
+
Parameters:
+

fileName (str) – observation file name

+
+
Returns:
+

dsObs (xarray.Dataset) – The observational dataset

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.html new file mode 100644 index 000000000..5d45f7e85 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.html @@ -0,0 +1,202 @@ + + + + + + + mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name

+
+
+RemapObservedClimatologySubtask.get_file_name(stage, season=None, comparisonGridName=None)[source]
+

Given config options, the name of a field and a string identifying the +months in a seasonal climatology, returns the full path for MPAS +climatology files before and after remapping.

+
+
Parameters:
+
    +
  • stage ({'original', 'climatology', 'remapped'}) – The stage of the masking and remapping process

  • +
  • season (str, optional) – One of the seasons in constants.monthDictionary

  • +
  • comparisonGridName (str, optional) – The name of the comparison grid to use for remapping.

  • +
+
+
Returns:
+

fileName (str) – The path to the climatology file for the specified season.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.html new file mode 100644 index 000000000..f57a3b437 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.html @@ -0,0 +1,198 @@ + + + + + + + mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor

+
+
+RemapObservedClimatologySubtask.get_observation_descriptor(fileName)[source]
+

get a MeshDescriptor for the observation grid. A subclass derived from +this class must override this method to create the appropriate +descriptor

+
+
Parameters:
+

fileName (str) – observation file name describing the source grid

+
+
Returns:
+

obsDescriptor (MeshDescriptor) – The descriptor for the observation grid

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.html new file mode 100644 index 000000000..4378871aa --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.html @@ -0,0 +1,325 @@ + + + + + + + mpas_analysis.shared.climatology.RemapObservedClimatologySubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.RemapObservedClimatologySubtask

+
+
+class mpas_analysis.shared.climatology.RemapObservedClimatologySubtask(parentTask, seasons, fileName, outFilePrefix, comparisonGridNames=['latlon'], subtaskName='remapObservations')[source]
+

An analysis task for comparison of 2D model fields against observations.

+
+
Variables:
+
    +
  • seasons (list of str) – A list of seasons (keys in constants.monthDictionary) over +which the climatology should be computed.

  • +
  • fileName (str) – The name of the observation file

  • +
  • outFilePrefix (str) – The prefix in front of output files and mapping files, typically the +name of the field being remapped

  • +
  • comparisonGridNames (list of str) – The name(s) of the comparison grid to use for remapping.

  • +
+
+
+
+
+__init__(parentTask, seasons, fileName, outFilePrefix, comparisonGridNames=['latlon'], subtaskName='remapObservations')[source]
+

Construct one analysis subtask for each plot (i.e. each season and +comparison grid) and a subtask for computing climatologies.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent (main) task for this subtask

  • +
  • seasons (list of str) – A list of seasons (keys in constants.monthDictionary) over +which the climatology should be computed.

  • +
  • fileName (str) – The name of the observation file

  • +
  • outFilePrefix (str) – The prefix in front of output files and mapping files, typically +the name of the field being remapped

  • +
  • comparisonGridNames (list of str) – optional +The name(s) of the comparison grid to use for remapping.

  • +
  • subtaskName (str, optional) – The name of the subtask

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, seasons, fileName, ...)

Construct one analysis subtask for each plot (i.e. each season and comparison grid) and a subtask for computing climatologies.

add_subtask(subtask)

Add a subtask to this tasks.

build_observational_dataset(fileName)

read in the data sets for observations, and possibly rename some variables and dimensions.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

get_file_name(stage[, season, ...])

Given config options, the name of a field and a string identifying the months in a seasonal climatology, returns the full path for MPAS climatology files before and after remapping.

get_observation_descriptor(fileName)

get a MeshDescriptor for the observation grid.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Performs remapping of obsrevations to the comparsion grid

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.html new file mode 100644 index 000000000..2f324ef25 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.html @@ -0,0 +1,206 @@ + + + + + + + mpas_analysis.shared.climatology.add_years_months_days_in_month — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.add_years_months_days_in_month

+
+
+mpas_analysis.shared.climatology.add_years_months_days_in_month(ds, calendar=None)[source]
+

Add year, month and daysInMonth as data arrays in ds. +The number of days in each month of ds is computed either using the +startTime and endTime if available or assuming noleap +calendar and ignoring leap years. year and month are computed +accounting correctly for the the calendar.

+
+
Parameters:
+
    +
  • ds (xarray.Dataset or xarray.DataArray object) – A data set with a Time coordinate expressed as days since +0001-01-01

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores, used to +determine year and month from Time coordinate

  • +
+
+
Returns:
+

ds (object of same type as ds) – The data set with year, month and daysInMonth data arrays +added (if not already present)

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.html new file mode 100644 index 000000000..5a16519ce --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.html @@ -0,0 +1,214 @@ + + + + + + + mpas_analysis.shared.climatology.compute_climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.compute_climatology

+
+
+mpas_analysis.shared.climatology.compute_climatology(ds, monthValues, calendar=None, maskVaries=True)[source]
+

Compute a monthly, seasonal or annual climatology data set from a data +set. The mean is weighted but the number of days in each month of +the data set, ignoring values masked out with NaNs. If the month +coordinate is not present, a data array month will be added based +on Time and the provided calendar.

+
+
Parameters:
+
    +
  • ds (xarray.Dataset or xarray.DataArray) – A data set with a Time coordinate expressed as days since +0001-01-01 or month coordinate

  • +
  • monthValues (int or array-like of ints) – A single month or an array of months to be averaged together

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores, used to +determine month from Time coordinate, so must be supplied if +ds does not already have a month coordinate or data array

  • +
  • maskVaries (bool, optional) – If the mask (where variables in ds are NaN) varies with time. +If not, the weighted average does not need make extra effort to account +for the mask. Most MPAS fields will have masks that don’t vary in +time, whereas observations may sometimes be present only at some +times and not at others, requiring maskVaries = True.

  • +
+
+
Returns:
+

climatology (object of same type as ds) – A data set without the 'Time' coordinate containing the mean +of ds over all months in monthValues, weighted by the number of days +in each month.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.html new file mode 100644 index 000000000..beb302bb3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.html @@ -0,0 +1,212 @@ + + + + + + + mpas_analysis.shared.climatology.compute_monthly_climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.compute_monthly_climatology

+
+
+mpas_analysis.shared.climatology.compute_monthly_climatology(ds, calendar=None, maskVaries=True)[source]
+

Compute monthly climatologies from a data set. The mean is weighted but +the number of days in each month of the data set, ignoring values masked +out with NaNs. If the month coordinate is not present, a data array +month will be added based on Time and the provided calendar.

+
+
Parameters:
+
    +
  • ds (xarray.Dataset or xarray.DataArray) – A data set with a Time coordinate expressed as days since +0001-01-01 or month coordinate

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores, used to +determine month from Time coordinate, so must be supplied if +ds does not already have a month coordinate or data array

  • +
  • maskVaries (bool, optional) – If the mask (where variables in ds are NaN) varies with time. +If not, the weighted average does not need make extra effort to account +for the mask. Most MPAS fields will have masks that don’t vary in +time, whereas observations may sometimes be present only at some +times and not at others, requiring maskVaries = True.

  • +
+
+
Returns:
+

climatology (object of same type as ds) – A data set without the 'Time' coordinate containing the mean +of ds over all months in monthValues, weighted by the number of days +in each month.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.html new file mode 100644 index 000000000..27c92bad3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.html @@ -0,0 +1,200 @@ + + + + + + + mpas_analysis.shared.climatology.get_comparison_descriptor — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_comparison_descriptor

+
+
+mpas_analysis.shared.climatology.get_comparison_descriptor(config, comparison_grid_name)[source]
+

Get the comparison grid descriptor from the comparison_grid_name.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • comparison_grid_name ({'latlon', 'antarctic', 'arctic', 'north_atlantic',) – ‘north_pacific’, ‘subpolar_north_atlantic’} +The name of the comparison grid to use for remapping.

  • +
+
+
Raises:
+

ValueError – If comparison_grid_name does not describe a known comparison grid

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.html new file mode 100644 index 000000000..791dceff8 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.html @@ -0,0 +1,200 @@ + + + + + + + mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name

+
+
+mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name(config, season, componentName, climatologyName, op='avg')[source]
+

Get the file name for a masked MPAS climatology

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • season (str) – One of the seasons in constants.monthDictionary

  • +
  • componentName ({'ocean', 'seaIce'}) – The MPAS component for which the climatology is being computed

  • +
  • climatologyName (str) – The name of the climatology (typically the name of a field to mask +and later remap)

  • +
  • op ({'avg', 'min', 'max'}) – operator for monthly stats

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.html new file mode 100644 index 000000000..c1906f402 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.html @@ -0,0 +1,203 @@ + + + + + + + mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name

+
+
+mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name(config, season, componentName, climatologyName, comparisonGridName, op='avg')[source]
+

Get the file name for a masked MPAS climatology

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Configuration options

  • +
  • season (str) – One of the seasons in constants.monthDictionary

  • +
  • componentName ({'ocean', 'seaIce'}) – The MPAS component for which the climatology is being computed

  • +
  • climatologyName (str) – The name of the climatology (typically the name of a field to mask +and later remap)

  • +
  • comparisonGridName (str) – The name of the comparison grid to use for remapping. If it is one +of the known comparison grid names, the full grid name is looked up via +mpas_analysis.shared.climatology.get_comparison_descriptor()

  • +
  • op ({'avg', 'min', 'max'}) – operator for monthly stats

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.html new file mode 100644 index 000000000..d83589e53 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.html @@ -0,0 +1,209 @@ + + + + + + + mpas_analysis.shared.climatology.get_remapper — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_remapper

+
+
+mpas_analysis.shared.climatology.get_remapper(config, sourceDescriptor, comparisonDescriptor, mappingFilePrefix, method, logger=None, vertices=False)[source]
+

Given config options and descriptions of the source and comparison grids, +returns a pyremap.Remapper object that can be used to remap from source +files or data sets to corresponding data sets on the comparison grid.

+

If necessary, creates the mapping file containing weights and indices +needed to perform remapping.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • sourceDescriptor (pyremap.MeshDescriptor) – A description of the source mesh or grid

  • +
  • comparisonDescriptor (pyremap.MeshDescriptor) – A description of the comparison grid

  • +
  • mappingFilePrefix (str) – A prefix to be prepended to the mapping file name

  • +
  • method ({'bilinear', 'neareststod', 'conserve'}) – The method of interpolation used.

  • +
  • logger (logging.Logger, optional) – A logger to which ncclimo output should be redirected

  • +
  • vertices (bool, optional) – Whether to remap from vertices, rather than cells

  • +
+
+
Returns:
+

remapper (pyremap.Remapper) – A remapper that can be used to remap files or data sets from the source +grid or mesh to the comparison grid.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.html new file mode 100644 index 000000000..17580a9ec --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.html @@ -0,0 +1,197 @@ + + + + + + + mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory

+
+
+mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory(config, op='avg')[source]
+

Get the directory for an unmasked MPAS climatology produced by ncclimo, +making the directory if it doesn’t already exist

+
+
Parameters:
+
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.html new file mode 100644 index 000000000..94a61ec24 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.html @@ -0,0 +1,198 @@ + + + + + + + mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name

+
+
+mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name(config, season, componentName, op='avg')[source]
+

Get the file name for an unmasked MPAS climatology produced by ncclimo

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – configuration options

  • +
  • season (str) – One of the seasons in constants.monthDictionary

  • +
  • componentName ({'ocean', 'seaIce'}) – The MPAS component for which the climatology is being computed

  • +
  • op ({'avg', 'min', 'max'}) – operator for monthly stats

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.html new file mode 100644 index 000000000..20f862851 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.html @@ -0,0 +1,238 @@ + + + + + + + mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset

+
+
+mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset(fileNames, calendar, config, simulationStartTime=None, timeVariableName='Time', variableList=None, selValues=None, iselValues=None, variableMap=None, startDate=None, endDate=None, chunking=None)[source]
+

Opens and returns an xarray data set given file name(s) and the MPAS +calendar name.

+
+
Parameters:
+
    +
  • fileNames (list of strings) – A lsit of file paths to read

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores

  • +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • simulationStartTime (string, optional) –

    The start date of the simulation, used to convert from time variables +expressed as days since the start of the simulation to days since the +reference date. simulationStartTime takes one of the following +forms:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

    simulationStartTime is only required if the MPAS time variable +(identified by timeVariableName) is a number of days since the +start of the simulation.

    +

  • +
  • timeVariableName (string, optional) – The name of the time variable (typically 'Time' if using a +variableMap or 'xtime' if not using a variableMap)

  • +
  • variableList (list of strings, optional) – If present, a list of variables to be included in the data set

  • +
  • selValues (dict, optional) –

    A dictionary of coordinate names (keys) and values or arrays of +values used to slice the variales in the data set. See +xarray.DataSet.sel() for details on how this dictonary is used. +An example:

    +
    selectCorrdValues = {'cellLon': 180.0}
    +
    +
    +

  • +
  • iselValues (dict, optional) –

    A dictionary of coordinate names (keys) and indices, slices or +arrays of indices used to slice the variales in the data set. See +xarray.DataSet.isel() for details on how this dictonary is used. +An example:

    +
    iselValues = {'nVertLevels': slice(0, 3),
    +              'nCells': cellIDs}
    +
    +
    +

  • +
  • variableMap (dict, optional) – A dictionary with keys that are variable names used by +MPAS-Analysis and values that are lists of possible names for the same +variable in the MPAS dycore that produced the data set (which may +differ between versions).

  • +
  • startDate (string or datetime.datetime, optional) – If present, the first and last dates to be used in the data set. The +time variable is sliced to only include dates within this range.

  • +
  • endDate (string or datetime.datetime, optional) – If present, the first and last dates to be used in the data set. The +time variable is sliced to only include dates within this range.

  • +
  • chunking (None, int, True, dict, optional) – If integer is present, applies maximum chunk size from config file +value maxChunkSize, otherwise if None do not perform chunking. If +True, use automated chunking using default config value +maxChunkSize. If chunking is a dict use dictionary values for +chunking.

  • +
+
+
Returns:
+

ds (xarray.Dataset)

+
+
Raises:
+
    +
  • TypeError – If the time variable has an unsupported type (not a date string, + a floating-pont number of days since the start of the simulation + or a numpy.datatime64 object).

  • +
  • ValueError – If the time variable is not found in the data set or if the time + variable is a number of days since the start of the simulation but + simulationStartTime is None.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.html new file mode 100644 index 000000000..81a53aff8 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__ — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__

+
+
+NameList.__getattr__(key)[source]
+

Accessor for dot noation, e.g., nml.field

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (str) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.html new file mode 100644 index 000000000..1232232c6 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__ — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__

+
+
+NameList.__getitem__(key)[source]
+

Accessor for bracket notation, e.g., nml[‘field’]

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (Any) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.html new file mode 100644 index 000000000..608bc9c37 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.html @@ -0,0 +1,186 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__ — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__

+
+
+NameList.__init__(fname, path=None)[source]
+

Parse the namelist file

+
+
Parameters:
+
    +
  • fname (str) – The file name of the namelist file

  • +
  • path (str, optional) – If fname contains a relative path, fname is +relative to path, rather than the current working directory

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.html new file mode 100644 index 000000000..15c61d339 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.get — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.get

+
+
+NameList.get(key)[source]
+

Get the value associated with a given key

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (Any) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.html new file mode 100644 index 000000000..568c4b167 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool

+
+
+NameList.getbool(key)[source]
+

Get the boolean value associated with a given key

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (bool) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.html new file mode 100644 index 000000000..507567d30 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat

+
+
+NameList.getfloat(key)[source]
+

Get the float value associated with a given key

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (float) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.html new file mode 100644 index 000000000..ac05026f2 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.NameList.getint — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.NameList.getint

+
+
+NameList.getint(key)[source]
+

Get the integer value associated with a given key

+
+
Parameters:
+

key (str) – The key to get a value for

+
+
Returns:
+

value (int) – The value associated with key

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.html new file mode 100644 index 000000000..f027f1616 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.html @@ -0,0 +1,186 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__ — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__

+
+
+StreamsFile.__init__(fname, streamsdir=None)[source]
+

Parse the streams file.

+
+
Parameters:
+
    +
  • fname (str) – The file name the stream file

  • +
  • streamsdir (str, optional) – The base path to both the output streams data and the sreams file +(the latter only if fname is a relative path).

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.html new file mode 100644 index 000000000..1f1ea3ddc --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.html @@ -0,0 +1,190 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream

+
+
+StreamsFile.find_stream(possibleStreams)[source]
+

If one (or more) of the names in possibleStreams is an stream in +this streams file, returns the first match.

+
+
Parameters:
+

possibleStreams (list of str) – A list of streams to search for

+
+
Returns:
+

streamName (str) – The name of an stream from possibleOptions occurring in the +streams file

+
+
Raises:
+

ValueError – If no match is found.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.html new file mode 100644 index 000000000..4b7e859e3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.html @@ -0,0 +1,188 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream

+
+
+StreamsFile.has_stream(streamName)[source]
+

Does the stream file have the given stream?

+

Returns True if the streams file has a stream with the given +streamName, otherwise returns False.

+
+
Parameters:
+

streamName (str) – The name of the stream

+
+
Returns:
+

streamFound (bool) – True if the stream was found in the stream file, False +otherwise

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.html new file mode 100644 index 000000000..a48a4fabb --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.html @@ -0,0 +1,189 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read

+
+
+StreamsFile.read(streamname, attribname)[source]
+

Get the value of the given attribute in the given stream

+
+
Parameters:
+
    +
  • streamname (str) – The name of the stream

  • +
  • attribname (str) – The name of the attribute within the stream

  • +
+
+
Returns:
+

value (str) – The value associated with the attribute, or None if the +attribute was not found

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.html new file mode 100644 index 000000000..4b40321af --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.html @@ -0,0 +1,207 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath

+
+
+StreamsFile.readpath(streamName, startDate=None, endDate=None, calendar=None)[source]
+

Given the name of a stream and optionally start and end dates and a +calendar type, returns a list of files that match the file template in +the stream.

+
+
Parameters:
+
    +
  • streamName (string) – The name of a stream that produced the files

  • +
  • startDate (string or datetime.datetime, optional) –

    String or datetime.datetime objects identifying the beginning +and end dates to be found.

    +

    Note: a buffer of one output interval is subtracted from startDate +and added to endDate because the file date might be the first +or last date contained in the file (or anything in between).

    +

  • +
  • endDate (string or datetime.datetime, optional) –

    String or datetime.datetime objects identifying the beginning +and end dates to be found.

    +

    Note: a buffer of one output interval is subtracted from startDate +and added to endDate because the file date might be the first +or last date contained in the file (or anything in between).

    +

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores, and is +required if startDate and/or endDate are supplied

  • +
+
+
Returns:
+

fileList (list) – A list of file names produced by the stream that fall between +the startDate and endDate (if supplied)

+
+
Raises:
+

ValueError – If no files from the stream are found.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.html new file mode 100644 index 000000000..e9049f8e8 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.html @@ -0,0 +1,188 @@ + + + + + + + mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict

+
+
+mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict(fname, readonly=True)[source]
+

Converts a namelist file to key-value pairs in dictionary.

+
+
Parameters:
+
    +
  • fname (str) – The file name of the namelist

  • +
  • readonly (bool, optional) – Should the resulting dictionary read-only?

  • +
+
+
Returns:
+

nml (dict) – A dictionary where keys are namelist options and values are namelist

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.html new file mode 100644 index 000000000..4080eda4e --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.html @@ -0,0 +1,197 @@ + + + + + + + mpas_analysis.shared.io.open_mpas_dataset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.open_mpas_dataset

+
+
+mpas_analysis.shared.io.open_mpas_dataset(fileName, calendar, timeVariableNames=['xtime_startMonthly', 'xtime_endMonthly'], variableList=None, startDate=None, endDate=None)[source]
+

Opens and returns an xarray data set given file name(s) and the MPAS +calendar name.

+
+
Parameters:
+
    +
  • fileName (str) – File path to read

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores

  • +
  • timeVariableNames (str or list of 2 str, optional) – The name of the time variable (typically 'xtime' +or ['xtime_startMonthly', 'xtime_endMonthly']), or None if +time does not need to be parsed (and is already in the Time +variable)

  • +
  • variableList (list of strings, optional) – If present, a list of variables to be included in the data set

  • +
  • startDate (string or datetime.datetime, optional) – If present, the first and last dates to be used in the data set. The +time variable is sliced to only include dates within this range.

  • +
  • endDate (string or datetime.datetime, optional) – If present, the first and last dates to be used in the data set. The +time variable is sliced to only include dates within this range.

  • +
+
+
Returns:
+

ds (xarray.Dataset)

+
+
Raises:
+
    +
  • TypeError – If the time variable has an unsupported type (not a date string).

  • +
  • ValueError – If the time variable is not found in the data set

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.html new file mode 100644 index 000000000..e53331b6a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.html @@ -0,0 +1,187 @@ + + + + + + + mpas_analysis.shared.io.utility.build_config_full_path — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.utility.build_config_full_path

+
+
+mpas_analysis.shared.io.utility.build_config_full_path(config, section, relativePathOption, relativePathSection=None, defaultPath=None, baseDirectoryOption='baseDirectory')[source]
+

Get a full path from a base directory and a relative path

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – configuration from which to read the path

  • +
  • section (str) – the name of a section in config, which must have an option +baseDirectory

  • +
  • relativePathOption (str) – the name of an option in section of the relative path within +baseDirectory (or possibly an absolute path)

  • +
  • relativePathSection (str, optional) – the name of a section for relativePathOption if not section

  • +
  • defaultPath (str, optional) – the name of a path to return if the resulting path doesn’t exist.

  • +
  • baseDirectoryOption (str, optional) – the name of the option in section for the base directorys

  • +
+
+
Returns:
+

fullPath (str) – The full path to the given relative path within the given +baseDirectory

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.html new file mode 100644 index 000000000..5e07d5a44 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.shared.io.utility.check_path_exists — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.utility.check_path_exists

+
+
+mpas_analysis.shared.io.utility.check_path_exists(path)[source]
+

Raise an exception if the given path does not exist.

+
+
Parameters:
+

path (str) – Absolute path

+
+
Raises:
+

OSError – If the path does not exist

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.html new file mode 100644 index 000000000..88ffce62d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.shared.io.utility.make_directories — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.utility.make_directories

+
+
+mpas_analysis.shared.io.utility.make_directories(path)[source]
+

Make the given path if it does not already exist.

+
+
Parameters:
+

path (str) – the path to make

+
+
Returns:
+

path (str) – the path unchanged

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.paths.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.paths.html new file mode 100644 index 000000000..3a8699769 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.utility.paths.html @@ -0,0 +1,178 @@ + + + + + + + mpas_analysis.shared.io.utility.paths — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.utility.paths

+
+
+mpas_analysis.shared.io.utility.paths(*args)[source]
+

Returns glob’d paths in list for arbitrary number of function arguments. +Note, each expanded set of paths is sorted.

+
+
Parameters:
+

*args (list) – A list of arguments to pass to glob.glob

+
+
Returns:
+

paths (list of str) – A list of file paths

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.html new file mode 100644 index 000000000..f7f439da9 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.io.write_netcdf.html @@ -0,0 +1,171 @@ + + + + + + + mpas_analysis.shared.io.write_netcdf — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.io.write_netcdf

+

Functions

+ + + + + + +

write_netcdf_with_fill(ds, fileName[, ...])

Write an xarray data set to a NetCDF file using finite fill values and unicode strings

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.html new file mode 100644 index 000000000..832822810 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.html @@ -0,0 +1,222 @@ + + + + + + + mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset

+
+
+mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset(fileNames, calendar, simulationStartTime=None, timeVariableName='xtime', variableList=None, selValues=None, iselValues=None)[source]
+

Opens and returns an xarray data set given file name(s) and the MPAS +calendar name.

+
+
Parameters:
+
    +
  • fileNames (list of strings) – A lsit of file paths to read

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores

  • +
  • simulationStartTime (string, optional) –

    The start date of the simulation, used to convert from time variables +expressed as days since the start of the simulation to days since the +reference date. simulationStartTime takes one of the following +forms:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

    simulationStartTime is only required if the MPAS time variable +(identified by timeVariableName) is a number of days since the +start of the simulation.

    +

  • +
  • timeVariableName (string, optional) – The name of the time variable (typically 'xtime' or 'Time').

  • +
  • variableList (list of strings, optional) – If present, a list of variables to be included in the data set

  • +
  • selectCorrdValues (dict, optional) –

    A dictionary of coordinate names (keys) and values or arrays of +values used to slice the variales in the data set. See +xarray.dataset.sel() for details on how this dictonary is used. +An example:

    +
    selectCorrdValues = {'cellLon': 180.0}
    +
    +
    +

  • +
  • iselValues (dict, optional) –

    A dictionary of coordinate names (keys) and indices, slices or +arrays of indices used to slice the variales in the data set. See +xarray.dataset.isel() for details on how this dictonary is used. +An example:

    +
    iselValues = {'nVertLevels': slice(0, 3),
    +              'nCells': cellIDs}
    +
    +
    +

  • +
+
+
Returns:
+

ds (xarray.Dataset)

+
+
Raises:
+
    +
  • TypeError – If the time variable has an unsupported type (not a date string or + a floating-pont number of days since the start of the simulation).

  • +
  • ValueError – If the time variable is not found in the data set or if the time + variable is a number of days since the start of the simulation but + simulationStartTime is None.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.html new file mode 100644 index 000000000..6f59db0fa --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.html @@ -0,0 +1,218 @@ + + + + + + + mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess

+
+
+mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess(ds, calendar, simulationStartTime, timeVariableName, variableList, selValues, iselValues)[source]
+

Builds correct time specification for MPAS, allowing a date offset +because the time must be between 1678 and 2262 based on the xarray +library. Also, if slicing information (selValues and/or +iselValues) was provided in openMultifileDataSet, this +function performs the appropriate slicing on the data set.

+
+
Parameters:
+
    +
  • ds (xarray.DataSet object) – The data set containing an MPAS time variable to be used to build +an xarray time coordinate.

  • +
  • calendar ({'gregorian', 'noleap'}) – The name of one of the calendars supported by MPAS cores

  • +
  • simulationStartTime (string, optinal) –

    The start date of the simulation, used to convert from time +variables expressed as days since the start of the simulation to +days since the reference date. simulationStartTime takes one +of the following forms:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

    simulationStartTime is only required if the MPAS time variable +(identified by timeVariableName) is a number of days since the +start of the simulation.

    +

  • +
  • timeVariableName (string, optional) – The name of the time variable (typically 'xtime' or 'Time').

  • +
  • variableList (list of strings) – If present, a list of variables to be included in the data set

  • +
  • selectCorrdValues (dict) –

    A dictionary of coordinate names (keys) and values or arrays of +values used to slice the variales in the data set. See +xarray.DataSet.sel() for details on how this dictonary is used. +An example:

    +
    selectCorrdValues = {'cellLon': 180.0}
    +
    +
    +

  • +
  • iselValues (dict) –

    A dictionary of coordinate names (keys) and indices, slices or +arrays of indices used to slice the variales in the data set. See +xarray.DataSet.isel() for details on how this dictonary is used. +An example:

    +
    iselValues = {'nVertLevels': slice(0, 3),
    +              'nCells': cellIDs}
    +
    +
    +

  • +
+
+
Returns:
+

ds (xarray.DataSet object) – A copy of the data set with the time coordinate set and which +has been sliced.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.html new file mode 100644 index 000000000..59ab3126c --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.html @@ -0,0 +1,178 @@ + + + + + + + mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index

+
+
+mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index(ds)[source]
+

Remove repeated times from xarray dataset.

+
+
Parameters:
+

ds (xarray.DataSet object) – The data set potentially containing repeated time indices.

+
+
Returns:
+

ds (xarray.DataSet object) – A copy of the original data set with any repeated time indices removed.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.html new file mode 100644 index 000000000..cdbd2ae68 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.html @@ -0,0 +1,186 @@ + + + + + + + mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables

+
+
+mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables(ds, variableList)[source]
+

Given a data set and a list of variable names, returns a new data set that +contains only variables with those names.

+
+
Parameters:
+
    +
  • ds (xarray.DataSet object) – The data set from which a subset of variables is to be extracted.

  • +
  • variableList (string or list of strings) – The names of the variables to be extracted.

  • +
+
+
Returns:
+

ds (xarray.DataSet object) – A copy of the original data set with only the variables in +variableList.

+
+
Raises:
+

ValueError – If the resulting data set is empty.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.html new file mode 100644 index 000000000..9acec07d5 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.html @@ -0,0 +1,346 @@ + + + + + + + mpas_analysis.shared.plot.PlotClimatologyMapSubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.PlotClimatologyMapSubtask

+
+
+class mpas_analysis.shared.plot.PlotClimatologyMapSubtask(parentTask, season, comparisonGridName, remapMpasClimatologySubtask, remapObsClimatologySubtask=None, secondRemapMpasClimatologySubtask=None, controlConfig=None, depth=None, removeMean=False, subtaskName=None)[source]
+

An analysis task for plotting 2D model fields against observations.

+
+
Variables:
+
    +
  • season (str) – A season (key in shared.constants.monthDictionary) to be +plotted.

  • +
  • comparisonGridName (str) – The name of the comparison grid to plot.

  • +
  • remapMpasClimatologySubtask (mpas_analysis.shared.climatology.RemapMpasClimatologySubtask) – The subtask for remapping the MPAS climatology that this subtask +will plot

  • +
  • remapObsClimatologySubtask (mpas_analysis.shared.climatology.RemapObservedClimatologySubtask) – The subtask for remapping the observational climatology that this +subtask will plot

  • +
  • secondRemapMpasClimatologySubtask (mpas_analysis.shared.climatology.RemapMpasClimatologySubtask) – A second subtask for remapping another MPAS climatology to plot +in the second panel and compare with in the third panel

  • +
  • removeMean (bool, optional) – If True, a common mask for the model and reference data sets is +computed (where both are valid) and the mean over that mask is +subtracted from both the model and reference results. This is +useful for data sets where the desire is to compare the spatial +pattern but the mean offset is not meaningful (e.g. SSH)

  • +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • diffTitleLabel (str, optional) – the title of the difference subplot

  • +
  • unitsLabel (str) – the units of the plotted field, to be displayed on color bars

  • +
  • imageCaption (str) – the caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – the name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str or None) – the subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – a short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str or None) – the name of the gallery in which this plot belongs

  • +
  • depth ({None, float, 'top', 'bot'}) – Depth at which to perform the comparison, ‘top’ for the surface +‘bot’ for the base

  • +
  • configSectionName (str) – the name of the section where the color map and range is defined

  • +
  • maskMinThreshold (float or None) – a value below which the field is mask out in plots

  • +
  • maskMaxThreshold (float or None) – a value above which the field is mask out in plots

  • +
  • extend ({'neither', 'both', 'min', 'max'}) – Determines the contourf-coloring of values that are outside the +range of the levels provided if using an indexed colormap.

  • +
+
+
+
+
+__init__(parentTask, season, comparisonGridName, remapMpasClimatologySubtask, remapObsClimatologySubtask=None, secondRemapMpasClimatologySubtask=None, controlConfig=None, depth=None, removeMean=False, subtaskName=None)[source]
+

Construct one analysis subtask for each plot (i.e. each season and +comparison grid) and a subtask for computing climatologies.

+
+
Parameters:
+
    +
  • parentTask (mpas_analysis.shared.AnalysisTask) – The parent (main) task for this subtask

  • +
  • season (str) – A season (key in shared.constants.monthDictionary) to be +plotted.

  • +
  • comparisonGridName (str) – The name of the comparison grid to plot.

  • +
  • remapMpasClimatologySubtask (mpas_analysis.shared.climatology.RemapMpasClimatologySubtask) – The subtask for remapping the MPAS climatology that this subtask +will plot

  • +
  • remapObsClimatologySubtask (mpas_analysis.shared.climatology.RemapObservedClimatologySubtask, optional) – The subtask for remapping the observational climatology that this +subtask will plot

  • +
  • secondRemapMpasClimatologySubtask (mpas_analysis.shared.climatology.RemapMpasClimatologySubtask, optional) – A second subtask for remapping another MPAS climatology to plot +in the second panel and compare with in the third panel

  • +
  • controlConfig (mpas_tools.config.MpasConfigParser, optional) – Configuration options for a control run (if any)

  • +
  • depth ({float, 'top', 'bot'}, optional) – Depth the data is being plotted, ‘top’ for the sea surface +‘bot’ for the sea floor

  • +
  • removeMean (bool, optional) – If True, a common mask for the model and reference data sets is +computed (where both are valid) and the mean over that mask is +subtracted from both the model and reference results. This is +useful for data sets where the desire is to compare the spatial +pattern but the mean offset is not meaningful (e.g. SSH)

  • +
  • subtaskName (str, optional) – The name of the subtask. If not specified, it is +plot<season>_<comparisonGridName> with a suffix indicating the +depth being sliced (if any)

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, season, ...[, ...])

Construct one analysis subtask for each plot (i.e. each season and comparison grid) and a subtask for computing climatologies.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Plots a comparison of E3SM/MPAS output to SST/TEMP, SSS/SALT or MLD observations or a control run

set_plot_info(outFileLabel, ...[, ...])

Store attributes related to plots, plot file names and HTML output.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.html new file mode 100644 index 000000000..5af596c7e --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.html @@ -0,0 +1,203 @@ + + + + + + + mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info

+
+
+PlotClimatologyMapSubtask.set_plot_info(outFileLabel, fieldNameInTitle, mpasFieldName, refFieldName, refTitleLabel, unitsLabel, imageCaption, galleryGroup, groupSubtitle, groupLink, galleryName, diffTitleLabel='Model - Observations', configSectionName=None, maskMinThreshold=None, maskMaxThreshold=None, extend=None)[source]
+

Store attributes related to plots, plot file names and HTML output.

+
+
Parameters:
+
    +
  • outFileLabel (str) – The prefix on each plot and associated XML file

  • +
  • fieldNameInTitle (str) – The name of the field being plotted, as used in the plot title

  • +
  • mpasFieldName (str) – The name of the variable in the MPAS timeSeriesStatsMonthly output

  • +
  • refFieldName (str) – The name of the variable to use from the observations or reference +file

  • +
  • refTitleLabel (str) – the title of the observations or reference subplot

  • +
  • unitsLabel (str) – the units of the plotted field, to be displayed on color bars

  • +
  • imageCaption (str) – the caption when mousing over the plot or displaying it full +screen

  • +
  • galleryGroup (str) – the name of the group of galleries in which this plot belongs

  • +
  • groupSubtitle (str or None) – the subtitle of the group in which this plot belongs (or blank +if none)

  • +
  • groupLink (str) – a short name (with no spaces) for the link to the gallery group

  • +
  • galleryName (str or None) – the name of the gallery in which this plot belongs

  • +
  • diffTitleLabel (str, optional) – the title of the difference subplot

  • +
  • configSectionName (str or None, optional) – the name of the section where the color map and range is defined, +default is the name of the task

  • +
  • maskMinThreshold (float or None, optional) – a value below which the field is mask out in plots

  • +
  • maskMaxThreshold (float or None, optional) – a value above which the field is mask out in plots

  • +
  • extend ({'neither', 'both', 'min', 'max'}, optional) – Determines the contourf-coloring of values that are outside the +range of the levels provided if using an indexed colormap.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.add_inset.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.add_inset.html new file mode 100644 index 000000000..4579797b3 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.add_inset.html @@ -0,0 +1,203 @@ + + + + + + + mpas_analysis.shared.plot.add_inset — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.add_inset

+
+
+mpas_analysis.shared.plot.add_inset(fig, fc, latlonbuffer=45.0, polarbuffer=5.0, width=1.0, height=1.0, lowerleft=None, xbuffer=None, ybuffer=None, maxlength=1.0)[source]
+

Plots an inset map showing the location of a transect or polygon. Shapes +are plotted on a polar grid if they are entirely poleward of +/-50 deg. +latitude and with a lat/lon grid if not.

+
+
Parameters:
+
    +
  • fig (matplotlib.figure.Figure) – A matplotlib figure to add the inset to

  • +
  • fc (geometric_features.FeatureCollection) – A collection of regions, transects and/or points to plot in the inset

  • +
  • latlonbuffer (float, optional) – The number of degrees lat/lon to use as a buffer around the shape(s) +to plot if a lat/lon plot is used.

  • +
  • polarbuffer (float, optional) – The number of degrees latitude to use as a buffer equatorward of the +shape(s) in polar plots

  • +
  • width (float, optional) – width and height in inches of the inset

  • +
  • height (float, optional) – width and height in inches of the inset

  • +
  • lowerleft (pair of floats, optional) – the location of the lower left corner of the axis in inches, default +puts the inset in the upper right corner of fig.

  • +
  • xbuffer (float, optional) – right and top buffers from the top-right corner (in inches) if +lowerleft is None.

  • +
  • ybuffer (float, optional) – right and top buffers from the top-right corner (in inches) if +lowerleft is None.

  • +
  • maxlength (float or None, optional) – Any segments longer than maxlength will be subdivided in the plot to +ensure curvature. If None, no subdivision is performed.

  • +
+
+
Returns:
+

inset (matplotlib.axes.Axes) – The new inset axis

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.html new file mode 100644 index 000000000..73b22052d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.html @@ -0,0 +1,199 @@ + + + + + + + mpas_analysis.shared.plot.colormap.setup_colormap — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.colormap.setup_colormap

+
+
+mpas_analysis.shared.plot.colormap.setup_colormap(config, configSectionName, suffix='')[source]
+

Set up a colormap from the registry

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • configSectionName (str) – name of config section

  • +
  • suffix (str, optional) – suffix of colormap related options

  • +
+
+
Returns:
+

colormapDict (dict) – A dictionary of colormap information.

+

’colormap’ specifies the name of the new colormap

+

’norm’ is a matplotlib norm object used to normalize the colormap

+

’levels’ is an array of contour levels or None if not using indexed +color map

+

’ticks’ is an array of values where ticks should be placed

+

’contours’ is an array of contour values to plot or None if none +have been specified

+

’lineWidth’ is the width of contour lines or None if not specified

+

’lineColor’ is the color of contour lines or None if not specified

+

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.html new file mode 100644 index 000000000..859a08ce6 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_1D.html @@ -0,0 +1,212 @@ + + + + + + + mpas_analysis.shared.plot.plot_1D — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.plot_1D

+
+
+mpas_analysis.shared.plot.plot_1D(config, xArrays, fieldArrays, errArrays, lineColors=None, lineStyles=None, markers=None, lineWidths=None, legendText=None, title=None, xlabel=None, ylabel=None, fileout='plot_1D.png', figsize=(10, 4), dpi=None, xLim=None, yLim=None, invertYAxis=False, maxTitleLength=None, titleFontSize=None, axisFontSize=None, defaultFontSize=None)[source]
+

Plots a 1D line plot with error bars if available.

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • xArrays (list of float arrays) – x array (latitude, or any other x axis except time)

  • +
  • fieldArrays (list of float arrays) – y array (any field as function of x)

  • +
  • errArrays (list of float arrays) – error array (y errors)

  • +
  • lineColors (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineStyles (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • markers (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • legendText (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineWidths (list of float, optional) – control line width. Default is 1.0.

  • +
  • title (str, optional) – title of plot

  • +
  • xlabel (str, optional) – label of x- and y-axis

  • +
  • ylabel (str, optional) – label of x- and y-axis

  • +
  • fileout (str, optional) – the file name to be written

  • +
  • figsize (tuple of float, optional) – size of the figure in inches

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • xLim (float array, optional) – x range of plot

  • +
  • yLim (float array, optional) – y range of plot

  • +
  • invertYAxis (logical, optional) – if True, invert Y axis

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
  • titleFontSize (int, optional) – size of the title font

  • +
  • axisFontSize (int, optional) – size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.html new file mode 100644 index 000000000..9f1112c6a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.html @@ -0,0 +1,208 @@ + + + + + + + mpas_analysis.shared.plot.plot_global_comparison — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.plot_global_comparison

+
+
+mpas_analysis.shared.plot.plot_global_comparison(config, Lons, Lats, modelArray, refArray, diffArray, colorMapSectionName, fileout, title=None, modelTitle='Model', refTitle='Observations', diffTitle='Model-Observations', cbarlabel='units', titleFontSize=None, defaultFontSize=None, figsize=None, dpi=None, lineWidth=1, lineColor='black', maxTitleLength=None, extend='both')[source]
+

Plots a data set as a longitude/latitude map.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • Lons (numpy.ndarray) – longitude and latitude arrays

  • +
  • Lats (numpy.ndarray) – longitude and latitude arrays

  • +
  • modelArray (numpy.ndarray) – model and observational or control run data sets

  • +
  • refArray (numpy.ndarray) – model and observational or control run data sets

  • +
  • diffArray (float array) – difference between modelArray and refArray

  • +
  • colorMapSectionName (str) – section name in config where color map info can be found.

  • +
  • fileout (str) – the file name to be written

  • +
  • title (str, optional) – the subtitle of the plot

  • +
  • modelTitle (str, optional) – title of the model panel

  • +
  • refTitle (str, optional) – title of the observations or control run panel

  • +
  • diffTitle (str, optional) – title of the difference (bias) panel

  • +
  • cbarlabel (str, optional) – label on the colorbar

  • +
  • titleFontSize (int, optional) – size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
  • figsize (tuple of float, optional) – the size of the figure in inches

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • lineWidth (int, optional) – the line width of contour lines (if specified)

  • +
  • lineColor (str, optional) – the color of contour lines (if specified)

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
  • extend ({'neither', 'both', 'min', 'max'}, optional) – Determines the contourf-coloring of values that are outside the +range of the levels provided if using an indexed colormap.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.html new file mode 100644 index 000000000..c06aae72f --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.html @@ -0,0 +1,208 @@ + + + + + + + mpas_analysis.shared.plot.plot_polar_comparison — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.plot_polar_comparison

+
+
+mpas_analysis.shared.plot.plot_polar_comparison(config, lon, lat, modelArray, refArray, diffArray, colorMapSectionName, fileout, title=None, plotProjection='npstere', latmin=50.0, lon0=0, modelTitle='Model', refTitle='Observations', diffTitle='Model-Observations', cbarlabel='units', titleFontSize=None, defaultFontSize=None, figsize=None, dpi=None, vertical=False, maxTitleLength=None)[source]
+

Plots a data set around either the north or south pole.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • lon (float arrays) – longitude and latitude arrays

  • +
  • lat (float arrays) – longitude and latitude arrays

  • +
  • modelArray (numpy.ndarray) – model and observational or control run data sets

  • +
  • refArray (numpy.ndarray) – model and observational or control run data sets

  • +
  • diffArray (float array) – difference between modelArray and refArray

  • +
  • colorMapSectionName (str) – section name in config where color map info can be found.

  • +
  • fileout (str) – the file name to be written

  • +
  • title (str, optional) – the subtitle of the plot

  • +
  • plotProjection ({'npstere', 'spstere'}, optional) – projection for the plot (north or south pole)

  • +
  • modelTitle (str, optional) – title of the model panel

  • +
  • refTitle (str, optional) – title of the observations or control run panel

  • +
  • diffTitle (str, optional) – title of the difference (bias) panel

  • +
  • cbarlabel (str, optional) – label on the colorbar

  • +
  • titleFontSize (int, optional) – size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
  • figsize (tuple of float, optional) – the size of the figure in inches. If None, the figure size is +(8, 22) if vertical == True and (22, 8) otherwise.

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • vertical (bool, optional) – whether the subplots should be stacked vertically rather than +horizontally

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.html new file mode 100644 index 000000000..445eea533 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.html @@ -0,0 +1,357 @@ + + + + + + + mpas_analysis.shared.plot.plot_vertical_section — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.plot_vertical_section

+
+
+mpas_analysis.shared.plot.plot_vertical_section(config, field, colorMapSectionName, xCoords=None, zCoord=None, triangulation_args=None, xOutline=None, zOutline=None, suffix='', colorbarLabel=None, title=None, xlabels=None, ylabel=None, figsize=(10, 4), dpi=None, titleFontSize=None, defaultFontSize=None, titleY=None, axisFontSize=None, xLim=None, yLim=None, lineWidth=2, lineStyle='solid', lineColor='black', contourColormap=None, backgroundColor='grey', invalidColor='white', outlineValid=True, numUpperTicks=None, upperXAxisTickLabelPrecision=None, invertYAxis=True, xCoordIsTime=False, movingAveragePoints=None, firstYearXTicks=None, yearStrideXTicks=None, maxXTicks=20, calendar='gregorian', plotAsContours=False, contourComparisonField=None, comparisonFieldName=None, originalFieldName=None, comparisonContourLineWidth=None, comparisonContourLineStyle=None, comparisonContourLineColor=None, labelContours=False, contourLabelPrecision=1, maxTitleLength=None)[source]
+

Plots a data set as a x distance (latitude, longitude, +or spherical distance) vs depth map (vertical section).

+

Or, if xCoordIsTime is True, plots data set on a vertical +Hovmoller plot (depth vs. time).

+

Typically, the field data are plotted using a heatmap, but if +contourComparisonField is not None, then contours of both +field and contourComparisonField are plotted instead.

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • field (xarray.DataArray) – field array to plot. For contour plots, xCoords and zCoords +should broadcast to the same shape as field. For heatmap plots, +xCoords and zCoords are the corners of the plot. If they +broadcast to the same shape as field, field will be bilinearly +interpolated to center values for each plot cell. If the coordinates +have one extra element in each direction than field, field is +assumed to contain cell values and no interpolation is performed.

  • +
  • colorMapSectionName (str) – section name in config where color map info can be found.

  • +
  • xCoords (xarray.DataArray or list of xarray.DataArray, optional) – The x coordinate(s) for the field. Optional second +and third entries will be used for a second and third x axis above the +plot. The typical use for the second and third axis is for transects, +for which the primary x axis represents distance along a transect, and +the second and third x axes are used to display the corresponding +latitudes and longitudes.

  • +
  • zCoord (xarray.DataArray, optional) – The z coordinates for the field

  • +
  • triangulation_args (dict, optional) – A dict of arguments to create a matplotlib.tri.Triangulation of the +transect that does not rely on it being on a logically rectangular grid. +The arguments rather than the triangulation itself are passed because +multiple triangulations with different masks are needed internally and +there is not an obvious mechanism for copying an existing triangulation. +If this option is provided, xCoords is only used for tick marks if +more than one x axis is requested, and zCoord will be ignored.

  • +
  • xOutline (numpy.ndarray, optional) – pairs of points defining line segments that are used to outline the +valid region of the mesh if outlineValid = True and +triangulation_args is not None

  • +
  • zOutline (numpy.ndarray, optional) – pairs of points defining line segments that are used to outline the +valid region of the mesh if outlineValid = True and +triangulation_args is not None

  • +
+
+
+
+
suffixstr, optional

the suffix used for colorbar config options

+
+
colorbarLabelstr, optional

the label for the colorbar. If plotAsContours and labelContours are +both True, colorbarLabel is used as follows (typically in order to +indicate the units that are associated with the contour labels): +if contourComparisonField is None, the colorbarLabel string is +parenthetically appended to the plot title; if +contourComparisonField is not None, it is parenthetically appended +to the legend entries of the contour comparison plot.

+
+
titlestr, optional

title of plot

+
+
xlabelsstr or list of str, optional

labels of x-axes. Labels correspond to entries in xCoords.

+
+
ylabelstr, optional

label of y-axis

+
+
figsizetuple of float, optional

size of the figure in inches, or None if the current figure should +be used (e.g. if this is a subplot)

+
+
dpiint, optional

the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

+
+
titleFontSizeint, optional

size of the title font

+
+
defaultFontSizeint, optional

the size of text other than the title

+
+
titleYfloat, optional

the y value to use for placing the plot title

+
+
axisFontSizeint, optional

size of the axis font

+
+
xLimfloat array, optional

x range of plot

+
+
yLimfloat array, optional

y range of plot

+
+
lineWidthfloat, optional

the line width of contour lines (if specified)

+
+
lineStylestr, optional

the line style of contour lines (if specified); this applies to the +style of contour lines of fieldArray (the style of the contour lines +of contourComparisonField is set using +contourComparisonLineStyle).

+
+
lineColorstr, optional

the color of contour lines (if specified); this applies to the +contour lines of fieldArray (the color of the contour lines of +contourComparisonField is set using contourComparisonLineColor

+
+
backgroundColorstr, optional

the background color for the plot outside the limits of xCoord and +zCoord.

+
+
invalidColorstr, optional

the color for invalid values (NaNs and masked areas will be +shown in this color)

+
+
outlineValidbool, optional

whether to outline the boundary between the valid an invalid regions +with a black contour

+
+
numUpperTicksint, optional

the approximate number of ticks to use on the upper x axis +or axes (these are the second and third x axes, which are placed above +the plot if they have been requested by specifying the secondXAxisData +or thirdXAxisData arrays above)

+
+
upperXAxisTickLabelPrecisionint, optional

the number of decimal places (to the right +of the decimal point) to use for values at upper axis ticks. This +value can be adjusted (in concert with numUpperTicks) to avoid problems +with overlapping numbers along the upper axis.

+
+
invertYAxislogical, optional

if True, invert Y axis

+
+
xCoordIsTimelogical, optional

if True, format the x axis for time (this applies only to the primary +x axis, not to the optional second or third x axes)

+
+
movingAveragePointsint, optional

the number of points over which to perform a moving average +NOTE: this option is mostly intended for use when xCoordIsTime is +True, although it will work with other data as well. Also, the moving +average calculation is based on number of points, not actual x axis +values, so for best results, the values in the first entry in +xCoords should be equally spaced.

+
+
firstYearXTicksint, optional

The year of the first tick on the x axis. By default, the first time +entry is the first tick.

+
+
yearStrideXTicksint, optional

The number of years between x ticks. By default, the stride is chosen +automatically to have maxXTicks tick marks or fewer.

+
+
maxXTicksint, optional

the maximum number of tick marks that will be allowed along the primary +x axis. This may need to be adjusted depending on the figure size and +aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True

+
+
calendarstr, optional

the calendar to use for formatting the time axis +NOTE: calendar is only used if xCoordIsTime is True

+
+
plotAsContoursbool, optional

if plotAsContours is True, instead of plotting field as a +heatmap, the function will plot only the contours of field. In +addition, if contourComparisonField is not None, the contours +of this field will be plotted on the same plot. The selection of +contour levels is still determined as for the contours on the heatmap +plots, via the ‘contours’ entry in colorMapSectionName.

+
+
contourComparisonFieldfloat array, optional

a comparison field array (typically observational data or results +from another simulation run), assumed to be of the same shape as +field. If plotAsContours is True and +countourComparisonFieldArray is not None, then contours of both +field and contourComparisonField will be plotted in order to +enable a comparison of the two fields on the same plot.

+
+
comparisonFieldNamestr, optional

the name for the comparison field. If contourComparisonField is +None, this parameter is ignored.

+
+
originalFieldNamestr, optional

the name for the field field (for the purposes of labeling the +contours on a contour comparison plot). If contourComparisonField +is None, this parameter is ignored.

+
+
comparisonContourLineWidthfloat, optional

the line width of contour lines of the comparisonFieldName field on +a contour comparison plot

+
+
comparisonContourLineStylestr, optional

the line style of contour lines of the comparisonFieldName field on +a contour comparison plot

+
+
comparisonContourLineColorstr, optional

the line color of contour lines of the comparisonFieldName field on +a contour comparison plot

+
+
labelContoursbool, optional

whether or not to label contour lines (if specified) with their values

+
+
contourLabelPrecisionint, optional

the precision (in terms of number of figures to the right of the +decimal point) of contour labels

+
+
maxTitleLengthint or None, optional

the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

+
+
+
+
Returns:
+

    +
  • fig (matplotlib.figure.Figure) – The figure that was plotted

  • +
  • ax (matplotlib.axes.Axes) – The subplot

  • +
+

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.html new file mode 100644 index 000000000..65cec5e80 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.html @@ -0,0 +1,314 @@ + + + + + + + mpas_analysis.shared.plot.plot_vertical_section_comparison — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.plot_vertical_section_comparison

+
+
+mpas_analysis.shared.plot.plot_vertical_section_comparison(config, modelArray, refArray, diffArray, colorMapSectionName, xCoords=None, zCoord=None, triangulation_args=None, xOutlineModel=None, zOutlineModel=None, xOutlineRef=None, zOutlineRef=None, xOutlineDiff=None, zOutlineDiff=None, colorbarLabel=None, xlabels=None, ylabel=None, title=None, modelTitle='Model', refTitle='Observations', diffTitle='Model-Observations', titleFontSize=None, defaultFontSize=None, plotTitleFontSize=None, axisFontSize=None, figsize=None, dpi=None, lineWidth=2, lineStyle='solid', lineColor='black', contourColormap=None, backgroundColor='grey', invalidColor='white', outlineValid=True, xLim=None, yLim=None, numUpperTicks=None, upperXAxisTickLabelPrecision=None, invertYAxis=True, xCoordIsTime=False, movingAveragePoints=None, firstYearXTicks=None, yearStrideXTicks=None, maxXTicks=20, calendar='gregorian', compareAsContours=False, comparisonContourLineWidth=None, comparisonContourLineStyle=None, comparisonContourLineColor=None, labelContours=False, contourLabelPrecision=1, resultSuffix='Result', diffSuffix='Difference', maxTitleLength=None)[source]
+

Plots vertical section plots in a three-panel format, comparing model data +(in modelArray) to some reference dataset (in refArray), which can be +either observations or an alternative model, and also presenting the +difference plot of the two. If refArray is None, then only one panel +is plotted, displaying the model data.

+

If compareAsContours is true, the contours of modelArray and refArray are +plotted on a single plot.

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • modelArray (xarray.DataArray) – model and observational or control run data sets

  • +
  • refArray (xarray.DataArray) – model and observational or control run data sets

  • +
  • diffArray (float array) – difference between modelArray and refArray

  • +
  • xCoords (xarray.DataArray or list of xarray.DataArray, optional) – The x coordinate(s) for the model, ref and diff arrays. Optional second +and third entries will be used for a second and third x axis above the +plot. The typical use for the second and third axis is for transects, +for which the primary x axis represents distance along a transect, and +the second and third x axes are used to display the corresponding +latitudes and longitudes.

  • +
  • zCoord (xarray.DataArray, optional) – The z coordinates for the model, ref and diff arrays

  • +
  • triangulation_args (dict, optional) – A dict of arguments to create a matplotlib.tri.Triangulation of the +transect that does not rely on it being on a logically rectangular grid. +The arguments rather than the triangulation itself are passed because +multiple triangulations with different masks are needed internally and +there is not an obvious mechanism for copying an existing triangulation. +If this option is provided, xCoords is only used for tick marks if +more than one x axis is requested, and zCoord will be ignored.

  • +
  • xOutlineModel (numpy.ndarray, optional) – pairs of points defining line segments that are used to outline the +valid region of the mesh for the model panel if outlineValid = True +and triangulation_args is not None

  • +
  • zOutlineModel (numpy.ndarray, optional) – pairs of points defining line segments that are used to outline the +valid region of the mesh for the model panel if outlineValid = True +and triangulation_args is not None

  • +
  • xOutlineRef (numpy.ndarray, optional) – Same as xOutlineModel and zOutlineModel but for the reference +panel

  • +
  • zOutlineRef (numpy.ndarray, optional) – Same as xOutlineModel and zOutlineModel but for the reference +panel

  • +
  • xOutlineDiff (numpy.ndarray, optional) – Same as xOutlineModel and zOutlineModel but for the difference +panel

  • +
  • zOutlineDiff (numpy.ndarray, optional) – Same as xOutlineModel and zOutlineModel but for the difference +panel

  • +
  • colorMapSectionName (str) – section name in config where color map info can be found.

  • +
  • colorbarLabel (str, optional) – the label for the colorbar. If compareAsContours and labelContours are +both True, colorbarLabel is used as follows (typically in order to +indicate the units that are associated with the contour labels): +if refArray is None, the colorbarLabel string is parenthetically +appended to the plot title; if refArray is not None, it is +parenthetically appended to the legend entries of the contour +comparison plot.

  • +
  • xlabels (str or list of str, optional) – labels of x-axes. Labels correspond to entries in xCoords.

  • +
  • ylabel (str, optional) – label of y-axis

  • +
  • title (str, optional) – the subtitle of the plot

  • +
  • modelTitle (str, optional) – title of the model panel

  • +
  • refTitle (str, optional) – title of the observations or control run panel

  • +
  • diffTitle (str, optional) – title of the difference (bias) panel

  • +
  • titleFontSize (int, optional) – size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
  • plotTitleFontSize (int, optional) – size of the title font for the individual plots

  • +
  • axisFontSize (int, optional) – size of the axis font

  • +
  • figsize (tuple of float, optional) – the size of the figure in inches

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • lineWidth (float, optional) – the line width of contour lines (if specified)

  • +
  • lineStyle (str, optional) – the line style of contour lines (if specified); this applies to the +contour lines on heatmaps and to the contour lines of the model field +on contour comparison plots (the line style of the contour lines of +the reference field on contour comparison plots is set using the +contourComparisonLineStyle argument).

  • +
  • lineColor (str, optional) – the color of contour lines (if specified); this applies to the +contour lines on heatmaps and to the contour lines of the model field +on contour comparison plots (the line color of the contour lines of +the reference field on contour comparison plots is set using the +contourComparisonLineColor argument).

  • +
  • backgroundColor (str, optional) – the background color for the plot outside the limits of xCoord and +zCoord.

  • +
  • invalidColor (str, optional) – the color for invalid values (NaNs and masked areas will be +shown in this color)

  • +
  • outlineValid (bool, optional) – whether to outline the boundary between the valid an invalid regions +with a black contour

  • +
  • xLim (float array, optional) – x range of plot

  • +
  • yLim (float array, optional) – y range of plot

  • +
  • numUpperTicks (the approximate number of ticks to use on the upper x axis) – or axes (these are the second and third x axes, which are placed above +the plot if they have been requested by specifying the secondXAxisData +or thirdXAxisData arrays above)

  • +
  • upperXAxisTickLabelPrecision (the number of decimal places (to the right) – of the decimal point) to use for values at upper axis ticks. This +value can be adjusted (in concert with numUpperTicks) to avoid problems +with overlapping numbers along the upper axis.

  • +
  • invertYAxis (logical, optional) – if True, invert Y axis

  • +
  • xCoordIsTime (logical, optional) – if True, format the x axis for time (this applies only to the primary +x axis, not to the optional second or third x axes)

  • +
  • movingAveragePoints (int, optional) – the number of points over which to perform a moving average +NOTE: this option is mostly intended for use when xCoordIsTime is True, +although it will work with other data as well. Also, the moving +average calculation is based on number of points, not actual x axis +values, so for best results, the values in the xArray should be equally +spaced.

  • +
  • firstYearXTicks (int, optional) – The year of the first tick on the x axis. By default, the first time +entry is the first tick.

  • +
  • yearStrideXTicks (int, optional) – The number of years between x ticks. By default, the stride is chosen +automatically to have maxXTicks tick marks or fewer.

  • +
  • maxXTicks (int, optional) – the maximum number of tick marks that will be allowed along the primary +x axis. This may need to be adjusted depending on the figure size and +aspect ratio. NOTE: maxXTicks is only used if xCoordIsTime is True

  • +
  • calendar (str, optional) – the calendar to use for formatting the time axis +NOTE: calendar is only used if xCoordIsTime is True

  • +
  • compareAsContours (bool, optional) – if compareAsContours is True, instead of creating a three panel plot +showing modelArray, refArray, and their difference, the function will +plot the contours of modelArray and refArray on a single plot (unless +refArray is None, in which case only the contours of modelArray will be +plotted on the single panel plot).

  • +
  • comparisonContourLineWidth (float, optional) – the line width of contour lines of the comparisonFieldName field on +a contour comparison plot

  • +
  • comparisonContourLineStyle (str, optional) – the line style of contour lines of the reference field on a contour +comparison plot

  • +
  • comparisonContourLineColor (str, optional) – the line color of contour lines of the reference field on a contour +comparison plot

  • +
  • labelContours (bool, optional) – whether or not to label contour lines (if specified) with their values

  • +
  • contourLabelPrecision (int, optional) – the precision (in terms of number of figures to the right of the +decimal point) of contour labels

  • +
  • resultSuffix (str, optional) – a suffix added to the config options related to colormap information +for the main and control fields

  • +
  • diffSuffix (str, optional) – a suffix added to the config options related to colormap information +for the difference field

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
+
+
Returns:
+

    +
  • fig (matplotlib.figure.Figure) – The figure that was plotted

  • +
  • axes (list of matplotlib.axes.Axes) – The subplots

  • +
  • suptitle (matplotlib.text.Text) – The super-title

  • +
+

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.html new file mode 100644 index 000000000..daf2e73fe --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.html @@ -0,0 +1,189 @@ + + + + + + + mpas_analysis.shared.plot.ticks.plot_xtick_format — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.ticks.plot_xtick_format

+
+
+mpas_analysis.shared.plot.ticks.plot_xtick_format(calendar, minDays, maxDays, maxXTicks, yearStride=None)[source]
+

Formats tick labels and positions along the x-axis for time series +/ index plots

+
+
Parameters:
+
    +
  • calendar (str) – the calendar to use for formatting the time axis

  • +
  • minDays (float) – start time for labels

  • +
  • maxDays (float) – end time for labels

  • +
  • maxXTicks (int) – the maximum number of tick marks to display, used to sub-sample ticks +if there are too many

  • +
  • yearStride (int, optional) – the number of years to skip over between ticks

  • +
+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.html new file mode 100644 index 000000000..169534bf7 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.html @@ -0,0 +1,228 @@ + + + + + + + mpas_analysis.shared.plot.timeseries_analysis_plot — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.timeseries_analysis_plot

+
+
+mpas_analysis.shared.plot.timeseries_analysis_plot(config, dsvalues, calendar, title, xlabel, ylabel, movingAveragePoints=None, lineColors=None, lineStyles=None, markers=None, lineWidths=None, legendText=None, maxPoints=None, titleFontSize=None, defaultFontSize=None, figsize=(12, 6), dpi=None, firstYearXTicks=None, yearStrideXTicks=None, maxXTicks=20, obsMean=None, obsUncertainty=None, obsLegend=None, legendLocation='lower left', maxTitleLength=None)[source]
+

Plots the list of time series data sets.

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • dsvalues (list of xarray DataSets) – the data set(s) to be plotted

  • +
  • title (str) – the title of the plot

  • +
  • xlabel (str) – axis labels

  • +
  • ylabel (str) – axis labels

  • +
  • calendar (str) – the calendar to use for formatting the time axis

  • +
  • movingAveragePoints (int, optional) – the number of time points over which to perform a moving average

  • +
  • lineColors (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineStyles (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • markers (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • legendText (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineWidths (list of float, optional) – control line width. Default is 1.0.

  • +
  • maxPoints (list of {None, int}, optional) – the approximate maximum number of time points to use in a time series. +This can be helpful for reducing the number of symbols plotted if +plotting with markers. Otherwise the markers become indistinguishable +from each other.

  • +
  • titleFontSize (int, optional) – the size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
  • figsize (tuple of float, optional) – the size of the figure in inches

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • firstYearXTicks (int, optional) – The year of the first tick on the x axis. By default, the first time +entry is the first tick.

  • +
  • yearStrideXTicks (int, optional) – The number of years between x ticks. By default, the stride is chosen +automatically to have maxXTicks tick marks or fewer.

  • +
  • maxXTicks (int, optional) – the maximum number of tick marks that will be allowed along the x axis. +This may need to be adjusted depending on the figure size and aspect +ratio.

  • +
  • obsMean (list of float, optional) – Mean values and uncertainties for observations to be plotted as error +bars. The two lists must have the same number of elements.

  • +
  • obsUncertainty (list of float, optional) – Mean values and uncertainties for observations to be plotted as error +bars. The two lists must have the same number of elements.

  • +
  • obsLegend (list of str, optional) – The label in the legend for each element in obsMean (and +obsUncertainty)

  • +
  • legendLocation (str, optional) – The location of the legend (see pyplot.legend() for details)

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
+
+
Returns:
+

fig (matplotlib.figure.Figure) – The resulting figure

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.html new file mode 100644 index 000000000..df1f0a875 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.html @@ -0,0 +1,207 @@ + + + + + + + mpas_analysis.shared.plot.timeseries_analysis_plot_polar — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.plot.timeseries_analysis_plot_polar

+
+
+mpas_analysis.shared.plot.timeseries_analysis_plot_polar(config, dsvalues, title, movingAveragePoints=None, lineColors=None, lineStyles=None, markers=None, lineWidths=None, legendText=None, titleFontSize=None, defaultFontSize=None, figsize=(15, 6), dpi=None, maxTitleLength=None)[source]
+

Plots the list of time series data sets on a polar plot.

+
+
Parameters:
+
    +
  • config (instance of ConfigParser) – the configuration, containing a [plot] section with options that +control plotting

  • +
  • dsvalues (list of xarray DataSets) – the data set(s) to be plotted

  • +
  • movingAveragePoints (int) – the numer of time points over which to perform a moving average

  • +
  • title (str) – the title of the plot

  • +
  • lineColors (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineStyles (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • markers (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • legendText (list of str, optional) – control line color, style, marker, and corresponding legend +text. Default is black, solid line with no marker, and no legend.

  • +
  • lineWidths (list of float, optional) – control line width. Default is 1.0.

  • +
  • titleFontSize (int, optional) – the size of the title font

  • +
  • defaultFontSize (int, optional) – the size of text other than the title

  • +
  • figsize (tuple of float, optional) – the size of the figure in inches

  • +
  • dpi (int, optional) – the number of dots per inch of the figure, taken from section plot +option dpi in the config file by default

  • +
  • maxTitleLength (int or None, optional) – the maximum number of characters in the title, beyond which it is +truncated with a trailing ellipsis. The default is from the +maxTitleLength config option.

  • +
+
+
Returns:
+

fig (matplotlib.figure.Figure) – The resulting figure

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.html new file mode 100644 index 000000000..27f730a30 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.shared.projection.get_cartopy_projection — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.projection.get_cartopy_projection

+
+
+mpas_analysis.shared.projection.get_cartopy_projection(comparison_grid_name)[source]
+

Get the projection from the comparison_grid_name.

+
+
Parameters:
+

comparison_grid_name (str) – The name of the projection comparison grid to use for remapping

+
+
Returns:
+

projection (cartopy.crs.Projection) – The projection

+
+
Raises:
+

ValueError – If comparison_grid_name does not describe a known comparison grid

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.html new file mode 100644 index 000000000..76b124434 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.html @@ -0,0 +1,177 @@ + + + + + + + mpas_analysis.shared.projection.get_pyproj_projection — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.projection.get_pyproj_projection

+
+
+mpas_analysis.shared.projection.get_pyproj_projection(comparison_grid_name)[source]
+

Get the projection from the comparison_grid_name.

+
+
Parameters:
+

comparison_grid_name (str) – The name of the projection comparison grid to use for remapping

+
+
Returns:
+

projection (pyproj.Proj) – The projection

+
+
Raises:
+

ValueError – If comparison_grid_name does not describe a known comparison grid

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.html new file mode 100644 index 000000000..b75a036de --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.html @@ -0,0 +1,284 @@ + + + + + + + mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks

+
+
+class mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks(config, conponentName)[source]
+

An analysis tasks for computing cell masks for regions defined by geojson +features

+
+
Variables:
+

regionMaskSubtasks (dict of ComputeRegionMasksSubtask objects) – The subtasks of this task with file names as keys

+
+
+
+
+__init__(config, conponentName)[source]
+

Construct the analysis task.

+
+
Parameters:
+
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, conponentName)

Construct the analysis task.

add_mask_subtask(regionGroup[, obsFileName, ...])

Construct the analysis task and adds it as a subtask of the parentTask.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Run the analysis.

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis (e.g. reading namelists and streams files).

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.html new file mode 100644 index 000000000..50d4653c5 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.html @@ -0,0 +1,318 @@ + + + + + + + mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask

+
+
+class mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask(parentTask, regionGroup, meshName, subprocessCount=1, obsFileName=None, lonVar='lon', latVar='lat', useMpasMaskCreator=False)[source]
+

An analysis tasks for computing cell masks for regions defined by geojson +features

+
+
Variables:
+
    +
  • regionGroup (str) – The name of one of the supported region groups (see +geometric_features.aggregation.get_region_by_name())

  • +
  • aggregationFunction (callable) – An aggregation function returned by +geometric_features.aggregation.get_region_by_name()

  • +
  • geojsonFileName (str) – A geojson file, typically from the MPAS geometric_features +repository, defining the shapes to be masked

  • +
  • outFileSuffix (str) – The suffix for the resulting mask file

  • +
  • maskFileName (str) – The name of the output mask file

  • +
  • obsFileName (str) – The name of an observations file to create masks for. But default, +lon/lat are taken from an MPAS restart file

  • +
  • latVar (lonVar,) – The name of the longitude and latitude variables in obsFileName

  • +
  • meshName (str) – The name of the mesh or grid, used as part of the mask file name. +Default is the MPAS mesh name

  • +
+
+
+
+
+__init__(parentTask, regionGroup, meshName, subprocessCount=1, obsFileName=None, lonVar='lon', latVar='lat', useMpasMaskCreator=False)[source]
+

Construct the analysis task and adds it as a subtask of the +parentTask.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent task, used to get the taskName, config and +componentName

  • +
  • regionGroup (str) – The name of one of the supported region groups (see +geometric_features.aggregation.get_region_by_name())

  • +
  • meshName (str) – The name of the mesh or grid, used as part of the mask file name. +Default is the MPAS mesh name

  • +
+
+
+
+
subprocessCountint, optional

The number of processes that can be used to make the mask

+
+
obsFileNamestr, optional

The name of an observations file to create masks for. But default, +lon/lat are taken from an MPAS restart file

+
+
lonVar, latVarstr, optional

The name of the longitude and latitude variables in obsFileName

+
+
useMpasMaskCreatorbool, optional

If True, the mask creator from mpas_tools will be used +to create the mask. Otherwise, python code is used.

+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, regionGroup, meshName)

Construct the analysis task and adds it as a subtask of the parentTask.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

expand_region_names(regionNames)

If regionNames contains 'all', make sure the geojson file exists and then return all the region names found in the file.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

make_region_mask()

If the geojson mask file has not already been cached in the diagnostics or custom diagnostic directories, it will be created in the analysis output's masks directory.

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute the requested climatologies

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.html new file mode 100644 index 000000000..ead010776 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.html @@ -0,0 +1,167 @@ + + + + + + + mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list

+
+
+mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list(geojsonFileName)[source]
+

Builds a list of features found in the geojson file

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.html new file mode 100644 index 000000000..8ed45ff9a --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.html @@ -0,0 +1,299 @@ + + + + + + + mpas_analysis.shared.time_series.MpasTimeSeriesTask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.time_series.MpasTimeSeriesTask

+
+
+class mpas_analysis.shared.time_series.MpasTimeSeriesTask(config, componentName, taskName=None, subtaskName=None, section='timeSeries')[source]
+

An analysis tasks for computing time series from output from the +timeSeriesStatsMonthly analysis member.

+
+
Variables:
+
    +
  • variableList (list of str) – A list of variable names in timeSeriesStatsMonthly to be +included in the time series

  • +
  • allVariables (list of str) – A list of all available variable names in timeSeriesStatsMonthly +used to raise an exception when an unavailable variable is requested

  • +
  • inputFiles (list of str) – A list of input files from which to extract the time series.

  • +
  • endDate (startDate,) – The start and end dates of the time series as strings

  • +
  • endYear (startYear,) – The start and end years of the time series

  • +
+
+
+
+
+__init__(config, componentName, taskName=None, subtaskName=None, section='timeSeries')[source]
+

Construct the analysis task for extracting time series.

+
+
Parameters:
+
    +
  • config (mpas_tools.config.MpasConfigParser) – Contains configuration options

  • +
  • componentName ({'ocean', 'seaIce'}) – The name of the component (same as the folder where the task +resides)

  • +
  • taskName (str, optional) – The name of the task, ‘mpasTimeSeriesOcean’ or +‘mpasTimeSeriesSeaIce’ by default (depending on componentName)

  • +
  • subtaskName (str, optional) – The name of the subtask (if any)

  • +
  • section (str, optional) – The section of the config file from which to read the start and +end times for the time series, also added as a tag

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(config, componentName[, taskName, ...])

Construct the analysis task for extracting time series.

add_subtask(subtask)

Add a subtask to this tasks.

add_variables(variableList)

Add one or more variables to extract as a time series.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute the requested time series

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.html new file mode 100644 index 000000000..bd7f8a94d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.html @@ -0,0 +1,199 @@ + + + + + + + mpas_analysis.shared.time_series.cache_time_series — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.time_series.cache_time_series

+
+
+mpas_analysis.shared.time_series.cache_time_series(timesInDataSet, timeSeriesCalcFunction, cacheFileName, calendar, yearsPerCacheUpdate=1, logger=None)[source]
+

Create or update a NetCDF file cacheFileName containing the given time +series, calculated with timeSeriesCalcFunction over the given times, +start and end year, and time frequency with which results are cached.

+

Note: only works with climatologies where the mask (locations of NaN +values) doesn’t vary with time.

+
+
Parameters:
+
    +
  • timesInDataSet (array-like) – Times at which the time series is to be calculated, typically taken +from ds.Times.values for a data set from which the time series +will be extracted or computed.

  • +
  • timeSeriesCalcFunction (function) – A function with arguments timeIndices, indicating the entries in +timesInDataSet to be computed, and firstCall, indicating +whether this is the first call to the funciton (useful for printing +progress information).

  • +
  • cacheFileName (str) – The absolute path to the cache file where the times series will be +stored

  • +
  • calendar ({'gregorian', 'noleap'}) – The name of one of the calendars supported by MPAS cores, used to +determine year and month from Time coordinate

  • +
  • yearsPerCacheUpdate (int, optional) – The frequency with which the cache file is updated as the computation +progresses. If the computation is expensive, it may be useful to +output the file frequently. If not, there will be needless overhead +in caching the file too frequently.

  • +
  • logger (logging.Logger, optional) – A logger to which to write output as the time series is computed

  • +
+
+
Returns:
+

climatology (object of same type as ds) – A data set without the 'Time' coordinate containing the mean +of ds over all months in monthValues, weighted by the number of days +in each month.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.html new file mode 100644 index 000000000..442ae570b --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.html @@ -0,0 +1,180 @@ + + + + + + + mpas_analysis.shared.time_series.compute_moving_avg — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.time_series.compute_moving_avg

+
+
+mpas_analysis.shared.time_series.compute_moving_avg(ds, movingAveragePoints=12)[source]
+

Compute the rolling mean of a data set

+
+
Parameters:
+
    +
  • ds (xarray.Dataset) – a dataset to be averaged

  • +
  • movingAveragePoints (int, optional) – The number of points (months) over which to perform the rolling average +of the data set

  • +
+
+
Returns:
+

ds (xarray.Dataset) – The anomaly of the rolling time mean from the start of the simulation

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.html new file mode 100644 index 000000000..641718cdb --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.html @@ -0,0 +1,191 @@ + + + + + + + mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start

+
+
+mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start(timeSeriesFileName, variableList, anomalyStartTime, anomalyEndTime, startDate, endDate, calendar, movingAveragePoints=12, alter_dataset=None)[source]
+

Compute the rolling mean of the anomaly of a quantity from the beginning +of the simulation (such that the rolling mean starts at zero by definition)

+
+
Parameters:
+
    +
  • timeSeriesFileName (str) – a file produced by MpasTimeSeriesTask containing variables, the +anomaly and rolling mean of which is to be computed

  • +
  • variableList (list of str) – variable names to include in the resulting data set

  • +
  • anomalyStartTime (str) – the start and end times of the reference point for the anomaly

  • +
  • anomalyEndTime (str) – the start and end times of the reference point for the anomaly

  • +
  • startDate (str) – the start and end dates of the time series

  • +
  • endDate (str) – the start and end dates of the time series

  • +
  • calendar ({'gregorian', 'gregoraian_noleap'}) – The calendar used in the MPAS run

  • +
  • movingAveragePoints (int, optional) – The number of points (months) over which to perform the rolling average +of the data set

  • +
  • alter_dataset (function) – A function for manipulating the data set (e.g. computing new +variables), taking an xarray.Dataset as input argument and +returning an xarray.Dataset

  • +
+
+
Returns:
+

ds (xarray.Dataset) – The anomaly of the rolling time mean from the start of the simulation

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.html new file mode 100644 index 000000000..814844f3e --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.html @@ -0,0 +1,202 @@ + + + + + + + mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta

+
+
+class mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta(dt1=None, dt2=None, years=0, months=0, days=0, hours=0, minutes=0, seconds=0, calendar='gregorian')[source]
+

MpasRelativeDelta is a subclass of dateutil.relativedelta for +relative time intervals with different MPAS calendars.

+

Only relative intervals (years, months, etc.) are supported and not the +absolute date specifications (year, month, etc.). Addition/subtraction +of datetime.datetime objects or other MpasRelativeDelta (but +currently not datetime.date, datetime.timedelta or other related +objects) is supported.

+
+
+__init__(dt1=None, dt2=None, years=0, months=0, days=0, hours=0, minutes=0, seconds=0, calendar='gregorian')[source]
+
+ +

Methods

+ + + + + + + + + +

__init__([dt1, dt2, years, months, days, ...])

normalized()

Return a version of this object represented entirely using integer values for the relative attributes.

+

Attributes

+ + + + + + +

weeks

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.html new file mode 100644 index 000000000..fa3dfc753 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.html @@ -0,0 +1,204 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.date_to_days — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.date_to_days

+
+
+mpas_analysis.shared.timekeeping.utility.date_to_days(year=1, month=1, day=1, hour=0, minute=0, second=0, calendar='gregorian', referenceDate='0001-01-01')[source]
+

Convert a date to days since the reference date.

+
+
Parameters:
+
    +
  • year (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • month (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • day (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • hour (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • minute (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • second (int, optional) – The date to be converted to days since referenceDate on the +given calendar.

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – A calendar to be used to convert days to a datetime.datetime +object.

  • +
  • referenceDate (str, optional) –

    A reference date of the form:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

  • +
+
+
Returns:
+

days (float) – The days since referenceDate on the given calendar.

+
+
Raises:
+

ValueError – If an invalid referenceDate or calendar is supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.html new file mode 100644 index 000000000..272de7cea --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.html @@ -0,0 +1,195 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.datetime_to_days — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.datetime_to_days

+
+
+mpas_analysis.shared.timekeeping.utility.datetime_to_days(dates, calendar='gregorian', referenceDate='0001-01-01')[source]
+

Given date(s), a calendar and a reference date, returns the days since +the reference date, either as a single float or an array of floats.

+
+
Parameters:
+
    +
  • datetime (instance or array-like of datetime.datetime) – The date(s) to be converted to days since referenceDate on the +given calendar.

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – A calendar to be used to convert days to a datetime.datetime object.

  • +
  • referenceDate (str, optional) –

    A reference date of the form:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

  • +
+
+
Returns:
+

days (float or array of floats) – The days since referenceDate on the given calendar.

+
+
Raises:
+

ValueError – If an invalid datetimes, referenceDate or calendar is + supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.html new file mode 100644 index 000000000..de6783aa5 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.html @@ -0,0 +1,194 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.days_to_datetime — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.days_to_datetime

+
+
+mpas_analysis.shared.timekeeping.utility.days_to_datetime(days, calendar='gregorian', referenceDate='0001-01-01')[source]
+

Covert days to datetime.datetime objects given a reference date and an +MPAS calendar (either ‘gregorian’ or ‘noleap’).

+
+
Parameters:
+
    +
  • days (float or array-like of floats) – The number of days since the reference date.

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – A calendar to be used to convert days to a datetime.datetime +object.

  • +
  • referenceDate (str, optional) –

    A reference date of the form:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

  • +
+
+
Returns:
+

datetime (datetime.datetime (or array-like of datetimes)) – The days since referenceDate on the given calendar.

+
+
Raises:
+

ValueError – If an invalid days, referenceDate or calendar is supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.html new file mode 100644 index 000000000..353ae23bb --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.html @@ -0,0 +1,185 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.get_simulation_start_time — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.get_simulation_start_time

+
+
+mpas_analysis.shared.timekeeping.utility.get_simulation_start_time(streams)[source]
+

Given a StreamsFile object, returns the simulation start time parsed +from a restart file.

+
+
Parameters:
+

steams (StreamsFile object) – For parsing an MPAS streams file

+
+
Returns:
+

simulation_start_time (str) – The start date of the simulation parsed from a restart file identified +by the contents of streams.

+
+
Raises:
+

IOError – If no restart file can be found.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.html new file mode 100644 index 000000000..18f950f39 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.html @@ -0,0 +1,199 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.string_to_datetime — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.string_to_datetime

+
+
+mpas_analysis.shared.timekeeping.utility.string_to_datetime(dateString)[source]
+

Given a date string and a calendar, returns a datetime.datetime

+
+
Parameters:
+

dateString (string) –

A date and time in one of the following formats:

+
YYYY-MM-DD hh:mm:ss
+YYYY-MM-DD hh.mm.ss
+YYYY-MM-DD SSSSS
+DDD hh:mm:ss
+DDD hh.mm.ss
+DDD SSSSS
+hh.mm.ss
+hh:mm:ss
+YYYY-MM-DD
+YYYY-MM
+SSSSS
+
+
+

Note: either underscores or spaces can be used to separate the date +from the time portion of the string.

+

+
+
Returns:
+

datetime (A datetime.datetime object)

+
+
Raises:
+

ValueError – If an invalid dateString is supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.html new file mode 100644 index 000000000..f98388d30 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.html @@ -0,0 +1,212 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.string_to_days_since_date — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.string_to_days_since_date

+
+
+mpas_analysis.shared.timekeeping.utility.string_to_days_since_date(dateString, calendar='gregorian', referenceDate='0001-01-01')[source]
+

Given a date string or an array-like of date strings, a reference date +string, and a calendar, returns the number of days (as a float or +numpy.array of floats) since the reference date

+
+
Parameters:
+
    +
  • dateStrings (str or array-like of str) –

    A date and time (or array of date/times) in one of the following +formats:

    +
    YYYY-MM-DD hh:mm:ss
    +YYYY-MM-DD hh.mm.ss
    +YYYY-MM-DD SSSSS
    +DDD hh:mm:ss
    +DDD hh.mm.ss
    +DDD SSSSS
    +hh.mm.ss
    +hh:mm:ss
    +YYYY-MM-DD
    +YYYY-MM
    +SSSSS
    +
    +
    +

    Note: either underscores or spaces can be used to separate the date +from the time portion of the string.

    +

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores

  • +
  • referenceDate (str, optional) –

    A reference date of the form:

    +
    0001-01-01
    +0001-01-01 00:00:00
    +
    +
    +

  • +
+
+
Returns:
+

days (float or numpy.array of floats) – The number of days since referenceDate for each date in +dateString

+
+
Raises:
+

ValueError – If an invalid dateString or calendar is supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.html new file mode 100644 index 000000000..cb0d39b1d --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.html @@ -0,0 +1,203 @@ + + + + + + + mpas_analysis.shared.timekeeping.utility.string_to_relative_delta — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.timekeeping.utility.string_to_relative_delta

+
+
+mpas_analysis.shared.timekeeping.utility.string_to_relative_delta(dateString, calendar='gregorian')[source]
+

Given a date string and a calendar, returns an instance of +MpasRelativeDelta

+
+
Parameters:
+
    +
  • dateString (str) –

    A date and time in one of the following formats:

    +
    YYYY-MM-DD hh:mm:ss
    +YYYY-MM-DD hh.mm.ss
    +YYYY-MM-DD SSSSS
    +DDD hh:mm:ss
    +DDD hh.mm.ss
    +DDD SSSSS
    +hh.mm.ss
    +hh:mm:ss
    +YYYY-MM-DD
    +YYYY-MM
    +SSSSS
    +
    +
    +

    Note: either underscores or spaces can be used to separate the date +from the time portion of the string.

    +

  • +
  • calendar ({'gregorian', 'noleap'}, optional) – The name of one of the calendars supported by MPAS cores

  • +
+
+
Returns:
+

relativedelta (An MpasRelativeDelta object)

+
+
Raises:
+

ValueError – If an invalid dateString is supplied.

+
+
+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.html new file mode 100644 index 000000000..fd103ec28 --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.html @@ -0,0 +1,298 @@ + + + + + + + mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask

+
+
+class mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask(parentTask, transectGroup, subprocessCount=None)[source]
+

An analysis tasks for computing cell masks for transects defined by geojson +features

+
+
Variables:
+
    +
  • aggregationFunction (callable) – An aggregation function returned by +geometric_features.aggregation.get_region_by_name()

  • +
  • geojsonFileName (str) – A geojson file, typically from the MPAS geometric_features +repository, defining the shapes to be masked

  • +
  • outFileSuffix (str) – The suffix for the resulting mask file

  • +
  • maskFileName (str) – The name of the output mask file

  • +
+
+
+
+
+__init__(parentTask, transectGroup, subprocessCount=None)[source]
+

Construct the analysis task and adds it as a subtask of the +parentTask.

+
+
Parameters:
+
    +
  • parentTask (AnalysisTask) – The parent task, used to get the taskName, config and +componentName

  • +
  • transectGroup (str) – The name of a transect group, see +mpas_analysis.shared.transects.get_transect_info()

  • +
  • subprocessCount (int, optional) – The number of processes that can be used to make the mask, default +is as many processes as allowed

  • +
+
+
+
+ +

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

__init__(parentTask, transectGroup[, ...])

Construct the analysis task and adds it as a subtask of the parentTask.

add_subtask(subtask)

Add a subtask to this tasks.

check_analysis_enabled(analysisOptionName[, ...])

Check to make sure a given analysis is turned on, issuing a warning or raising an exception if not.

check_generate()

Determines if this analysis should be generated, based on the generate config option and taskName, componentName and tags.

close()

Close the Process object.

expand_transect_names(transectNames)

If transectNames contains 'all', make sure the geojson file exists and then return all the transect names found in the file.

is_alive()

Return whether process is alive

join([timeout])

Wait until child process terminates

kill()

Terminate process; sends SIGKILL signal or uses TerminateProcess()

make_transect_mask()

If the geojson mask file has not already been cached in the diagnostics or custom diagnostic directories, it will be created in the analysis output's masks directory.

run([writeLogFile])

Sets up logging and then runs the analysis task.

run_after(task)

Only run this task after the given task has completed.

run_task()

Compute the requested climatologies

set_start_end_date(section)

Set the start and end dates in the config correspond to the start and end years in a given category of analysis

setup_and_check()

Perform steps to set up the analysis and check for errors in the setup.

start()

Start child process

terminate()

Terminate process; sends SIGTERM signal or uses TerminateProcess()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

BLOCKED

FAIL

READY

RUNNING

SUCCESS

UNSET

authkey

daemon

Return whether process is a daemon

exitcode

Return exit code of process or None if it has yet to stop

ident

Return identifier (PID) of process or None if it has yet to start

name

pid

Return identifier (PID) of process or None if it has yet to start

sentinel

Return a file descriptor (Unix) or handle (Windows) suitable for waiting for process termination.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.html b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.html new file mode 100644 index 000000000..b1b4dba1f --- /dev/null +++ b/1.11.0rc1/developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.html @@ -0,0 +1,166 @@ + + + + + + + mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks

+
+
+mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks(geojsonFileName, meshFileName, maskFileName, logger=None, processCount=1, chunkSize=1000, subdivisionThreshold=10000.0, useMpasMaskCreator=False, dir=None)[source]
+

Build a transect mask file from the given MPAS mesh and geojson file defining a set of transects.

+
+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/genindex.html b/1.11.0rc1/genindex.html new file mode 100644 index 000000000..752553ae5 --- /dev/null +++ b/1.11.0rc1/genindex.html @@ -0,0 +1,632 @@ + + + + + + Index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ _ + | A + | B + | C + | D + | F + | G + | H + | I + | M + | O + | P + | R + | S + | T + | U + | W + +
+

_

+ + +
+ +

A

+ + + +
+ +

B

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

F

+ + +
+ +

G

+ + + +
+ +

H

+ + +
+ +

I

+ + +
+ +

M

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + +
+ +

W

+ + +
+ + + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/index.html b/1.11.0rc1/index.html new file mode 100644 index 000000000..bf2e834da --- /dev/null +++ b/1.11.0rc1/index.html @@ -0,0 +1,287 @@ + + + + + + + MPAS-Analysis — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

MPAS-Analysis

+_images/sst_example.png +

Analysis for simulations produced with Model for Prediction Across Scales +(MPAS) components and the Energy Exascale Earth System Model (E3SM), which +used those components.

+
+

User's guide

+ +
+ + +
+

Authors

+ +
+
+

Versions

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/objects.inv b/1.11.0rc1/objects.inv new file mode 100644 index 000000000..a9a40adcf Binary files /dev/null and b/1.11.0rc1/objects.inv differ diff --git a/1.11.0rc1/py-modindex.html b/1.11.0rc1/py-modindex.html new file mode 100644 index 000000000..9f457d966 --- /dev/null +++ b/1.11.0rc1/py-modindex.html @@ -0,0 +1,153 @@ + + + + + + Python Module Index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Python Module Index

+ +
+ m +
+ + + + + + + + + + +
 
+ m
+ mpas_analysis +
    + mpas_analysis.shared.io.write_netcdf +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/search.html b/1.11.0rc1/search.html new file mode 100644 index 000000000..62b3a2cfc --- /dev/null +++ b/1.11.0rc1/search.html @@ -0,0 +1,148 @@ + + + + + + Search — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/1.11.0rc1/searchindex.js b/1.11.0rc1/searchindex.js new file mode 100644 index 000000000..b55a32762 --- /dev/null +++ b/1.11.0rc1/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"alltitles": {"1 Setting up a Conda Environment": [[144, "setting-up-a-conda-environment"]], "1. Getting started": [[141, "getting-started"]], "1. Getting started on GitHub": [[142, "getting-started-on-github"]], "1. The big picture": [[143, "the-big-picture"]], "1.1 Forking MPAS-Analysis": [[142, "forking-mpas-analysis"]], "1.1 Installing Miniconda": [[144, "installing-miniconda"]], "1.1 MPAS output": [[143, "mpas-output"]], "1.2 Adding SSH keys": [[142, "adding-ssh-keys"]], "1.2 Analysis tasks": [[143, "analysis-tasks"]], "1.2 Creating a conda environment": [[144, "creating-a-conda-environment"]], "1.3 Activating the environment": [[144, "activating-the-environment"]], "1.3 Local git configuration": [[142, "local-git-configuration"]], "1.3 Shared framework": [[143, "shared-framework"]], "2 Downloading observations": [[144, "downloading-observations"]], "2. Cloning the repository": [[142, "cloning-the-repository"]], "2. The reference scripts": [[141, "the-reference-scripts"]], "2. Tour of an analysis task (ClimatologyMapOHCAnomaly)": [[143, "tour-of-an-analysis-task-climatologymapohcanomaly"]], "2.1 Attributes": [[143, "attributes"]], "2.2 Constructor": [[143, "constructor"]], "2.3 setup_and_check() method": [[143, "setup-and-check-method"]], "2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)": [[194, "climatology-from-sose-the-southern-ocean-state-estimate-sose"]], "3 Downloading an example run": [[144, "downloading-an-example-run"]], "3. Making a worktree": [[142, "making-a-worktree"]], "3. Selecting an existing task to copy": [[141, "selecting-an-existing-task-to-copy"]], "3. Tour of a subtask (RemapMpasOHCClimatology)": [[143, "tour-of-a-subtask-remapmpasohcclimatology"]], "3.1 Attributes": [[143, "id1"]], "3.2 Constructor": [[141, "id1"], [143, "id2"]], "3.3 setup_and_check() method": [[143, "id3"]], "3.4 run_task() method": [[143, "run-task-method"]], "3.5 customize_masked_climatology() method": [[143, "customize-masked-climatology-method"]], "4 Configuring MPAS-Analysis": [[144, "configuring-mpas-analysis"]], "4. Developing the task": [[141, "developing-the-task"]], "4. Making a conda environment": [[142, "making-a-conda-environment"]], "4. The full code for posterity": [[143, "the-full-code-for-posterity"]], "4.1 ClimatologyMapBSF class": [[141, "climatologymapbsf-class"]], "4.1 Installing Mambaforge": [[142, "installing-mambaforge"]], "4.1 [runs]": [[144, "runs"]], "4.2 Constructor": [[141, "constructor"]], "4.2 One-time Miniconda setup": [[142, "one-time-miniconda-setup"]], "4.2 [execute]": [[144, "execute"]], "4.3 Create a development environment": [[142, "create-a-development-environment"]], "4.3 [diagnostics]": [[144, "diagnostics"]], "4.3 setup_and_check() method": [[141, "setup-and-check-method"]], "4.4 Activating the environment": [[142, "activating-the-environment"]], "4.4 [input]": [[144, "input"]], "4.5 Switching worktrees": [[142, "switching-worktrees"]], "4.5 [output]": [[144, "output"]], "4.6 [climatology], [timeSeries] and [index]": [[144, "climatology-timeseries-and-index"]], "5 Running MPAS-Analysis": [[144, "running-mpas-analysis"]], "5. Developing a subtask": [[141, "developing-a-subtask"]], "5. Editing code": [[142, "editing-code"]], "5.1 RemapMpasBSFClimatology class": [[141, "remapmpasbsfclimatology-class"]], "5.3 setup_and_check() method": [[141, "id2"]], "5.4 customize_masked_climatology() method": [[141, "customize-masked-climatology-method"]], "6 Viewing the Output": [[144, "viewing-the-output"]], "6. Config options": [[141, "config-options"]], "6. Running MPAS-Analysis on a laptop": [[142, "running-mpas-analysis-on-a-laptop"]], "7 Troubleshooting": [[144, "troubleshooting"]], "7. Adding the task": [[141, "adding-the-task"]], "7. Running MPAS-Analysis on an E3SM supported machine": [[142, "running-mpas-analysis-on-an-e3sm-supported-machine"]], "7.1 Configuring MPAS-Analysis": [[142, "configuring-mpas-analysis"]], "7.1 Purging old Analysis": [[144, "purging-old-analysis"]], "7.1.1 [runs]": [[142, "runs"]], "7.1.2 [execute]": [[142, "execute"]], "7.1.3 [input]": [[142, "input"]], "7.1.4 [output]": [[142, "output"]], "7.1.5. [climatology], [timeSeries] and [index]": [[142, "climatology-timeseries-and-index"]], "7.2 Errors During Setup": [[144, "errors-during-setup"]], "7.2 Errors in Tasks": [[144, "errors-in-tasks"]], "7.2 Running MPAS-Analysis": [[142, "running-mpas-analysis"]], "7.3 Viewing the Output": [[142, "viewing-the-output"]], "8. The full code for posterity": [[141, "the-full-code-for-posterity"]], "API reference": [[12, "api-reference"]], "AVISO Absolute Dynamic Topography": [[177, "aviso-absolute-dynamic-topography"]], "Adding Contour Lines": [[149, "adding-contour-lines"]], "Algorithms": [[3, "algorithms"]], "Analysis Task Template": [[1, "analysis-task-template"]], "Analysis Tasks": [[146, "analysis-tasks"]], "Analysis tasks": [[12, "analysis-tasks"]], "Anomaly Reference Year": [[148, "anomaly-reference-year"], [165, "anomaly-reference-year"]], "Antarctic Seafloor Temperature and Salinity": [[192, "antarctic-seafloor-temperature-and-salinity"]], "Antarctic melt rates and fluxes": [[172, "antarctic-melt-rates-and-fluxes"], [188, "antarctic-melt-rates-and-fluxes"], [190, "antarctic-melt-rates-and-fluxes"]], "Argo Mixed Layer Depth (MLD) climatology": [[176, "argo-mixed-layer-depth-mld-climatology"]], "Authors": [[140, null]], "Base Class": [[12, "base-class"]], "Bibliography": [[3, "bibliography"]], "Bins": [[234, "bins"]], "Bins and Contour Intervals": [[236, "bins-and-contour-intervals"]], "CCHDO": [[200, "cchdo"]], "Climatology": [[12, "climatology"], [148, "climatology"]], "Colormaps": [[149, "colormaps"]], "Colormpas in Three Panel Plots": [[149, "colormpas-in-three-panel-plots"]], "Comparison Grids": [[150, "comparison-grids"]], "Computing climatologies": [[148, "computing-climatologies"]], "Config File Reorganization": [[2, "config-file-reorganization"]], "Configuration": [[168, "configuration"]], "Configuration Options": [[205, "configuration-options"], [206, "configuration-options"], [207, "configuration-options"], [208, "configuration-options"], [209, "configuration-options"], [210, "configuration-options"], [211, "configuration-options"], [212, "configuration-options"], [213, "configuration-options"], [214, "configuration-options"], [215, "configuration-options"], [216, "configuration-options"], [217, "configuration-options"], [218, "configuration-options"], [219, "configuration-options"], [220, "configuration-options"], [221, "configuration-options"], [222, "configuration-options"], [223, "configuration-options"], [224, "configuration-options"], [225, "configuration-options"], [226, "configuration-options"], [227, "configuration-options"], [228, "configuration-options"], [229, "configuration-options"], [230, "configuration-options"], [231, "configuration-options"], [232, "configuration-options"], [233, "configuration-options"], [234, "configuration-options"], [235, "configuration-options"], [236, "configuration-options"], [237, "configuration-options"], [238, "configuration-options"], [239, "configuration-options"], [240, "configuration-options"], [241, "configuration-options"], [242, "configuration-options"], [243, "configuration-options"], [244, "configuration-options"], [245, "configuration-options"], [246, "configuration-options"], [247, "configuration-options"]], "Continuous Colormaps": [[149, "continuous-colormaps"]], "Contributors": [[0, "contributors"]], "Customizing plots or creating new ones": [[203, "customizing-plots-or-creating-new-ones"]], "Dask threads": [[151, "id1"]], "Dask threads and subprocess count": [[151, "dask-threads-and-subprocess-count"]], "Depth Bounds": [[241, "depth-bounds"]], "Description": [[172, "description"], [173, "description"], [174, "description"], [175, "description"], [176, "description"], [177, "description"], [178, "description"], [179, "description"], [180, "description"], [181, "description"], [182, "description"], [183, "description"], [184, "description"], [185, "description"], [186, "description"], [187, "description"], [188, "description"], [189, "description"], [190, "description"], [191, "description"], [192, "description"], [193, "description"], [194, "description"], [195, "description"], [196, "description"], [197, "description"], [198, "description"], [199, "description"], [200, "description"]], "Design Documents": [[6, "design-documents"]], "Design and Implementation": [[3, "design-and-implementation"]], "Details on Each Data Set": [[201, "details-on-each-data-set"]], "Developer's guide": [[140, null]], "Developer: Getting Started": [[142, "developer-getting-started"]], "Developers: Adding a new analysis task": [[141, "developers-adding-a-new-analysis-task"]], "Developers: Understanding an analysis task": [[143, "developers-understanding-an-analysis-task"]], "Diagnostics": [[152, "diagnostics"]], "Diagnostics Directories": [[152, "diagnostics-directories"]], "Diagram Type": [[236, "diagram-type"]], "Download analysis input data": [[203, "download-analysis-input-data"]], "Downloading data": [[12, "downloading-data"]], "E3SM": [[147, "e3sm"], [169, "e3sm"]], "ERS SSTv4 Nino 3.4 Index": [[181, "ers-sstv4-nino-3-4-index"]], "Eddy Kinetic Energy Climatology Mapping": [[3, "eddy-kinetic-energy-climatology-mapping"]], "Errors on Missing Data": [[156, "errors-on-missing-data"]], "Example Result": [[205, "example-result"], [206, "example-result"], [207, "example-result"], [208, "example-result"], [209, "example-result"], [210, "example-result"], [211, "example-result"], [213, "example-result"], [214, "example-result"], [215, "example-result"], [216, "example-result"], [217, "example-result"], [218, "example-result"], [219, "example-result"], [220, "example-result"], [221, "example-result"], [222, "example-result"], [223, "example-result"], [224, "example-result"], [225, "example-result"], [226, "example-result"], [227, "example-result"], [228, "example-result"], [229, "example-result"], [230, "example-result"], [231, "example-result"], [232, "example-result"], [233, "example-result"], [234, "example-result"], [235, "example-result"], [236, "example-result"], [237, "example-result"], [238, "example-result"], [239, "example-result"], [240, "example-result"], [241, "example-result"], [242, "example-result"], [243, "example-result"], [244, "example-result"], [245, "example-result"], [246, "example-result"], [247, "example-result"]], "Execute": [[153, "execute"]], "Fields": [[230, "fields"]], "Files and Directories": [[158, "files-and-directories"]], "GLODAPv2": [[182, "glodapv2"]], "Generalize Calendar supported by Analysis": [[4, "generalize-calendar-supported-by-analysis"]], "Generalized Horizontal Interpolation in MPAS-Analysis": [[5, "generalized-horizontal-interpolation-in-mpas-analysis"]], "Generate Option": [[159, "generate-option"]], "Generating Documentation": [[203, "generating-documentation"]], "Geojson Files": [[230, "geojson-files"]], "HTML": [[154, "html"]], "HadISST Nino 3.4 Index": [[183, "hadisst-nino-3-4-index"]], "I/O Utilities": [[12, "i-o-utilities"]], "Ice Shelf and Region Names": [[239, "ice-shelf-and-region-names"]], "Ice area and extent time series: SSM/I derived": [[196, "ice-area-and-extent-time-series-ssm-i-derived"]], "Ice concentration: SSM/I, Bootstrap algorithm": [[178, "ice-concentration-ssm-i-bootstrap-algorithm"]], "Ice concentration: SSM/I, NASATeam algorithm": [[187, "ice-concentration-ssm-i-nasateam-algorithm"]], "IceSat Ice Thickness": [[185, "icesat-ice-thickness"]], "Iceberg Concentration: Altiberg": [[173, "iceberg-concentration-altiberg"]], "Index": [[155, "index"]], "Indexed Colormaps": [[149, "indexed-colormaps"]], "Input": [[156, "input"]], "Input Directories": [[156, "input-directories"]], "Installation for developers": [[203, "installation-for-developers"]], "Installation for users": [[203, "installation-for-users"]], "Instructions for creating a new analysis task": [[203, "instructions-for-creating-a-new-analysis-task"]], "Landschuetzerv2016 SOM-FFN": [[186, "landschuetzerv2016-som-ffn"]], "List Analysis": [[203, "list-analysis"]], "List of MPAS output files that are needed by MPAS-Analysis:": [[203, "list-of-mpas-output-files-that-are-needed-by-mpas-analysis"]], "MPAS Components and E3SM": [[147, "mpas-components-and-e3sm"]], "MPAS Ocean": [[147, "mpas-ocean"], [170, "mpas-ocean"]], "MPAS-Analysis": [[140, "mpas-analysis"]], "MPAS-Analysis Tasks": [[172, "mpas-analysis-tasks"], [173, "mpas-analysis-tasks"], [174, "mpas-analysis-tasks"], [175, "mpas-analysis-tasks"], [176, "mpas-analysis-tasks"], [177, "mpas-analysis-tasks"], [178, "mpas-analysis-tasks"], [179, "mpas-analysis-tasks"], [180, "mpas-analysis-tasks"], [181, "mpas-analysis-tasks"], [182, "mpas-analysis-tasks"], [183, "mpas-analysis-tasks"], [184, "mpas-analysis-tasks"], [185, "mpas-analysis-tasks"], [186, "mpas-analysis-tasks"], [187, "mpas-analysis-tasks"], [188, "mpas-analysis-tasks"], [189, "mpas-analysis-tasks"], [190, "mpas-analysis-tasks"], [191, "mpas-analysis-tasks"], [192, "mpas-analysis-tasks"], [193, "mpas-analysis-tasks"], [194, "mpas-analysis-tasks"], [195, "mpas-analysis-tasks"], [196, "mpas-analysis-tasks"], [197, "mpas-analysis-tasks"], [198, "mpas-analysis-tasks"], [199, "mpas-analysis-tasks"], [200, "mpas-analysis-tasks"]], "MPAS-Seaice": [[147, "mpas-seaice"], [171, "mpas-seaice"]], "Main Authors": [[0, "main-authors"]], "Mapping Files": [[152, "mapping-files"]], "Meridional Heat Transport (MHT)": [[197, "meridional-heat-transport-mht"]], "Mesh Name": [[156, "mesh-name"]], "Moving Average": [[157, "moving-average"]], "Moving variable mapping outside of mpas_xarray": [[11, "moving-variable-mapping-outside-of-mpas-xarray"]], "Namelist Files": [[156, "namelist-files"]], "Namelist and Streams Files": [[12, "namelist-and-streams-files"]], "Observations": [[201, "observations"], [205, "observations"], [206, "observations"], [207, "observations"], [208, "observations"], [209, "observations"], [210, "observations"], [211, "observations"], [214, "observations"], [215, "observations"], [216, "observations"], [217, "observations"], [218, "observations"], [219, "observations"], [221, "observations"], [223, "observations"], [224, "observations"], [225, "observations"], [227, "observations"], [228, "observations"], [232, "observations"], [233, "observations"], [234, "observations"], [236, "observations"], [237, "observations"], [239, "observations"], [241, "observations"], [244, "observations"], [247, "observations"]], "Ocean Observations": [[201, "ocean-observations"]], "Ocean tasks": [[12, "ocean-tasks"]], "Ocean, Sea Ice and Iceberg Observations": [[158, "ocean-sea-ice-and-iceberg-observations"]], "Other Config Options": [[234, "other-config-options"], [236, "other-config-options"], [241, "other-config-options"]], "Other Options": [[148, "other-options"], [230, "other-options"], [239, "other-options"], [246, "other-options"]], "Output": [[159, "output"]], "Output Directories": [[159, "output-directories"]], "Output Grids for Transects": [[167, "output-grids-for-transects"]], "PIOMAS Arctic Sea Ice Volume Reanalysis": [[189, "piomas-arctic-sea-ice-volume-reanalysis"]], "Parallel Mapping File Creation": [[153, "parallel-mapping-file-creation"]], "Parallel Mask Creation": [[153, "parallel-mask-creation"]], "Parallel Tasks": [[153, "parallel-tasks"]], "Parallelism in NCO": [[153, "parallelism-in-nco"]], "Physics": [[3, "physics"]], "Plot": [[160, "plot"]], "Plotting": [[12, "plotting"]], "Preprocessed Reference Runs": [[161, "preprocessed-reference-runs"]], "Prerequisite Tasks and Subtasks": [[8, "prerequisite-tasks-and-subtasks"]], "Projection": [[12, "projection"]], "Purge Old Analysis": [[203, "purge-old-analysis"]], "Quick Start Guide": [[203, "quick-start-guide"]], "Reading MPAS Datasets": [[12, "reading-mpas-datasets"]], "References": [[172, "references"], [173, "references"], [174, "references"], [175, "references"], [176, "references"], [177, "references"], [178, "references"], [179, "references"], [180, "references"], [181, "references"], [182, "references"], [183, "references"], [184, "references"], [185, "references"], [186, "references"], [187, "references"], [188, "references"], [189, "references"], [190, "references"], [191, "references"], [192, "references"], [193, "references"], [194, "references"], [195, "references"], [196, "references"], [197, "references"], [198, "references"], [199, "references"], [200, "references"]], "Region Config Section": [[162, "region-config-section"]], "Region Groups": [[162, "region-groups"], [234, "region-groups"], [236, "region-groups"], [241, "region-groups"]], "Region Names": [[234, "region-names"], [236, "region-names"], [241, "region-names"]], "Regions": [[12, "regions"], [162, "regions"]], "Release Policy": [[172, "release-policy"], [173, "release-policy"], [174, "release-policy"], [175, "release-policy"], [176, "release-policy"], [177, "release-policy"], [178, "release-policy"], [179, "release-policy"], [180, "release-policy"], [181, "release-policy"], [182, "release-policy"], [183, "release-policy"], [184, "release-policy"], [185, "release-policy"], [186, "release-policy"], [187, "release-policy"], [188, "release-policy"], [189, "release-policy"], [190, "release-policy"], [191, "release-policy"], [192, "release-policy"], [193, "release-policy"], [194, "release-policy"], [195, "release-policy"], [196, "release-policy"], [197, "release-policy"], [198, "release-policy"], [199, "release-policy"], [200, "release-policy"]], "Remapper for \u201conline\u201d remapping of data sets": [[9, "remapper-for-online-remapping-of-data-sets"]], "Remapping": [[158, "remapping"]], "Remapping Options": [[148, "remapping-options"]], "Reorganize Timekeeping": [[10, "reorganize-timekeeping"]], "Requirements": [[3, "requirements"]], "Roemmich-Gilson Argo Climatology": [[191, "roemmich-gilson-argo-climatology"]], "Running in parallel via a queueing system": [[203, "running-in-parallel-via-a-queueing-system"]], "Running the analysis": [[203, "running-the-analysis"]], "Runs": [[163, "runs"]], "SOSE data for the full Southern Ocean": [[226, "sose-data-for-the-full-southern-ocean"]], "SSS from NASA Aquarius satellite": [[175, "sss-from-nasa-aquarius-satellite"]], "SST merged Hadley Center-NOAA/OI data set": [[184, "sst-merged-hadley-center-noaa-oi-data-set"]], "Sea Ice Observations": [[201, "sea-ice-observations"]], "Sea ice production and transport: Haumann et al 2016": [[174, "sea-ice-production-and-transport-haumann-et-al-2016"]], "Sea ice tasks": [[12, "sea-ice-tasks"]], "SeaWiFS": [[193, "seawifs"]], "Seasons": [[164, "seasons"]], "Setting up E3SM runs": [[147, "setting-up-e3sm-runs"], [169, "setting-up-e3sm-runs"]], "Setting up Standalone MPAS Sea Ice Runs": [[147, "setting-up-standalone-mpas-sea-ice-runs"], [171, "setting-up-standalone-mpas-sea-ice-runs"]], "Setting up Standalone MPAS-O Runs": [[147, "setting-up-standalone-mpas-o-runs"], [170, "setting-up-standalone-mpas-o-runs"]], "Shared modules": [[12, "shared-modules"]], "Source": [[172, "source"], [173, "source"], [174, "source"], [175, "source"], [176, "source"], [177, "source"], [178, "source"], [179, "source"], [180, "source"], [181, "source"], [182, "source"], [183, "source"], [184, "source"], [185, "source"], [186, "source"], [187, "source"], [188, "source"], [189, "source"], [190, "source"], [191, "source"], [192, "source"], [193, "source"], [194, "source"], [195, "source"], [196, "source"], [197, "source"], [198, "source"], [199, "source"], [200, "source"]], "Specifying Colorbar Tick Marks": [[149, "specifying-colorbar-tick-marks"]], "Start and End Year": [[148, "start-and-end-year"], [155, "start-and-end-year"], [165, "start-and-end-year"]], "State Estimate": [[226, "state-estimate"]], "Streams Files": [[156, "streams-files"]], "Subprocess count": [[151, "subprocess-count"]], "Summary": [[3, "summary"]], "Support Parallel Tasks": [[7, "support-parallel-tasks"]], "Supported Colormaps": [[149, "supported-colormaps"]], "Surface Current Variance from Drifter Data": [[179, "surface-current-variance-from-drifter-data"]], "Testing": [[3, "testing"]], "Time Series": [[12, "time-series"], [165, "time-series"]], "Time-Axis Tick Marks": [[166, "time-axis-tick-marks"]], "Timekeeping": [[12, "timekeeping"]], "Top-level script: mpas_analysis": [[12, "top-level-script-mpas-analysis"]], "Transect Names": [[246, "transect-names"]], "Transects": [[12, "transects"]], "Tutorials": [[140, null]], "User's guide": [[140, null]], "User: Getting Started": [[144, "user-getting-started"]], "Variable List": [[234, "variable-list"]], "Variables": [[241, "variables"]], "Versions": [[140, null], [248, "versions"]], "WOA18 Temperature and Salinity Climatology": [[199, "woa18-temperature-and-salinity-climatology"]], "WOCE": [[200, "id1"]], "WOCE sections": [[200, "woce-sections"]], "Wave Reanalysis: ERA5": [[180, "wave-reanalysis-era5"]], "Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative": [[195, "wave-satelite-altimeter-observations-esa-sea-state-climate-change-initiative"]], "Weight List": [[234, "weight-list"]], "World Ocean Atlas v2": [[198, "world-ocean-atlas-v2"]], "Xarray and Dask": [[156, "xarray-and-dask"]], "climatologyMapAntarcticMelt": [[205, "climatologymapantarcticmelt"]], "climatologyMapArgoSalinity": [[206, "climatologymapargosalinity"]], "climatologyMapArgoTemperature": [[207, "climatologymapargotemperature"]], "climatologyMapBGC": [[208, "climatologymapbgc"]], "climatologyMapEKE": [[209, "climatologymapeke"]], "climatologyMapIcebergConcSH": [[210, "climatologymapicebergconcsh"]], "climatologyMapMLD": [[211, "climatologymapmld"]], "climatologyMapMLDMinMax": [[212, "climatologymapmldminmax"]], "climatologyMapOHCAnomaly": [[213, "climatologymapohcanomaly"]], "climatologyMapSSH": [[214, "climatologymapssh"]], "climatologyMapSSS": [[215, "climatologymapsss"]], "climatologyMapSST": [[216, "climatologymapsst"]], "climatologyMapSchmidtko": [[217, "climatologymapschmidtko"]], "climatologyMapSeaIceConcNH": [[218, "climatologymapseaiceconcnh"]], "climatologyMapSeaIceConcSH": [[219, "climatologymapseaiceconcsh"]], "climatologyMapSeaIceMeltingNH": [[220, "climatologymapseaicemeltingnh"]], "climatologyMapSeaIceMeltingSH": [[221, "climatologymapseaicemeltingsh"]], "climatologyMapSeaIceProductionNH": [[222, "climatologymapseaiceproductionnh"]], "climatologyMapSeaIceProductionSH": [[223, "climatologymapseaiceproductionsh"]], "climatologyMapSeaIceThickNH": [[224, "climatologymapseaicethicknh"]], "climatologyMapSeaIceThickSH": [[225, "climatologymapseaicethicksh"]], "climatologyMapSose": [[226, "climatologymapsose"]], "climatologyMapWaves": [[227, "climatologymapwaves"]], "climatologyMapWoa": [[228, "climatologymapwoa"]], "conservation": [[229, "conservation"]], "geojsonTransects": [[230, "geojsontransects"]], "hovmollerOceanRegions": [[231, "hovmolleroceanregions"]], "indexNino34": [[232, "indexnino34"]], "meridionalHeatTransport": [[233, "meridionalheattransport"]], "mpas_analysis.__main__.add_task_and_subtasks": [[13, "mpas-analysis-main-add-task-and-subtasks"]], "mpas_analysis.__main__.build_analysis_list": [[14, "mpas-analysis-main-build-analysis-list"]], "mpas_analysis.__main__.determine_analyses_to_generate": [[15, "mpas-analysis-main-determine-analyses-to-generate"]], "mpas_analysis.__main__.main": [[16, "mpas-analysis-main-main"]], "mpas_analysis.__main__.run_analysis": [[17, "mpas-analysis-main-run-analysis"]], "mpas_analysis.__main__.update_generate": [[18, "mpas-analysis-main-update-generate"]], "mpas_analysis.__main__.wait_for_task": [[19, "mpas-analysis-main-wait-for-task"]], "mpas_analysis.download_data.download_analysis_data": [[20, "mpas-analysis-download-data-download-analysis-data"]], "mpas_analysis.ocean.ClimatologyMapAntarcticMelt": [[21, "mpas-analysis-ocean-climatologymapantarcticmelt"]], "mpas_analysis.ocean.ClimatologyMapArgoSalinity": [[22, "mpas-analysis-ocean-climatologymapargosalinity"]], "mpas_analysis.ocean.ClimatologyMapArgoTemperature": [[23, "mpas-analysis-ocean-climatologymapargotemperature"]], "mpas_analysis.ocean.ClimatologyMapEKE": [[24, "mpas-analysis-ocean-climatologymapeke"]], "mpas_analysis.ocean.ClimatologyMapMLD": [[25, "mpas-analysis-ocean-climatologymapmld"]], "mpas_analysis.ocean.ClimatologyMapMLDMinMax": [[26, "mpas-analysis-ocean-climatologymapmldminmax"]], "mpas_analysis.ocean.ClimatologyMapOHCAnomaly": [[27, "mpas-analysis-ocean-climatologymapohcanomaly"]], "mpas_analysis.ocean.ClimatologyMapSSH": [[28, "mpas-analysis-ocean-climatologymapssh"]], "mpas_analysis.ocean.ClimatologyMapSSS": [[29, "mpas-analysis-ocean-climatologymapsss"]], "mpas_analysis.ocean.ClimatologyMapSST": [[30, "mpas-analysis-ocean-climatologymapsst"]], "mpas_analysis.ocean.ClimatologyMapSose": [[31, "mpas-analysis-ocean-climatologymapsose"]], "mpas_analysis.ocean.ClimatologyMapWaves": [[32, "mpas-analysis-ocean-climatologymapwaves"]], "mpas_analysis.ocean.ConservationTask": [[33, "mpas-analysis-ocean-conservationtask"]], "mpas_analysis.ocean.IndexNino34": [[34, "mpas-analysis-ocean-indexnino34"]], "mpas_analysis.ocean.MeridionalHeatTransport": [[35, "mpas-analysis-ocean-meridionalheattransport"]], "mpas_analysis.ocean.OceanHistogram": [[36, "mpas-analysis-ocean-oceanhistogram"]], "mpas_analysis.ocean.StreamfunctionMOC": [[37, "mpas-analysis-ocean-streamfunctionmoc"]], "mpas_analysis.ocean.TimeSeriesAntarcticMelt": [[38, "mpas-analysis-ocean-timeseriesantarcticmelt"]], "mpas_analysis.ocean.TimeSeriesOHCAnomaly": [[39, "mpas-analysis-ocean-timeseriesohcanomaly"]], "mpas_analysis.ocean.TimeSeriesOceanRegions": [[40, "mpas-analysis-ocean-timeseriesoceanregions"]], "mpas_analysis.ocean.TimeSeriesSST": [[41, "mpas-analysis-ocean-timeseriessst"]], "mpas_analysis.ocean.TimeSeriesSalinityAnomaly": [[42, "mpas-analysis-ocean-timeseriessalinityanomaly"]], "mpas_analysis.ocean.TimeSeriesTemperatureAnomaly": [[43, "mpas-analysis-ocean-timeseriestemperatureanomaly"]], "mpas_analysis.ocean.TimeSeriesTransport": [[44, "mpas-analysis-ocean-timeseriestransport"]], "mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask": [[45, "mpas-analysis-ocean-compute-anomaly-subtask-computeanomalysubtask"]], "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask": [[46, "mpas-analysis-ocean-plot-depth-integrated-time-series-subtask-plotdepthintegratedtimeseriessubtask"]], "mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask": [[47, "mpas-analysis-ocean-plot-hovmoller-subtask-plothovmollersubtask"]], "mpas_analysis.sea_ice.ClimatologyMapIcebergConc": [[48, "mpas-analysis-sea-ice-climatologymapicebergconc"]], "mpas_analysis.sea_ice.ClimatologyMapSeaIceConc": [[49, "mpas-analysis-sea-ice-climatologymapseaiceconc"]], "mpas_analysis.sea_ice.ClimatologyMapSeaIceThick": [[50, "mpas-analysis-sea-ice-climatologymapseaicethick"]], "mpas_analysis.sea_ice.TimeSeriesSeaIce": [[51, "mpas-analysis-sea-ice-timeseriesseaice"]], "mpas_analysis.shared.AnalysisTask": [[52, "mpas-analysis-shared-analysistask"]], "mpas_analysis.shared.AnalysisTask.add_subtask": [[53, "mpas-analysis-shared-analysistask-add-subtask"]], "mpas_analysis.shared.AnalysisTask.check_analysis_enabled": [[54, "mpas-analysis-shared-analysistask-check-analysis-enabled"]], "mpas_analysis.shared.AnalysisTask.check_generate": [[55, "mpas-analysis-shared-analysistask-check-generate"]], "mpas_analysis.shared.AnalysisTask.run": [[56, "mpas-analysis-shared-analysistask-run"]], "mpas_analysis.shared.AnalysisTask.run_after": [[57, "mpas-analysis-shared-analysistask-run-after"]], "mpas_analysis.shared.AnalysisTask.run_task": [[58, "mpas-analysis-shared-analysistask-run-task"]], "mpas_analysis.shared.AnalysisTask.set_start_end_date": [[59, "mpas-analysis-shared-analysistask-set-start-end-date"]], "mpas_analysis.shared.AnalysisTask.setup_and_check": [[60, "mpas-analysis-shared-analysistask-setup-and-check"]], "mpas_analysis.shared.climatology.MpasClimatologyTask": [[61, "mpas-analysis-shared-climatology-mpasclimatologytask"]], "mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables": [[62, "mpas-analysis-shared-climatology-mpasclimatologytask-add-variables"]], "mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name": [[63, "mpas-analysis-shared-climatology-mpasclimatologytask-get-file-name"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask": [[64, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor": [[65, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-add-comparison-grid-descriptor"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology": [[66, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-customize-masked-climatology"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology": [[67, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-customize-remapped-climatology"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name": [[68, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-get-masked-file-name"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name": [[69, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-get-remapped-file-name"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task": [[70, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-run-task"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check": [[71, "mpas-analysis-shared-climatology-remapmpasclimatologysubtask-setup-and-check"]], "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask": [[72, "mpas-analysis-shared-climatology-remapobservedclimatologysubtask"]], "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset": [[73, "mpas-analysis-shared-climatology-remapobservedclimatologysubtask-build-observational-dataset"]], "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name": [[74, "mpas-analysis-shared-climatology-remapobservedclimatologysubtask-get-file-name"]], "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor": [[75, "mpas-analysis-shared-climatology-remapobservedclimatologysubtask-get-observation-descriptor"]], "mpas_analysis.shared.climatology.add_years_months_days_in_month": [[76, "mpas-analysis-shared-climatology-add-years-months-days-in-month"]], "mpas_analysis.shared.climatology.compute_climatology": [[77, "mpas-analysis-shared-climatology-compute-climatology"]], "mpas_analysis.shared.climatology.compute_monthly_climatology": [[78, "mpas-analysis-shared-climatology-compute-monthly-climatology"]], "mpas_analysis.shared.climatology.get_comparison_descriptor": [[79, "mpas-analysis-shared-climatology-get-comparison-descriptor"]], "mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name": [[80, "mpas-analysis-shared-climatology-get-masked-mpas-climatology-file-name"]], "mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name": [[81, "mpas-analysis-shared-climatology-get-remapped-mpas-climatology-file-name"]], "mpas_analysis.shared.climatology.get_remapper": [[82, "mpas-analysis-shared-climatology-get-remapper"]], "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory": [[83, "mpas-analysis-shared-climatology-get-unmasked-mpas-climatology-directory"]], "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name": [[84, "mpas-analysis-shared-climatology-get-unmasked-mpas-climatology-file-name"]], "mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset": [[85, "mpas-analysis-shared-generalized-reader-generalized-reader-open-multifile-dataset"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__": [[86, "mpas-analysis-shared-io-namelist-streams-interface-namelist-getattr"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__": [[87, "mpas-analysis-shared-io-namelist-streams-interface-namelist-getitem"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__": [[88, "mpas-analysis-shared-io-namelist-streams-interface-namelist-init"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.get": [[89, "mpas-analysis-shared-io-namelist-streams-interface-namelist-get"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool": [[90, "mpas-analysis-shared-io-namelist-streams-interface-namelist-getbool"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat": [[91, "mpas-analysis-shared-io-namelist-streams-interface-namelist-getfloat"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList.getint": [[92, "mpas-analysis-shared-io-namelist-streams-interface-namelist-getint"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__": [[93, "mpas-analysis-shared-io-namelist-streams-interface-streamsfile-init"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream": [[94, "mpas-analysis-shared-io-namelist-streams-interface-streamsfile-find-stream"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream": [[95, "mpas-analysis-shared-io-namelist-streams-interface-streamsfile-has-stream"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read": [[96, "mpas-analysis-shared-io-namelist-streams-interface-streamsfile-read"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath": [[97, "mpas-analysis-shared-io-namelist-streams-interface-streamsfile-readpath"]], "mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict": [[98, "mpas-analysis-shared-io-namelist-streams-interface-convert-namelist-to-dict"]], "mpas_analysis.shared.io.open_mpas_dataset": [[99, "mpas-analysis-shared-io-open-mpas-dataset"]], "mpas_analysis.shared.io.utility.build_config_full_path": [[100, "mpas-analysis-shared-io-utility-build-config-full-path"]], "mpas_analysis.shared.io.utility.check_path_exists": [[101, "mpas-analysis-shared-io-utility-check-path-exists"]], "mpas_analysis.shared.io.utility.make_directories": [[102, "mpas-analysis-shared-io-utility-make-directories"]], "mpas_analysis.shared.io.utility.paths": [[103, "mpas-analysis-shared-io-utility-paths"]], "mpas_analysis.shared.io.write_netcdf": [[104, "module-mpas_analysis.shared.io.write_netcdf"]], "mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset": [[105, "mpas-analysis-shared-mpas-xarray-mpas-xarray-open-multifile-dataset"]], "mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess": [[106, "mpas-analysis-shared-mpas-xarray-mpas-xarray-preprocess"]], "mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index": [[107, "mpas-analysis-shared-mpas-xarray-mpas-xarray-remove-repeated-time-index"]], "mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables": [[108, "mpas-analysis-shared-mpas-xarray-mpas-xarray-subset-variables"]], "mpas_analysis.shared.plot.PlotClimatologyMapSubtask": [[109, "mpas-analysis-shared-plot-plotclimatologymapsubtask"]], "mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info": [[110, "mpas-analysis-shared-plot-plotclimatologymapsubtask-set-plot-info"]], "mpas_analysis.shared.plot.add_inset": [[111, "mpas-analysis-shared-plot-add-inset"]], "mpas_analysis.shared.plot.colormap.setup_colormap": [[112, "mpas-analysis-shared-plot-colormap-setup-colormap"]], "mpas_analysis.shared.plot.plot_1D": [[113, "mpas-analysis-shared-plot-plot-1d"]], "mpas_analysis.shared.plot.plot_global_comparison": [[114, "mpas-analysis-shared-plot-plot-global-comparison"]], "mpas_analysis.shared.plot.plot_polar_comparison": [[115, "mpas-analysis-shared-plot-plot-polar-comparison"]], "mpas_analysis.shared.plot.plot_vertical_section": [[116, "mpas-analysis-shared-plot-plot-vertical-section"]], "mpas_analysis.shared.plot.plot_vertical_section_comparison": [[117, "mpas-analysis-shared-plot-plot-vertical-section-comparison"]], "mpas_analysis.shared.plot.ticks.plot_xtick_format": [[118, "mpas-analysis-shared-plot-ticks-plot-xtick-format"]], "mpas_analysis.shared.plot.timeseries_analysis_plot": [[119, "mpas-analysis-shared-plot-timeseries-analysis-plot"]], "mpas_analysis.shared.plot.timeseries_analysis_plot_polar": [[120, "mpas-analysis-shared-plot-timeseries-analysis-plot-polar"]], "mpas_analysis.shared.projection.get_cartopy_projection": [[121, "mpas-analysis-shared-projection-get-cartopy-projection"]], "mpas_analysis.shared.projection.get_pyproj_projection": [[122, "mpas-analysis-shared-projection-get-pyproj-projection"]], "mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks": [[123, "mpas-analysis-shared-regions-compute-region-masks-computeregionmasks"]], "mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask": [[124, "mpas-analysis-shared-regions-compute-region-masks-subtask-computeregionmaskssubtask"]], "mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list": [[125, "mpas-analysis-shared-regions-compute-region-masks-subtask-get-feature-list"]], "mpas_analysis.shared.time_series.MpasTimeSeriesTask": [[126, "mpas-analysis-shared-time-series-mpastimeseriestask"]], "mpas_analysis.shared.time_series.cache_time_series": [[127, "mpas-analysis-shared-time-series-cache-time-series"]], "mpas_analysis.shared.time_series.compute_moving_avg": [[128, "mpas-analysis-shared-time-series-compute-moving-avg"]], "mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start": [[129, "mpas-analysis-shared-time-series-compute-moving-avg-anomaly-from-start"]], "mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta": [[130, "mpas-analysis-shared-timekeeping-mpasrelativedelta-mpasrelativedelta"]], "mpas_analysis.shared.timekeeping.utility.date_to_days": [[131, "mpas-analysis-shared-timekeeping-utility-date-to-days"]], "mpas_analysis.shared.timekeeping.utility.datetime_to_days": [[132, "mpas-analysis-shared-timekeeping-utility-datetime-to-days"]], "mpas_analysis.shared.timekeeping.utility.days_to_datetime": [[133, "mpas-analysis-shared-timekeeping-utility-days-to-datetime"]], "mpas_analysis.shared.timekeeping.utility.get_simulation_start_time": [[134, "mpas-analysis-shared-timekeeping-utility-get-simulation-start-time"]], "mpas_analysis.shared.timekeeping.utility.string_to_datetime": [[135, "mpas-analysis-shared-timekeeping-utility-string-to-datetime"]], "mpas_analysis.shared.timekeeping.utility.string_to_days_since_date": [[136, "mpas-analysis-shared-timekeeping-utility-string-to-days-since-date"]], "mpas_analysis.shared.timekeeping.utility.string_to_relative_delta": [[137, "mpas-analysis-shared-timekeeping-utility-string-to-relative-delta"]], "mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask": [[138, "mpas-analysis-shared-transects-compute-transect-masks-subtask-computetransectmaskssubtask"]], "mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks": [[139, "mpas-analysis-shared-transects-compute-transect-masks-subtask-compute-mpas-transect-masks"]], "oceanHistogram": [[234, "oceanhistogram"]], "oceanRegionalProfiles": [[235, "oceanregionalprofiles"]], "regionalTSDiagrams": [[236, "regionaltsdiagrams"]], "soseTransects": [[237, "sosetransects"]], "streamfunctionMOC": [[238, "streamfunctionmoc"]], "timeSeriesAntarcticMelt": [[239, "timeseriesantarcticmelt"]], "timeSeriesOHCAnomaly": [[240, "timeseriesohcanomaly"]], "timeSeriesOceanRegions": [[241, "timeseriesoceanregions"]], "timeSeriesSST": [[242, "timeseriessst"]], "timeSeriesSalinityAnomaly": [[243, "timeseriessalinityanomaly"]], "timeSeriesSeaIceAreaVol": [[244, "timeseriesseaiceareavol"]], "timeSeriesTemperatureAnomaly": [[245, "timeseriestemperatureanomaly"]], "timeSeriesTransport": [[246, "timeseriestransport"]], "woceTransects": [[247, "wocetransects"]]}, "docnames": ["authors", "design_docs/analysis_task_template", "design_docs/config_file_reorganization", "design_docs/eddykineticenergy", "design_docs/generalize_calendar", "design_docs/generalized_horizontal_interpolation", "design_docs/index", "design_docs/parallel_tasks", "design_docs/prerequisite_tasks", "design_docs/remapper", "design_docs/timekeeping_reorg", "design_docs/variable_mapping_reorg", "developers_guide/api", "developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks", "developers_guide/generated/mpas_analysis.__main__.build_analysis_list", "developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate", "developers_guide/generated/mpas_analysis.__main__.main", "developers_guide/generated/mpas_analysis.__main__.run_analysis", "developers_guide/generated/mpas_analysis.__main__.update_generate", "developers_guide/generated/mpas_analysis.__main__.wait_for_task", "developers_guide/generated/mpas_analysis.download_data.download_analysis_data", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves", "developers_guide/generated/mpas_analysis.ocean.ConservationTask", "developers_guide/generated/mpas_analysis.ocean.IndexNino34", "developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport", "developers_guide/generated/mpas_analysis.ocean.OceanHistogram", "developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport", "developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask", "developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask", "developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick", "developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce", "developers_guide/generated/mpas_analysis.shared.AnalysisTask", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor", "developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month", "developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology", "developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology", "developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor", "developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name", "developers_guide/generated/mpas_analysis.shared.climatology.get_remapper", "developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory", "developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name", "developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict", "developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset", "developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path", "developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists", "developers_guide/generated/mpas_analysis.shared.io.utility.make_directories", "developers_guide/generated/mpas_analysis.shared.io.utility.paths", "developers_guide/generated/mpas_analysis.shared.io.write_netcdf", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables", "developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask", "developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info", "developers_guide/generated/mpas_analysis.shared.plot.add_inset", "developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap", "developers_guide/generated/mpas_analysis.shared.plot.plot_1D", "developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison", "developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison", "developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section", "developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison", "developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format", "developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot", "developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar", "developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection", "developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list", "developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask", "developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series", "developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg", "developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start", "developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta", "developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask", "developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks", "index", "tutorials/dev_add_task", "tutorials/dev_getting_started", "tutorials/dev_understand_a_task", "tutorials/getting_started", "users_guide/all_obs", "users_guide/analysis_tasks", "users_guide/components", "users_guide/config/climatology", "users_guide/config/colormaps", "users_guide/config/comparison_grids", "users_guide/config/dask_threads", "users_guide/config/diagnostics", "users_guide/config/execute", "users_guide/config/html", "users_guide/config/index", "users_guide/config/input", "users_guide/config/moving_average", "users_guide/config/observations", "users_guide/config/output", "users_guide/config/plot", "users_guide/config/preprocessed", "users_guide/config/regions", "users_guide/config/runs", "users_guide/config/seasons", "users_guide/config/timeSeries", "users_guide/config/time_axis_ticks", "users_guide/config/transects", "users_guide/configuration", "users_guide/e3sm", "users_guide/mpaso", "users_guide/mpasseaice", "users_guide/obs/adusumilli_melt", "users_guide/obs/altiberg", "users_guide/obs/aniceflux", "users_guide/obs/aquarius_sss", "users_guide/obs/argo_mld", "users_guide/obs/aviso_ssh", "users_guide/obs/bootstrap_conc", "users_guide/obs/drifter_eke", "users_guide/obs/era5_waves", "users_guide/obs/ers_sst_nino", "users_guide/obs/glodapv2", "users_guide/obs/hadisst_nino", "users_guide/obs/hadley_center_sst", "users_guide/obs/icesat_thickness", "users_guide/obs/landschuetzer-som-ffn", "users_guide/obs/nasateam_conc", "users_guide/obs/paolo_melt", "users_guide/obs/piomass_ice_volume", "users_guide/obs/rignot_melt", "users_guide/obs/roemmich_gilson_argo", "users_guide/obs/schmidtko", "users_guide/obs/seawifs", "users_guide/obs/sose", "users_guide/obs/sscci_waves", "users_guide/obs/ssmi_ice_area", "users_guide/obs/trenberth_mht", "users_guide/obs/woa", "users_guide/obs/woa18_t_s", "users_guide/obs/woce", "users_guide/observations", "users_guide/ocean_obs_table", "users_guide/quick_start", "users_guide/seaice_obs_table", "users_guide/tasks/climatologyMapAntarcticMelt", "users_guide/tasks/climatologyMapArgoSalinity", "users_guide/tasks/climatologyMapArgoTemperature", "users_guide/tasks/climatologyMapBGC", "users_guide/tasks/climatologyMapEKE", "users_guide/tasks/climatologyMapIcebergConcSH", "users_guide/tasks/climatologyMapMLD", "users_guide/tasks/climatologyMapMLDMinMax", "users_guide/tasks/climatologyMapOHCAnomaly", "users_guide/tasks/climatologyMapSSH", "users_guide/tasks/climatologyMapSSS", "users_guide/tasks/climatologyMapSST", "users_guide/tasks/climatologyMapSchmidtko", "users_guide/tasks/climatologyMapSeaIceConcNH", "users_guide/tasks/climatologyMapSeaIceConcSH", "users_guide/tasks/climatologyMapSeaIceMeltingNH", "users_guide/tasks/climatologyMapSeaIceMeltingSH", "users_guide/tasks/climatologyMapSeaIceProductionNH", "users_guide/tasks/climatologyMapSeaIceProductionSH", "users_guide/tasks/climatologyMapSeaIceThickNH", "users_guide/tasks/climatologyMapSeaIceThickSH", "users_guide/tasks/climatologyMapSose", "users_guide/tasks/climatologyMapWaves", "users_guide/tasks/climatologyMapWoa", "users_guide/tasks/conservation", "users_guide/tasks/geojsonTransects", "users_guide/tasks/hovmollerOceanRegions", "users_guide/tasks/indexNino34", "users_guide/tasks/meridionalHeatTransport", "users_guide/tasks/oceanHistogram", "users_guide/tasks/oceanRegionalProfiles", "users_guide/tasks/regionalTSDiagrams", "users_guide/tasks/soseTransects", "users_guide/tasks/streamfunctionMOC", "users_guide/tasks/timeSeriesAntarcticMelt", "users_guide/tasks/timeSeriesOHCAnomaly", "users_guide/tasks/timeSeriesOceanRegions", "users_guide/tasks/timeSeriesSST", "users_guide/tasks/timeSeriesSalinityAnomaly", "users_guide/tasks/timeSeriesSeaIceAreaVol", "users_guide/tasks/timeSeriesTemperatureAnomaly", "users_guide/tasks/timeSeriesTransport", "users_guide/tasks/woceTransects", "versions"], "envversion": {"sphinx": 61, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1}, "filenames": ["authors.rst", "design_docs/analysis_task_template.rst", "design_docs/config_file_reorganization.rst", "design_docs/eddykineticenergy.rst", "design_docs/generalize_calendar.rst", "design_docs/generalized_horizontal_interpolation.rst", "design_docs/index.rst", "design_docs/parallel_tasks.rst", "design_docs/prerequisite_tasks.rst", "design_docs/remapper.rst", "design_docs/timekeeping_reorg.rst", "design_docs/variable_mapping_reorg.rst", "developers_guide/api.rst", "developers_guide/generated/mpas_analysis.__main__.add_task_and_subtasks.rst", "developers_guide/generated/mpas_analysis.__main__.build_analysis_list.rst", "developers_guide/generated/mpas_analysis.__main__.determine_analyses_to_generate.rst", "developers_guide/generated/mpas_analysis.__main__.main.rst", "developers_guide/generated/mpas_analysis.__main__.run_analysis.rst", "developers_guide/generated/mpas_analysis.__main__.update_generate.rst", "developers_guide/generated/mpas_analysis.__main__.wait_for_task.rst", "developers_guide/generated/mpas_analysis.download_data.download_analysis_data.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapAntarcticMelt.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoSalinity.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapArgoTemperature.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapEKE.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLD.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapMLDMinMax.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapOHCAnomaly.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSH.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSSS.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSST.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapSose.rst", "developers_guide/generated/mpas_analysis.ocean.ClimatologyMapWaves.rst", "developers_guide/generated/mpas_analysis.ocean.ConservationTask.rst", "developers_guide/generated/mpas_analysis.ocean.IndexNino34.rst", "developers_guide/generated/mpas_analysis.ocean.MeridionalHeatTransport.rst", "developers_guide/generated/mpas_analysis.ocean.OceanHistogram.rst", "developers_guide/generated/mpas_analysis.ocean.StreamfunctionMOC.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesAntarcticMelt.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesOHCAnomaly.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesOceanRegions.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesSST.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesSalinityAnomaly.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.rst", "developers_guide/generated/mpas_analysis.ocean.TimeSeriesTransport.rst", "developers_guide/generated/mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.rst", "developers_guide/generated/mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.rst", "developers_guide/generated/mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.rst", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapIcebergConc.rst", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.rst", "developers_guide/generated/mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.rst", "developers_guide/generated/mpas_analysis.sea_ice.TimeSeriesSeaIce.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.add_subtask.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_analysis_enabled.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.check_generate.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_after.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.run_task.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.set_start_end_date.rst", "developers_guide/generated/mpas_analysis.shared.AnalysisTask.setup_and_check.rst", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.rst", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables.rst", "developers_guide/generated/mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor.rst", "developers_guide/generated/mpas_analysis.shared.climatology.add_years_months_days_in_month.rst", "developers_guide/generated/mpas_analysis.shared.climatology.compute_climatology.rst", "developers_guide/generated/mpas_analysis.shared.climatology.compute_monthly_climatology.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_comparison_descriptor.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_remapper.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory.rst", "developers_guide/generated/mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name.rst", "developers_guide/generated/mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.get.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.NameList.getint.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath.rst", "developers_guide/generated/mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict.rst", "developers_guide/generated/mpas_analysis.shared.io.open_mpas_dataset.rst", "developers_guide/generated/mpas_analysis.shared.io.utility.build_config_full_path.rst", "developers_guide/generated/mpas_analysis.shared.io.utility.check_path_exists.rst", "developers_guide/generated/mpas_analysis.shared.io.utility.make_directories.rst", "developers_guide/generated/mpas_analysis.shared.io.utility.paths.rst", "developers_guide/generated/mpas_analysis.shared.io.write_netcdf.rst", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset.rst", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess.rst", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index.rst", "developers_guide/generated/mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables.rst", "developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.rst", "developers_guide/generated/mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info.rst", "developers_guide/generated/mpas_analysis.shared.plot.add_inset.rst", "developers_guide/generated/mpas_analysis.shared.plot.colormap.setup_colormap.rst", "developers_guide/generated/mpas_analysis.shared.plot.plot_1D.rst", "developers_guide/generated/mpas_analysis.shared.plot.plot_global_comparison.rst", "developers_guide/generated/mpas_analysis.shared.plot.plot_polar_comparison.rst", "developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section.rst", "developers_guide/generated/mpas_analysis.shared.plot.plot_vertical_section_comparison.rst", "developers_guide/generated/mpas_analysis.shared.plot.ticks.plot_xtick_format.rst", "developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot.rst", "developers_guide/generated/mpas_analysis.shared.plot.timeseries_analysis_plot_polar.rst", "developers_guide/generated/mpas_analysis.shared.projection.get_cartopy_projection.rst", "developers_guide/generated/mpas_analysis.shared.projection.get_pyproj_projection.rst", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.rst", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.rst", "developers_guide/generated/mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list.rst", "developers_guide/generated/mpas_analysis.shared.time_series.MpasTimeSeriesTask.rst", "developers_guide/generated/mpas_analysis.shared.time_series.cache_time_series.rst", "developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg.rst", "developers_guide/generated/mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.date_to_days.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.datetime_to_days.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.days_to_datetime.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.get_simulation_start_time.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_datetime.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_days_since_date.rst", "developers_guide/generated/mpas_analysis.shared.timekeeping.utility.string_to_relative_delta.rst", "developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.rst", "developers_guide/generated/mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks.rst", "index.rst", "tutorials/dev_add_task.rst", "tutorials/dev_getting_started.rst", "tutorials/dev_understand_a_task.rst", "tutorials/getting_started.rst", "users_guide/all_obs.rst", "users_guide/analysis_tasks.rst", "users_guide/components.rst", "users_guide/config/climatology.rst", "users_guide/config/colormaps.rst", "users_guide/config/comparison_grids.rst", "users_guide/config/dask_threads.rst", "users_guide/config/diagnostics.rst", "users_guide/config/execute.rst", "users_guide/config/html.rst", "users_guide/config/index.rst", "users_guide/config/input.rst", "users_guide/config/moving_average.rst", "users_guide/config/observations.rst", "users_guide/config/output.rst", "users_guide/config/plot.rst", "users_guide/config/preprocessed.rst", "users_guide/config/regions.rst", "users_guide/config/runs.rst", "users_guide/config/seasons.rst", "users_guide/config/timeSeries.rst", "users_guide/config/time_axis_ticks.rst", "users_guide/config/transects.rst", "users_guide/configuration.rst", "users_guide/e3sm.rst", "users_guide/mpaso.rst", "users_guide/mpasseaice.rst", "users_guide/obs/adusumilli_melt.rst", "users_guide/obs/altiberg.rst", "users_guide/obs/aniceflux.rst", "users_guide/obs/aquarius_sss.rst", "users_guide/obs/argo_mld.rst", "users_guide/obs/aviso_ssh.rst", "users_guide/obs/bootstrap_conc.rst", "users_guide/obs/drifter_eke.rst", "users_guide/obs/era5_waves.rst", "users_guide/obs/ers_sst_nino.rst", "users_guide/obs/glodapv2.rst", "users_guide/obs/hadisst_nino.rst", "users_guide/obs/hadley_center_sst.rst", "users_guide/obs/icesat_thickness.rst", "users_guide/obs/landschuetzer-som-ffn.rst", "users_guide/obs/nasateam_conc.rst", "users_guide/obs/paolo_melt.rst", "users_guide/obs/piomass_ice_volume.rst", "users_guide/obs/rignot_melt.rst", "users_guide/obs/roemmich_gilson_argo.rst", "users_guide/obs/schmidtko.rst", "users_guide/obs/seawifs.rst", "users_guide/obs/sose.rst", "users_guide/obs/sscci_waves.rst", "users_guide/obs/ssmi_ice_area.rst", "users_guide/obs/trenberth_mht.rst", "users_guide/obs/woa.rst", "users_guide/obs/woa18_t_s.rst", "users_guide/obs/woce.rst", "users_guide/observations.rst", "users_guide/ocean_obs_table.rst", "users_guide/quick_start.rst", "users_guide/seaice_obs_table.rst", "users_guide/tasks/climatologyMapAntarcticMelt.rst", "users_guide/tasks/climatologyMapArgoSalinity.rst", "users_guide/tasks/climatologyMapArgoTemperature.rst", "users_guide/tasks/climatologyMapBGC.rst", "users_guide/tasks/climatologyMapEKE.rst", "users_guide/tasks/climatologyMapIcebergConcSH.rst", "users_guide/tasks/climatologyMapMLD.rst", "users_guide/tasks/climatologyMapMLDMinMax.rst", "users_guide/tasks/climatologyMapOHCAnomaly.rst", "users_guide/tasks/climatologyMapSSH.rst", "users_guide/tasks/climatologyMapSSS.rst", "users_guide/tasks/climatologyMapSST.rst", "users_guide/tasks/climatologyMapSchmidtko.rst", "users_guide/tasks/climatologyMapSeaIceConcNH.rst", "users_guide/tasks/climatologyMapSeaIceConcSH.rst", "users_guide/tasks/climatologyMapSeaIceMeltingNH.rst", "users_guide/tasks/climatologyMapSeaIceMeltingSH.rst", "users_guide/tasks/climatologyMapSeaIceProductionNH.rst", "users_guide/tasks/climatologyMapSeaIceProductionSH.rst", "users_guide/tasks/climatologyMapSeaIceThickNH.rst", "users_guide/tasks/climatologyMapSeaIceThickSH.rst", "users_guide/tasks/climatologyMapSose.rst", "users_guide/tasks/climatologyMapWaves.rst", "users_guide/tasks/climatologyMapWoa.rst", "users_guide/tasks/conservation.rst", "users_guide/tasks/geojsonTransects.rst", "users_guide/tasks/hovmollerOceanRegions.rst", "users_guide/tasks/indexNino34.rst", "users_guide/tasks/meridionalHeatTransport.rst", "users_guide/tasks/oceanHistogram.rst", "users_guide/tasks/oceanRegionalProfiles.rst", "users_guide/tasks/regionalTSDiagrams.rst", "users_guide/tasks/soseTransects.rst", "users_guide/tasks/streamfunctionMOC.rst", "users_guide/tasks/timeSeriesAntarcticMelt.rst", "users_guide/tasks/timeSeriesOHCAnomaly.rst", "users_guide/tasks/timeSeriesOceanRegions.rst", "users_guide/tasks/timeSeriesSST.rst", "users_guide/tasks/timeSeriesSalinityAnomaly.rst", "users_guide/tasks/timeSeriesSeaIceAreaVol.rst", "users_guide/tasks/timeSeriesTemperatureAnomaly.rst", "users_guide/tasks/timeSeriesTransport.rst", "users_guide/tasks/woceTransects.rst", "versions.rst"], "indexentries": {"__getattr__() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[86, "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__", false]], "__getitem__() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[87, "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__", false]], "__init__() (mpas_analysis.ocean.climatologymapantarcticmelt method)": [[21, "mpas_analysis.ocean.ClimatologyMapAntarcticMelt.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapargosalinity method)": [[22, "mpas_analysis.ocean.ClimatologyMapArgoSalinity.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapargotemperature method)": [[23, "mpas_analysis.ocean.ClimatologyMapArgoTemperature.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapeke method)": [[24, "mpas_analysis.ocean.ClimatologyMapEKE.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapmld method)": [[25, "mpas_analysis.ocean.ClimatologyMapMLD.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapmldminmax method)": [[26, "mpas_analysis.ocean.ClimatologyMapMLDMinMax.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapohcanomaly method)": [[27, "mpas_analysis.ocean.ClimatologyMapOHCAnomaly.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapsose method)": [[31, "mpas_analysis.ocean.ClimatologyMapSose.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapssh method)": [[28, "mpas_analysis.ocean.ClimatologyMapSSH.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapsss method)": [[29, "mpas_analysis.ocean.ClimatologyMapSSS.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapsst method)": [[30, "mpas_analysis.ocean.ClimatologyMapSST.__init__", false]], "__init__() (mpas_analysis.ocean.climatologymapwaves method)": [[32, "mpas_analysis.ocean.ClimatologyMapWaves.__init__", false]], "__init__() (mpas_analysis.ocean.compute_anomaly_subtask.computeanomalysubtask method)": [[45, "mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask.__init__", false]], "__init__() (mpas_analysis.ocean.conservationtask method)": [[33, "mpas_analysis.ocean.ConservationTask.__init__", false]], "__init__() (mpas_analysis.ocean.indexnino34 method)": [[34, "mpas_analysis.ocean.IndexNino34.__init__", false]], "__init__() (mpas_analysis.ocean.meridionalheattransport method)": [[35, "mpas_analysis.ocean.MeridionalHeatTransport.__init__", false]], "__init__() (mpas_analysis.ocean.oceanhistogram method)": [[36, "mpas_analysis.ocean.OceanHistogram.__init__", false]], "__init__() (mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.plotdepthintegratedtimeseriessubtask method)": [[46, "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask.__init__", false]], "__init__() (mpas_analysis.ocean.plot_hovmoller_subtask.plothovmollersubtask method)": [[47, "mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask.__init__", false]], "__init__() (mpas_analysis.ocean.streamfunctionmoc method)": [[37, "mpas_analysis.ocean.StreamfunctionMOC.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriesantarcticmelt method)": [[38, "mpas_analysis.ocean.TimeSeriesAntarcticMelt.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriesoceanregions method)": [[40, "mpas_analysis.ocean.TimeSeriesOceanRegions.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriesohcanomaly method)": [[39, "mpas_analysis.ocean.TimeSeriesOHCAnomaly.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriessalinityanomaly method)": [[42, "mpas_analysis.ocean.TimeSeriesSalinityAnomaly.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriessst method)": [[41, "mpas_analysis.ocean.TimeSeriesSST.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriestemperatureanomaly method)": [[43, "mpas_analysis.ocean.TimeSeriesTemperatureAnomaly.__init__", false]], "__init__() (mpas_analysis.ocean.timeseriestransport method)": [[44, "mpas_analysis.ocean.TimeSeriesTransport.__init__", false]], "__init__() (mpas_analysis.sea_ice.climatologymapicebergconc method)": [[48, "mpas_analysis.sea_ice.ClimatologyMapIcebergConc.__init__", false]], "__init__() (mpas_analysis.sea_ice.climatologymapseaiceconc method)": [[49, "mpas_analysis.sea_ice.ClimatologyMapSeaIceConc.__init__", false]], "__init__() (mpas_analysis.sea_ice.climatologymapseaicethick method)": [[50, "mpas_analysis.sea_ice.ClimatologyMapSeaIceThick.__init__", false]], "__init__() (mpas_analysis.sea_ice.timeseriesseaice method)": [[51, "mpas_analysis.sea_ice.TimeSeriesSeaIce.__init__", false]], "__init__() (mpas_analysis.shared.analysistask method)": [[52, "mpas_analysis.shared.AnalysisTask.__init__", false]], "__init__() (mpas_analysis.shared.climatology.mpasclimatologytask method)": [[61, "mpas_analysis.shared.climatology.MpasClimatologyTask.__init__", false]], "__init__() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[64, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.__init__", false]], "__init__() (mpas_analysis.shared.climatology.remapobservedclimatologysubtask method)": [[72, "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.__init__", false]], "__init__() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[88, "mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__", false]], "__init__() (mpas_analysis.shared.io.namelist_streams_interface.streamsfile method)": [[93, "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__", false]], "__init__() (mpas_analysis.shared.plot.plotclimatologymapsubtask method)": [[109, "mpas_analysis.shared.plot.PlotClimatologyMapSubtask.__init__", false]], "__init__() (mpas_analysis.shared.regions.compute_region_masks.computeregionmasks method)": [[123, "mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks.__init__", false]], "__init__() (mpas_analysis.shared.regions.compute_region_masks_subtask.computeregionmaskssubtask method)": [[124, "mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask.__init__", false]], "__init__() (mpas_analysis.shared.time_series.mpastimeseriestask method)": [[126, "mpas_analysis.shared.time_series.MpasTimeSeriesTask.__init__", false]], "__init__() (mpas_analysis.shared.timekeeping.mpasrelativedelta.mpasrelativedelta method)": [[130, "mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta.__init__", false]], "__init__() (mpas_analysis.shared.transects.compute_transect_masks_subtask.computetransectmaskssubtask method)": [[138, "mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask.__init__", false]], "add_comparison_grid_descriptor() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[65, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor", false]], "add_inset() (in module mpas_analysis.shared.plot)": [[111, "mpas_analysis.shared.plot.add_inset", false]], "add_subtask() (mpas_analysis.shared.analysistask method)": [[53, "mpas_analysis.shared.AnalysisTask.add_subtask", false]], "add_task_and_subtasks() (in module mpas_analysis.__main__)": [[13, "mpas_analysis.__main__.add_task_and_subtasks", false]], "add_variables() (mpas_analysis.shared.climatology.mpasclimatologytask method)": [[62, "mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables", false]], "add_years_months_days_in_month() (in module mpas_analysis.shared.climatology)": [[76, "mpas_analysis.shared.climatology.add_years_months_days_in_month", false]], "analysistask (class in mpas_analysis.shared)": [[52, "mpas_analysis.shared.AnalysisTask", false]], "build_analysis_list() (in module mpas_analysis.__main__)": [[14, "mpas_analysis.__main__.build_analysis_list", false]], "build_config_full_path() (in module mpas_analysis.shared.io.utility)": [[100, "mpas_analysis.shared.io.utility.build_config_full_path", false]], "build_observational_dataset() (mpas_analysis.shared.climatology.remapobservedclimatologysubtask method)": [[73, "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset", false]], "cache_time_series() (in module mpas_analysis.shared.time_series)": [[127, "mpas_analysis.shared.time_series.cache_time_series", false]], "check_analysis_enabled() (mpas_analysis.shared.analysistask method)": [[54, "mpas_analysis.shared.AnalysisTask.check_analysis_enabled", false]], "check_generate() (mpas_analysis.shared.analysistask method)": [[55, "mpas_analysis.shared.AnalysisTask.check_generate", false]], "check_path_exists() (in module mpas_analysis.shared.io.utility)": [[101, "mpas_analysis.shared.io.utility.check_path_exists", false]], "climatologymapantarcticmelt (class in mpas_analysis.ocean)": [[21, "mpas_analysis.ocean.ClimatologyMapAntarcticMelt", false]], "climatologymapargosalinity (class in mpas_analysis.ocean)": [[22, "mpas_analysis.ocean.ClimatologyMapArgoSalinity", false]], "climatologymapargotemperature (class in mpas_analysis.ocean)": [[23, "mpas_analysis.ocean.ClimatologyMapArgoTemperature", false]], "climatologymapeke (class in mpas_analysis.ocean)": [[24, "mpas_analysis.ocean.ClimatologyMapEKE", false]], "climatologymapicebergconc (class in mpas_analysis.sea_ice)": [[48, "mpas_analysis.sea_ice.ClimatologyMapIcebergConc", false]], "climatologymapmld (class in mpas_analysis.ocean)": [[25, "mpas_analysis.ocean.ClimatologyMapMLD", false]], "climatologymapmldminmax (class in mpas_analysis.ocean)": [[26, "mpas_analysis.ocean.ClimatologyMapMLDMinMax", false]], "climatologymapohcanomaly (class in mpas_analysis.ocean)": [[27, "mpas_analysis.ocean.ClimatologyMapOHCAnomaly", false]], "climatologymapseaiceconc (class in mpas_analysis.sea_ice)": [[49, "mpas_analysis.sea_ice.ClimatologyMapSeaIceConc", false]], "climatologymapseaicethick (class in mpas_analysis.sea_ice)": [[50, "mpas_analysis.sea_ice.ClimatologyMapSeaIceThick", false]], "climatologymapsose (class in mpas_analysis.ocean)": [[31, "mpas_analysis.ocean.ClimatologyMapSose", false]], "climatologymapssh (class in mpas_analysis.ocean)": [[28, "mpas_analysis.ocean.ClimatologyMapSSH", false]], "climatologymapsss (class in mpas_analysis.ocean)": [[29, "mpas_analysis.ocean.ClimatologyMapSSS", false]], "climatologymapsst (class in mpas_analysis.ocean)": [[30, "mpas_analysis.ocean.ClimatologyMapSST", false]], "climatologymapwaves (class in mpas_analysis.ocean)": [[32, "mpas_analysis.ocean.ClimatologyMapWaves", false]], "compute_climatology() (in module mpas_analysis.shared.climatology)": [[77, "mpas_analysis.shared.climatology.compute_climatology", false]], "compute_monthly_climatology() (in module mpas_analysis.shared.climatology)": [[78, "mpas_analysis.shared.climatology.compute_monthly_climatology", false]], "compute_moving_avg() (in module mpas_analysis.shared.time_series)": [[128, "mpas_analysis.shared.time_series.compute_moving_avg", false]], "compute_moving_avg_anomaly_from_start() (in module mpas_analysis.shared.time_series)": [[129, "mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start", false]], "compute_mpas_transect_masks() (in module mpas_analysis.shared.transects.compute_transect_masks_subtask)": [[139, "mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks", false]], "computeanomalysubtask (class in mpas_analysis.ocean.compute_anomaly_subtask)": [[45, "mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask", false]], "computeregionmasks (class in mpas_analysis.shared.regions.compute_region_masks)": [[123, "mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks", false]], "computeregionmaskssubtask (class in mpas_analysis.shared.regions.compute_region_masks_subtask)": [[124, "mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask", false]], "computetransectmaskssubtask (class in mpas_analysis.shared.transects.compute_transect_masks_subtask)": [[138, "mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask", false]], "conservationtask (class in mpas_analysis.ocean)": [[33, "mpas_analysis.ocean.ConservationTask", false]], "convert_namelist_to_dict() (in module mpas_analysis.shared.io.namelist_streams_interface)": [[98, "mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict", false]], "customize_masked_climatology() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[66, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology", false]], "customize_remapped_climatology() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[67, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology", false]], "date_to_days() (in module mpas_analysis.shared.timekeeping.utility)": [[131, "mpas_analysis.shared.timekeeping.utility.date_to_days", false]], "datetime_to_days() (in module mpas_analysis.shared.timekeeping.utility)": [[132, "mpas_analysis.shared.timekeeping.utility.datetime_to_days", false]], "days_to_datetime() (in module mpas_analysis.shared.timekeeping.utility)": [[133, "mpas_analysis.shared.timekeeping.utility.days_to_datetime", false]], "determine_analyses_to_generate() (in module mpas_analysis.__main__)": [[15, "mpas_analysis.__main__.determine_analyses_to_generate", false]], "download_analysis_data() (in module mpas_analysis.download_data)": [[20, "mpas_analysis.download_data.download_analysis_data", false]], "find_stream() (mpas_analysis.shared.io.namelist_streams_interface.streamsfile method)": [[94, "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream", false]], "get() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[89, "mpas_analysis.shared.io.namelist_streams_interface.NameList.get", false]], "get_cartopy_projection() (in module mpas_analysis.shared.projection)": [[121, "mpas_analysis.shared.projection.get_cartopy_projection", false]], "get_comparison_descriptor() (in module mpas_analysis.shared.climatology)": [[79, "mpas_analysis.shared.climatology.get_comparison_descriptor", false]], "get_feature_list() (in module mpas_analysis.shared.regions.compute_region_masks_subtask)": [[125, "mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list", false]], "get_file_name() (mpas_analysis.shared.climatology.mpasclimatologytask method)": [[63, "mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name", false]], "get_file_name() (mpas_analysis.shared.climatology.remapobservedclimatologysubtask method)": [[74, "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name", false]], "get_masked_file_name() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[68, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name", false]], "get_masked_mpas_climatology_file_name() (in module mpas_analysis.shared.climatology)": [[80, "mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name", false]], "get_observation_descriptor() (mpas_analysis.shared.climatology.remapobservedclimatologysubtask method)": [[75, "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor", false]], "get_pyproj_projection() (in module mpas_analysis.shared.projection)": [[122, "mpas_analysis.shared.projection.get_pyproj_projection", false]], "get_remapped_file_name() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[69, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name", false]], "get_remapped_mpas_climatology_file_name() (in module mpas_analysis.shared.climatology)": [[81, "mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name", false]], "get_remapper() (in module mpas_analysis.shared.climatology)": [[82, "mpas_analysis.shared.climatology.get_remapper", false]], "get_simulation_start_time() (in module mpas_analysis.shared.timekeeping.utility)": [[134, "mpas_analysis.shared.timekeeping.utility.get_simulation_start_time", false]], "get_unmasked_mpas_climatology_directory() (in module mpas_analysis.shared.climatology)": [[83, "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory", false]], "get_unmasked_mpas_climatology_file_name() (in module mpas_analysis.shared.climatology)": [[84, "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name", false]], "getbool() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[90, "mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool", false]], "getfloat() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[91, "mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat", false]], "getint() (mpas_analysis.shared.io.namelist_streams_interface.namelist method)": [[92, "mpas_analysis.shared.io.namelist_streams_interface.NameList.getint", false]], "has_stream() (mpas_analysis.shared.io.namelist_streams_interface.streamsfile method)": [[95, "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream", false]], "indexnino34 (class in mpas_analysis.ocean)": [[34, "mpas_analysis.ocean.IndexNino34", false]], "main() (in module mpas_analysis.__main__)": [[16, "mpas_analysis.__main__.main", false]], "make_directories() (in module mpas_analysis.shared.io.utility)": [[102, "mpas_analysis.shared.io.utility.make_directories", false]], "meridionalheattransport (class in mpas_analysis.ocean)": [[35, "mpas_analysis.ocean.MeridionalHeatTransport", false]], "module": [[104, "module-mpas_analysis.shared.io.write_netcdf", false]], "mpas_analysis.shared.io.write_netcdf": [[104, "module-mpas_analysis.shared.io.write_netcdf", false]], "mpasclimatologytask (class in mpas_analysis.shared.climatology)": [[61, "mpas_analysis.shared.climatology.MpasClimatologyTask", false]], "mpasrelativedelta (class in mpas_analysis.shared.timekeeping.mpasrelativedelta)": [[130, "mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta", false]], "mpastimeseriestask (class in mpas_analysis.shared.time_series)": [[126, "mpas_analysis.shared.time_series.MpasTimeSeriesTask", false]], "oceanhistogram (class in mpas_analysis.ocean)": [[36, "mpas_analysis.ocean.OceanHistogram", false]], "open_mpas_dataset() (in module mpas_analysis.shared.io)": [[99, "mpas_analysis.shared.io.open_mpas_dataset", false]], "open_multifile_dataset() (in module mpas_analysis.shared.generalized_reader.generalized_reader)": [[85, "mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset", false]], "open_multifile_dataset() (in module mpas_analysis.shared.mpas_xarray.mpas_xarray)": [[105, "mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset", false]], "paths() (in module mpas_analysis.shared.io.utility)": [[103, "mpas_analysis.shared.io.utility.paths", false]], "plot_1d() (in module mpas_analysis.shared.plot)": [[113, "mpas_analysis.shared.plot.plot_1D", false]], "plot_global_comparison() (in module mpas_analysis.shared.plot)": [[114, "mpas_analysis.shared.plot.plot_global_comparison", false]], "plot_polar_comparison() (in module mpas_analysis.shared.plot)": [[115, "mpas_analysis.shared.plot.plot_polar_comparison", false]], "plot_vertical_section() (in module mpas_analysis.shared.plot)": [[116, "mpas_analysis.shared.plot.plot_vertical_section", false]], "plot_vertical_section_comparison() (in module mpas_analysis.shared.plot)": [[117, "mpas_analysis.shared.plot.plot_vertical_section_comparison", false]], "plot_xtick_format() (in module mpas_analysis.shared.plot.ticks)": [[118, "mpas_analysis.shared.plot.ticks.plot_xtick_format", false]], "plotclimatologymapsubtask (class in mpas_analysis.shared.plot)": [[109, "mpas_analysis.shared.plot.PlotClimatologyMapSubtask", false]], "plotdepthintegratedtimeseriessubtask (class in mpas_analysis.ocean.plot_depth_integrated_time_series_subtask)": [[46, "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask", false]], "plothovmollersubtask (class in mpas_analysis.ocean.plot_hovmoller_subtask)": [[47, "mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask", false]], "preprocess() (in module mpas_analysis.shared.mpas_xarray.mpas_xarray)": [[106, "mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess", false]], "read() (mpas_analysis.shared.io.namelist_streams_interface.streamsfile method)": [[96, "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read", false]], "readpath() (mpas_analysis.shared.io.namelist_streams_interface.streamsfile method)": [[97, "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath", false]], "remapmpasclimatologysubtask (class in mpas_analysis.shared.climatology)": [[64, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask", false]], "remapobservedclimatologysubtask (class in mpas_analysis.shared.climatology)": [[72, "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask", false]], "remove_repeated_time_index() (in module mpas_analysis.shared.mpas_xarray.mpas_xarray)": [[107, "mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index", false]], "run() (mpas_analysis.shared.analysistask method)": [[56, "mpas_analysis.shared.AnalysisTask.run", false]], "run_after() (mpas_analysis.shared.analysistask method)": [[57, "mpas_analysis.shared.AnalysisTask.run_after", false]], "run_analysis() (in module mpas_analysis.__main__)": [[17, "mpas_analysis.__main__.run_analysis", false]], "run_task() (mpas_analysis.shared.analysistask method)": [[58, "mpas_analysis.shared.AnalysisTask.run_task", false]], "run_task() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[70, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task", false]], "set_plot_info() (mpas_analysis.shared.plot.plotclimatologymapsubtask method)": [[110, "mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info", false]], "set_start_end_date() (mpas_analysis.shared.analysistask method)": [[59, "mpas_analysis.shared.AnalysisTask.set_start_end_date", false]], "setup_and_check() (mpas_analysis.shared.analysistask method)": [[60, "mpas_analysis.shared.AnalysisTask.setup_and_check", false]], "setup_and_check() (mpas_analysis.shared.climatology.remapmpasclimatologysubtask method)": [[71, "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check", false]], "setup_colormap() (in module mpas_analysis.shared.plot.colormap)": [[112, "mpas_analysis.shared.plot.colormap.setup_colormap", false]], "streamfunctionmoc (class in mpas_analysis.ocean)": [[37, "mpas_analysis.ocean.StreamfunctionMOC", false]], "string_to_datetime() (in module mpas_analysis.shared.timekeeping.utility)": [[135, "mpas_analysis.shared.timekeeping.utility.string_to_datetime", false]], "string_to_days_since_date() (in module mpas_analysis.shared.timekeeping.utility)": [[136, "mpas_analysis.shared.timekeeping.utility.string_to_days_since_date", false]], "string_to_relative_delta() (in module mpas_analysis.shared.timekeeping.utility)": [[137, "mpas_analysis.shared.timekeeping.utility.string_to_relative_delta", false]], "subset_variables() (in module mpas_analysis.shared.mpas_xarray.mpas_xarray)": [[108, "mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables", false]], "timeseries_analysis_plot() (in module mpas_analysis.shared.plot)": [[119, "mpas_analysis.shared.plot.timeseries_analysis_plot", false]], "timeseries_analysis_plot_polar() (in module mpas_analysis.shared.plot)": [[120, "mpas_analysis.shared.plot.timeseries_analysis_plot_polar", false]], "timeseriesantarcticmelt (class in mpas_analysis.ocean)": [[38, "mpas_analysis.ocean.TimeSeriesAntarcticMelt", false]], "timeseriesoceanregions (class in mpas_analysis.ocean)": [[40, "mpas_analysis.ocean.TimeSeriesOceanRegions", false]], "timeseriesohcanomaly (class in mpas_analysis.ocean)": [[39, "mpas_analysis.ocean.TimeSeriesOHCAnomaly", false]], "timeseriessalinityanomaly (class in mpas_analysis.ocean)": [[42, "mpas_analysis.ocean.TimeSeriesSalinityAnomaly", false]], "timeseriesseaice (class in mpas_analysis.sea_ice)": [[51, "mpas_analysis.sea_ice.TimeSeriesSeaIce", false]], "timeseriessst (class in mpas_analysis.ocean)": [[41, "mpas_analysis.ocean.TimeSeriesSST", false]], "timeseriestemperatureanomaly (class in mpas_analysis.ocean)": [[43, "mpas_analysis.ocean.TimeSeriesTemperatureAnomaly", false]], "timeseriestransport (class in mpas_analysis.ocean)": [[44, "mpas_analysis.ocean.TimeSeriesTransport", false]], "update_generate() (in module mpas_analysis.__main__)": [[18, "mpas_analysis.__main__.update_generate", false]], "wait_for_task() (in module mpas_analysis.__main__)": [[19, "mpas_analysis.__main__.wait_for_task", false]]}, "objects": {"mpas_analysis.__main__": [[13, 0, 1, "", "add_task_and_subtasks"], [14, 0, 1, "", "build_analysis_list"], [15, 0, 1, "", "determine_analyses_to_generate"], [16, 0, 1, "", "main"], [17, 0, 1, "", "run_analysis"], [18, 0, 1, "", "update_generate"], [19, 0, 1, "", "wait_for_task"]], "mpas_analysis.download_data": [[20, 0, 1, "", "download_analysis_data"]], "mpas_analysis.ocean": [[21, 1, 1, "", "ClimatologyMapAntarcticMelt"], [22, 1, 1, "", "ClimatologyMapArgoSalinity"], [23, 1, 1, "", "ClimatologyMapArgoTemperature"], [24, 1, 1, "", "ClimatologyMapEKE"], [25, 1, 1, "", "ClimatologyMapMLD"], [26, 1, 1, "", "ClimatologyMapMLDMinMax"], [27, 1, 1, "", "ClimatologyMapOHCAnomaly"], [28, 1, 1, "", "ClimatologyMapSSH"], [29, 1, 1, "", "ClimatologyMapSSS"], [30, 1, 1, "", "ClimatologyMapSST"], [31, 1, 1, "", "ClimatologyMapSose"], [32, 1, 1, "", "ClimatologyMapWaves"], [33, 1, 1, "", "ConservationTask"], [34, 1, 1, "", "IndexNino34"], [35, 1, 1, "", "MeridionalHeatTransport"], [36, 1, 1, "", "OceanHistogram"], [37, 1, 1, "", "StreamfunctionMOC"], [38, 1, 1, "", "TimeSeriesAntarcticMelt"], [39, 1, 1, "", "TimeSeriesOHCAnomaly"], [40, 1, 1, "", "TimeSeriesOceanRegions"], [41, 1, 1, "", "TimeSeriesSST"], [42, 1, 1, "", "TimeSeriesSalinityAnomaly"], [43, 1, 1, "", "TimeSeriesTemperatureAnomaly"], [44, 1, 1, "", "TimeSeriesTransport"]], "mpas_analysis.ocean.ClimatologyMapAntarcticMelt": [[21, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapArgoSalinity": [[22, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapArgoTemperature": [[23, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapEKE": [[24, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapMLD": [[25, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapMLDMinMax": [[26, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapOHCAnomaly": [[27, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapSSH": [[28, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapSSS": [[29, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapSST": [[30, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapSose": [[31, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ClimatologyMapWaves": [[32, 2, 1, "", "__init__"]], "mpas_analysis.ocean.ConservationTask": [[33, 2, 1, "", "__init__"]], "mpas_analysis.ocean.IndexNino34": [[34, 2, 1, "", "__init__"]], "mpas_analysis.ocean.MeridionalHeatTransport": [[35, 2, 1, "", "__init__"]], "mpas_analysis.ocean.OceanHistogram": [[36, 2, 1, "", "__init__"]], "mpas_analysis.ocean.StreamfunctionMOC": [[37, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesAntarcticMelt": [[38, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesOHCAnomaly": [[39, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesOceanRegions": [[40, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesSST": [[41, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesSalinityAnomaly": [[42, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesTemperatureAnomaly": [[43, 2, 1, "", "__init__"]], "mpas_analysis.ocean.TimeSeriesTransport": [[44, 2, 1, "", "__init__"]], "mpas_analysis.ocean.compute_anomaly_subtask": [[45, 1, 1, "", "ComputeAnomalySubtask"]], "mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask": [[45, 2, 1, "", "__init__"]], "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask": [[46, 1, 1, "", "PlotDepthIntegratedTimeSeriesSubtask"]], "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask": [[46, 2, 1, "", "__init__"]], "mpas_analysis.ocean.plot_hovmoller_subtask": [[47, 1, 1, "", "PlotHovmollerSubtask"]], "mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask": [[47, 2, 1, "", "__init__"]], "mpas_analysis.sea_ice": [[48, 1, 1, "", "ClimatologyMapIcebergConc"], [49, 1, 1, "", "ClimatologyMapSeaIceConc"], [50, 1, 1, "", "ClimatologyMapSeaIceThick"], [51, 1, 1, "", "TimeSeriesSeaIce"]], "mpas_analysis.sea_ice.ClimatologyMapIcebergConc": [[48, 2, 1, "", "__init__"]], "mpas_analysis.sea_ice.ClimatologyMapSeaIceConc": [[49, 2, 1, "", "__init__"]], "mpas_analysis.sea_ice.ClimatologyMapSeaIceThick": [[50, 2, 1, "", "__init__"]], "mpas_analysis.sea_ice.TimeSeriesSeaIce": [[51, 2, 1, "", "__init__"]], "mpas_analysis.shared": [[52, 1, 1, "", "AnalysisTask"]], "mpas_analysis.shared.AnalysisTask": [[52, 2, 1, "", "__init__"], [53, 2, 1, "", "add_subtask"], [54, 2, 1, "", "check_analysis_enabled"], [55, 2, 1, "", "check_generate"], [56, 2, 1, "", "run"], [57, 2, 1, "", "run_after"], [58, 2, 1, "", "run_task"], [59, 2, 1, "", "set_start_end_date"], [60, 2, 1, "", "setup_and_check"]], "mpas_analysis.shared.climatology": [[61, 1, 1, "", "MpasClimatologyTask"], [64, 1, 1, "", "RemapMpasClimatologySubtask"], [72, 1, 1, "", "RemapObservedClimatologySubtask"], [76, 0, 1, "", "add_years_months_days_in_month"], [77, 0, 1, "", "compute_climatology"], [78, 0, 1, "", "compute_monthly_climatology"], [79, 0, 1, "", "get_comparison_descriptor"], [80, 0, 1, "", "get_masked_mpas_climatology_file_name"], [81, 0, 1, "", "get_remapped_mpas_climatology_file_name"], [82, 0, 1, "", "get_remapper"], [83, 0, 1, "", "get_unmasked_mpas_climatology_directory"], [84, 0, 1, "", "get_unmasked_mpas_climatology_file_name"]], "mpas_analysis.shared.climatology.MpasClimatologyTask": [[61, 2, 1, "", "__init__"], [62, 2, 1, "", "add_variables"], [63, 2, 1, "", "get_file_name"]], "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask": [[64, 2, 1, "", "__init__"], [65, 2, 1, "", "add_comparison_grid_descriptor"], [66, 2, 1, "", "customize_masked_climatology"], [67, 2, 1, "", "customize_remapped_climatology"], [68, 2, 1, "", "get_masked_file_name"], [69, 2, 1, "", "get_remapped_file_name"], [70, 2, 1, "", "run_task"], [71, 2, 1, "", "setup_and_check"]], "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask": [[72, 2, 1, "", "__init__"], [73, 2, 1, "", "build_observational_dataset"], [74, 2, 1, "", "get_file_name"], [75, 2, 1, "", "get_observation_descriptor"]], "mpas_analysis.shared.generalized_reader.generalized_reader": [[85, 0, 1, "", "open_multifile_dataset"]], "mpas_analysis.shared.io": [[99, 0, 1, "", "open_mpas_dataset"], [104, 3, 0, "-", "write_netcdf"]], "mpas_analysis.shared.io.namelist_streams_interface": [[98, 0, 1, "", "convert_namelist_to_dict"]], "mpas_analysis.shared.io.namelist_streams_interface.NameList": [[86, 2, 1, "", "__getattr__"], [87, 2, 1, "", "__getitem__"], [88, 2, 1, "", "__init__"], [89, 2, 1, "", "get"], [90, 2, 1, "", "getbool"], [91, 2, 1, "", "getfloat"], [92, 2, 1, "", "getint"]], "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile": [[93, 2, 1, "", "__init__"], [94, 2, 1, "", "find_stream"], [95, 2, 1, "", "has_stream"], [96, 2, 1, "", "read"], [97, 2, 1, "", "readpath"]], "mpas_analysis.shared.io.utility": [[100, 0, 1, "", "build_config_full_path"], [101, 0, 1, "", "check_path_exists"], [102, 0, 1, "", "make_directories"], [103, 0, 1, "", "paths"]], "mpas_analysis.shared.mpas_xarray.mpas_xarray": [[105, 0, 1, "", "open_multifile_dataset"], [106, 0, 1, "", "preprocess"], [107, 0, 1, "", "remove_repeated_time_index"], [108, 0, 1, "", "subset_variables"]], "mpas_analysis.shared.plot": [[109, 1, 1, "", "PlotClimatologyMapSubtask"], [111, 0, 1, "", "add_inset"], [113, 0, 1, "", "plot_1D"], [114, 0, 1, "", "plot_global_comparison"], [115, 0, 1, "", "plot_polar_comparison"], [116, 0, 1, "", "plot_vertical_section"], [117, 0, 1, "", "plot_vertical_section_comparison"], [119, 0, 1, "", "timeseries_analysis_plot"], [120, 0, 1, "", "timeseries_analysis_plot_polar"]], "mpas_analysis.shared.plot.PlotClimatologyMapSubtask": [[109, 2, 1, "", "__init__"], [110, 2, 1, "", "set_plot_info"]], "mpas_analysis.shared.plot.colormap": [[112, 0, 1, "", "setup_colormap"]], "mpas_analysis.shared.plot.ticks": [[118, 0, 1, "", "plot_xtick_format"]], "mpas_analysis.shared.projection": [[121, 0, 1, "", "get_cartopy_projection"], [122, 0, 1, "", "get_pyproj_projection"]], "mpas_analysis.shared.regions.compute_region_masks": [[123, 1, 1, "", "ComputeRegionMasks"]], "mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks": [[123, 2, 1, "", "__init__"]], "mpas_analysis.shared.regions.compute_region_masks_subtask": [[124, 1, 1, "", "ComputeRegionMasksSubtask"], [125, 0, 1, "", "get_feature_list"]], "mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask": [[124, 2, 1, "", "__init__"]], "mpas_analysis.shared.time_series": [[126, 1, 1, "", "MpasTimeSeriesTask"], [127, 0, 1, "", "cache_time_series"], [128, 0, 1, "", "compute_moving_avg"], [129, 0, 1, "", "compute_moving_avg_anomaly_from_start"]], "mpas_analysis.shared.time_series.MpasTimeSeriesTask": [[126, 2, 1, "", "__init__"]], "mpas_analysis.shared.timekeeping.MpasRelativeDelta": [[130, 1, 1, "", "MpasRelativeDelta"]], "mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta": [[130, 2, 1, "", "__init__"]], "mpas_analysis.shared.timekeeping.utility": [[131, 0, 1, "", "date_to_days"], [132, 0, 1, "", "datetime_to_days"], [133, 0, 1, "", "days_to_datetime"], [134, 0, 1, "", "get_simulation_start_time"], [135, 0, 1, "", "string_to_datetime"], [136, 0, 1, "", "string_to_days_since_date"], [137, 0, 1, "", "string_to_relative_delta"]], "mpas_analysis.shared.transects.compute_transect_masks_subtask": [[138, 1, 1, "", "ComputeTransectMasksSubtask"], [139, 0, 1, "", "compute_mpas_transect_masks"]], "mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask": [[138, 2, 1, "", "__init__"]]}, "objnames": {"0": ["py", "function", "Python function"], "1": ["py", "class", "Python class"], "2": ["py", "method", "Python method"], "3": ["py", "module", "Python module"]}, "objtypes": {"0": "py:function", "1": "py:class", "2": "py:method", "3": "py:module"}, "terms": {"": [1, 4, 7, 8, 9, 15, 64, 72, 85, 99, 105, 111, 116, 117, 119, 120, 132, 141, 142, 143, 144, 147, 148, 150, 156, 162, 170, 171, 172, 176, 178, 179, 183, 184, 195, 196, 200, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 235, 236, 237, 241], "0": [4, 7, 19, 85, 105, 106, 111, 113, 115, 119, 120, 130, 131, 139, 141, 142, 143, 144, 148, 149, 150, 167, 179, 180, 193, 199, 205, 206, 208, 209, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 233, 236, 237, 238, 240, 243, 245, 247, 248], "00": [1, 10, 85, 105, 106, 131, 132, 133, 136, 147, 170, 171], "000": [143, 144, 176, 189, 213, 226], "0000": [147, 170, 171], "0001": [4, 10, 11, 76, 77, 78, 85, 105, 106, 131, 132, 133, 136, 147, 170, 203], "0002": [147, 170, 171, 203], "001": 243, "003": 243, "005": 233, "0051": 141, "0060": 141, "009": 179, "00_00": [147, 170, 171], "00_01": [147, 170], "01": [1, 2, 4, 5, 10, 11, 76, 77, 78, 85, 105, 106, 131, 132, 133, 136, 147, 148, 170, 171, 203, 208, 230, 233, 237, 247], "0111": 141, "0120": 141, "0160558": 186, "01_00": [1, 10, 147, 170], "01_00000": 203, "02": [2, 4, 5, 7, 10, 11, 142, 144, 193, 215, 233, 237, 243, 247], "03": [1, 5, 7, 144, 203], "04": [5, 9, 10, 144, 179], "04d": [1, 4], "05": [233, 237, 245, 247], "06": [3, 8, 10, 141, 142, 144], "07": 186, "08": [1, 7, 10, 142], "09": [4, 8, 10, 144], "0km_": 141, "0km_1": 141, "0km_10": [142, 144, 226], "0km_antarctic_stereo_20180710": 226, "0km_antarctic_stereo_20190603": 226, "0km_antarctic_stereo_bilinear": [142, 144], "0x10000": 226, "0x1100": 141, "0x6000": [142, 144, 226], "1": [1, 3, 4, 5, 7, 11, 19, 111, 113, 114, 116, 117, 119, 120, 124, 127, 131, 139, 147, 148, 149, 151, 153, 155, 157, 165, 170, 173, 175, 176, 177, 180, 183, 184, 186, 193, 195, 196, 198, 199, 203, 205, 208, 210, 211, 212, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 229, 230, 231, 232, 233, 236, 237, 238, 239, 240, 242, 243, 244, 245, 246, 247, 248], "10": [1, 7, 8, 11, 113, 116, 141, 142, 143, 148, 149, 150, 156, 160, 167, 176, 179, 180, 181, 193, 205, 208, 211, 212, 213, 214, 216, 226, 227, 238, 247, 248], "100": [4, 141, 142, 144, 149, 191, 205, 206, 207, 208, 211, 212, 214, 226], "1000": [139, 209, 228, 233, 236, 241], "10000": [139, 143, 156, 213, 226], "1001": 236, "1002": 176, "100e3": 141, "1016": 179, "102": 10, "1026": [217, 226, 230, 231, 237], "1028": [217, 226, 230, 231, 237], "106": [205, 239], "1084": 4, "10th": 7, "11": [4, 8, 141, 144, 148, 186, 189, 213, 243], "110": [149, 212, 215, 216, 218, 219, 224, 225, 237, 238, 247], "1101": 141, "111": 141, "112": [218, 219], "113": [149, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "116": 214, "12": [1, 4, 8, 11, 119, 128, 129, 143, 144, 148, 153, 157, 160, 164, 180, 196, 213, 231, 237, 238, 240, 242, 243, 244, 245], "120": [141, 211, 212], "121": 141, "124": [3, 179], "128": [149, 211, 212, 214, 215, 216, 218, 219, 224, 225, 237, 247], "13": [142, 144], "130": [149, 214], "131": 236, "139": 214, "14": [141, 197, 238, 247], "140": [149, 211, 212, 215, 216, 218, 219, 224, 225, 237, 238, 247], "142": [149, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "142808207": 4, "144": [218, 219], "15": [9, 11, 120, 141, 162, 196, 218, 219, 227, 231, 234, 235, 236], "150": [206, 207, 211, 212], "1500": [192, 199, 206, 207, 228], "15n": [162, 231, 234, 235, 236], "16": [1, 4, 141, 149, 160, 216, 236], "160": [214, 218, 219, 224, 225], "162": 214, "1678": [4, 106], "17": 172, "170": [149, 211, 212, 215, 216, 218, 219, 224, 225, 233, 237, 238, 240, 243, 245, 247], "18": [3, 144, 156, 238, 244, 247], "180": [5, 85, 105, 106, 141, 210, 219, 221, 223, 225], "181": 141, "185": 214, "1850": 11, "1854": [181, 232], "1870": [183, 184, 216, 232], "19": [5, 8], "1900": [198, 208, 216], "192": [218, 219, 224, 225], "1950": 180, "1958": 4, "1959": [180, 227], "1971": 181, "1975": 192, "1976": 3, "1978": [180, 196], "1979": 189, "198": [149, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "1981": [183, 184], "1982": [174, 186], "1983": 3, "1985": [186, 197], "1986": 174, "1987": [178, 196], "1989": 197, "1990": [200, 216], "1991": [195, 227], "1992": 177, "1996": [187, 201, 204], "1998": [193, 201, 202], "1999": [196, 201, 204], "1d": 113, "1deg": 199, "1e": [141, 143, 210], "1e12": 236, "1e6": 141, "1km": 193, "1x317": 179, "1x720": 179, "2": [3, 4, 5, 7, 11, 36, 99, 116, 117, 148, 149, 151, 153, 163, 164, 166, 173, 177, 179, 180, 181, 182, 183, 184, 186, 193, 195, 196, 199, 205, 206, 207, 208, 209, 210, 213, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 234, 236, 237, 238, 239, 240, 245, 246, 247, 248], "20": [116, 117, 119, 148, 149, 155, 165, 205, 208, 211, 212, 214, 218, 219, 224, 225, 226, 238], "200": [141, 149, 160, 206, 207, 211, 212, 215, 216, 218, 219, 224, 225, 226, 237, 238, 241, 247], "2000": [143, 181, 206, 207, 213, 240], "2000m": 240, "2001": [197, 201, 202], "2002": [196, 201, 204], "2003": [189, 201, 204], "2004": [176, 191, 201, 202], "2005": [145, 174, 200, 201, 202, 226, 236, 237, 241], "2007": [3, 196], "2008": [174, 183, 184, 191, 196, 201, 202, 221, 223], "2009": [174, 176, 185, 191, 201, 202, 204, 214], "201": 236, "2010": [145, 177, 201, 202, 226, 236, 237, 241], "2011": [174, 189, 201, 204, 216], "2012": [174, 192, 196, 201, 204, 239], "2013": [174, 186, 190, 198, 201, 202, 239], "2014": [181, 186, 192, 193, 201, 202, 217], "2014a": [198, 201, 202], "2014b": [198, 201, 202], "2015": [175, 181, 186, 201, 202], "2016": [145, 182, 186, 189, 191, 193, 201, 202, 204, 221, 223, 232], "2017": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 173, 176, 178, 179, 181, 182, 186, 201, 202, 204], "2017_20180308": [158, 210], "2017gl073426": 176, "2018": [3, 195, 199, 227], "2019": [199, 201, 202], "2020": [172, 180, 195, 201, 202, 239], "20200305": [142, 144], "20201025": 142, "2021": 227, "2022": [141, 143], "2023": [188, 201, 202, 205, 239], "209": 214, "21": [183, 184], "2150": 208, "22": [7, 115, 144, 149, 216, 238], "224": [218, 219, 224, 225], "2262": [4, 106], "227": [149, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "23": [4, 7, 214], "230": [149, 211, 212, 215, 216, 218, 219, 224, 225, 237, 238, 247], "2300": 208, "232": 214, "24": [4, 37], "240": [149, 214], "2450": 208, "249": [148, 165, 229], "25": [5, 141, 149, 178, 180, 187, 196, 206, 207, 214, 224, 237, 238, 245, 247], "255": [149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 233, 237, 238, 240, 243, 245, 247], "25deg": 199, "25degree_to_0": 144, "25x0": 144, "26": [149, 216, 238, 245], "2641": 3, "2646": 3, "27": 226, "27637925": 141, "28": [149, 189, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "29": [2, 193, 215], "2d": [72, 109, 141], "3": [4, 34, 85, 105, 106, 145, 149, 153, 155, 159, 162, 164, 175, 180, 195, 196, 199, 201, 202, 208, 210, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 230, 231, 232, 234, 235, 237, 241, 247, 248], "30": [144, 156, 206, 207, 211, 212, 215, 230, 231, 237, 238], "300": [208, 209, 226], "31": [4, 11, 215], "317x720": 179, "31_23": [1, 4], "32": [149, 215, 216, 218, 219, 224, 225], "33": [144, 215, 217, 226, 228, 236, 247], "330": 237, "34": [3, 206, 215, 226, 228, 230, 236, 237, 247], "3433": 197, "3443": 197, "35": [206, 208, 215, 217, 226, 228, 230, 236, 237, 238, 247], "36": [215, 247], "36207813": 4, "365": [4, 10], "37": 213, "38": [213, 214, 215], "385": 176, "39": [206, 230, 231], "3d": [141, 230, 231, 235], "3e9": 236, "3km": 173, "4": [1, 3, 4, 7, 34, 113, 116, 145, 151, 153, 155, 159, 162, 180, 191, 195, 196, 201, 202, 210, 218, 219, 225, 232, 236, 237, 238, 240, 247, 248], "40": [149, 212, 214, 215, 216, 226, 234, 237, 238, 247], "400": [206, 207, 211, 212, 226, 236, 241], "4000": [167, 230, 237, 247], "401": [167, 230, 237, 247], "42": 142, "44": 176, "45": 111, "450": 208, "45gb": 144, "46": 214, "480": [142, 144], "4e3": 141, "4th": 186, "5": [5, 7, 111, 148, 149, 150, 167, 177, 180, 205, 206, 208, 210, 212, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 236, 237, 238, 240, 245, 247, 248], "50": [111, 115, 144, 149, 205, 206, 207, 208, 210, 211, 212, 218, 219, 220, 221, 222, 223, 224, 225, 226], "500": [189, 228], "5067": 193, "50m": 141, "51": 141, "5145": [183, 184], "5153": [183, 184], "52": 144, "55": 142, "5500": [199, 228], "5618": 176, "5626": 176, "57": [149, 211, 212, 215, 216, 233, 237, 240, 243, 245, 247], "58": 7, "59": [1, 4], "5_20180710": [218, 219, 224, 225], "5_ts_1": 142, "5degree_bilinear": [142, 144], "5e6": 141, "5n": 37, "5x0": [142, 144, 218, 219, 224, 225], "6": [119, 120, 149, 153, 156, 216, 218, 219, 225, 226, 237, 240, 247, 248], "60": [4, 141, 144, 156, 214, 226, 234, 236, 237, 241], "600": [206, 226], "6000": [148, 150, 226, 231], "64": [4, 218, 219, 224, 225], "64gb": 156, "65": [231, 234, 235, 236, 237], "65n": [231, 234, 235, 236], "675": 237, "6e6": 141, "6th": 180, "7": [7, 153, 162, 175, 178, 180, 196, 218, 219, 227, 237, 247, 248], "70": 214, "700": [143, 213, 240], "700m": [143, 240], "71": 141, "724": 182, "725": 237, "7289": 181, "73": [3, 179], "74": 144, "75": [211, 212, 237], "7728169": 141, "78": 142, "8": [7, 115, 144, 151, 153, 175, 191, 208, 217, 218, 219, 225, 226, 228, 236, 237, 238, 240, 247, 248], "80": [149, 208, 211, 212, 214, 215, 216, 218, 219, 224, 225, 226, 233, 237, 238, 247], "800": [206, 207, 211, 212, 226], "81": [3, 4, 141, 191, 199], "82": [191, 199], "85": [149, 211, 212, 215, 216, 218, 219, 233, 237, 240, 243, 245, 247], "85n": 179, "86": [2, 142], "88": 141, "9": [142, 143, 149, 206, 207, 209, 213, 217, 218, 219, 220, 221, 222, 223, 226, 227, 228, 230, 237, 247, 248], "90": 141, "92": [3, 179], "926": 236, "93": 214, "95": [218, 219], "950": 144, "96": [218, 219, 224, 225], "9999": [4, 11], "A": [1, 2, 3, 4, 7, 8, 9, 10, 11, 13, 14, 15, 17, 19, 21, 32, 33, 34, 36, 38, 40, 45, 46, 47, 52, 61, 62, 64, 65, 72, 73, 75, 76, 77, 78, 82, 85, 94, 97, 98, 103, 105, 106, 107, 108, 109, 111, 112, 116, 117, 124, 126, 127, 129, 131, 132, 133, 135, 136, 137, 138, 141, 142, 143, 144, 147, 148, 149, 152, 155, 156, 159, 163, 165, 167, 169, 173, 176, 179, 183, 184, 195, 199, 201, 203, 205, 206, 207, 209, 210, 213, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 234, 235, 236, 237, 241, 247], "And": [1, 141], "Andes": 153, "As": [1, 2, 7, 10, 141, 143, 151, 153], "At": [11, 141, 142, 144, 203], "Be": 163, "But": 124, "By": [1, 116, 117, 119, 143, 148, 149, 152, 153, 156, 157, 163, 165, 166, 216, 218, 219, 221, 223, 232, 233, 234, 236, 238, 241], "For": [0, 1, 2, 4, 5, 7, 12, 60, 116, 134, 141, 142, 143, 144, 147, 148, 149, 151, 152, 153, 156, 159, 166, 170, 171, 174, 179, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 233, 234, 235, 236, 237, 238, 240, 241, 242, 243, 244, 245, 247], "If": [1, 2, 5, 7, 8, 9, 10, 13, 52, 54, 55, 56, 61, 64, 71, 77, 78, 79, 81, 82, 85, 88, 94, 97, 99, 101, 105, 106, 108, 109, 111, 115, 116, 117, 121, 122, 124, 127, 131, 132, 133, 134, 135, 136, 137, 142, 143, 144, 148, 152, 153, 156, 158, 159, 161, 162, 163, 164, 167, 176, 194, 203, 205, 229, 230, 231, 234, 236, 240], "In": [2, 3, 4, 7, 9, 11, 116, 141, 142, 143, 144, 147, 149, 153, 159, 170, 171, 180, 191, 212, 229, 231, 234, 235, 236, 238, 241], "It": [1, 3, 4, 5, 7, 8, 9, 11, 57, 141, 142, 143, 163, 174, 176, 180, 181, 182, 195, 198, 203, 241], "No": 7, "Not": [5, 188], "ON": [164, 224, 225], "On": [7, 9, 141, 142, 144, 153, 248], "One": [63, 68, 69, 74, 80, 81, 84, 144], "Or": [116, 143], "Such": 180, "That": [1, 141, 143, 151], "The": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 56, 57, 59, 61, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 131, 132, 133, 134, 136, 137, 138, 142, 144, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 165, 167, 168, 169, 170, 171, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 189, 191, 194, 195, 196, 197, 198, 199, 200, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "Then": [5, 7, 141, 142, 143, 144, 162, 203], "There": [1, 2, 4, 7, 9, 10, 11, 141, 156, 159, 180, 203, 226, 232], "These": [1, 11, 52, 141, 142, 143, 144, 147, 149, 150, 151, 153, 159, 170, 171, 177, 195, 196, 200, 203, 210, 218, 219, 224, 225, 226, 230, 239, 246], "To": [2, 3, 7, 9, 141, 142, 144, 149, 151, 153, 157, 159, 162, 163, 180, 203, 230, 232, 244], "Will": 37, "With": [142, 144, 203], "_": [1, 4, 109, 141, 143], "_1100": 141, "__": 143, "__file__": 7, "__init__": [1, 3, 9, 11, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 61, 64, 72, 109, 123, 124, 126, 130, 138, 141, 143, 203], "__main__": [141, 142, 203], "__name__": 141, "_assert_valid_datetim": 11, "_assert_valid_select": 11, "_build": 203, "_compute_barotropic_streamfunction_cel": 141, "_compute_barotropic_streamfunction_vertex": 141, "_compute_ohc": 143, "_compute_transport": 141, "_ensure_list": 11, "_get_datetim": [4, 11], "_get_lock_path": 7, "_map_variable_nam": 11, "_mask_climatologi": 143, "_my_sub_task": 1, "_parse_dataset_tim": 4, "_preprocess": 11, "_rename_vari": 11, "_runstatu": 8, "_setupstatu": 8, "_ts_": 142, "a_wcycl1850": [142, 144], "ab": 143, "abbot": 239, "abbrevi": [143, 164], "abil": [4, 11, 147, 170, 218, 219, 224, 225, 244], "abl": [5, 7], "aboard": 178, "about": [9, 141, 142, 143, 144, 179, 180, 189, 232], "abov": [2, 4, 7, 8, 10, 11, 109, 110, 116, 117, 141, 142, 143, 144, 149, 152, 167, 177, 201, 202, 203, 206, 207, 210, 218, 219, 220, 221, 222, 223, 224, 225, 234], "absoltu": 158, "absolut": [10, 100, 101, 127, 130, 142, 144, 145, 152, 156, 158, 159, 163, 192, 201, 202, 214, 230, 234], "absolute_energy_error": 229, "absolute_salt_error": 229, "ac": 142, "accept": [5, 232], "access": [1, 142, 143, 144, 180, 181, 186, 193, 195], "accessor": [86, 87], "accommod": [5, 11], "accomplish": 7, "accord": [203, 234], "account": [76, 77, 78, 142], "accur": [147, 170], "accuraci": 174, "acknowledg": [175, 176, 178, 183, 184, 185, 187, 191, 193, 194, 196, 197, 199], "acm": [2, 9, 10, 161], "acme1": 153, "acme_scratch": 142, "acquir": 7, "acronym": 208, "across": [8, 140, 143, 147, 160, 168, 170, 171, 180, 203], "action": 1, "activ": [141, 147, 170, 203], "activetrac": [147, 170], "actual": [8, 116, 117, 143, 163], "ad": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 14, 19, 45, 66, 67, 76, 77, 78, 97, 117, 126, 140, 143, 191, 199, 203, 226, 234, 236, 237, 240], "adapt": [2, 10, 141, 180], "add": [1, 7, 8, 10, 11, 13, 53, 62, 64, 65, 76, 111, 124, 138, 141, 142, 143, 144, 153, 203, 230, 236, 238, 241], "add_arrow_to_line2d": 141, "add_climatology_map_bsf": 141, "add_comparison_descriptor": 64, "add_featur": 141, "add_land_lakes_coastlin": 141, "add_mpas_climatology_task": 8, "add_my_fancy_task": 142, "add_patch": 141, "add_polar_stereographic_interp": 9, "add_subplot": 141, "add_subtask": [141, 143], "add_task_and_subtask": 8, "add_vari": 143, "addit": [1, 4, 7, 10, 11, 60, 116, 130, 141, 143, 147, 156, 160, 170, 171, 177, 183, 184, 191, 195, 200, 203, 234, 236, 239, 244], "addition": [143, 147, 170, 171, 196], "additionalarg": 9, "address": [5, 7, 11, 142], "adjust": [116, 117, 119, 156, 183, 184, 197, 238], "adrian": 0, "adt": 177, "adusumilli": [172, 201, 202, 239], "advanc": [162, 178], "advantag": [142, 143, 144], "adventur": 143, "advis": 200, "afer": 158, "affect": [142, 150, 203], "affili": 189, "africa": 246, "after": [1, 7, 8, 53, 57, 60, 67, 68, 69, 74, 143, 148, 159, 203, 230, 237, 247], "again": [8, 141, 142, 144, 149, 153], "against": [3, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 48, 49, 50, 72, 109, 141, 142, 143, 144, 148, 150, 163, 165, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 232, 233, 234, 236, 241, 242, 244], "agenc": [177, 195], "aggreg": [124, 138, 162, 231, 235], "aggregationfunct": [124, 138], "agre": 142, "agreement": 180, "agulha": 246, "ahead": [142, 144], "ai": 1, "air": [180, 186], "akturn": 4, "al": [145, 172, 173, 175, 176, 179, 180, 181, 182, 183, 184, 186, 187, 188, 189, 190, 192, 193, 194, 195, 196, 198, 199, 201, 202, 204, 205, 214, 217, 221, 223, 239], "algorithm": [1, 4, 5, 7, 8, 9, 11, 145, 176, 201, 204, 218, 219], "alia": 142, "alk": 208, "alkalin": 208, "all": [0, 1, 2, 3, 4, 5, 7, 8, 10, 11, 14, 15, 17, 33, 61, 77, 78, 126, 127, 141, 142, 143, 144, 147, 148, 149, 150, 153, 156, 158, 159, 160, 163, 164, 167, 169, 170, 171, 173, 180, 182, 194, 195, 196, 198, 203, 205, 213, 226, 228, 231, 234, 235, 236, 237, 239, 241, 246], "all_": [2, 18, 142, 144, 159], "all_climatologi": 159, "all_ocean": 2, "all_publicob": [142, 144, 159], "all_regriddedhorizont": 2, "all_seaic": 2, "all_timeseri": [2, 159], "all_transect": 52, "allow": [1, 4, 7, 8, 9, 57, 106, 116, 117, 119, 138, 142, 143, 144, 147, 148, 151, 153, 170, 171, 172, 180, 224, 225, 233, 236, 239], "allsuffix": 1, "allvari": [61, 126], "almost": [7, 143], "along": [116, 117, 118, 119, 143, 152, 160, 167, 177, 196, 203, 212, 239], "alpha": 156, "alpha8": 5, "alreadi": [5, 7, 8, 76, 77, 78, 83, 99, 102, 142, 144, 152, 153, 162, 163, 203], "also": [1, 2, 3, 4, 5, 7, 8, 10, 11, 45, 52, 57, 106, 116, 117, 126, 141, 142, 143, 144, 148, 151, 159, 160, 173, 175, 176, 178, 180, 181, 183, 184, 194, 195, 196, 203, 230, 235, 236, 237, 238], "alter": [150, 156, 162, 218, 219, 240], "alter_dataset": [45, 129], "altern": [4, 5, 117, 142, 144, 149, 159, 216, 226], "although": [116, 117], "altiberg": [145, 158, 201, 204, 210], "altiberg_1991": [158, 210], "altika": 173, "altimet": [145, 173, 177, 185, 201, 202, 227], "altimetri": 177, "alwai": [7, 53, 141, 142, 143, 144, 156, 159, 167], "am": [4, 7, 33, 141, 142, 144, 147, 170, 171, 203], "amen": 143, "amend": 141, "ameri": 239, "amip": [183, 184], "amj": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "among": [5, 232], "amount": [4, 7, 143], "amp": [220, 221], "amsr": 178, "amundsen": [234, 236, 241], "an": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 45, 47, 48, 49, 50, 52, 54, 61, 64, 72, 77, 83, 84, 85, 94, 99, 100, 101, 105, 106, 109, 110, 111, 112, 114, 116, 117, 123, 124, 126, 129, 131, 132, 133, 134, 135, 136, 137, 138, 140, 147, 148, 149, 152, 153, 155, 156, 158, 159, 162, 164, 165, 167, 169, 174, 175, 176, 178, 179, 180, 182, 185, 186, 187, 194, 196, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "anaconda": [142, 144, 203], "analog": 2, "analogi": [2, 142, 144, 159], "analyi": [4, 232], "analys": [1, 2, 7, 8, 14, 15, 17, 141, 142, 144, 159, 183, 184, 191, 203], "analysestogener": [8, 13, 15], "analysi": [0, 2, 3, 6, 7, 8, 9, 10, 11, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 54, 55, 56, 58, 59, 60, 61, 64, 71, 72, 85, 109, 123, 124, 126, 138, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 159, 160, 162, 163, 164, 165, 168, 169, 170, 171, 201, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "analysis_input_fil": 3, "analysis_memb": [147, 170], "analysis_task": [1, 3, 203], "analysis_task_templ": [1, 203], "analysis_test": [142, 144], "analysismodul": [1, 7], "analysisoptionnam": 54, "analysistask": [1, 8, 13, 14, 15, 17, 19, 45, 46, 47, 64, 72, 109, 124, 138, 141, 143, 203], "analyz": [1, 2, 5, 33, 142, 143, 144, 156, 163, 176, 198, 203, 206, 207, 226, 228], "anaysi": 1, "ancillari": [177, 200], "andthen": 7, "angl": 179, "ani": [1, 2, 4, 5, 7, 8, 9, 11, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 46, 47, 48, 49, 50, 51, 66, 67, 87, 89, 107, 109, 111, 113, 126, 141, 142, 143, 144, 147, 148, 149, 153, 156, 158, 162, 163, 167, 169, 172, 174, 180, 194, 203, 230, 236, 237, 239, 241, 247], "aniceflux": [221, 223], "aniceflux_v01_clim_mean_1982": [221, 223], "anim": 199, "anl": [3, 142], "anlaysi": 148, "ann": [3, 141, 143, 164, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 220, 221, 222, 223, 226, 227, 228, 230, 234, 235, 236, 237, 247], "annual": [3, 77, 143, 157, 174, 175, 189, 191, 199, 221, 223, 228, 238], "anomali": [4, 27, 42, 43, 45, 128, 129, 141, 143, 159, 181, 183, 184, 189, 203, 213, 229, 231, 240, 243, 245], "anomalyendtim": 129, "anomalyrefyear": [148, 165, 229], "anomalystarttim": 129, "anoth": [1, 2, 7, 13, 109, 116, 141, 142, 143, 144, 149, 152, 159, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225], "answer": [142, 144], "ant": 246, "ant_g1920v01_iceshelfmelt": [188, 201, 202], "antarct": [3, 21, 31, 38, 65, 79, 141, 143, 145, 148, 150, 158, 162, 196, 201, 202, 205, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 234, 236, 239, 241, 244, 246], "antarctica": [190, 201, 202, 237, 239], "antarcticregion": [236, 241], "anthropogen": [147, 170], "anticip": [234, 236, 238], "antil": 246, "anvil": [142, 144, 153], "anyon": [1, 11], "anyth": [97, 141, 167, 203, 230], "anywher": 9, "aoml": [179, 201, 202], "api": [3, 9, 140], "apl": 189, "appear": [3, 5, 142, 144, 148, 230, 231, 235], "append": [1, 7, 8, 116, 117, 141, 149, 203, 231, 235, 241], "append_ax": 141, "appli": [85, 116, 117, 141, 172, 212, 226], "applic": 180, "approach": [2, 3, 4, 5, 7, 14, 19, 142, 144, 174], "appropri": [2, 4, 5, 7, 10, 11, 73, 75, 106, 141, 142, 144, 153, 156, 159, 160, 194, 203, 212, 236], "approxim": [116, 117, 119, 144, 156, 167, 196, 199, 206, 207, 226, 228, 230, 237, 240, 247], "apr": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "april": [189, 197], "aprun": 7, "aquariu": [145, 201, 202, 215], "ar": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 19, 20, 33, 45, 46, 47, 52, 60, 62, 64, 71, 76, 77, 78, 85, 97, 98, 109, 110, 111, 114, 116, 117, 118, 124, 127, 130, 140, 141, 142, 143, 144, 147, 148, 149, 150, 151, 152, 153, 156, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 174, 175, 176, 177, 178, 180, 181, 183, 184, 185, 186, 187, 189, 191, 193, 194, 195, 196, 197, 198, 199, 200, 201, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "arang": [5, 141, 143, 149, 214, 237, 238, 240, 243, 245, 247], "arbitrari": [4, 5, 103, 209, 214], "architectur": 142, "archiv": [142, 144, 156, 195, 196, 201, 202, 203], "arctic": [65, 79, 145, 162, 185, 186, 196, 201, 204, 218, 220, 222, 224, 228, 244], "arctic_basin": [231, 234, 235, 236], "area": [116, 117, 145, 148, 180, 182, 199, 201, 204, 244], "area_edg": 141, "area_vert": 141, "areacel": 234, "areaedg": 141, "areanh": 244, "areash": 244, "areavert": 141, "aren": [8, 144, 153], "arg": [1, 7, 103], "arg1": 1, "arg2": 1, "argo": [22, 23, 141, 145, 158, 200, 201, 202, 206, 207, 208, 211], "argosubdirectori": 158, "argument": [1, 4, 11, 103, 116, 117, 127, 129, 141, 142, 143, 149], "argv": 141, "argvalu": 203, "aris": [1, 11], "arm": 142, "around": [1, 5, 7, 10, 111, 115, 141, 151, 180, 190, 201, 202, 237, 244], "arrai": [4, 9, 76, 77, 78, 85, 105, 106, 112, 113, 114, 115, 116, 117, 127, 132, 133, 136, 167, 213, 214, 230, 237, 247], "arrang": [210, 218, 219, 220, 221, 222, 223, 224, 225], "arrow": 141, "arrow_spac": 141, "arthur": 179, "artic": [162, 191], "articl": 180, "artif": 4, "asai": [0, 1, 2, 4, 5, 7, 8, 9, 10, 11, 141, 142], "asay932": 142, "asbw": 192, "ascii": [192, 201, 202], "asid": [2, 143], "ask": [2, 142, 143, 189], "aspect": [116, 117, 119, 143], "assembl": [174, 176], "assess": [143, 174], "assign": [1, 167], "assimil": [180, 189], "assist": 156, "associ": [11, 46, 47, 52, 86, 87, 89, 90, 91, 92, 96, 109, 110, 116, 117, 142, 143, 148, 149, 167, 230, 231, 240], "assum": [7, 8, 9, 10, 54, 76, 116, 142, 144, 156], "ater": 237, "atka": 239, "atla": [145, 186, 199, 200, 201, 202, 208], "atlant": 238, "atlantic_basin": [231, 234, 235, 236], "atm": 208, "atmospher": [180, 181, 183, 184, 197, 240], "attempt": [5, 7, 141, 143, 159], "attr": [141, 143], "attribnam": 96, "attribut": [8, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 60, 61, 64, 72, 96, 109, 110, 123, 124, 126, 130, 138, 141], "aug": [141, 143, 164, 199, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "augment": 238, "august": 196, "author": [1, 7, 10, 141, 159, 174, 190, 199], "auto": 12, "autom": [85, 143], "automat": [7, 116, 117, 119, 141, 142, 144, 149, 152, 153, 156, 162, 166, 203, 206, 207, 209, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 238, 239, 240, 242, 243, 244, 245, 246, 247], "autonom": 11, "autosquash": 142, "avail": [3, 7, 8, 10, 61, 62, 71, 76, 113, 126, 141, 142, 143, 144, 147, 148, 149, 151, 152, 153, 156, 159, 161, 162, 163, 169, 170, 172, 173, 175, 177, 178, 179, 180, 182, 190, 191, 194, 195, 197, 198, 199, 201, 202, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "availab": [149, 159], "avali": 152, "averag": [4, 45, 46, 77, 78, 116, 117, 119, 120, 128, 129, 140, 142, 143, 144, 148, 165, 168, 177, 180, 186, 189, 195, 196, 205, 231, 233, 235, 238, 239, 240, 241, 242, 243, 244, 245, 246], "avg": [61, 64, 80, 81, 83, 84, 141, 203], "avgsurfacetemperatur": 11, "avgvaluewithinoceanlayerregion": [147, 170], "avgvaluewithinoceanregion": [147, 170], "avgvaluewithinoceanvolumeregion": [147, 170], "aviat": 239, "aviso": [145, 201, 202, 214, 234], "avoid": [7, 116, 117, 142], "awai": [141, 156], "awar": [4, 8, 10, 153, 156], "ax": [111, 116, 117, 141, 160, 240, 243, 245], "axes_class": 141, "axes_grid1": 141, "axi": [4, 46, 111, 113, 116, 117, 118, 119, 140, 155, 160, 167, 168, 179, 231, 233, 235, 238, 239, 240, 241, 242, 243, 244, 245, 246], "axisfonts": [113, 116, 117, 160], "b": 3, "bach": 239, "back": [1, 4, 143, 180], "background": [116, 117, 144, 153], "backgroundcolor": [116, 117], "bahama": 246, "baja": 246, "bakker": [186, 201, 202], "balanc": [141, 174, 197, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 233, 237, 240, 243, 245], "baldwin": 0, "baltic": 246, "band": [193, 195], "banzon": 181, "bar": [47, 109, 110, 113, 119, 142, 144, 160, 231], "barent": 246, "barotrop": 141, "barotropicstreamfunct": 141, "barycentr": 5, "base": [1, 7, 15, 19, 52, 55, 60, 77, 78, 93, 100, 106, 109, 116, 117, 141, 142, 143, 144, 148, 151, 152, 156, 158, 159, 161, 180, 181, 183, 184, 186, 198, 212, 236, 239, 247], "base_path": [142, 144], "basedirecori": [142, 144, 156], "basedirectori": [1, 100, 142, 144, 152, 156, 158, 159, 161, 163, 203, 226], "basedirectoryopt": 100, "baseexcept": 8, "baselin": 196, "basepath": 142, "bash": [142, 144, 203], "bashrc": 142, "basi": [174, 195], "basic": [1, 52, 60, 141, 142, 143, 144, 189], "basin": [162, 174, 181, 231, 234, 235, 236, 238, 239], "battel": [141, 143], "baudouin": 239, "bbox_inch": 141, "bck": [144, 148, 153], "becam": 11, "becaus": [1, 3, 4, 7, 9, 10, 11, 97, 106, 116, 117, 141, 142, 143, 144, 148, 153, 158, 163, 209, 214, 226, 229, 233, 236, 237], "becom": [8, 119, 142, 143, 144, 148], "been": [1, 2, 4, 5, 7, 8, 9, 10, 11, 13, 55, 62, 66, 67, 106, 112, 116, 117, 141, 142, 143, 144, 147, 152, 153, 156, 159, 162, 163, 171, 178, 180, 191, 199, 203, 209, 214, 239, 240, 246], "befor": [1, 7, 8, 45, 52, 53, 57, 62, 66, 68, 69, 74, 141, 142, 143, 144, 147, 153, 158, 170, 171, 203], "begin": [2, 57, 97, 129, 141, 142, 143, 144, 156, 181, 231, 238, 239, 240, 242, 243, 244, 245, 246], "behav": [2, 10], "behavior": 5, "behind": [142, 144], "being": [5, 7, 33, 46, 47, 64, 72, 80, 81, 84, 109, 110, 116, 117, 141, 142, 143, 144, 148, 163, 195, 203, 236, 237], "bellingshausen": [234, 236, 241], "belong": [4, 46, 47, 109, 110, 159], "below": [9, 10, 11, 109, 110, 142, 143, 144, 148, 151, 152, 153, 156, 167, 203, 213, 216, 226, 228, 231, 234, 236, 237, 239, 240, 241, 242, 243, 244, 245], "ben": 9, "benefit": 180, "bere": 246, "besid": 141, "best": [116, 117, 141, 142, 167, 180, 230], "beta": 156, "beta0": [5, 9], "better": [11, 151, 157, 183, 184, 191], "between": [2, 4, 5, 8, 9, 10, 11, 85, 97, 106, 114, 115, 116, 117, 118, 119, 141, 142, 143, 144, 148, 149, 153, 163, 165, 167, 186, 191, 200, 203, 212, 230, 231, 236, 237, 238, 239, 240, 242, 243, 244, 245, 246, 247], "beyond": [113, 114, 115, 116, 117, 119, 120, 172], "bgc": 208, "bi": 195, "bia": [114, 115, 117, 149, 183, 184], "bias": 143, "bibliograph": 200, "bibtex": [172, 173, 174, 176, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "big": 153, "bilinear": [5, 9, 82, 142, 144, 148, 158], "bilinearli": 116, "bin": [141, 142, 144, 177, 238], "binboundarymerheattran": [147, 170], "biogeochem": 186, "biogeochemistri": [142, 208], "biologi": 193, "bit": [2, 4, 8, 11, 141, 142, 144], "black": [113, 114, 116, 117, 119, 120, 160, 229], "blah": 1, "blank": [46, 47, 109, 110], "bld": 212, "blob": [4, 5], "block": [1, 8, 57, 141, 144], "blockag": 246, "boarder": 141, "bolu": 238, "bona": 194, "bool": [1, 13, 15, 54, 55, 56, 64, 77, 78, 82, 90, 95, 98, 109, 115, 116, 117, 124, 141], "boolean": [2, 54, 90, 141], "bootstrap": [145, 201, 204, 218, 219], "bootstrap_nsidc0079": [218, 219], "borchgrevink": 239, "bot": [109, 206, 207, 226], "both": [1, 3, 4, 5, 7, 9, 10, 93, 109, 110, 114, 116, 117, 141, 142, 147, 150, 151, 167, 170, 171, 178, 180, 195, 203, 218, 219, 232, 233, 244], "bother": 7, "bottom": [143, 192, 210, 212, 213, 218, 219, 220, 221, 222, 223, 224, 225], "bottomdepth": 143, "bound": [142, 144, 155, 156, 165, 203, 208, 213, 229, 233, 236], "boundari": [5, 116, 117, 141, 148, 149, 160, 183, 184, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 233, 236, 237, 238, 240, 243, 245, 247], "boundary_vertic": 141, "boundaryvertic": 141, "box": 142, "boyer": [176, 201, 202], "boyin": 181, "bracket": 87, "bradi": 0, "brahm": 239, "branch": [1, 4, 5, 7, 8, 9, 10, 11, 142, 203], "brbg_r": 208, "break": [1, 4, 8], "breviti": 8, "brief": 7, "briefli": 144, "bright": 187, "broadcast": 116, "broader": [4, 143], "brows": 168, "browser": [142, 143, 203], "brunt_stancomb": 239, "bsd": [141, 143], "bsf": 141, "bsf_": 141, "bsf_cell": 141, "bsf_vertex": 141, "bsfcell": 141, "bsfvertex": 141, "btm": 240, "budget": 197, "buffer": [97, 111], "build": [8, 14, 15, 19, 106, 125, 139, 143, 195], "build_analysis_list": [8, 203], "build_config_full_path": [1, 7], "build_mapping_fil": 9, "built": [11, 153], "bulk": 186, "buor": [149, 213, 247], "byte": 178, "c": [3, 141, 143, 172, 181, 186, 195, 230, 231, 235, 241], "ca": 246, "cach": [5, 127, 142, 144, 152, 158, 159, 162, 203], "cachefilenam": 127, "calcul": [116, 117, 127, 174, 176, 180, 189, 196, 238], "calendar": [1, 6, 10, 11, 52, 60, 76, 77, 78, 85, 97, 99, 105, 106, 116, 117, 118, 119, 127, 129, 130, 131, 132, 133, 135, 136, 137, 140, 143], "call": [1, 4, 5, 7, 8, 11, 13, 15, 52, 60, 62, 64, 127, 141, 142, 143, 144, 153, 180, 203, 226], "callabl": [124, 138], "callcheckgener": [8, 13], "cam": [183, 184], "camelcas": 2, "campaign": 143, "campbel": 239, "can": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 52, 53, 54, 64, 82, 114, 115, 116, 117, 119, 124, 134, 135, 136, 137, 138, 141, 142, 143, 144, 147, 148, 149, 150, 151, 152, 153, 156, 157, 158, 159, 160, 162, 163, 166, 167, 168, 170, 171, 172, 180, 203, 205, 208, 210, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 230, 231, 233, 234, 235, 236, 237, 238, 239, 240, 241, 244, 246], "cancel": 8, "cannot": [4, 10, 142, 144, 163, 229, 237], "capabilit": 10, "capabl": [1, 10, 11, 144, 148, 151, 153], "capit": [2, 141, 231], "caption": [46, 47, 109, 110, 230, 231, 235], "carbon": 182, "care": [7, 11, 142, 143], "carefulli": 174, "caron": [183, 184, 197, 201, 202], "carri": 200, "cartesian": 5, "cartopi": [121, 141, 160], "cartopygridfonts": 160, "case": [1, 5, 7, 8, 11, 117, 141, 142, 143, 144, 147, 148, 149, 150, 151, 153, 156, 159, 167, 170, 180, 203, 212, 234, 236, 238, 241], "casenam": 2, "cast": 199, "catalogu": 195, "catch": 1, "categori": [1, 2, 59], "category1": 1, "category2": 1, "caus": [1, 7, 144, 203], "cavalieri": [174, 187, 196, 201, 204], "caviti": 142, "cax": 141, "cbar": 141, "cbarlabel": [114, 115], "cccccc": 141, "cchdo": [201, 202], "cci": 195, "cd": [141, 142, 144, 203], "cdir": 142, "ceda": [195, 201, 202], "cell": [5, 9, 64, 82, 116, 123, 124, 138, 141, 144, 148, 167, 174, 187, 230, 234, 237, 247], "cell0": 141, "cell1": 141, "cellid": [85, 105, 106], "celllon": [85, 105, 106], "cells_on_edg": 141, "cells_on_vertex": 141, "cellsonedg": 141, "cellsonvertex": 141, "center": [5, 9, 116, 141, 142, 145, 150, 153, 167, 181, 186, 193, 196, 197, 198, 199, 201, 202, 216, 232, 234], "centr": [180, 183, 184, 197], "central_latitud": 141, "central_longitud": 141, "centroid": 239, "certain": [11, 147, 148, 165, 170, 171], "cesm": [161, 163], "cf": 142, "cfg": [141, 142, 144, 159, 163, 168, 203], "challeng": [3, 4], "chanc": [5, 141, 143], "chang": [1, 7, 8, 10, 141, 142, 143, 144, 145, 147, 152, 158, 159, 162, 170, 174, 201, 202, 203, 227], "channel": [142, 144, 203, 246], "channel_prior": [142, 144, 203], "chapter": 12, "charact": [113, 114, 115, 116, 117, 119, 120], "characterist": [193, 200], "charg": 198, "charl": 0, "cheaper": 5, "check": [1, 4, 7, 8, 54, 60, 71, 141, 142, 143, 144, 148, 153, 158], "check_cal": 7, "check_gener": [1, 2, 8, 13, 203], "cheetham": 239, "chelton": 3, "chemic": [3, 200], "chicoma": [142, 153], "child": 1, "children": [1, 52, 60], "chl": 208, "chlorophyl": [193, 208], "chmod": 142, "choic": 159, "choos": [4, 141, 143, 144, 203, 230], "chose": [142, 144], "chosen": [5, 116, 117, 119, 149, 203, 246], "chrysali": [142, 153], "chunk": [7, 85, 151, 156], "chunksiz": 139, "ci": 2, "cice": [147, 156, 161, 171, 203], "cice_in": [147, 156, 171, 203], "cima": 179, "circ": 241, "circul": [37, 147, 170, 200, 238, 247], "circumst": [144, 148, 165], "citat": [175, 178, 185, 187, 194, 195, 196], "cite": [176, 177, 179, 181, 186, 195, 197, 198], "clarifi": 1, "class": [1, 4, 9, 10, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 60, 61, 64, 72, 73, 75, 109, 123, 124, 126, 130, 138, 143, 148, 203], "clean": [2, 141, 203], "cleanup": 11, "clear": [2, 5, 11, 143], "clearer": [1, 141], "clearli": 156, "click": 142, "clim": [158, 159, 203, 208], "clim_": 142, "clim_3": 142, "climat": [34, 142, 143, 144, 145, 147, 155, 159, 163, 170, 177, 180, 183, 184, 197, 198, 201, 202, 227, 232], "climatalogi": [142, 144, 148], "climatolog": [3, 198, 227, 228, 238], "climatologi": [1, 5, 6, 7, 8, 9, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 36, 37, 48, 49, 50, 52, 57, 59, 109, 127, 140, 141, 143, 145, 150, 153, 156, 158, 159, 160, 163, 164, 168, 175, 177, 179, 181, 185, 186, 198, 201, 202, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 233, 234, 235, 236, 237, 238, 241, 247], "climatology_map": 1, "climatology_map_bsf": 141, "climatology_map_ek": [3, 141], "climatology_map_ohc_anomali": 141, "climatology_map_sos": 3, "climatology_nam": [141, 143], "climatologymabbld": 212, "climatologymap": 1, "climatologymapantarcticmelt": [140, 146, 162, 172, 188, 190], "climatologymapargosalin": [140, 146, 191], "climatologymapargotemperatur": [140, 146, 191], "climatologymapbgc": [140, 146, 182, 186, 193, 198], "climatologymapbgc_alk": 208, "climatologymapbgc_chl": 208, "climatologymapbgc_co2_gas_flux": 208, "climatologymapbgc_d": 208, "climatologymapbgc_no3": 208, "climatologymapbgc_o2": 208, "climatologymapbgc_pco2surfac": 208, "climatologymapbgc_ph_3d": 208, "climatologymapbgc_po4": 208, "climatologymapbgc_sio3": 208, "climatologymapbldmax": 212, "climatologymapbldmin": 212, "climatologymapbldminmax": 212, "climatologymapdensitymld": 212, "climatologymapdensitymldmax": 212, "climatologymapdensitymldmin": 212, "climatologymapdensitymldminmax": 212, "climatologymapek": [3, 140, 141, 146, 179], "climatologymapicebergconcsh": [140, 146, 173], "climatologymapmld": [140, 146, 176], "climatologymapmldminmax": [140, 146], "climatologymapocean": 1, "climatologymapohcanomali": [140, 141, 146], "climatologymapschmidtko": [140, 146, 192], "climatologymapschmidtkopotentialdens": 217, "climatologymapschmidtkosalin": 217, "climatologymapschmidtkotemperatur": 217, "climatologymapseaic": 1, "climatologymapseaiceconc": [224, 225], "climatologymapseaiceconcnh": [140, 146, 178, 187], "climatologymapseaiceconcsh": [140, 146, 178, 187], "climatologymapseaicemeltingnh": [140, 146], "climatologymapseaicemeltingsh": [140, 146, 174], "climatologymapseaiceproductionnh": [140, 146], "climatologymapseaiceproductionsh": [140, 146, 174], "climatologymapseaicethicknh": [140, 146], "climatologymapseaicethicksh": [140, 146], "climatologymapsos": [140, 146, 194], "climatologymapsosemeridionalveloc": 226, "climatologymapsosemixedlayerdepth": 226, "climatologymapsosepotentialdens": 226, "climatologymapsosesalin": 226, "climatologymapsosetemperatur": 226, "climatologymapsosevelocitymagnitud": 226, "climatologymapsosezonalveloc": 226, "climatologymapssh": [140, 146, 177], "climatologymapsss": [140, 146, 159, 175], "climatologymapsst": [140, 146, 159, 184], "climatologymaptemperaturemld": 212, "climatologymaptemperaturemldmax": 212, "climatologymaptemperaturemldmin": 212, "climatologymaptemperaturemldminmax": 212, "climatologymapwav": [140, 146, 180, 195], "climatologymapwavespeakwaveperiod": 227, "climatologymapwavessignificantwaveheight": 227, "climatologymapwoa": [140, 146, 199], "climatologymapwoasalin": 228, "climatologymapwoatemperatur": 228, "climatologynam": [64, 80, 81, 141], "climatologysubdirectori": 158, "clip": [156, 229], "clivar": 200, "clobber_mod": [147, 170, 171], "clockwis": 244, "clone": [141, 203], "close": [4, 7, 141, 143, 180], "closest": [141, 239], "closur": [156, 238], "cloud": [185, 236], "cluster": [153, 186], "cmap": 141, "cmip5": 177, "cmo": [141, 236], "cmocean": [141, 149], "cne": 177, "co": [177, 190, 199], "co2": [186, 208], "co2_gas_flux": 208, "coars": [5, 144], "coarser": 152, "coastal": 174, "code": [1, 4, 5, 7, 8, 11, 64, 124, 144, 162, 203, 224, 225, 234], "codecog": 3, "col": 141, "collabor": 179, "collect": [111, 141, 180], "color": [47, 109, 110, 112, 113, 114, 115, 116, 117, 119, 120, 141, 142, 144, 149, 160, 193, 201, 202, 203, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 229, 230, 231, 233, 236, 237, 238, 240, 243, 245, 247], "colorbar": [114, 115, 116, 117, 141, 203, 208, 230, 231, 233, 235, 236, 238, 240], "colorbarlabel": [116, 117], "colorbarlevel": [149, 233, 240, 243, 245], "colorbarlevelsatlant": 238, "colorbarlevelsdiffer": [149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 237, 247], "colorbarlevelsglob": 238, "colorbarlevelsindopacif": 238, "colorbarlevelsresult": [149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 237, 247], "colorbartick": 149, "colorbarticksdiffer": [141, 205, 206, 207, 209, 217, 220, 221, 222, 223, 226, 227, 228, 230, 237, 247], "colorbarticksresult": [141, 205, 206, 207, 209, 210, 213, 214, 217, 220, 221, 222, 223, 226, 227, 228, 230, 237, 247], "colormap": [46, 47, 109, 110, 114, 117, 140, 141, 152, 168, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 233, 236, 237, 238, 240, 243, 245, 247], "colormapdict": 112, "colormapindic": [149, 233, 240, 243, 245], "colormapindicesatlant": 238, "colormapindicesdiffer": [149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 237, 247], "colormapindicesglob": 238, "colormapindicesindopacif": 238, "colormapindicesresult": [149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 237, 247], "colormapnam": [141, 149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 230, 233, 237, 240, 243, 245, 247], "colormapnameatlant": 238, "colormapnamediffer": [141, 149, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 237, 247], "colormapnameglob": 238, "colormapnameindopacif": 238, "colormapnameresult": [141, 149, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 237, 247], "colormapsectionnam": [114, 115, 116, 117], "colormaptypediffer": [141, 205, 211, 212, 220, 221, 222, 223, 227, 228, 231], "colormaptyperesult": [141, 205, 211, 212, 220, 221, 222, 223, 227, 228, 231], "colour": 149, "column": [141, 143], "com": [0, 1, 3, 4, 5, 7, 10, 141, 142, 143, 203], "combin": [7, 143, 159, 173, 174, 180], "come": 195, "comeau": 0, "comiso": [174, 178, 201, 204], "comma": 18, "command": [1, 2, 7, 11, 15, 18, 141, 142, 144, 153, 159, 203], "commandlin": 7, "commandprefix": 7, "commend": 1, "comment": [1, 2, 163, 203, 231, 238, 239, 240, 242, 243, 244, 245, 246], "commerci": [174, 194], "commit": [141, 142], "commnd": 7, "common": [1, 8, 109, 141, 142, 143, 144, 148, 152, 158, 162, 165, 167, 180, 203], "commonli": [9, 148, 168, 233], "commun": [141, 142, 163, 180, 183, 184], "compact": [8, 143], "compar": [2, 3, 9, 109, 117, 141, 142, 143, 144, 148, 150, 158, 161, 163, 165, 203, 206, 207, 208, 209, 212, 213, 214, 217, 218, 219, 221, 223, 226, 228, 229, 233, 234, 236, 237, 240, 241, 244, 247], "compareascontour": 117, "comparewithobserv": [233, 244], "comparion": 148, "comparison": [2, 9, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 48, 49, 50, 64, 65, 67, 69, 72, 74, 79, 81, 82, 109, 116, 117, 121, 122, 140, 141, 143, 144, 148, 152, 153, 158, 163, 167, 168, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 232, 233, 235, 236, 237, 244, 247], "comparison_grid_nam": [79, 121, 122, 141, 143], "comparisonantarcticstereoresolut": [148, 150], "comparisonantarcticstereowidth": [148, 150], "comparisoncontourlinecolor": [116, 117], "comparisoncontourlinestyl": [116, 117], "comparisoncontourlinewidth": [116, 117], "comparisondescriptor": [64, 65, 82], "comparisonfieldnam": [116, 117], "comparisongrid": [141, 143, 150, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228], "comparisongridnam": [64, 65, 67, 69, 72, 74, 81, 109, 141], "comparisonlatresolut": [148, 150], "comparisonlonresolut": [148, 150], "compars": 163, "compat": 4, "compi": [142, 153], "complet": [5, 7, 8, 52, 57, 143, 148, 151, 159, 163, 180, 181], "complex": 9, "complic": 143, "compon": [1, 2, 3, 4, 10, 52, 61, 80, 81, 84, 123, 126, 140, 141, 142, 143, 144, 156, 159, 162, 163, 169, 170, 183, 184, 194, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "componentnam": [1, 52, 55, 61, 64, 80, 81, 84, 124, 126, 138, 141, 143], "compoon": 159, "comprehens": [144, 181, 200], "comput": [4, 5, 7, 8, 10, 20, 21, 32, 34, 36, 37, 38, 40, 45, 57, 58, 61, 62, 64, 70, 71, 72, 76, 77, 78, 80, 81, 84, 109, 123, 124, 126, 127, 128, 129, 138, 141, 142, 143, 144, 152, 153, 158, 159, 162, 163, 165, 168, 180, 181, 183, 186, 194, 197, 203, 205, 216, 229, 231, 233, 236, 237, 238, 239, 240, 242, 243, 244, 245, 246], "computation": 143, "compute_transects_with_vel_mag": 3, "compute_zmid": 143, "computeanomali": [45, 231], "computeregionmask": [21, 32, 36, 38, 40], "computeregionmaskssubtask": 123, "compyf": 142, "concat": 4, "concatin": 156, "conceiv": [4, 8], "concentr": [48, 49, 145, 174, 183, 184, 193, 196, 201, 204, 210, 218, 219], "concentrationaltibergsh": [158, 210], "concentrationbootstrapnh_ja": 218, "concentrationbootstrapnh_jfm": 218, "concentrationbootstrapsh_djf": 219, "concentrationbootstrapsh_jja": 219, "concentrationnasateamnh_ja": 218, "concentrationnasateamnh_jfm": 218, "concentrationnasateamsh_djf": 219, "concentrationnasateamsh_jja": 219, "concept": [142, 144], "conceptu": 4, "concert": [116, 117], "concurr": 7, "conda": [141, 203], "conda_packag": 203, "condit": [4, 142, 143, 194], "conduc": 7, "conduct": 234, "confid": [7, 174], "config": [1, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 55, 59, 61, 64, 68, 69, 74, 79, 80, 81, 82, 83, 84, 85, 100, 109, 112, 113, 114, 115, 116, 117, 119, 120, 123, 124, 126, 138, 140, 142, 143, 144, 148, 151, 152, 153, 155, 156, 160, 163, 165, 168, 203, 212, 226, 229, 230, 231, 235, 238, 240, 242, 244], "config_am_layervolumeweightedaverage_compute_interv": [147, 170], "config_am_layervolumeweightedaverage_en": [147, 170], "config_am_meridionalheattransport_compute_interv": [147, 170], "config_am_meridionalheattransport_en": [147, 170], "config_am_mixedlayerdepths_en": [147, 170], "config_am_surfaceareaweightedaverages_compute_interv": [147, 170], "config_am_surfaceareaweightedaverages_en": [147, 170], "config_am_timeseriesstatsmonthly_en": [147, 170, 171], "config_calendar_typ": [1, 4, 10], "config_density0": 143, "config_execut": 148, "config_run_dur": [147, 170, 171], "config_specific_heat_sea_wat": 143, "configfil": 7, "configpars": [112, 113, 116, 117, 119, 120], "configr": [210, 218, 219, 220, 221, 222, 223, 224, 225], "configsectionnam": [109, 110, 112], "configur": [1, 3, 7, 10, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 60, 61, 79, 80, 81, 82, 83, 84, 85, 100, 109, 112, 113, 114, 115, 116, 117, 119, 120, 123, 126, 140, 141, 143, 147, 148, 149, 150, 151, 152, 153, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 169, 203], "conflict": [2, 142, 144], "conform": 141, "confuns": 4, "confus": [1, 11, 143], "conger_glenz": 239, "conponentnam": 123, "consecut": 164, "conserv": [5, 33, 82, 140, 146, 148, 158, 192], "consev": [148, 158], "consid": [8, 148], "consider": [3, 5, 7, 141, 180], "consist": [1, 2, 4, 5, 7, 8, 180, 187, 195, 198, 239], "constant": [8, 62, 63, 64, 68, 69, 72, 74, 80, 81, 84, 109, 143, 241], "constitu": 3, "constrain": 4, "constraint": [172, 180, 197], "construct": [1, 5, 8, 9, 11, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 57, 61, 64, 72, 109, 123, 124, 126, 138, 141, 143], "constructor": 1, "consum": [144, 151, 152, 162], "contact": [156, 159, 164, 174, 194, 200], "contain": [1, 3, 5, 11, 14, 17, 18, 33, 37, 42, 43, 46, 47, 52, 59, 61, 77, 78, 79, 82, 85, 88, 97, 106, 107, 108, 112, 113, 114, 115, 116, 117, 119, 120, 126, 127, 129, 141, 142, 143, 144, 148, 152, 153, 155, 156, 158, 159, 160, 161, 162, 163, 165, 177, 179, 186, 195, 200, 203, 230, 231, 234, 235, 236, 241], "content": [27, 39, 134, 141, 143, 144, 148, 163, 165, 180, 213, 240], "context": 153, "contina": 156, "continent": [192, 226, 239, 241], "continu": [1, 9, 141, 142, 143, 144, 181, 205, 211, 212, 220, 221, 222, 223, 227, 228, 231, 236], "contour": [46, 47, 112, 114, 116, 117, 141, 160, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 230, 231, 233, 237, 238, 240, 243, 245, 247], "contourcolor": 149, "contourcolormap": [116, 117], "contourcolorresult": 214, "contourcomparisonfield": 116, "contourcomparisonlinecolor": [116, 117], "contourcomparisonlinestyl": [116, 117], "contourf": [109, 110, 114], "contourlabelprecis": [116, 117], "contourlevel": [149, 231, 233, 240, 243, 245], "contourlevelsatlant": 238, "contourlevelsdiffer": [237, 247], "contourlevelsglob": 238, "contourlevelsindopacif": 238, "contourlevelsresult": [214, 237, 247], "contourthick": 149, "contourthicknessresult": 214, "contrast": 143, "contribut": [0, 142], "contributor": [1, 2, 4, 5, 7, 8, 9, 10, 11, 140], "control": [7, 14, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 44, 46, 47, 48, 49, 50, 51, 109, 112, 113, 114, 115, 116, 117, 119, 120, 141, 142, 143, 144, 148, 149, 151, 153, 159, 163, 166, 167, 168, 199, 205, 211, 212, 220, 221, 222, 223, 228, 229, 233], "control_config": [27, 141, 143], "control_run": 163, "control_run_nam": [141, 143], "controlcolor": [229, 234], "controlconfig": [14, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 44, 46, 47, 48, 49, 50, 51, 109, 141, 143], "controlrunconfigfil": 163, "conveni": [3, 142, 144, 180, 231, 235], "convent": [2, 4, 11, 156], "convers": 4, "convert": [4, 10, 85, 98, 105, 106, 131, 132, 133, 141], "cook": 239, "cooper": 179, "coord": [4, 9], "coordin": [4, 5, 9, 11, 76, 77, 78, 85, 105, 106, 116, 117, 127, 143, 167], "copernicu": [180, 201, 202], "copi": [1, 4, 106, 107, 108, 116, 117, 142, 143, 144, 152, 160, 172, 200, 203], "copyright": [141, 143, 172, 175, 178, 185, 187, 196], "core": [10, 76, 77, 78, 85, 97, 99, 105, 106, 127, 136, 137, 142, 144, 148, 151, 153, 165, 203], "cori": [7, 142, 153], "corner": [9, 111, 116, 142], "correct": [1, 60, 106, 195], "correctli": [2, 76, 141, 142, 143, 144, 195, 203], "correl": 191, "correspond": [1, 7, 11, 47, 59, 82, 113, 116, 117, 119, 120, 162, 167, 203, 212, 230, 231, 234, 235, 236, 238, 240, 241, 242, 243, 244, 245], "cosgrov": 239, "costli": 5, "could": [1, 5, 7, 8, 9, 60, 143, 144, 159, 180, 205, 217, 226], "count": [1, 140, 148, 168, 236], "counterpart": 1, "countourcomparisonfieldarrai": 116, "coupl": 177, "cours": [141, 142, 196], "cover": [142, 143, 144, 160, 163, 186, 193, 194, 195, 216], "coverag": [175, 177, 187, 199], "covert": 133, "cp": [141, 143], "cpu": [7, 142], "cr": [121, 141], "crash": [7, 144, 153], "creat": [1, 2, 3, 5, 7, 9, 11, 52, 55, 56, 73, 75, 82, 116, 117, 124, 127, 140, 141, 143, 148, 152, 153, 158, 183, 184, 186], "creator": 124, "crop": 141, "crosson": 239, "cruis": 182, "cryosat": [173, 195], "csh": [142, 144], "cshrc": 142, "csr_matrix": 141, "csv": 205, "ctd": 200, "cuba": 246, "cumsum": 141, "curl": 141, "current": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 19, 88, 116, 130, 141, 143, 144, 145, 147, 150, 153, 156, 162, 170, 171, 177, 180, 183, 184, 201, 202, 209, 230, 234, 236, 238], "curv": [4, 229], "curvatur": 111, "custodi": 172, "custom": [65, 66, 67, 140, 143, 144, 148, 149, 152, 155, 156, 159, 165, 168, 230, 236, 241, 244], "customdirectori": 152, "cycl": [4, 148, 157, 165, 189, 191, 229, 238], "cycleindex": 4, "cylindr": 195, "d": [3, 4, 9, 11, 36, 76, 77, 78, 85, 99, 103, 105, 106, 107, 108, 127, 128, 129, 141, 142, 144, 147, 170, 171, 176, 181, 183, 184, 186, 191, 226, 229, 234], "d_": [147, 170, 171], "daac": 193, "dac": 179, "dai": [4, 10, 76, 77, 78, 85, 105, 106, 127, 130, 131, 132, 133, 136, 143, 175, 178, 179, 180, 189, 216], "daili": [143, 174, 175, 178, 180, 196], "darin": 0, "dask": [7, 140, 144, 148, 168, 236], "daskthread": [148, 151, 236], "data": [1, 3, 4, 5, 6, 7, 8, 11, 20, 45, 46, 47, 60, 66, 67, 73, 76, 77, 78, 82, 85, 93, 99, 105, 106, 107, 108, 109, 114, 115, 116, 117, 119, 120, 127, 128, 129, 140, 141, 142, 143, 144, 145, 148, 151, 152, 153, 158, 159, 161, 162, 163, 165, 167, 172, 174, 175, 176, 177, 178, 180, 182, 183, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 202, 204, 206, 207, 209, 216, 217, 227, 228, 229, 232, 233, 234, 236, 241], "dataarrai": [76, 77, 78, 116, 117, 141, 143], "databas": [173, 176, 182, 199], "dataset": [3, 4, 8, 11, 45, 66, 67, 73, 76, 77, 78, 85, 99, 105, 106, 107, 108, 117, 119, 120, 128, 129, 141, 143, 144, 156, 158, 173, 177, 180, 181, 183, 184, 186, 195, 201, 202, 203, 204, 208], "datatime64": 85, "date": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 59, 61, 85, 97, 99, 105, 106, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 142, 144, 162, 181], "date2num": 4, "date_to_dai": 4, "datestr": [10, 135, 136, 137], "datetim": [4, 10, 85, 97, 99, 130, 131, 132, 133, 135], "datetime64": 4, "datetime_to_dai": 4, "datetimnoleap": 4, "dateutil": [10, 130], "davi": [0, 1, 2, 4, 5, 7, 8, 9, 10, 11, 141, 142, 246], "days_to_datetim": 4, "daysinmonth": 76, "dayssincestartofsim": [4, 11, 147, 170], "dcedg": 141, "dd": [1, 10, 135, 136, 137, 142, 144], "ddd": [10, 135, 136, 137], "de": [3, 176, 201, 202], "debug": [141, 148, 218, 219, 224, 225, 233, 240, 244], "dec": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "decad": [180, 200], "decemb": [177, 196], "decend": 1, "decim": [116, 117], "decomposit": 195, "decor": 142, "decreas": [148, 151, 236], "dedic": 193, "deep": [3, 179, 228, 234, 236, 241], "deep_r": 236, "deepen": 246, "deeper": 241, "def": [1, 4, 7, 8, 9, 10, 141, 143], "defalult": [147, 170, 171], "default": [1, 3, 5, 7, 8, 46, 47, 54, 61, 64, 85, 110, 111, 113, 114, 115, 116, 117, 119, 120, 124, 126, 138, 141, 142, 143, 144, 148, 149, 151, 152, 153, 156, 157, 158, 159, 160, 163, 165, 166, 167, 168, 203, 206, 207, 209, 210, 213, 214, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 241, 246, 247], "defaultfonts": [113, 114, 115, 116, 117, 119, 120, 160, 234], "defaultpath": 100, "defens": [178, 196], "defin": [1, 8, 9, 46, 47, 52, 53, 109, 110, 116, 117, 123, 124, 138, 139, 142, 143, 144, 147, 160, 162, 167, 170, 171, 174, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225, 230, 231, 235, 237, 239, 241, 246, 247], "definit": 129, "deg": [111, 186], "degc": 143, "degre": [111, 148, 150, 175, 177, 179, 180, 181, 198, 210, 218, 219, 220, 221, 222, 223, 224, 225, 230, 231, 235, 237, 238], "deleg": 143, "delet": [141, 142, 144, 203], "deliber": 1, "deliv": 200, "delta": [143, 226, 230, 237], "deltaohc": [143, 213], "deltaohc_": 143, "deltaohc_0": 143, "demonstr": [1, 10, 141, 147, 170], "dennistoun": 239, "denois": 195, "dens": [208, 222, 223], "densiti": [148, 194, 212, 217, 226, 230, 231, 235, 236, 237, 241, 246], "depend": [4, 7, 8, 53, 57, 116, 117, 119, 126, 142, 143, 144, 180, 203, 236], "depth": [25, 26, 42, 43, 46, 47, 109, 116, 141, 143, 145, 167, 192, 199, 201, 202, 203, 206, 207, 211, 212, 213, 226, 228, 231, 233, 235, 236, 238, 240, 243, 245], "depth_rang": [141, 143], "depth_range_str": 143, "depthlimglob": 233, "depthrang": [143, 213, 235], "deriv": [10, 33, 73, 75, 145, 174, 175, 177, 178, 179, 181, 183, 184, 185, 187, 191, 193, 201, 204, 244], "derivedvariablelist": 33, "descend": 143, "descib": 191, "describ": [1, 2, 3, 8, 9, 11, 52, 64, 75, 79, 121, 122, 141, 142, 143, 144, 176, 179, 186, 203, 210, 218, 219, 224, 225, 231, 235], "descript": [82, 141, 143, 147, 167, 169], "descriptor": [9, 64, 65, 75, 79, 141], "desigend": 151, "design": [1, 2, 4, 5, 7, 8, 9, 10, 11, 140, 143, 147, 170, 171, 187], "design_doc": 4, "design_doc_variable_mapping_reorg": 4, "desir": [5, 7, 8, 11, 71, 109, 143, 147, 148, 153, 158, 159, 162, 163, 165, 170, 171, 200, 226, 230, 231, 237, 238], "desktop": [142, 144, 203], "destin": [5, 9, 142, 144, 148, 230, 237, 247], "destinationdescriptor": 9, "destint": 9, "detail": [7, 12, 85, 105, 106, 119, 140, 143, 144, 152, 153, 158, 174, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 247], "detect": [142, 173, 180], "determin": [1, 52, 55, 60, 76, 77, 78, 109, 110, 114, 116, 127, 141, 142, 143, 144, 148, 155, 156, 159, 162, 165, 166, 167, 205, 210, 218, 219, 220, 221, 222, 223, 224, 225, 226, 230, 231, 234, 236, 237, 238, 239, 240, 242, 243, 244, 245, 246], "dev": [0, 141, 142, 143, 147, 170, 203], "develop": [1, 2, 4, 7, 8, 11, 144, 156, 164, 179, 183, 184, 189, 248], "deviat": [142, 196, 235], "dgradmld": [147, 170], "diagnos": 144, "diagnost": [2, 3, 140, 153, 158, 162, 168, 203, 226], "diagnostic_output": 142, "diagonst": 152, "diagram": 231, "diagramtyp": 236, "dibbl": 239, "dic": 208, "dict": [1, 19, 26, 33, 61, 64, 85, 98, 105, 106, 112, 116, 117, 123], "dictionari": [1, 7, 8, 9, 13, 15, 17, 61, 64, 85, 98, 105, 106, 112, 141, 149, 205, 206, 207, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 235, 237, 241, 247], "dictonari": [85, 105, 106], "did": 143, "didn": [7, 142, 144], "diego": 172, "diff": [117, 141], "diff_title_label": [141, 143], "diffarrai": [114, 115, 117], "differ": [1, 2, 3, 9, 10, 11, 33, 85, 109, 110, 114, 115, 116, 117, 130, 141, 142, 143, 144, 148, 149, 151, 153, 156, 163, 165, 168, 180, 183, 184, 186, 194, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 231, 237, 239, 241, 247], "differenttext": 1, "difficult": 151, "diffsuffix": 117, "difftitl": [114, 115, 117], "difftitlelabel": [109, 110, 141, 143], "digit": [172, 200], "dim": [4, 9, 141, 143], "dimens": [9, 64, 73], "dir": [139, 142, 144, 156, 159, 161], "direct": [3, 116, 159], "directli": [3, 4, 7, 11, 141, 143, 148, 153, 156, 159, 203, 226], "directori": [1, 3, 4, 5, 7, 8, 11, 52, 83, 88, 100, 142, 143, 144, 153, 154, 161, 163, 177, 203, 230, 239], "disabl": [142, 156, 163, 233, 238, 244], "discourag": 163, "discuss": [1, 11, 143], "disk": [143, 180, 238], "displai": [46, 47, 109, 110, 116, 117, 118, 142, 144, 148, 149, 153, 154, 159, 168, 230], "dissolv": 199, "distanc": [116, 117, 141, 167, 231, 238, 239, 240, 242, 243, 244, 245, 246], "distant": 143, "distribut": [141, 143, 148, 158, 172, 177, 180], "diverg": 174, "divid": [141, 143, 151, 167], "djf": [164, 210, 219, 220, 221, 222, 223], "dmsp": [178, 196], "do": [1, 2, 4, 5, 7, 10, 11, 55, 58, 64, 85, 141, 142, 143, 144, 153, 157, 159, 167, 174, 203], "doc": [3, 203], "docstr": [1, 11, 141, 143], "document": [1, 2, 3, 4, 9, 10, 11, 12, 60, 140, 143, 144, 147, 171, 248], "documentaiton": [147, 149, 170], "dodet": [195, 201, 202], "doe": [1, 5, 7, 10, 11, 77, 78, 79, 95, 99, 101, 102, 116, 117, 121, 122, 141, 143, 144, 147, 153, 159, 163, 169, 171, 180, 230, 236], "doesn": [1, 7, 11, 52, 83, 100, 127, 142, 143], "doi": [176, 179, 181, 193], "domain": 237, "don": [1, 8, 77, 78, 141, 142, 143, 162], "done": [8, 141, 142, 144, 153, 241], "dot": [1, 86, 113, 114, 115, 116, 117, 119, 120, 160], "dotson": 239, "doubl": 143, "doutriaux": 0, "down": [151, 228], "download": [3, 20, 140, 142, 150, 152, 159, 189, 226], "download_analysis_data": [3, 144, 152, 203, 226], "dpi": [113, 114, 115, 116, 117, 119, 120, 141, 160], "dr": [197, 201, 202], "drake": 246, "draw": [141, 230], "draw_label": 141, "drift": [174, 185], "drifter": [3, 145, 201, 202, 209], "drifter_vari": [3, 179], "drive": 142, "driver": [1, 186], "drop": 141, "drop_var": [141, 143], "drygalski": 239, "ds_mesh": 141, "ds_restart": 143, "dsbsf": 141, "dsendtim": 4, "dsfirstyear": 4, "dsmask": 143, "dsmesh": 141, "dsnew": 4, "dsob": 73, "dspreprocess": 4, "dsr": 179, "dsshift": 4, "dsstarttim": 4, "dst_me": 144, "dst_mesh": 142, "dst_region": [142, 144], "dstorepl": 4, "dsvalu": [119, 120], "dt": 177, "dt1": 130, "dt2": 130, "dthreshmld": [147, 170], "dtype": 141, "duac": 177, "due": 229, "durat": [3, 147, 170, 171], "dure": [1, 7, 11, 13, 15, 52, 53, 66, 142, 143, 147, 159, 170, 171, 200], "dvedg": 141, "dx": 141, "dy": 141, "dycor": 85, "dynam": [145, 201, 202, 214, 234], "e": [1, 2, 3, 4, 5, 7, 8, 10, 11, 45, 52, 57, 60, 64, 72, 86, 87, 109, 116, 129, 141, 142, 143, 144, 148, 156, 157, 158, 159, 161, 162, 163, 164, 165, 167, 174, 178, 186, 189, 193, 194, 203, 212, 226, 230, 231, 236, 237, 238, 240, 242, 243, 244, 245], "e3sm": [3, 140, 143, 144, 148, 152, 153, 156, 158, 159, 161, 162, 163, 168, 170, 171, 203, 242], "each": [1, 2, 5, 7, 8, 9, 13, 15, 46, 47, 52, 58, 61, 64, 72, 76, 77, 78, 103, 109, 110, 116, 119, 127, 136, 140, 142, 143, 144, 148, 149, 150, 151, 152, 153, 156, 158, 159, 160, 164, 167, 176, 180, 189, 195, 196, 200, 203, 205, 210, 212, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 230, 231, 234, 235, 236, 237, 238, 241, 247], "earli": [180, 198], "earlier": [2, 203], "earth": [140, 147, 163, 169, 177, 178, 197, 203], "easi": [1, 5, 11, 141, 180, 200, 224, 225], "easier": [1, 2, 11, 141, 142], "easili": [2, 4, 5, 10, 141, 163, 230], "east": [179, 234, 236, 239, 241], "eastern": [234, 236, 241], "eastern_ross": 239, "ec": 156, "ec30to60e2r2": [142, 144, 156], "ec60to30": 9, "ecmwf": [180, 197], "ecolog": 193, "ecologi": 193, "ecwisc30to60e2r1": [142, 144, 156], "eddi": [6, 24, 140, 142, 156, 179, 209, 238], "edg": 141, "edge_indic": 141, "edgecolor": 141, "edgeindic": 141, "edges_on_cel": 141, "edgesoncel": 141, "edison": [2, 4, 5, 7, 8, 10, 11], "edit": 3, "editor": [3, 141, 142, 199], "edward_viii": 239, "effici": [2, 4, 7, 9, 11], "effort": [2, 77, 78, 141, 143], "eight": 162, "either": [1, 4, 5, 7, 9, 10, 17, 18, 76, 115, 117, 132, 133, 135, 136, 137, 142, 144, 149, 150, 152, 153, 156, 158, 159, 160, 162, 163, 203, 236], "ek": [3, 24, 209], "ekesubdirectori": 3, "ekstrom": 239, "el": [34, 142, 144, 155, 159, 232], "element": [1, 116, 119, 196], "elev": [167, 185, 230, 237, 247], "elif": 1, "elimin": 10, "ellips": 179, "ellipsi": [113, 114, 115, 116, 117, 119, 120], "ellp": 141, "els": [1, 4, 7, 141, 142, 143, 144, 167], "elsewher": 11, "email": 142, "empir": 195, "empti": [7, 8, 52, 108, 167, 203, 224, 225, 235, 241], "en": 7, "enabl": [54, 116, 143, 147, 156, 170, 171], "encompass": [2, 167], "encount": 156, "end": [3, 11, 33, 59, 61, 97, 118, 126, 127, 129, 142, 143, 144, 156, 163, 203, 212, 229, 234, 237, 241], "end_dat": [4, 11], "end_year": 143, "enddat": [1, 4, 61, 85, 97, 99, 126, 129], "enddatefirstyear": 4, "endindex": 4, "endtim": 76, "endyear": [1, 33, 59, 61, 126, 142, 144, 148, 155, 156, 165, 203, 229], "energi": [6, 24, 140, 142, 147, 169, 179, 197, 203, 209, 229, 240], "english": 246, "enhanc": 181, "enough": [4, 7, 9, 144], "ensembl": 180, "ensur": [1, 2, 3, 4, 7, 8, 11, 111, 143, 180], "entir": [111, 141, 143, 160, 174], "entri": [16, 20, 33, 116, 117, 119, 127, 180, 203, 230, 241], "enumer": 141, "env": [141, 142, 144], "envidat": [174, 201, 204], "environ": [141, 203], "environment": [181, 186, 197], "envis": [141, 234], "envisat": [173, 177, 195], "epsilon": 4, "eqneditor": 3, "equal": [116, 117, 141, 144, 167], "equat": [3, 156], "equatori": [162, 191], "equatorward": 111, "equival": [2, 142, 143, 144, 159, 203, 213, 240], "er": [145, 173, 195, 201, 202, 232], "era": 180, "era5": [145, 201, 202, 227], "era5obsendyear": 227, "era5obsstartyear": 227, "era5t": 180, "erdc_icefire_h": [149, 205], "erdc_icefire_l": 149, "eric": 181, "erik": 179, "errarrai": 113, "error": [1, 4, 7, 8, 71, 113, 119, 141, 142, 143, 151, 155, 165, 174, 177, 229], "erroronmiss": [156, 229], "ers_sstv4": 232, "ersst": [181, 201, 202], "esa": [145, 201, 202, 227], "esmf": [5, 142, 153], "esmf_regridweight": 144, "esmf_regridweightgen": [5, 7, 142, 148, 158], "esmvaltool": 143, "especi": [141, 152, 200, 203], "essenti": 4, "estim": [31, 145, 174, 176, 180, 191, 197, 201, 202, 236, 237, 241], "et": [1, 145, 172, 173, 175, 176, 179, 180, 181, 182, 183, 184, 186, 187, 188, 189, 190, 192, 193, 194, 195, 196, 198, 199, 201, 202, 204, 205, 214, 217, 221, 223, 239], "eta": 144, "etc": [1, 2, 3, 10, 40, 130, 142, 144, 156, 159, 160, 203, 231, 235], "etcdf4": 144, "european": 197, "evalu": 203, "even": [1, 141, 143, 144, 147, 170, 171, 239], "evenli": 237, "eventu": [5, 37, 141], "ever": 4, "everi": [143, 166, 167, 175, 178, 180, 237], "everyth": [141, 142, 144], "everytim": 143, "evolv": [141, 143, 180], "exacerb": 4, "exactli": 4, "examin": [5, 157], "exampl": [1, 2, 4, 7, 11, 12, 60, 85, 105, 106, 141, 142, 143, 149, 151, 152, 153, 156, 159, 162, 166, 168, 203], "example_e3sm": [142, 168], "example_jan_feb": 11, "exascal": [140, 147, 169, 203], "exce": 9, "except": [1, 2, 5, 7, 8, 13, 15, 52, 54, 61, 101, 113, 126, 142, 143, 144, 149, 159, 163, 186, 240], "exchang": 200, "exclud": 18, "exclus": 172, "execut": [1, 5, 7, 140, 151, 156, 168], "exist": [1, 2, 4, 5, 7, 8, 9, 10, 11, 14, 19, 52, 83, 100, 101, 102, 116, 117, 142, 143, 144, 148, 159, 162, 163, 195, 213, 231, 235], "exit": [1, 7], "expand": [1, 7, 103], "expect": [1, 2, 4, 7, 9, 10, 11, 61, 141, 143], "expens": [5, 127], "experi": [142, 144, 148, 153, 156, 165, 195, 197, 200, 247], "experienc": 144, "explicitli": [64, 142, 144, 149, 166], "explor": [7, 142, 143, 144], "export": [142, 144], "express": [5, 76, 77, 78, 85, 105, 106], "extend": [5, 7, 8, 11, 109, 110, 114, 142, 181, 191, 195, 203, 232, 234, 237, 239], "extens": [7, 156, 180, 186], "extent": [2, 141, 145, 201, 204, 244], "extern": 239, "extra": [1, 64, 77, 78, 116, 148], "extract": [34, 38, 39, 41, 42, 43, 44, 45, 51, 57, 64, 108, 126, 127, 163], "ey": 9, "f": [141, 143, 181], "f11": [178, 196], "f13": [178, 196], "f17": [178, 196], "f8": [178, 196], "facecolor": 141, "facil": 153, "facili": [147, 170], "facilit": 180, "fact": 4, "factor": 153, "fail": [1, 3, 7, 8, 142, 144, 159], "failur": 7, "fair": 172, "fall": [97, 149], "fals": [1, 7, 8, 9, 54, 64, 82, 95, 109, 113, 115, 116, 117, 124, 139, 141, 142, 144, 148, 156, 160, 205, 208, 210, 218, 219, 220, 221, 222, 223, 224, 225, 229, 231, 233, 238, 244], "familiar": 7, "fancier": 141, "fancyarrow": 141, "far": [1, 7, 180], "farm": 7, "fast": 180, "fasten": 7, "faster": 5, "favor": [1, 10, 156], "favorit": 230, "fc": 111, "feasibl": 9, "featur": [3, 4, 123, 124, 125, 138, 141, 142, 156, 162, 163, 176, 183, 184, 203, 230, 231, 234, 235, 236, 241], "featurecollect": 111, "feb": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "februari": [174, 176, 197], "feedback": 1, "feel": 141, "ferret": 149, "ferrigno": 239, "fetch": 142, "few": [8, 141, 142, 143, 144, 148, 163, 186], "fewer": [116, 117, 119, 142, 144, 148, 151, 153, 236], "ffn": [145, 201, 202], "ffnv2016": 208, "fide": 194, "field": [1, 2, 3, 4, 5, 9, 31, 46, 47, 53, 64, 66, 67, 68, 69, 72, 74, 77, 78, 80, 81, 86, 87, 109, 110, 113, 116, 117, 141, 143, 147, 148, 149, 170, 171, 180, 186, 191, 193, 195, 198, 199, 206, 207, 208, 212, 217, 226, 227, 228, 231, 235, 236, 237, 240, 241, 246, 247], "field_nam": [141, 143], "fieldarrai": [113, 116], "fieldlist": [226, 227, 228, 237], "fieldnam": [1, 231], "fieldnameintitl": [46, 47, 109, 110, 141, 143], "fifth": 180, "fig": [111, 116, 117, 119, 120, 141, 214], "figsiz": [113, 114, 115, 116, 117, 119, 120, 141], "figur": [4, 111, 113, 114, 115, 116, 117, 119, 120, 141, 142, 144, 160, 163, 196, 212, 230, 231, 235, 236], "filchner": [141, 239], "file": [1, 3, 4, 5, 6, 7, 9, 10, 11, 14, 20, 33, 45, 46, 47, 52, 56, 59, 60, 61, 63, 64, 68, 69, 71, 72, 73, 74, 75, 80, 81, 82, 84, 85, 88, 93, 94, 95, 97, 98, 99, 103, 105, 109, 110, 113, 114, 115, 116, 117, 119, 120, 123, 124, 125, 126, 127, 129, 134, 138, 139, 140, 141, 142, 143, 144, 147, 148, 150, 155, 159, 160, 161, 162, 163, 165, 167, 168, 169, 170, 171, 172, 173, 174, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 210, 218, 219, 221, 223, 224, 225, 226, 229, 231, 233, 234, 235, 236, 239, 240, 241, 244], "file1": 230, "file2": 230, "file_nam": [4, 11], "filelist": 97, "filenam": [63, 68, 69, 72, 73, 74, 75, 85, 99, 105, 141], "filename_interv": [147, 170, 171], "filename_tempal": [147, 170, 171], "filename_templ": [147, 170, 171], "fileout": [113, 114, 115], "fileprefix": [33, 208], "filesfirstyear": 4, "filesuffix": 226, "fill": [141, 149, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 230, 237, 243, 245, 247], "filter": 195, "fimbul": 239, "final": [11, 141, 142, 143, 144, 149, 159, 176, 180, 203, 230], "find": [1, 141, 142, 143, 144, 156, 174, 176], "finish": [7, 8, 11, 19, 53, 57, 143, 203], "first": [1, 2, 4, 7, 8, 11, 27, 42, 52, 60, 85, 94, 97, 99, 116, 117, 119, 127, 141, 142, 143, 144, 148, 149, 156, 163, 164, 166, 177, 186, 195, 203, 216, 231, 236, 238, 239, 240, 242, 243, 244, 245, 246], "firstcal": 127, "firstyear": 4, "firstyearxtick": [116, 117, 119, 166, 231, 238, 239, 240, 242, 243, 244, 245, 246], "fit": [2, 151, 156, 197, 203], "fitzgerald": 239, "five": [149, 196], "fix": [10, 162], "fixedloc": 141, "flag": [2, 5, 7, 15, 18, 52, 64, 142, 144, 153, 203], "flatnonzero": 141, "flaw": 180, "flexibl": [5, 7, 142, 144], "flight": 193, "float": [4, 85, 91, 105, 109, 110, 111, 113, 114, 115, 116, 117, 118, 119, 120, 131, 132, 133, 136, 141, 143, 149, 239], "float_info": 4, "floor": [109, 226, 228, 240], "florida": 246, "flow": 180, "flush": 7, "flux": [145, 148, 174, 186, 201, 202, 205, 208, 229, 239, 240], "fly": [142, 144, 150, 152, 153, 163], "fm": [164, 224, 225], "fname": [88, 93, 98], "focu": [144, 195], "focus": [143, 197, 226], "folder": [1, 52, 61, 126, 158, 160, 168, 203], "foldmethod": 1, "follow": [1, 2, 3, 8, 9, 10, 11, 14, 19, 60, 85, 105, 106, 116, 117, 135, 136, 137, 141, 142, 143, 144, 147, 149, 158, 170, 171, 179, 186, 194, 195, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "font": [113, 114, 115, 116, 117, 119, 120, 141, 160, 203, 244], "forc": [4, 148, 165, 183, 184], "forecast": [180, 197], "forego": 180, "forg": [142, 144, 203], "forget": [1, 141, 142], "fork": [141, 153], "forkserv": 153, "form": [7, 18, 85, 105, 106, 131, 132, 133, 136, 153, 167], "format": [1, 4, 7, 10, 116, 117, 118, 119, 135, 136, 137, 141, 174, 178, 200, 205, 230, 240], "formul": [1, 3, 4, 5, 7, 8, 9, 11, 186], "fortran": [147, 170, 171], "fortun": 4, "found": [1, 2, 10, 11, 54, 60, 85, 94, 95, 96, 97, 99, 105, 114, 115, 116, 117, 125, 134, 141, 142, 143, 144, 147, 152, 153, 159, 168, 170, 171, 180, 203, 205, 239], "four": 180, "fowler": 174, "fr": 177, "fraction": [148, 236], "fram": 246, "framework": [147, 171], "free": [143, 174, 186, 198, 230], "freeboard": [185, 201, 204], "freeli": [182, 194], "freeman": 181, "freez": 174, "french": 177, "frequenc": [127, 195, 227], "frequent": 127, "fresh": 143, "freshwat": 174, "fri": 141, "fris_stereo_patch": 141, "from": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 18, 20, 27, 33, 34, 35, 37, 38, 39, 41, 42, 43, 45, 51, 61, 64, 71, 73, 75, 76, 77, 78, 79, 82, 85, 94, 97, 100, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 119, 120, 121, 122, 124, 126, 127, 128, 129, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 145, 147, 148, 150, 151, 152, 153, 156, 158, 159, 161, 162, 163, 165, 167, 170, 171, 172, 173, 174, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 195, 196, 197, 198, 201, 202, 203, 205, 208, 209, 212, 214, 215, 217, 226, 229, 231, 232, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246], "from_dict": 143, "front": 72, "frost": 239, "fs1": 142, "ft": 1, "ftp": 178, "full": [1, 7, 13, 15, 33, 45, 46, 47, 63, 68, 69, 74, 81, 100, 109, 110, 142, 144, 147, 156, 167, 169, 170, 180, 199, 200, 203, 229, 231, 235], "fullpath": 100, "funciton": [1, 4, 11, 62, 127, 233], "function": [1, 2, 3, 4, 5, 7, 8, 10, 11, 42, 43, 45, 55, 58, 60, 66, 67, 103, 104, 106, 113, 116, 117, 124, 127, 129, 138, 141, 143, 149, 162, 203], "further": [5, 143, 180, 200], "furthermor": [10, 186], "futur": [1, 2, 5, 7, 9, 10, 143, 144, 153, 234, 236, 238], "fyke": 0, "g": [1, 2, 3, 4, 5, 7, 8, 10, 11, 45, 52, 57, 60, 64, 86, 87, 109, 116, 129, 141, 142, 143, 144, 148, 156, 157, 158, 159, 161, 162, 163, 164, 165, 167, 193, 194, 203, 212, 226, 230, 231, 236, 237, 238, 240, 242, 243, 244, 245], "ga": 186, "gain": 153, "galleri": [46, 47, 109, 110, 141, 142, 143, 144, 162, 208, 229, 231, 234, 235, 236, 241], "gallerygroup": [46, 47, 109, 110, 141, 143], "gallerylabel": 208, "gallerynam": [46, 47, 109, 110, 141, 143], "garcia": [198, 201, 202], "garraffo": 179, "garzoli": 179, "gb": 226, "gdp": 179, "geikieinlet": 239, "gen": 144, "gener": [1, 2, 3, 6, 7, 8, 11, 12, 13, 15, 18, 19, 33, 52, 55, 140, 141, 142, 143, 144, 150, 151, 152, 153, 154, 162, 163, 176, 178, 180, 187, 229, 230, 240], "generalize_calendar": 4, "generalized_read": [4, 11, 143], "generatelist": 1, "gent": 238, "geoid": [177, 201, 202], "geojson": [47, 123, 124, 125, 138, 139, 162, 167, 234, 236, 241], "geojsonfil": 230, "geojsonfilenam": [124, 125, 138, 139], "geojsonmeridionalvelocitytransect": 230, "geojsonpotentialdensitytransect": 230, "geojsonsalinitytransect": 230, "geojsontemperaturetransect": 230, "geojsontransect": [140, 146], "geojsonzonalvelocitytransect": 230, "geometr": [5, 230], "geometric_featur": [111, 124, 138, 162, 203, 231, 235, 246], "geophi": 176, "geophys": 3, "george_vi": 239, "geoscienc": 185, "geostroph": 177, "get": [1, 7, 53, 56, 64, 75, 79, 80, 81, 83, 84, 86, 87, 90, 91, 92, 96, 100, 121, 122, 124, 138, 140, 143, 148, 151, 152, 156, 203, 234, 236, 241], "get_aggregator_by_nam": [162, 231, 235], "get_antarctic_stereographic_project": 141, "get_comparison_descriptor": 81, "get_file_nam": 143, "get_fris_stereographic_comparison_descriptor": 141, "get_path": 141, "get_region_by_nam": [124, 138], "get_simulation_start_tim": 4, "get_start_and_end": 143, "get_task_nam": 7, "get_transect_info": 138, "getexpress": [1, 141, 143], "getfloat": 143, "getint": 1, "getwithdefault": 7, "getz": 239, "gfo": 195, "gibralt": 246, "gillet": 239, "gilson": [145, 176, 201, 202, 206, 207, 208], "git": [141, 203], "gitconfig": 142, "github": [0, 1, 4, 5, 7, 10, 144, 147, 164, 168, 170, 203, 248], "githubusercont": [141, 143], "give": [9, 141, 142, 143, 144, 166, 230], "given": [1, 2, 5, 7, 8, 9, 10, 11, 18, 54, 57, 59, 60, 64, 68, 69, 74, 82, 85, 89, 90, 91, 92, 95, 96, 97, 99, 100, 101, 102, 105, 108, 127, 131, 132, 133, 134, 135, 136, 137, 139, 142, 143, 144, 148, 151, 153, 156, 158, 159, 160, 162, 167, 180, 195, 203, 216, 226, 230, 239, 240, 241, 244], "gj": 143, "gl": 141, "gla": 185, "glob": 103, "global": [3, 9, 22, 142, 143, 147, 150, 158, 162, 170, 175, 179, 180, 181, 182, 186, 191, 195, 200, 206, 207, 208, 209, 211, 214, 215, 216, 227, 231, 234, 235, 236, 238, 240, 242, 243, 245], "globe": 182, "globwav": 195, "glodapv2": [145, 201, 202, 208], "gm": 238, "gmail": 142, "gmpa": [5, 142], "gmpas_qu240": 1, "go": [1, 141, 142, 143, 180, 203, 230], "goddard": 193, "goe": [1, 141, 163], "good": [195, 203], "googl": 144, "gotten": 141, "gov": [3, 142, 179], "gpf": 142, "gradient": [176, 179], "grai": 149, "grammat": 8, "grant": 194, "graph": [0, 142, 143], "graphic": 200, "greater": [3, 5, 7, 142, 144, 149], "greg": 0, "gregoraian_noleap": [1, 52, 129], "gregori": 179, "gregorian": [1, 4, 10, 52, 76, 77, 78, 85, 97, 99, 105, 106, 116, 117, 127, 129, 130, 131, 132, 133, 136, 137], "gregorian_noleap": [4, 10, 143], "grei": [116, 117], "grid": [3, 5, 9, 64, 65, 67, 69, 72, 74, 75, 79, 81, 82, 109, 111, 116, 117, 121, 122, 124, 140, 141, 142, 143, 144, 147, 148, 152, 153, 158, 160, 162, 163, 168, 171, 174, 175, 177, 178, 180, 181, 183, 184, 186, 187, 195, 196, 199, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 237, 247], "gridlin": 141, "griidc": 179, "ground": 239, "group": [46, 47, 109, 110, 124, 138, 142, 143, 153, 156, 168, 193, 231, 235, 246], "grouplink": [46, 47, 109, 110, 141, 143], "groupsubtitl": [46, 47, 109, 110, 141, 143], "grow": 7, "gruber": 186, "guess": 141, "guid": [141, 143, 144, 152, 158, 162], "guidanc": 203, "guidelin": [142, 180], "gulf": [179, 183, 184, 246], "gz": 144, "h": [144, 147, 170, 177], "ha": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 53, 55, 57, 62, 66, 67, 85, 95, 99, 105, 106, 141, 142, 143, 144, 147, 149, 152, 153, 159, 162, 163, 170, 171, 178, 180, 191, 195, 203, 209, 212, 214, 230, 231, 238, 240, 241, 246], "hack": [183, 184], "had": [4, 5, 66, 141], "hadisst": [145, 201, 202, 232], "hadisst1": [183, 184], "hadlei": [145, 183, 201, 202, 216, 232], "hager": 3, "half": 238, "halfwai": 141, "halin": [206, 215, 217, 226, 228, 230, 231, 237], "halo": 5, "hamilton": 239, "hand": [141, 240], "handl": [5, 7, 10, 64, 141, 144], "hannan": 239, "happen": [1, 7, 142, 143], "happi": 143, "harbordglaci": 239, "hard": [141, 142, 144, 162], "has_opt": 1, "hasattr": 1, "haswel": [142, 153], "haumann": [145, 201, 204, 221, 223], "have": [1, 2, 4, 5, 7, 8, 9, 10, 11, 77, 78, 95, 100, 112, 116, 117, 119, 141, 142, 143, 144, 147, 151, 152, 153, 156, 157, 159, 162, 163, 167, 171, 172, 175, 180, 191, 196, 199, 203, 230, 239, 241], "hdf5": [142, 144], "hdf5_use_file_lock": [142, 144], "header": 203, "heat": [3, 27, 35, 39, 141, 143, 145, 201, 202, 213, 233, 240], "heatmap": [116, 117], "height": [28, 111, 177, 191, 195, 201, 202, 209, 214, 227], "helen": 239, "help": [119, 141, 142, 143, 144, 156, 236], "helper": [1, 4, 10, 11, 141, 143], "hemispher": [48, 49, 50, 196, 210, 218, 219, 220, 221, 222, 223, 224, 225], "her": [1, 241], "here": [1, 2, 4, 7, 11, 14, 19, 141, 142, 143, 144, 152, 161, 163, 179, 180, 196], "herein": 4, "hersback": [180, 201, 202], "hh": [10, 135, 136, 137], "hi": 1, "hide": 11, "high": [4, 144, 152, 173, 199], "higher": [5, 163], "highest": 200, "highlight": 239, "hijack": 141, "hist": [142, 144, 147, 156, 170, 171, 203], "histogram": [36, 229, 234], "histori": [33, 52, 71, 142, 143, 144, 156], "historydirectori": [33, 52, 60, 143], "historystream": [52, 60, 143], "historysubdirectori": 156, "hoffman": 0, "hokkaido": 246, "hold": [8, 143], "holder": 172, "holm": 239, "holmeswest": 239, "holt": [176, 201, 202], "home": [142, 144], "homogen": [183, 184], "hope": [143, 144, 153], "horiz_interp": 5, "horizont": [2, 3, 4, 6, 9, 115, 140, 141, 143, 167, 205, 209, 210, 211, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 230, 237, 247], "horizontalmap": [3, 52, 141, 143, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228], "horizontalresoltuion": 167, "horizontalresolut": [167, 230, 237, 247], "hour": [4, 10, 130, 131, 180], "hourli": 180, "hovmol": [116, 160, 231, 240, 243, 245], "hovmolleroceanbasin": 231, "hovmolleroceanregion": [140, 146, 162], "hovmolleroceanregionspotentialdens": 231, "hovmolleroceanregionspotentialtemperatur": 231, "hovmolleroceanregionssalin": 231, "hovmollerohcanomali": 240, "hovmollersalinityanomali": 243, "hovmollertemperatureanomali": 245, "how": [1, 85, 105, 106, 141, 142, 143, 144, 148, 153, 156, 162, 168], "howev": [4, 7, 8, 10, 11, 53, 143, 144, 148, 153, 156, 175, 178, 185, 187, 196, 203], "hpc": [142, 143, 144], "html": [7, 110, 140, 142, 143, 144, 147, 159, 160, 168, 170, 203, 229], "html_onli": 203, "htmlsubdirectori": [142, 144, 154, 159], "http": [0, 1, 3, 4, 5, 7, 10, 141, 142, 143, 147, 169, 170, 177, 179, 193, 196, 203, 229, 230], "huai": 181, "huang": [181, 201, 202], "huge": 151, "hull": 239, "hurrel": [183, 184, 201, 202], "hybrid": 176, "hydrograph": 200, "i": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 13, 15, 45, 46, 47, 52, 54, 56, 59, 60, 61, 62, 64, 71, 72, 76, 77, 78, 80, 81, 84, 85, 88, 93, 94, 97, 99, 103, 105, 106, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 119, 120, 124, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 138, 141, 142, 143, 144, 145, 147, 148, 149, 151, 152, 153, 154, 156, 158, 159, 161, 163, 165, 167, 168, 169, 170, 171, 172, 173, 174, 177, 179, 180, 181, 182, 183, 184, 185, 186, 189, 191, 194, 195, 197, 198, 200, 201, 203, 204, 205, 206, 207, 209, 211, 212, 213, 214, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 244, 247], "iaf": 142, "ic": [1, 2, 5, 7, 11, 38, 49, 50, 51, 140, 141, 142, 144, 145, 156, 161, 162, 168, 172, 183, 184, 190, 202, 204, 205, 206, 207, 210, 218, 219, 220, 221, 222, 223, 224, 225, 226, 229, 237, 244], "ice_salt_flux": 229, "icearea_timeseri": 244, "iceareacel": [147, 171], "iceareanh_climo_20180710": 244, "iceareash_climo_20180710": 244, "iceberg": [48, 140, 142, 145, 168, 201, 204, 210], "icebergobserv": 158, "icepres": [147, 171], "icesat": [145, 201, 204, 224, 225], "icesat_gridded_mean_thickness_nh_fm": 224, "icesat_gridded_mean_thickness_nh_on": 224, "icesat_gridded_mean_thickness_sh_fm": 225, "icesat_gridded_mean_thickness_sh_on": 225, "iceshelfaggregatedflux": 239, "iceshelves20200621": 162, "iceshelvesint": 205, "iceshelvestoplot": 239, "icevolumecel": [147, 171], "icoad": 181, "idea": [2, 151], "ideal": 1, "ident": [2, 4, 5, 7, 8, 9, 11, 163], "identifi": [68, 69, 74, 85, 97, 105, 106, 134, 142, 144, 163, 196], "ignor": [76, 77, 78, 116, 117], "ignore_unma": 144, "ignore_unmap": [142, 144], "imag": [2, 3, 46, 47, 141, 160, 178, 185, 193, 196, 210, 218, 219, 220, 221, 222, 223, 224, 225], "imagecapt": [46, 47, 109, 110, 141, 143], "imbi": 239, "imbie1": 239, "imbie10": 239, "imbie11": 239, "imbie12": 239, "imbie13": 239, "imbie14": 239, "imbie15": 239, "imbie16": 239, "imbie17": 239, "imbie18": 239, "imbie19": 239, "imbie2": 239, "imbie20": 239, "imbie21": 239, "imbie22": 239, "imbie23": 239, "imbie24": 239, "imbie25": 239, "imbie26": 239, "imbie27": 239, "imbie3": 239, "imbie4": 239, "imbie5": 239, "imbie6": 239, "imbie7": 239, "imbie8": 239, "imbie9": 239, "immedi": [1, 4, 7], "implement": [1, 2, 4, 5, 7, 8, 9, 10, 11], "import": [1, 3, 4, 7, 10, 11, 64, 141, 143, 153, 183, 184, 203, 236], "impress": 143, "improv": [3, 11, 179, 180, 181, 195, 199], "inaccur": 141, "inch": [111, 113, 114, 115, 116, 117, 119, 120, 160], "includ": [1, 4, 5, 7, 9, 11, 13, 18, 20, 45, 46, 47, 61, 62, 64, 85, 99, 105, 106, 126, 129, 141, 142, 143, 144, 147, 149, 156, 157, 159, 160, 164, 170, 171, 175, 177, 178, 180, 181, 182, 185, 187, 194, 196, 199, 203, 205, 210, 213, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 229, 230, 231, 234, 235, 238], "includebolu": 238, "incompat": [142, 144], "incomplet": [142, 144], "incorpor": 176, "increas": [148, 151, 236], "increment": 10, "ind": 141, "independ": [5, 11, 143], "index": [34, 52, 59, 109, 110, 112, 114, 118, 140, 141, 145, 156, 168, 201, 202, 203, 205, 211, 212, 220, 221, 222, 223, 227, 228, 231, 232], "indexnino34": [140, 146, 181, 183], "indian_basin": [231, 234, 235, 236], "indic": [5, 7, 9, 11, 18, 33, 54, 64, 82, 85, 105, 106, 107, 109, 116, 117, 127, 141, 142, 143, 144, 149, 155, 156, 159, 162, 163, 167, 180, 205, 206, 207, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 228, 229, 230, 231, 233, 235, 237, 238, 240, 243, 245, 247], "indirectori": 1, "indistinguish": 119, "individu": [1, 2, 7, 52, 55, 60, 117, 142, 144, 160, 196, 205], "indo": 238, "indonesian": 246, "indopacif": 238, "induc": [174, 240], "industri": 143, "inerfac": 11, "infilenam": [9, 46, 47], "infilespreprocess": 4, "inflow": 246, "info": [114, 115, 116, 117, 141, 142, 143], "inform": [8, 9, 71, 106, 112, 117, 127, 141, 142, 143, 144, 163, 179, 180, 181, 186, 189, 232, 234, 236, 237, 241], "ingest": [163, 180], "inherit": [4, 143, 195, 234], "ini": [142, 144], "init": [142, 144], "init_conda": 142, "initi": [1, 5, 7, 8, 52, 142, 144, 145, 179, 201, 202, 209, 214, 227], "initial_onli": [147, 170, 171], "inner": 141, "inner_edg": 141, "inneredg": 141, "inorgan": [182, 199], "input": [1, 3, 5, 20, 52, 61, 126, 129, 140, 143, 147, 148, 158, 163, 168, 170, 171, 172, 195, 201, 202], "input_interv": [147, 170, 171], "inputdata": 142, "inputfil": [33, 61, 126], "inquiri": 172, "ins": 153, "inset": [47, 111], "insid": [1, 152, 156, 159], "inspir": 214, "instal": [140, 141], "instanc": [1, 10, 112, 113, 116, 117, 119, 120, 132, 137, 143, 151], "instead": [2, 4, 116, 117, 141, 143, 144, 156, 167, 236], "institut": 142, "instruct": [140, 142], "instrument": [175, 185, 187], "int": [4, 33, 45, 77, 85, 92, 113, 114, 115, 116, 117, 118, 119, 120, 124, 127, 128, 129, 131, 138, 141, 213, 214], "integ": [85, 92, 167, 178], "integr": [3, 9, 143, 174, 213, 233, 238, 240], "intend": [1, 5, 7, 116, 117, 143, 147, 170, 172, 213], "intens": [143, 153], "intent": 56, "interact": 142, "interannu": [172, 201, 202], "intercomparison": 177, "interdepend": 143, "interest": [144, 148, 165, 229, 241, 246], "interfac": [206, 207, 226], "interim": 180, "interior": [160, 182], "intermedi": [7, 142, 144, 159], "intern": [56, 64, 116, 117, 153, 156, 181, 182, 200, 237], "interp0": [218, 219, 224, 225], "interpol": [6, 7, 9, 82, 116, 140, 141, 142, 143, 144, 148, 150, 152, 158, 167, 183, 184, 230, 247], "interpolationmethod": [5, 158], "interprocess": 7, "interprocesslock": 7, "interrupt": [7, 144], "interv": [10, 97, 130, 167, 180, 230, 237, 247], "introduc": 1, "introductori": 200, "intuit": [2, 11], "invalid": [1, 5, 10, 116, 117, 131, 132, 133, 135, 136, 137, 148], "invalidcolor": [116, 117], "invert": [113, 116, 117], "invertyaxi": [113, 116, 117], "investig": [5, 7, 200], "involv": [2, 4, 5, 141, 143, 153], "io": [1, 7, 52, 141, 143, 147, 170, 230], "io_typ": [147, 170, 171], "ioerror": [71, 134], "ireland": 246, "is_boundary_cov": 141, "is_run": 7, "isboundarycov": 141, "isel": [4, 85, 105, 106, 141, 143], "iselvalu": [64, 85, 105, 106], "isinst": 1, "isn": [141, 143, 159], "issu": [1, 4, 7, 54, 144, 164, 174, 180], "item": 2, "iteritem": 7, "its": [1, 3, 7, 8, 11, 13, 141, 142, 143, 144, 149, 152, 162, 186, 203, 226, 230, 231, 234, 235, 236, 238, 241], "itself": [1, 3, 7, 116, 117, 141, 142, 143], "ivert": 141, "j": [0, 3, 4, 7, 143, 176, 179, 183, 184, 190, 191, 197], "ja": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 218, 226, 227, 228, 230, 235, 237, 247], "jai": 181, "jame": 3, "jan": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "januari": [181, 196], "japan": 246, "jason": [173, 177, 195], "jelbart": 239, "jeremi": 0, "jessica": 179, "jfm": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 218, 226, 227, 228, 230, 235, 237, 247], "jja": [164, 210, 219, 220, 221, 222, 223], "job": [3, 7, 11, 141, 142, 144, 153, 159, 203], "job_script": 203, "johnson": 179, "join": 7, "joseph": 0, "journal": [3, 183, 184], "jpl": [177, 201, 202], "jul": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "juli": [178, 199], "jun": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "jupyt": 141, "just": [2, 4, 5, 7, 141, 142, 143, 144, 160, 167, 212], "justif": 143, "k": [3, 141], "k_0": 141, "ka": 195, "keep": [4, 8, 11, 141, 142, 143, 144], "kei": [1, 17, 19, 33, 61, 62, 64, 72, 85, 86, 87, 89, 90, 91, 92, 98, 105, 106, 109, 123, 143, 149, 230, 231, 235], "kennedi": 0, "kevin": [0, 3, 197, 201, 202], "keyword": [141, 149, 205, 206, 207, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "kg": [143, 230, 231, 235, 241], "kill": 7, "kilomet": 167, "kinet": [6, 24, 140, 142, 179, 209], "km": [142, 144, 147, 148, 150, 156, 167, 170, 178, 187, 196, 226, 230, 237, 247], "km3": 189, "km_": 141, "km_fris_stereo": 141, "know": [7, 8, 141, 142, 143, 144, 148, 158, 203], "known": [79, 81, 121, 122, 142, 144], "ku": 195, "kuroshio": [183, 184], "kurtz": 174, "kwarg": 7, "kwok": 174, "l": [3, 143, 176, 179, 208], "l2": 195, "l2p": 195, "l3": 175, "l4": 195, "la": 246, "label": [46, 113, 114, 115, 116, 117, 118, 119, 160, 208, 241], "labelcontour": [116, 117], "laboratori": [147, 169, 193, 198], "lagerloef": [175, 201, 202], "lake": 141, "lakes_50m": 141, "lancast": 246, "land": [5, 141, 142, 148, 180, 185, 229, 239], "land_50m": 141, "land_ic": 1, "land_ice_mass_chang": 229, "land_ice_mass_flux_compon": 229, "land_ice_ssh_chang": 229, "landicecav": [205, 239], "landschuetz": [186, 201, 202], "landschuetzerv2016": [145, 201, 202], "lanl": [2, 4, 8, 11, 142], "laptop": [7, 144, 203], "larg": [3, 11, 143, 148, 153, 156, 160, 162, 174, 180, 226, 233], "larger": 143, "largest": 239, "larsen": 239, "larsen_": 239, "larsen_b": 239, "larsen_c": 239, "larsen_d": 239, "larsen_f": 239, "larsen_g": 239, "laser": 185, "last": [1, 2, 4, 5, 7, 8, 9, 10, 11, 85, 97, 99, 141, 142, 143, 144, 148, 149, 216], "lat": [5, 9, 111, 114, 115, 124, 148, 150, 179, 180, 205, 210, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 230], "lat_0": 141, "lat_t": 141, "latbinsizeatlant": 238, "latbinsizeglob": 238, "latbinsizeindopacif": 238, "latcel": [5, 141], "latenc": 180, "later": [1, 4, 7, 9, 11, 57, 80, 81, 141, 143, 144, 148, 165, 174, 180, 203, 229], "latest": [7, 142, 144, 181, 203, 248], "latex": 3, "latitud": [5, 111, 113, 114, 115, 116, 117, 124, 143, 150, 156, 158, 167, 179, 199, 210, 218, 219, 220, 221, 222, 223, 224, 225, 230, 233, 237, 238, 247], "latitude_formatt": 141, "latlon": [64, 65, 72, 79, 141, 143, 150, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 227, 228], "latlonbuff": 111, "latlongriddescriptor": 9, "latmin": 115, "latter": [2, 93, 163], "latvar": 124, "latvertex": 141, "launch": [7, 153], "launch_task": 7, "launchtask": 7, "laurindo": [3, 179, 201, 202], "lauvset": [182, 201, 202], "law": [172, 180], "lawrenc": [141, 143], "lawrimor": 181, "layer": [25, 26, 141, 143, 145, 201, 202, 211, 212, 226, 241], "layer_thick": [141, 143], "layer_thickness_edg": 141, "layerthick": [141, 143], "layerthicknessedg": 141, "layervolumeweightedaverag": [147, 170], "layervolumeweightedaverageampkg": [147, 170], "layervolumeweightedaverageoutput": [147, 170], "lazarev": 239, "lcrc": [3, 142], "lead": [142, 143, 144, 151, 155, 165, 209, 214, 229], "leap": [10, 76], "least": [3, 4, 5, 141, 147, 170, 171, 196], "leav": [3, 5, 7, 141, 142, 144, 163, 231, 238, 239, 240, 242, 243, 244, 245, 246], "left": [11, 111, 119, 141, 144, 158, 163, 210, 218, 219, 224, 225, 238], "left_label": 141, "legaci": 156, "legend": [113, 116, 117, 119, 120, 142, 144, 163], "legendloc": 119, "legendtext": [113, 119, 120], "len": [7, 141, 143], "length": [167, 173, 234], "length_includes_head": 141, "less": [9, 141, 173, 237], "let": [142, 143, 144], "lett": 176, "letter": [3, 143, 164, 231], "level": [109, 110, 112, 114, 116, 140, 141, 149, 175, 177, 180, 195, 200, 206, 207, 209, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 228, 231, 233, 237, 238, 240, 243, 245, 247], "liabil": 174, "lib": 142, "liber": 142, "librari": [106, 172], "licenc": [180, 195], "licens": [141, 143, 172], "like": [1, 2, 4, 5, 7, 11, 77, 127, 132, 133, 136, 141, 142, 143, 144, 148, 158, 230, 246], "likewis": 11, "lilli": 239, "limit": [4, 7, 116, 117, 142, 144, 153, 167, 180, 203, 231, 233], "linalg": 141, "line": [1, 2, 7, 15, 18, 112, 113, 114, 116, 117, 119, 120, 141, 142, 144, 153, 159, 200, 203, 214, 229, 231, 233, 237, 238, 240, 241, 243, 245, 247], "line2d": 141, "linear": [149, 167, 206, 207, 208, 209, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 236, 237, 247], "linearli": 5, "linecolor": [112, 113, 114, 116, 117, 119, 120], "liner": 186, "linestr": 230, "linestyl": [113, 116, 117, 119, 120, 141], "linewidth": [112, 113, 114, 116, 117, 119, 120, 141, 234], "link": [46, 47, 109, 110, 142, 143, 144, 163], "linscal": [141, 149, 205, 210, 226], "linspac": [5, 141, 149, 167, 206, 207, 209, 213, 214, 217, 220, 221, 222, 223, 226, 227, 228, 230, 236, 237, 247], "linthresh": [141, 149, 205, 208, 210, 226], "linux": [142, 144], "list": [0, 1, 2, 4, 5, 7, 8, 13, 14, 15, 19, 33, 45, 52, 60, 61, 62, 64, 72, 85, 94, 97, 99, 103, 105, 106, 108, 113, 116, 117, 119, 120, 125, 126, 129, 140, 141, 142, 143, 144, 147, 150, 159, 162, 164, 167, 168, 170, 171, 175, 178, 185, 187, 196, 205, 206, 207, 210, 213, 218, 219, 224, 225, 226, 227, 228, 230, 231, 235, 236, 237, 239, 240, 241, 242, 243, 245, 246], "littl": [141, 142, 143, 144], "liu": [181, 201, 202], "livermor": [141, 143], "ll": [10, 141, 143], "llc": [141, 143], "load": [4, 141, 142, 144], "local": [11, 143, 147, 170, 174, 181, 203], "locarnini": [199, 201, 202], "locat": [1, 3, 9, 111, 119, 127, 141, 142, 144, 149, 152, 156, 159, 162, 167, 177, 203, 237, 239, 247], "lock": 7, "log": [7, 52, 56, 82, 127, 141, 142, 143, 144, 149, 153, 159, 208, 210, 226, 236, 247], "logfil": 7, "logfilenam": 7, "logg": 142, "logger": [52, 56, 82, 127, 139, 141, 143], "logic": [4, 9, 113, 116, 117], "logical_and": 141, "logical_or": 141, "login": [7, 153], "logsdirectori": 7, "logssubdirectori": [7, 159], "lon": [5, 9, 111, 114, 115, 124, 148, 150, 179, 180, 205, 210, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 230], "lon0": 115, "lon_0": 141, "loncel": [5, 141], "long": [1, 10, 142, 143, 148, 165, 181, 229], "longer": [111, 141, 147, 170, 171, 191], "longitud": [5, 114, 115, 116, 117, 124, 143, 150, 158, 167, 179, 210, 218, 219, 220, 221, 222, 223, 224, 225, 230, 237, 247], "longitude_formatt": 141, "lonvar": 124, "lonvertex": 141, "look": [1, 3, 5, 81, 142, 143, 144, 203], "loop": [1, 141, 143], "lot": [141, 142, 144, 163], "low": [142, 144], "lower": [111, 119, 148, 213, 230, 237, 240, 247], "lowercas": [2, 52, 142, 143], "lowerleft": 111, "lsit": [85, 105], "lsqr": 141, "luca": 179, "luke": 0, "lumpkin": [3, 179], "lx": 141, "ly": 141, "m": [1, 3, 4, 7, 141, 142, 143, 147, 167, 170, 171, 179, 181, 183, 184, 192, 199, 203, 206, 207, 208, 213, 228, 230, 231, 235, 237, 240, 241, 247], "m3": 143, "mach": 142, "machin": [2, 7, 141, 144, 152, 153, 168, 203], "machine_nam": 203, "made": [2, 4, 11, 66, 67, 142, 143, 148, 162, 163, 172, 177, 183, 184, 185, 194, 203, 239], "madt": 177, "magaard": 3, "magma_r": 209, "magnitud": [3, 237], "mai": [5, 7, 8, 11, 13, 57, 77, 78, 85, 116, 117, 119, 127, 141, 142, 143, 144, 148, 151, 152, 153, 156, 158, 159, 160, 164, 165, 167, 180, 200, 203, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 229, 230, 232, 235, 236, 237, 240, 247], "mail": 189, "main": [1, 3, 7, 12, 27, 33, 72, 109, 117, 140, 141, 142, 143, 144, 149, 156, 159, 163, 167, 180, 203, 212, 229, 236, 248], "main_run": 163, "maincolor": [229, 234], "mainrunconfigfil": 163, "mainrunnam": [2, 33, 141, 142, 143, 144, 163], "mainten": 143, "major": [4, 141, 191, 195, 236], "make": [1, 2, 3, 4, 7, 8, 9, 11, 47, 54, 77, 78, 83, 102, 123, 124, 138, 141, 143, 144, 148, 153, 156, 159, 167, 203, 205, 217, 226, 236], "make_axes_locat": 141, "make_directori": [1, 7], "make_region_mask": 162, "maket": 205, "mali": 143, "mamba": 142, "manag": [7, 143, 144], "mani": [5, 7, 57, 118, 138, 142, 144, 149, 150, 151, 152, 153, 160, 180, 195, 246], "manipul": [4, 11, 45, 129, 149], "manual": [142, 144, 159], "map": [1, 4, 5, 6, 7, 9, 20, 72, 82, 109, 110, 111, 112, 114, 115, 116, 117, 123, 140, 141, 142, 143, 144, 148, 149, 150, 156, 158, 159, 163, 177, 196, 203, 205, 206, 207, 208, 209, 210, 211, 212, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 236], "map_obs_eke_0": 144, "map_oqu480_to_0": [142, 144], "map_oqu480_to_6000": [142, 144], "map_oqu480_to_sose_transects_5km_bilinear": [142, 144], "map_oqu480_to_woce_transects_5km_bilinear": 142, "map_vari": 11, "mapmpitask": [142, 153], "mapparallelexec": [142, 153], "mappingfilenam": 9, "mappingfileprefix": 82, "mappingsubdirectori": [152, 159], "mar": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "march": 174, "margin": [186, 191], "mariano": [3, 179], "marin": 239, "mark": [0, 116, 117, 118, 119, 140, 160, 168, 231, 238, 239, 240, 242, 243, 244, 245, 246], "marker": [1, 113, 119, 120], "marku": 174, "mask": [9, 20, 21, 32, 36, 38, 40, 66, 74, 77, 78, 80, 81, 109, 110, 116, 117, 123, 124, 127, 138, 139, 141, 142, 143, 144, 148, 152, 156, 162, 163, 203, 230, 231, 235, 236, 237, 239, 241, 247], "maskedclimatologyfilenam": 143, "maskfilenam": [124, 138, 139], "maskmaxthreshold": [109, 110], "maskminthreshold": [109, 110], "maskvari": [77, 78], "mass": [3, 174, 229], "massonnet": 174, "mastervariablelist": 33, "match": [94, 97, 142], "materi": [200, 239], "mathemat": 4, "matplotlib": [4, 111, 112, 116, 117, 119, 120, 141, 149, 166, 167, 229], "matrix": 141, "matter": [203, 208], "matthew": 0, "matusevitch": 239, "max": [4, 26, 37, 61, 64, 80, 81, 83, 84, 109, 110, 114, 212, 236, 237], "max_depth": 143, "max_edg": 141, "maxchunks": [85, 156], "maxdai": 118, "maxedg": 141, "maximenko": [149, 214], "maximum": [7, 85, 113, 114, 115, 116, 117, 118, 119, 120, 143, 156, 167, 212, 213, 228, 235, 236, 238, 241], "maxlat": 237, "maxlength": 111, "maxlevelcel": 143, "maxpoint": 119, "maxtitlelength": [113, 114, 115, 116, 117, 119, 120], "maxvaluewithinoceanlayerregion": [147, 170], "maxvaluewithinoceanregion": [147, 170], "maxvaluewithinoceanvolumeregion": [147, 170], "maxxtick": [116, 117, 118, 119], "mayb": 153, "mayra": 179, "mazloff": [194, 201, 202], "mb": 144, "mcwilliam": 238, "md": [3, 4], "me": [5, 11], "mean": [1, 3, 4, 5, 7, 8, 40, 77, 78, 109, 119, 127, 128, 129, 141, 142, 143, 144, 153, 157, 167, 179, 180, 189, 191, 192, 196, 198, 205, 209, 214, 231, 233, 235, 238, 239], "mean_veloc": 179, "meanfirstyear": 4, "meaning": [109, 143], "meaningless": 148, "meant": [1, 2, 11, 142, 143, 195], "meantim": 7, "measur": [177, 178, 185, 195, 235], "meat": 1, "mechan": [7, 8, 116, 117], "mediterranean_basin": [231, 234, 235, 236], "medium": 197, "meet": [147, 169], "meier": 174, "melt": [21, 38, 143, 145, 158, 174, 201, 202, 205, 220, 221, 239], "meltingsh": 221, "meltrat": 205, "meltsubdirectori": 158, "melttabl": 205, "meltwat": [172, 201, 202], "member": [1, 7, 9, 33, 35, 37, 54, 61, 64, 126, 143, 147, 170, 171, 180], "membernam": 1, "memori": [7, 9, 142, 143, 144, 148, 151, 153, 156, 236], "mendelssohn": 239, "mention": [4, 7, 11], "meop": 236, "meq": 208, "merg": [1, 7, 145, 173, 183, 195, 201, 202, 216], "meridion": [3, 35, 37, 145, 179, 201, 202, 226, 230, 233, 237, 238], "meridionalheattransport": [140, 146, 147, 170, 197, 203], "meridionalheattransport_year": 203, "meridionalheattransportampkg": [147, 170], "meridionalheattransportlat": [147, 170], "meridionalheattransportlatz": [147, 170], "meridionalheattransportoutput": [147, 170], "meridionalveloc": [142, 144, 226, 230, 237], "mertz": 239, "mesh": [3, 5, 9, 20, 44, 64, 71, 82, 116, 117, 124, 139, 141, 142, 143, 144, 147, 148, 150, 152, 153, 162, 167, 170, 203, 213], "meshdescriptor": [9, 64, 65, 75, 82], "meshfilenam": 139, "meshnam": [9, 124, 141], "mesoscal": [147, 170], "messag": [1, 8, 142, 144, 148], "meteorolog": [178, 196], "meter": [143, 206, 207, 213, 226, 228, 240], "method": [1, 2, 3, 4, 5, 7, 8, 9, 10, 13, 15, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 60, 61, 64, 72, 73, 75, 82, 109, 123, 124, 126, 130, 138, 142, 144, 148, 153, 158, 174, 176, 180, 181, 183, 184, 186, 191], "methodologi": 167, "mexico": 179, "mg": 208, "mht": [1, 145, 158, 201, 202, 233], "mht_trenberthcaron": 233, "mhtsubdirectori": 158, "miami": 179, "microwav": [178, 185, 187, 196], "mid": 156, "middl": [212, 240], "might": [4, 5, 7, 9, 97, 141, 142, 143, 144, 148, 151, 156, 165, 229], "milena": 0, "millenia": [147, 170], "million": [4, 199], "min": [4, 7, 26, 61, 64, 80, 81, 83, 84, 109, 110, 114, 181, 212, 236, 237], "min_depth": 143, "mindai": 118, "mini": 142, "miniconda3": [142, 144], "minim": 11, "minimum": [4, 142, 143, 144, 147, 148, 167, 170, 171, 203, 212, 213, 230, 235, 236, 237, 241, 247], "minimumlatitud": [210, 218, 219, 220, 221, 222, 223, 224, 225], "minlat": 237, "minmax": 212, "minu": 212, "minut": [4, 10, 130, 131], "minvaluewithinoceanlayerregion": [147, 170], "minvaluewithinoceanregion": [147, 170], "minvaluewithinoceanvolumeregion": [147, 170], "mishonov": 199, "miss": [1, 3, 9, 141, 144, 192], "mission": [147, 169, 173, 195, 200], "mix": [25, 26, 141, 145, 201, 202, 211, 212, 226, 241], "mixedlayerdepth": [147, 170, 226, 241], "mixedlayerdepthsampkg": [147, 170], "mixedlayerdepthsoutput": [147, 170], "mkdir": [142, 144], "mke": 3, "ml": 208, "mld": [1, 3, 25, 26, 145, 158, 201, 202, 211, 212], "mldsubdirectori": 158, "mm": [1, 10, 135, 136, 137, 142, 144], "mmol": 208, "moc": [1, 20, 37, 238], "mocstreamfunction_year": 203, "mode": [56, 142, 144, 148, 153, 159, 195, 236], "model": [1, 2, 3, 5, 37, 72, 109, 110, 114, 115, 117, 140, 141, 142, 143, 144, 147, 148, 149, 150, 156, 163, 167, 169, 170, 171, 176, 177, 180, 183, 184, 189, 203, 205, 206, 207, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 233, 236, 237, 238, 247], "modelarrai": [114, 115, 117], "modeltitl": [114, 115, 117], "modelvsob": 1, "modif": [11, 66, 67, 141, 147, 169, 180, 191], "modifi": [1, 2, 4, 5, 7, 8, 9, 10, 11, 141, 142, 143, 144, 147, 156, 162, 168, 170, 171, 203, 218, 219, 224, 225, 244], "modul": [1, 2, 4, 11, 19, 140, 142, 147, 160, 162, 170, 171], "mol": 208, "momentum": 3, "mona": 246, "monitor": [5, 193], "montegut": [176, 201, 202], "month": [1, 4, 10, 45, 68, 69, 74, 76, 77, 78, 127, 128, 129, 130, 131, 141, 142, 143, 144, 147, 148, 153, 157, 164, 170, 175, 180, 196, 205, 206, 207, 209, 210, 211, 212, 213, 214, 217, 218, 219, 224, 225, 226, 227, 228, 231, 238, 239, 240, 242, 243, 244, 245, 246], "monthdictionari": [62, 63, 64, 68, 69, 72, 74, 80, 81, 84, 109, 143], "monthli": [26, 34, 38, 39, 41, 42, 43, 45, 51, 61, 62, 64, 77, 78, 80, 81, 83, 84, 142, 143, 144, 147, 153, 170, 171, 175, 177, 178, 180, 181, 186, 191, 194, 195, 196, 199, 212, 228, 231, 238, 240, 242, 243, 244, 245], "monthvalu": [77, 78, 127], "more": [2, 7, 9, 11, 12, 62, 64, 94, 116, 117, 141, 142, 143, 144, 148, 149, 153, 156, 158, 179, 180, 183, 184, 203, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 217, 226, 227, 228, 230, 231, 232, 233, 234, 235, 236, 238, 239, 240, 241, 242, 243, 245], "moscow_univers": 239, "most": [1, 4, 10, 11, 52, 77, 78, 142, 143, 144, 147, 153, 156, 160, 162, 163, 168, 170, 171, 180, 203, 239, 246], "mostli": [116, 117, 143, 144], "motiv": 141, "moubrai": 239, "mouginot": 190, "mount": 199, "mous": [46, 47, 109, 110], "move": [1, 4, 5, 6, 7, 45, 116, 117, 119, 120, 140, 141, 156, 165, 168, 231, 233, 238, 239, 240, 242, 243, 244, 245, 246], "movingaveragemonth": [157, 231, 239, 246], "movingaveragepoint": [45, 116, 117, 119, 120, 128, 129, 233, 238, 240, 242, 243, 244, 245], "movingaveragepointsclimatolog": 238, "mozambiqu": 246, "mpa": [0, 1, 2, 3, 4, 6, 7, 9, 10, 11, 20, 34, 38, 39, 41, 42, 43, 44, 45, 46, 47, 51, 52, 63, 64, 66, 67, 68, 69, 74, 76, 77, 78, 80, 81, 83, 84, 85, 97, 99, 105, 106, 109, 110, 124, 127, 129, 130, 133, 134, 136, 137, 138, 139, 141, 148, 149, 150, 151, 152, 153, 156, 159, 160, 162, 163, 167, 168, 169, 201, 208, 212, 213, 230, 231, 232, 235, 236, 237, 238, 239, 241, 247], "mpas_analysi": [1, 3, 11, 140, 141, 142, 143, 144, 149, 152, 159, 162, 203], "mpas_climatology_task": [27, 141, 143], "mpas_dev": [141, 142, 203], "mpas_field_nam": [141, 143], "mpas_tool": [14, 17, 18, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 61, 79, 80, 81, 82, 83, 84, 85, 100, 109, 114, 115, 123, 124, 126, 141, 143, 203], "mpas_xarrai": [4, 6, 140, 143], "mpasanalysisconfigpars": [1, 32, 141], "mpascic": [61, 203], "mpasclimatologi": [61, 148], "mpasclimatologysubdirectori": 159, "mpasclimatologytask": [21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 36, 37, 48, 49, 50, 64, 141, 143], "mpasconfigpars": [14, 17, 18, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 61, 79, 80, 81, 82, 83, 84, 85, 100, 109, 114, 115, 123, 126, 141, 143], "mpasfieldnam": [46, 47, 109, 110, 141, 143], "mpasinterpolationmethod": [5, 148], "mpasmeshdescriptor": 9, "mpasmeshnam": [142, 144, 156, 203], "mpaso": [61, 142, 144, 147, 170, 203], "mpaso_in": [142, 144, 147, 156, 170, 203], "mpasrelativedelta": [4, 10, 137], "mpasseaic": [147, 171, 203], "mpassi": [142, 144], "mpassi_in": [142, 144, 147, 156, 171, 203], "mpastimeseriesocean": 126, "mpastimeseriesseaic": 126, "mpastimeseriestask": [34, 38, 39, 41, 42, 43, 45, 51, 129], "mpatch": 141, "mpax_xarrai": 11, "mpi": [142, 144, 148, 153], "mpirun": [142, 153], "mpl": 141, "mpl_toolkit": 141, "msla": 177, "mticker": 141, "mu": 208, "much": [1, 3, 4, 5, 143, 148, 151, 153, 158], "mulebreen": 239, "multi": [153, 156, 195], "multichannel": [178, 196], "multipl": [7, 8, 9, 57, 116, 117, 143, 151, 153, 174, 193, 195, 203], "multiprocess": [8, 153], "multiprocessingmethod": 153, "multiscal": [147, 170], "must": [1, 3, 4, 7, 8, 10, 52, 64, 73, 75, 77, 78, 100, 106, 119, 142, 143, 144, 147, 148, 152, 153, 159, 163, 170, 171, 194, 195, 203, 229, 230], "my": [1, 7, 11, 141, 230], "my_run": 159, "myarg": [1, 203], "mydefaultvalu": 1, "myer": 239, "mynewarg": 1, "myrun": [142, 144, 203], "mytask": [1, 52, 203], "n": [1, 7, 141, 142, 144, 179, 186, 203, 238], "n_boundary_vertic": 141, "n_edges_on_cel": 141, "n_inner_edg": 141, "n_step": 141, "n_vert_level": 143, "nalysi": 144, "name": [1, 2, 7, 8, 9, 10, 11, 17, 18, 19, 33, 45, 46, 47, 52, 54, 59, 61, 62, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 88, 93, 94, 95, 96, 97, 98, 99, 100, 105, 106, 108, 109, 110, 112, 113, 114, 115, 116, 117, 121, 122, 123, 124, 126, 127, 129, 136, 137, 138, 141, 142, 143, 144, 147, 149, 158, 159, 161, 162, 163, 164, 167, 170, 171, 189, 203, 205, 208, 226, 230, 231, 235, 237, 238, 240, 247], "named_color": 229, "namelist": [1, 4, 10, 52, 54, 60, 98, 142, 143, 144, 147, 169, 170, 171], "namelistfilenam": 1, "nan": [9, 77, 78, 116, 117, 127], "nansen": 239, "nare": 246, "nasa": [145, 177, 178, 185, 187, 193, 196, 201, 202, 204, 215], "nasateam": [145, 201, 204, 218, 219], "nasateam_nsidc0051": [218, 219], "nation": [141, 143, 147, 169, 181, 186, 196, 197, 198, 199, 201, 202], "nativ": [141, 152, 158, 167, 180, 203], "naturalearthfeatur": 141, "nboundaryvertic": 141, "nc": [3, 4, 11, 141, 142, 144, 147, 158, 170, 171, 179, 188, 201, 202, 203, 210, 218, 219, 221, 223, 224, 225, 233, 239, 240, 244], "ncar": [183, 184, 197, 201, 202], "ncclimo": [61, 63, 82, 83, 84, 143, 144, 148, 153], "ncclimomodel": 61, "ncclimoparallelmod": [144, 153], "ncclimothread": 153, "ncei": 186, "ncell": [85, 105, 106, 141], "ncep": 197, "nco": [148, 156], "ncremap": [5, 9, 64, 148, 153], "ncremapparallelexec": 153, "ndarrai": [9, 114, 115, 116, 117], "ne4_oqu480": [142, 144], "nearest": [5, 148, 158], "nearestdto": 5, "neareststod": [5, 82, 148, 158], "nearli": [141, 143, 153, 163, 164], "neatli": 2, "necessari": [1, 3, 7, 60, 82, 141, 143, 148, 153, 158, 203, 230], "nedg": 141, "nedgesoncel": 141, "need": [1, 4, 5, 7, 8, 9, 10, 11, 13, 33, 55, 62, 64, 77, 78, 82, 99, 116, 117, 119, 140, 141, 142, 143, 144, 147, 148, 152, 153, 156, 158, 159, 162, 163, 164, 169, 170, 171, 237, 238], "needless": 127, "neg": [4, 167, 179, 212, 230, 237, 241, 247], "neighbor": [5, 148, 158], "neither": [109, 110, 114, 141], "nersc": [2, 142], "nesdi": 199, "netcdf": [5, 127, 142, 143, 144, 147, 153, 170, 200, 239, 241], "netcdf4": [4, 142, 144], "netcdftim": 4, "network": 186, "neural": 186, "neutral": [194, 236], "nevertheless": [7, 11], "new": [1, 2, 3, 4, 7, 8, 9, 10, 11, 14, 19, 45, 108, 111, 112, 129, 140, 142, 143, 144, 150, 152, 176, 180, 183, 184, 191, 195, 199], "newest": 181, "newli": 180, "next": [1, 7, 8, 141, 143, 159, 174, 210, 218, 219, 220, 221, 222, 223, 224, 225], "nh": [48, 49, 50, 218, 220, 222, 224], "ni": [155, 159, 232], "nice": 7, "nickerson": 239, "nightmar": 143, "nimbu": [178, 196], "nine": 196, "ninneredg": 141, "ninni": 239, "nino": [34, 142, 144, 145, 155, 158, 162, 201, 202, 232], "nino3": [162, 232], "nino34": 1, "nino4": [162, 232], "ninosubdirectori": 158, "nivl": 239, "ni\u00f1o": [142, 144], "nml": [86, 87, 98], "no3": 208, "no_": [2, 18, 142, 144, 159], "no_anomali": 159, "no_bgc": 142, "no_climatologi": 52, "no_climatologymapsst": 159, "no_ek": 142, "no_iceberg": 142, "no_index": [142, 159], "no_landicecav": 142, "no_log": [142, 144], "no_ocean": 2, "no_seaic": 159, "no_timeseri": [2, 142, 144, 159], "no_timeseriesohc": 2, "noaa": [145, 179, 181, 183, 186, 199, 201, 202, 216], "noation": 86, "noatm_20180710": 233, "nodc": [199, 201, 202], "node": [7, 142, 143, 144, 148, 151, 153], "nois": 238, "noisi": 148, "noleap": [76, 77, 78, 85, 97, 99, 105, 106, 127, 131, 132, 133, 136, 137, 143], "noll": 239, "nompi_": 142, "non": [1, 11, 152, 174, 186, 195, 236], "none": [1, 4, 9, 14, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 46, 47, 48, 49, 50, 51, 52, 61, 62, 64, 74, 76, 77, 78, 82, 85, 88, 93, 96, 97, 99, 100, 105, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 124, 126, 127, 129, 130, 138, 139, 141, 142, 143, 147, 152, 153, 163, 170, 171, 218, 219, 231, 240, 244], "nor": [4, 141], "nordenskjold": 239, "norm": [112, 141, 149, 205, 206, 207, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 236, 237, 247], "normal": [7, 11, 112, 141, 143, 160, 234], "normal_veloc": 141, "normalveloc": [141, 147, 170], "normarg": 149, "normargsdiffer": [141, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "normargsresult": [141, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "normtyp": [149, 236], "normtypediffer": [141, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "normtyperesult": [141, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "north": [115, 178, 246], "north_atlant": [65, 79], "north_pacif": [65, 79], "northern": [218, 219, 220, 221, 222, 223, 224, 225], "northernmost": [210, 219, 220, 221, 222, 223, 225], "nosuffix": 1, "notabl": [11, 142, 143, 238], "notat": 87, "note": [1, 2, 4, 10, 97, 103, 116, 117, 127, 135, 136, 137, 142, 144, 153, 156, 159, 179, 194, 203, 228, 234, 237, 240, 241], "notebook": 141, "noth": [1, 5], "notic": [152, 153], "notifi": 180, "nov": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "novemb": [183, 184, 196], "now": [2, 5, 7, 8, 142, 144], "np": [7, 141, 143, 237, 238, 240, 243, 245, 247], "npstere": 115, "nrt": 177, "nsf": 194, "nsidc": [178, 187, 196, 201, 204], "ntime": 141, "num2dat": 4, "number": [2, 4, 7, 10, 33, 45, 76, 77, 78, 85, 103, 105, 106, 111, 113, 114, 115, 116, 117, 118, 119, 120, 124, 127, 128, 129, 133, 136, 138, 142, 143, 144, 148, 149, 151, 153, 156, 167, 168, 177, 179, 180, 203, 231, 233, 234, 236, 238, 239, 240, 242, 243, 244, 245, 246], "numer": [10, 120, 180, 200], "numpi": [4, 5, 9, 85, 114, 115, 116, 117, 136, 141, 143, 149, 167, 206, 207, 209, 213, 214, 217, 220, 221, 222, 223, 226, 227, 228, 230, 236, 237, 247], "numuppertick": [116, 117], "nutrient": [198, 199, 201, 202], "nvertic": 141, "nvertlevel": [85, 105, 106, 141, 143], "o": [1, 3, 7, 141, 143, 144, 155, 159, 169, 193, 201, 202, 203, 208, 213, 226, 232], "o2": 208, "o_in": [147, 156, 170, 203], "ob": [3, 158, 167, 193, 203, 208, 229, 230, 236, 237, 241, 247], "object": [1, 4, 8, 9, 10, 14, 15, 17, 76, 77, 78, 82, 85, 97, 106, 107, 108, 112, 123, 127, 130, 131, 132, 133, 134, 135, 137, 141, 143, 198, 203, 230], "obpg": 193, "obscolor": 234, "obsdescriptor": 75, "obsendyear": 216, "obser": 236, "observ": [1, 2, 3, 5, 9, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 48, 49, 50, 60, 72, 73, 75, 77, 78, 109, 110, 114, 115, 116, 117, 119, 124, 140, 141, 142, 143, 145, 148, 149, 150, 152, 159, 165, 167, 168, 177, 178, 179, 180, 183, 184, 186, 200, 202, 203, 204, 212, 213, 220, 222, 226, 229, 230], "observationdata": [232, 233], "observationo": 5, "observationprefix": [210, 218, 219, 224, 225], "observationslabel": 208, "obsfilenam": 124, "obslegend": 119, "obslist": 234, "obsmean": 119, "obsstartyear": 216, "obssubdirectori": 158, "obsuncertainti": 119, "obtain": 172, "obviou": [4, 5, 116, 117], "obvious": [142, 203], "occru": 156, "occupi": [148, 151, 236], "occur": [13, 15, 94, 144, 148, 156, 180], "oce130007": 194, "ocean": [1, 2, 3, 5, 7, 52, 61, 80, 81, 84, 126, 140, 141, 142, 143, 144, 145, 148, 156, 159, 161, 162, 165, 167, 168, 172, 174, 179, 180, 181, 182, 186, 189, 191, 193, 196, 197, 199, 200, 202, 203, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 245, 246, 247], "ocean_seaic": 144, "oceanbasin": 236, "oceanclimatolgytask": 141, "oceanhistogram": [140, 146, 162], "oceanhistorysubdirectori": [142, 144, 156, 203], "oceannamelistfilenam": [1, 142, 144, 156], "oceanobserv": [158, 203], "oceanograph": [3, 198, 199, 200, 201, 202], "oceanographi": 191, "oceanpreprocessedrefer": 161, "oceanrefyearclimatolgytask": 141, "oceanregionalprofil": [140, 146, 162], "oceansit": 200, "oceanstreammap": 1, "oceanstreamsfilenam": [1, 142, 144, 156], "oceanvariablemap": 1, "ocn": [142, 144, 156], "oct": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "octob": 177, "oean": 191, "oec60to30v3": 156, "off": [2, 142, 143, 144, 163], "offici": 142, "offlin": 9, "offset": [4, 106, 109, 209, 214], "often": [9, 142, 143, 144, 165, 203, 229], "ohc": [4, 27, 39, 141, 143, 203, 213, 240], "ohc_": 240, "ohc_anom": 143, "ohc_timeseri": [2, 4], "oi": [145, 183, 201, 202, 216], "old": [140, 142], "older": [147, 156, 170, 171, 203], "olsen": [182, 201, 202], "omit": 142, "onc": [5, 7, 8, 11, 57, 144, 148, 151, 163, 203, 236], "ond": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "one": [1, 3, 4, 5, 7, 10, 11, 47, 55, 57, 59, 61, 62, 64, 72, 76, 77, 78, 81, 85, 94, 97, 99, 105, 106, 109, 116, 117, 124, 127, 135, 136, 137, 141, 142, 143, 144, 149, 153, 156, 203, 210, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 230, 231, 232, 235], "onelin": 142, "ones": [1, 140, 142, 144], "ongo": [147, 169], "onli": [1, 3, 4, 5, 7, 8, 9, 10, 11, 54, 57, 62, 64, 77, 78, 85, 93, 98, 99, 105, 106, 108, 116, 117, 127, 130, 141, 142, 143, 144, 148, 153, 158, 159, 163, 165, 191, 194, 195, 199, 203, 205, 206, 207, 210, 217, 218, 219, 221, 223, 224, 225, 226, 228, 229, 232, 233, 234, 236, 237, 238, 239, 241], "onlin": [6, 140, 142, 144, 147, 162, 169, 180], "only_": [142, 144, 159], "only_climatologi": 159, "only_ocean": [142, 144, 159], "only_publicob": 159, "onto": [141, 239], "onward": [180, 183, 184], "op": [61, 64, 80, 81, 83, 84], "open": [4, 7, 11, 85, 99, 105, 141, 142, 143, 144, 156, 203, 246], "open_dataset": [141, 143], "open_mfdataset": 11, "open_multifile_dataset": [4, 11], "openli": 173, "openmultifiledataset": 106, "oper": [1, 4, 7, 11, 45, 61, 64, 80, 81, 83, 84, 142, 143, 144, 151, 153, 156], "opportun": 141, "oppos": [141, 143, 163], "optic": 193, "optim": [147, 169, 180, 181, 183, 184, 191], "optin": 106, "optioin": 61, "option": [1, 2, 4, 5, 7, 9, 10, 11, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 54, 55, 56, 59, 61, 62, 64, 68, 69, 72, 74, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 88, 93, 97, 98, 99, 100, 105, 106, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 123, 124, 126, 127, 128, 129, 131, 132, 133, 136, 137, 138, 142, 143, 144, 147, 149, 150, 151, 152, 153, 154, 155, 156, 158, 160, 161, 162, 163, 164, 165, 167, 168, 170, 171, 189, 203], "oqu240": [142, 144, 156], "oqu480": [142, 144], "orbview": 193, "order": [3, 5, 8, 9, 116, 117, 143, 147, 148, 158, 162, 170, 171], "ordereddict": [13, 15, 17], "org": [147, 169, 193, 196, 229], "orient": 179, "origin": [74, 107, 108, 141, 180, 200, 229, 244], "originalfieldnam": 116, "orrs18to6": 156, "orrs30to10v3": 156, "orsi": [200, 201, 202], "oserror": [1, 7, 101], "ostm": 177, "osx": [142, 144], "other": [1, 2, 3, 7, 8, 10, 11, 45, 52, 57, 60, 64, 65, 77, 78, 113, 114, 115, 116, 117, 119, 120, 130, 141, 142, 143, 144, 152, 153, 156, 157, 158, 159, 164, 167, 173, 178, 180, 200, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225, 237, 238, 240, 247], "other_str": 11, "otherwis": [8, 56, 85, 95, 115, 119, 124, 141, 142, 148, 228, 229, 239], "our": [141, 143, 174, 175, 178, 185, 187, 196], "out": [1, 4, 7, 9, 11, 13, 15, 45, 77, 78, 109, 110, 141, 142, 143, 144, 147, 148, 151, 152, 153, 155, 156, 157, 159, 160, 163, 165, 170, 171, 200, 203, 229, 230, 231, 236, 237, 238, 239, 240, 242, 243, 244, 245, 246, 247], "out_file_label": [141, 143], "out_filenam": 141, "outdat": [11, 142], "outfilelabel": [46, 47, 109, 110, 141, 143], "outfilenam": [9, 45], "outfileprefix": 72, "outfilesuffix": [124, 138], "outlin": [116, 117], "outlinevalid": [116, 117], "output": [1, 3, 4, 5, 7, 33, 34, 35, 38, 39, 40, 41, 42, 43, 45, 46, 47, 51, 52, 56, 61, 64, 72, 82, 93, 97, 109, 110, 124, 126, 127, 138, 140, 141, 147, 148, 152, 153, 154, 156, 158, 160, 163, 168, 169, 170, 171, 194, 195, 208, 226, 230, 231, 235, 236, 237, 238, 239, 240, 242, 243, 244, 245, 247], "output_interv": [147, 170, 171], "outputfil": 33, "outsid": [1, 4, 6, 109, 110, 114, 116, 117, 140, 141, 149], "outweightfilenam": 7, "over": [1, 3, 7, 46, 47, 64, 72, 77, 78, 109, 110, 116, 117, 118, 119, 120, 127, 128, 129, 141, 142, 143, 144, 148, 149, 157, 165, 174, 176, 180, 189, 195, 196, 203, 205, 213, 229, 231, 233, 234, 235, 236, 238, 239, 240, 241, 242, 243, 244, 245, 246], "overhead": 127, "overlap": [116, 117], "overrid": [2, 58, 66, 67, 73, 75, 141, 142, 144, 159, 236, 241], "overridden": [143, 159], "overturn": [37, 238], "overview": 180, "overwhelm": [142, 144, 153], "overwrit": 9, "overwritemappingfil": 7, "overwritten": 7, "own": [1, 11, 55, 142, 143, 230, 231, 234, 236, 238, 241], "owner": 142, "oxygen": [198, 199, 201, 202], "p": [141, 142, 186], "pacif": 238, "pacific_basin": [231, 234, 235, 236], "packag": [3, 7, 142, 143, 144, 147, 153, 156, 162, 170, 171, 203], "pad": 141, "pad_inch": 141, "page": [12, 141, 142, 143, 144, 159, 162, 163, 180, 234, 236], "painfulli": 148, "pair": [33, 98, 111, 116, 117, 143, 213], "paleo": 4, "pan": 189, "panda": 4, "panel": [109, 114, 115, 117, 160, 210, 212, 218, 219, 220, 221, 222, 223, 224, 225, 236], "paolo": [188, 201, 202, 205, 239], "paper": [3, 174, 191, 201, 204], "parallel": [1, 6, 8, 11, 17, 56, 57, 140, 142, 143, 144, 151, 156], "parallel_task": 7, "paralleltaskcount": [7, 144, 151, 153], "paramet": [1, 10, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 56, 57, 59, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 138, 141, 143, 156, 167, 181], "parameter": 238, "parent": [8, 45, 46, 47, 53, 64, 72, 109, 124, 138, 143, 151], "parent_task": [141, 143], "parenthet": [116, 117], "parenttask": [45, 46, 47, 64, 72, 109, 124, 138, 141], "parkinson": [174, 196, 201, 204], "pars": [2, 4, 10, 88, 93, 99, 134, 156], "parsl": 143, "part": [3, 4, 11, 12, 53, 124, 141, 142, 143, 144, 156, 160, 162], "partial": 186, "particular": [4, 5, 9, 11, 142, 143, 144, 159, 195], "particularli": [3, 5, 10, 142, 143, 144, 159, 163], "pass": [1, 103, 116, 117, 141, 143, 159], "passag": 246, "passiv": [187, 196], "past": [141, 180], "patch": [5, 141], "path": [1, 3, 7, 33, 45, 63, 68, 69, 74, 85, 88, 93, 99, 100, 101, 102, 105, 127, 141, 142, 143, 144, 152, 156, 158, 159, 162, 163, 203, 226, 230], "pattern": [109, 176], "pazo": 179, "pco2": [186, 208], "pco2surfac": 208, "pcolormesh": 141, "pdf": [141, 160], "pdf_filenam": 141, "peak": 227, "peakwaveperiod": 227, "peninsula": [239, 246], "pep8": [1, 141, 142], "per": [7, 113, 114, 115, 116, 117, 119, 120, 144, 153, 160, 179, 233, 239], "perform": [1, 4, 5, 7, 9, 10, 11, 38, 39, 40, 41, 42, 43, 45, 51, 52, 60, 71, 82, 85, 106, 109, 111, 116, 117, 119, 120, 128, 129, 141, 142, 143, 144, 147, 148, 153, 156, 157, 158, 163, 165, 167, 170, 171, 203, 212, 229, 238], "perhap": [5, 8, 148], "period": [4, 5, 10, 143, 174, 183, 184, 186, 189, 192, 197, 216, 227], "perlmutt": [142, 153], "permiss": [142, 172, 194], "permit": [3, 153], "perous": 246, "persian": 246, "person": 189, "peter": 181, "petersen": 0, "peterson": 181, "ph": 208, "ph_3d": 208, "phase": [52, 66, 142, 143, 144, 159, 177], "phenomena": [147, 170], "phillip": [0, 4, 7], "phod": 179, "php": [3, 179], "physic": [141, 148, 176, 180, 197, 200], "pid": 7, "piec": 143, "pinch": 144, "pine_island": 239, "pinnip": 199, "pioma": [145, 201, 204, 244], "piomasvolume_monthly_climo_20180710": 244, "pip": [141, 142, 203], "place": [1, 11, 56, 112, 116, 117, 141, 144, 152, 166, 167, 200, 206, 207, 209, 217, 220, 221, 222, 223, 226, 227, 228, 230, 237, 247], "plan": [2, 4, 10, 144], "planar": 5, "plasma": 227, "platecarre": 141, "pleas": [141, 144, 156, 164, 174, 176, 177, 179, 186, 193, 197], "plot": [1, 3, 4, 5, 8, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 44, 46, 47, 48, 49, 50, 52, 58, 72, 140, 141, 142, 143, 144, 150, 154, 159, 162, 163, 164, 165, 167, 168, 196, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "plot_climatology_map_subtask": [141, 143], "plot_colormap": 149, "plot_dat": 4, "plot_pdf": 141, "plotascontour": 116, "plotclimatologymapsubtask": [141, 143], "plote": 4, "plothovmol": 47, "plotproject": 115, "plotsdirectori": [1, 52, 60, 143], "plotssubdirectori": [1, 159], "plottimeseri": 46, "plottitl": 162, "plottitlefonts": 117, "plottyp": [33, 229], "plotverticalsect": 233, "plt": 141, "plu": 180, "pm": 142, "pmel": 179, "pnetcdf": [147, 170, 171], "png": [113, 160], "po4": 208, "point": [1, 4, 5, 16, 20, 45, 56, 111, 116, 117, 119, 120, 128, 129, 141, 142, 143, 144, 149, 152, 158, 159, 160, 161, 163, 167, 168, 178, 203, 218, 219, 224, 225, 230, 231, 233, 236, 237, 238, 240, 242, 243, 244, 245, 247], "polar": [9, 111, 120, 141, 148, 150, 178, 187, 196, 244], "polarbuff": 111, "polarplot": 244, "pole": [9, 115, 150, 156], "poleward": 111, "polici": [153, 203], "polygon": 111, "polylin": 230, "polynya": 174, "pont": [85, 105], "pool": 151, "pop": [7, 161], "popen": 7, "popul": [142, 143], "portal": [142, 144, 159], "portion": [10, 135, 136, 137, 153], "poseidon": 177, "posit": [4, 118, 143, 167, 206, 207, 212, 213, 226, 228, 236], "possibl": [2, 3, 7, 8, 11, 61, 85, 141, 143, 144, 151, 176, 200, 237, 241], "possiblearg": 1, "possibleopt": 94, "possiblestream": 94, "possibli": [9, 73, 100, 153, 203], "post": [37, 144, 164, 195], "potenti": [1, 5, 23, 43, 107, 143, 148, 191, 194, 207, 217, 226, 228, 230, 231, 235, 236, 237, 241, 245, 246, 247], "potentialdens": [142, 144, 217, 226, 230, 231, 235, 237, 241], "potentialtemperatur": [231, 235], "pourquoipa": 239, "power": 151, "power8": 142, "pp": 179, "pped": 144, "pr": [1, 7, 10], "practic": [9, 141, 143], "pre": [5, 20, 143, 180, 203], "preced": [142, 144], "precis": [4, 116, 117], "predict": [140, 147, 169, 170, 171, 180, 197, 203], "preexist": 4, "prefer": [2, 4, 142, 203, 216], "prefix": [1, 2, 7, 33, 46, 47, 72, 82, 109, 110, 162, 208, 210, 218, 219, 224, 225, 230, 231, 235, 240], "preindustri": [208, 216], "preliminari": [180, 200], "premis": 11, "prepend": [46, 47, 82, 234], "preprocess": [1, 2, 4, 11, 140, 142, 144, 163, 168, 240, 242], "preprocessedfieldprefix": 240, "preprocessedfileprefix": 240, "preprocessedinputdirectori": 4, "preprocessedreferencerunnam": [2, 4, 163], "preprocessedrunnam": 240, "preprocessor": 11, "prepross": 11, "prereq": 8, "prerequisit": [6, 13, 15, 140], "present": [1, 4, 76, 77, 78, 85, 99, 105, 106, 117, 143, 159, 176, 180, 181, 196, 198, 200, 203, 216, 234], "pressur": [180, 186], "presum": [142, 144], "pretti": 142, "prevent": [1, 7, 8, 143, 151, 153, 156], "previou": [1, 2, 4, 5, 7, 142, 143, 144, 148, 150, 165, 179, 180, 198, 199, 203, 229], "previous": [10, 143], "prevou": 1, "price": 0, "primari": [3, 116, 117, 142, 143, 144, 200], "prince_harald": 239, "principl": 180, "print": [1, 4, 7, 127, 141, 143, 159, 200], "print_exc": 1, "prior": [178, 194], "privat": [11, 143, 172], "probabl": [1, 5, 144, 173], "problem": [4, 7, 8, 116, 117, 144], "proces": 7, "process": [7, 8, 15, 20, 37, 74, 124, 138, 141, 142, 144, 148, 152, 153, 162, 193, 195, 200, 203, 236], "processcount": 139, "processor": 7, "produc": [2, 4, 5, 7, 8, 11, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 36, 37, 48, 49, 50, 52, 63, 64, 83, 84, 85, 97, 129, 140, 141, 142, 143, 144, 148, 150, 154, 155, 156, 160, 164, 165, 167, 177, 180, 181, 195, 203, 218, 219, 221, 223, 229, 233, 236, 237, 238, 241, 244], "product": [145, 173, 175, 177, 180, 183, 186, 193, 194, 195, 196, 197, 198, 200, 201, 204, 221, 222, 223], "productionsh": 223, "profil": [142, 144, 176, 199, 231, 235], "profilegallerygroup": 235, "profilesoceanbasin": 235, "profit": 194, "program": [172, 177, 178, 191, 196, 200], "programm": 200, "progress": [5, 127, 142, 144, 191, 203, 244], "proj": [122, 141], "project": [5, 7, 9, 115, 141, 143, 147, 169, 177, 182, 187, 193, 195, 196], "projectiongriddescriptor": [9, 141], "prompt": [142, 144], "proper": 7, "properli": [142, 143, 144, 203], "properti": [51, 160, 200, 208, 230, 236, 241, 244], "propos": [4, 7, 8, 9, 10], "proprietari": 143, "protect": 172, "proven": [142, 144], "provid": [1, 3, 5, 12, 33, 77, 78, 106, 109, 110, 114, 116, 117, 141, 142, 143, 144, 150, 156, 158, 159, 163, 168, 173, 174, 175, 177, 178, 180, 185, 187, 193, 194, 196, 200, 218, 219, 224, 225, 230, 241, 244], "provinc": 186, "provis": 180, "prune": 142, "psc": 189, "pseudo": 7, "psu": [230, 231, 235, 241], "public": [1, 3, 9, 11, 20, 142, 143, 144, 147, 148, 152, 156, 158, 159, 160, 162, 171, 175, 178, 180, 185, 187, 189, 194, 195, 196, 200, 236, 239], "publicli": [147, 152, 159, 171, 179], "publicob": [141, 143, 159, 206, 207, 209, 211, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 228, 232, 233, 234, 236, 237, 238, 240, 242, 243, 244, 245], "publish": [175, 178, 185, 187, 196, 200], "pubugn": 208, "pubugn_r": 208, "pull": 143, "pure": 5, "purg": [1, 140, 142, 143], "purpos": [4, 116, 141, 142, 144, 148, 174, 180, 203, 218, 219, 224, 225, 233, 240, 244], "push": 142, "put": [1, 111, 142, 143, 144, 159], "pwolfram": 11, "py": [1, 2, 3, 7, 11, 141, 142, 144, 152, 162, 203], "pycharm": [141, 142], "pydata": 4, "pyplot": [4, 119, 141], "pyproj": [9, 122, 141], "pyremap": [5, 82, 141], "python": [1, 7, 64, 124, 141, 142, 143, 144, 148, 153, 167, 203, 230, 241], "python3": 142, "qu240": [5, 9], "qu480": 142, "qualiti": [180, 193, 199, 200], "quantit": 236, "quantiti": [129, 148, 180, 227], "quar": 239, "quasi": [142, 144], "quatntiti": 148, "queue": 140, "quick": [140, 141, 152, 158, 162], "quickli": 153, "quit": [141, 142, 153], "quot": 230, "r": [3, 142, 143, 179, 230, 231, 235, 241], "ra": 179, "radiat": 197, "radiomet": [178, 196], "radiu": 156, "rais": [1, 4, 10, 54, 55, 61, 62, 71, 79, 85, 94, 97, 99, 101, 105, 108, 121, 122, 126, 131, 132, 133, 134, 135, 136, 137, 141, 143], "raiseexcept": 54, "ram": 156, "ran": [5, 7, 143], "rang": [4, 46, 71, 85, 99, 109, 110, 113, 114, 116, 117, 141, 143, 148, 149, 155, 156, 163, 165, 167, 189, 197, 203, 213, 216, 229, 231, 235, 236, 241], "raster": 141, "rate": [21, 38, 143, 145, 201, 202, 205, 239], "rather": [1, 7, 10, 11, 64, 82, 88, 115, 116, 117, 141, 142, 143, 153, 210, 212, 218, 219, 220, 221, 222, 223, 224, 225, 237], "ratio": [116, 117, 119], "raw": [141, 143], "rayner_thy": 239, "rb": 179, "rc": 141, "rdbu_r": [149, 215, 216, 247], "rdylbu_r": [149, 207, 216, 217, 226, 228, 230, 231, 237, 238, 247], "re": [1, 5, 141, 142, 143, 144, 176, 179, 191, 203, 239], "reach": [144, 153], "read": [1, 5, 7, 9, 60, 61, 64, 71, 73, 85, 98, 99, 100, 105, 126, 141, 142, 143, 144, 156, 203, 232, 233, 236, 240], "readabl": 159, "reader": [1, 11, 52, 143], "readi": [8, 142, 143, 144], "readm": [1, 3], "readonli": 98, "readpath": 4, "readthedoc": 7, "real": [4, 141], "realist": 1, "realli": [143, 205, 217, 226], "realpath": 7, "reanalysi": [145, 197, 201, 202, 204, 226, 227, 244], "reason": [4, 141, 152, 195, 203], "rebas": 142, "receiv": 194, "recent": [142, 143, 240], "recogn": [153, 238], "recommend": [142, 144, 200, 203, 239], "reconstruct": [141, 174, 181, 186, 232], "record": [183, 184, 191, 195], "recreat": 141, "rectangular": [9, 116, 117], "recurs": 8, "red": [229, 246], "redirect": 82, "redman": 179, "reduc": [7, 119, 143, 148, 153, 156, 174, 180], "redund": 11, "redundantli": 143, "ref": [117, 143, 148, 163], "ref_casename_v0": 2, "ref_end_year": 143, "ref_field_nam": [141, 143], "ref_file_nam": 143, "ref_ohc": 143, "ref_start_year": 143, "ref_title_label": [141, 143], "ref_year_climatology_task": [27, 141, 143], "ref_year_climo": 143, "refarrai": [114, 115, 117], "refbottomdepth": [147, 170], "refer": [1, 2, 4, 10, 11, 27, 43, 85, 105, 106, 109, 110, 117, 129, 131, 132, 133, 136, 140, 142, 143, 144, 149, 152, 159, 163, 168, 201, 202, 203, 204, 206, 207, 209, 210, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 240, 241, 243, 245], "referenc": 177, "reference_tim": [147, 170], "referenced": [131, 132, 133, 136], "referencelongitud": [210, 218, 219, 220, 221, 222, 223, 224, 225], "referencerunnam": 2, "reffieldnam": [110, 141, 143], "refin": [147, 170], "reflect": 203, "reftitl": [114, 115, 117], "reftitlelabel": [110, 141, 143], "refyearclimatologytask": 143, "refyearmpasclimatologytask": [27, 143], "refzmid": [147, 170], "regard": [153, 200], "regardless": [13, 167], "region": [1, 3, 9, 20, 21, 32, 36, 38, 40, 46, 47, 111, 116, 117, 140, 141, 142, 143, 144, 148, 149, 152, 153, 156, 168, 174, 178, 185, 196, 203, 205, 229, 231, 232, 235, 238, 240, 242, 243, 245, 246], "region_mask": [152, 162], "regionaltsdiagram": [140, 146, 162], "regiongroup": [124, 231, 234, 235, 236, 241], "regionmaskfil": 47, "regionmaskstask": [21, 32, 36, 38, 40], "regionmasksubdirectori": [152, 162, 236, 239, 241], "regionmasksubtask": 123, "regionnam": [46, 47, 231, 234, 235, 236, 238, 239, 241], "regiono": 40, "regist": [159, 195], "registri": [112, 234], "regress": 10, "regrid": [2, 5, 180, 205], "regriddedmld": 2, "regriddedseaiceconcthick": 2, "regriddedsss": 2, "regriddedsst": 2, "regular": [5, 180, 195, 199], "reilli": [193, 201, 202], "reinstal": 142, "rel": [1, 5, 7, 10, 11, 88, 93, 100, 130, 142, 143, 144, 156, 158, 163, 189, 230], "relat": [5, 7, 10, 11, 110, 112, 117, 130, 141, 142, 143, 144, 148, 150, 152, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 165, 174, 179, 180, 182, 199, 200, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "relationship": 186, "relativedelta": [10, 130, 137], "relativepathopt": 100, "relativepathsect": 100, "releas": [7, 147, 156, 159, 171], "relev": [12, 159, 163, 246], "reli": [8, 116, 117, 142, 144], "reliabl": [153, 197], "remain": [4, 11, 152, 210, 216, 218, 219, 220, 221, 222, 223, 224, 225, 230, 231, 233, 234, 235, 236, 237, 238, 244], "remaind": [141, 143], "remainingtask": 7, "remap": [5, 6, 7, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 36, 37, 48, 49, 50, 64, 65, 66, 67, 68, 69, 72, 74, 79, 80, 81, 82, 109, 121, 122, 140, 141, 143, 150, 152, 153, 163, 203, 209, 210, 211, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 230, 237, 247], "remap_climatology_subtask": [141, 143], "remap_fil": 9, "remap_observations_subtask": [141, 143], "remapmpasclimatologi": 64, "remapmpasclimatology_": 143, "remapmpasclimatologysubtask": [109, 141, 143], "remapmpasohcclimatologi": 141, "remapobsclimatologysubtask": 109, "remapobserv": 72, "remapobservedclimatologysubtask": 109, "remapp": [6, 82, 140, 143, 148], "remappedclimsubdirectori": 158, "remot": [142, 195], "remov": [1, 2, 4, 7, 8, 11, 107, 141, 142, 189, 231, 235, 236, 241], "removemean": 109, "renam": [2, 73, 141, 159, 203], "rename_vari": 11, "renew": 199, "rennick": 239, "renorm": [9, 148], "renormalizationthreshold": [9, 148, 230, 237, 247], "reorgan": [6, 140], "repeat": 107, "rependtim": 4, "repetit": [148, 165], "replac": [2, 4, 142, 156, 180, 203], "replic": 4, "replicate_cycl": 4, "repo": [5, 142, 203], "repositori": [1, 20, 124, 138, 148, 152, 156, 158, 159, 162, 226, 230], "repres": [4, 10, 116, 117, 143, 200, 239], "represent": 4, "reproduc": [147, 170], "reproduct": 180, "repsecondtim": 4, "repsect": 168, "repstarttim": 4, "request": [3, 8, 11, 13, 15, 33, 61, 62, 70, 116, 117, 126, 142, 143, 144, 163, 167, 175, 178, 185, 187, 190, 196, 197, 201, 202], "requir": [1, 2, 4, 5, 7, 8, 9, 10, 11, 77, 78, 85, 97, 105, 106, 141, 143, 144, 153, 159, 172, 180, 203], "requri": 1, "rerun": [5, 142, 203], "research": [3, 172, 177, 179, 194, 197, 200], "reserv": [141, 143], "resid": [52, 61, 126], "residu": 179, "resolut": [3, 5, 142, 144, 148, 150, 152, 156, 160, 163, 167, 173, 175, 180, 186, 193, 195, 198, 203, 230, 237, 247], "resolv": [183, 184], "resourc": [7, 143, 147, 153, 169, 194], "respect": [1, 4, 142, 144, 152, 177, 179, 180, 181, 196, 203, 231, 233, 238], "respons": 172, "rest": [3, 4, 7, 11, 172], "restart": [33, 52, 64, 71, 124, 134, 141, 142, 143, 144, 147, 148, 156, 165, 170, 171, 203, 234], "restartfilenam": [64, 141, 143], "result": [2, 3, 4, 5, 7, 9, 11, 45, 98, 100, 108, 109, 116, 117, 119, 120, 124, 127, 129, 138, 141, 142, 143, 144, 148, 149, 150, 151, 156, 158, 159, 161, 162, 163, 167, 168, 175, 176, 178, 185, 186, 187, 191, 196, 203, 212], "resultsuffix": 117, "retain": 143, "return": [1, 4, 7, 8, 9, 10, 14, 15, 19, 45, 54, 55, 63, 66, 67, 68, 69, 73, 74, 75, 76, 77, 78, 82, 85, 86, 87, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 102, 103, 105, 106, 107, 108, 111, 112, 116, 117, 119, 120, 121, 122, 124, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 138, 141, 143], "returncod": 7, "reus": [143, 152, 163, 224, 225], "rh": 141, "rho": 143, "rhointerv": 236, "richter": 239, "rick": 179, "rid": 141, "right": [1, 111, 116, 117, 141, 142, 143, 240], "right_label": 141, "rignot": [190, 201, 202, 239], "riiser": 239, "rilei": 0, "risk": 174, "robinson": 3, "robust": [7, 143], "robustli": 7, "roekel": 0, "roemmich": [145, 176, 201, 202, 206, 207, 208], "roll": [128, 129], "ronn": [141, 239], "root": 203, "rosa": [0, 3], "rosinski": [183, 184], "ross": [234, 236, 239, 241], "rossbi": 156, "rotate_label": 141, "rothrock": [189, 201, 204], "roughli": 193, "routin": [1, 10, 232], "rr": 156, "rst": [3, 147, 171, 203], "rudimentari": 143, "run": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 14, 15, 17, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 44, 46, 47, 48, 49, 50, 51, 52, 53, 55, 57, 58, 60, 109, 114, 115, 116, 117, 129, 140, 141, 143, 148, 149, 150, 151, 152, 153, 156, 159, 165, 168, 196, 212, 229, 236, 238, 239, 240, 242], "run_aft": [8, 143], "run_analysi": [1, 2, 7, 8, 203], "run_mpas_analysi": 8, "run_parallel_task": 7, "run_task": 141, "runaftertask": [8, 52], "rundirectori": [33, 52, 60, 143], "runnam": [142, 144, 163], "runningtask": 19, "runstream": [52, 60, 143], "runsubdirectori": [142, 144, 156, 203], "runtim": 153, "runtimeerror": 54, "rx": 142, "safe": 151, "safest": 142, "sai": [142, 143, 144, 203, 230], "sakhalin": 246, "salin": [22, 29, 40, 42, 141, 142, 144, 145, 148, 175, 191, 194, 201, 202, 206, 215, 217, 226, 228, 230, 231, 235, 236, 237, 241, 243, 246, 247], "salt": 229, "same": [1, 2, 4, 7, 8, 9, 10, 11, 52, 61, 66, 67, 76, 77, 78, 85, 116, 117, 119, 126, 127, 141, 142, 143, 144, 151, 152, 156, 162, 163, 177, 180, 200, 203, 212, 213, 214, 234, 236, 240], "samelson": 3, "sampl": [118, 149, 167, 180, 203, 206, 207, 226, 228, 236, 239], "san": 172, "saral": 195, "satelit": [145, 201, 202, 227], "satellit": [145, 174, 177, 178, 185, 195, 196, 201, 202, 215], "satisfi": [1, 180], "save": [1, 5, 141, 163, 203, 230, 238], "savefig": 141, "sbin": 236, "scale": [140, 147, 156, 170, 171, 174, 191, 203, 208], "scan": [178, 196], "scatter": 236, "schlax": 3, "schmidtko": [158, 192, 201, 202, 217], "schmidtkosubdirectori": 158, "schwegmann": 174, "schweiger": [189, 201, 204], "scienc": [143, 147, 169], "scientif": [144, 149, 194], "scipi": 141, "sciviscolor": 149, "scope": 200, "scott": 181, "scratch": 142, "screen": [46, 47, 109, 110, 144, 153, 159], "scrip": [5, 9], "scripp": [191, 201, 202], "script": [1, 4, 7, 11, 16, 140, 142, 159, 162, 203], "sea": [1, 2, 3, 5, 7, 28, 29, 30, 41, 49, 50, 51, 109, 140, 142, 143, 144, 145, 156, 161, 168, 175, 177, 178, 179, 181, 183, 184, 185, 186, 187, 191, 193, 196, 202, 204, 206, 207, 209, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 232, 234, 236, 241, 242, 244, 246], "sea_ic": [1, 4, 203], "seafloor": [143, 145, 201, 202, 206, 207, 217, 226], "seaic": [4, 52, 61, 80, 81, 84, 126, 140, 142, 143, 144, 156, 158, 159, 161, 162, 169, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225, 244], "seaiceconc": [218, 219], "seaicehistorysubdirectori": [142, 144, 156, 203], "seaicemelt": [220, 221], "seaicenamelistfilenam": [142, 144, 156], "seaiceobserv": [158, 203], "seaicepreprocessedrefer": 161, "seaiceproduct": [222, 223], "seaicestreammap": 1, "seaicestreamsfilenam": [142, 144, 156], "seaicethick": [224, 225], "seaicevariablemap": 1, "seal": 236, "seamlessli": 3, "search": [94, 144, 176], "searchsort": 141, "season": [1, 3, 61, 62, 63, 64, 66, 67, 68, 69, 72, 74, 77, 80, 81, 84, 109, 140, 141, 142, 143, 144, 148, 153, 157, 168, 175, 179, 199, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 230, 234, 235, 236, 237, 247], "seasonsubtask": 61, "seawat": 3, "seawif": [145, 201, 202, 208], "seawifs_oc": 193, "second": [4, 7, 10, 109, 116, 117, 130, 131, 143, 152, 186], "secondremapmpasclimatologysubtask": 109, "secondxaxisdata": [116, 117], "section": [1, 2, 8, 9, 33, 46, 47, 59, 100, 109, 110, 112, 113, 114, 115, 116, 117, 119, 120, 126, 141, 142, 143, 144, 145, 148, 151, 152, 153, 154, 155, 156, 158, 159, 160, 161, 163, 165, 201, 202, 203, 212, 226, 230, 231, 233, 234, 235, 236, 237, 240, 241, 247], "section_nam": [141, 143], "sectionnam": [46, 47], "secur": [141, 143], "see": [1, 2, 4, 5, 7, 8, 10, 13, 60, 85, 105, 106, 119, 124, 138, 141, 142, 144, 148, 149, 151, 152, 153, 154, 156, 158, 159, 161, 162, 163, 174, 179, 198, 200, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 247], "seek": 7, "seem": [4, 5, 7, 141, 153], "seemingli": 4, "seen": [142, 144, 159], "segment": [111, 116, 117, 167], "sel": [4, 85, 105, 106], "select": [4, 5, 7, 116, 149, 167, 176, 218, 219, 226, 231, 232, 233, 237, 240], "selectcorrdvalu": [85, 105, 106], "self": [1, 8, 9, 55, 56, 141, 143], "selvalu": [85, 105, 106], "semilognorm": 230, "semimajor": 179, "semiminor": 179, "send": 143, "sens": [2, 4, 11, 143, 148, 195, 205, 217, 226], "sensit": 180, "senson": 193, "sensor": [178, 185, 193, 195, 196], "sep": [141, 143, 164, 205, 206, 207, 208, 209, 211, 212, 213, 214, 215, 216, 217, 226, 227, 228, 230, 235, 237, 247], "separ": [2, 3, 4, 5, 7, 10, 18, 135, 136, 137, 143, 149, 153, 156, 212, 237, 240, 241], "septemb": 189, "sequenc": [1, 8, 196], "seri": [2, 4, 33, 34, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 51, 57, 118, 119, 120, 126, 127, 129, 140, 142, 143, 144, 145, 149, 157, 159, 160, 162, 163, 168, 187, 195, 201, 203, 204, 229, 231, 232, 238, 239, 240, 241, 242, 243, 244, 245, 246], "serial": [7, 11, 17, 56, 142, 144, 148, 153], "seriou": 180, "serv": [1, 3, 4], "server": [142, 144, 159], "servic": 193, "session": 142, "set": [1, 3, 4, 5, 6, 7, 8, 11, 20, 45, 46, 47, 53, 55, 56, 59, 60, 62, 64, 66, 67, 71, 73, 76, 77, 78, 82, 85, 99, 103, 105, 106, 107, 108, 109, 112, 114, 115, 116, 117, 119, 120, 127, 128, 129, 139, 140, 141, 142, 143, 145, 148, 149, 150, 151, 153, 156, 157, 158, 159, 160, 163, 166, 174, 178, 180, 183, 185, 186, 187, 196, 198, 200, 202, 203, 216, 226, 230, 232, 233, 234, 236, 239, 240, 241, 244, 246], "set_axis_off": 141, "set_ext": 141, "set_label": 141, "set_plot_info": [141, 143], "set_tick": 141, "set_ticklabel": 141, "set_titl": 141, "setenv": [142, 144], "setup": [1, 8, 53, 60, 71, 143], "setup_and_check": [1, 8, 13, 15, 159, 203], "sever": [1, 2, 3, 7, 8, 9, 11, 142, 143, 144, 147, 148, 151, 159, 162, 164, 165, 167, 168, 170, 171, 173, 177, 180, 187, 191, 234, 236], "sh": [48, 49, 50, 142, 144, 210, 219, 221, 223, 225], "shackleton": 239, "shade": [149, 203, 236], "shallow": [192, 241], "shape": [111, 116, 124, 138, 141, 176], "share": [1, 4, 5, 7, 8, 11, 27, 140, 141, 144, 152, 153, 203], "shea": [183, 184], "shelf": [38, 141, 190, 192, 201, 202, 205, 226, 234, 236, 241], "shell": [142, 144], "shelv": [162, 172, 201, 202, 205, 206, 207, 226, 239], "ship": 247, "shiras": 239, "short": [46, 47, 64, 109, 110, 142, 143, 144, 156, 181, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225], "shortcom": 4, "shortcut": [2, 142, 144, 159], "shorter": [4, 142, 144], "should": [1, 2, 4, 5, 7, 8, 9, 11, 13, 14, 18, 19, 45, 52, 55, 57, 58, 60, 64, 72, 82, 98, 112, 115, 116, 117, 141, 142, 143, 144, 147, 148, 149, 151, 153, 154, 155, 156, 158, 159, 161, 162, 163, 165, 167, 170, 171, 180, 194, 203, 210, 216, 218, 219, 224, 225, 229, 230, 231, 234, 235, 236, 237, 238, 240, 241, 244, 247], "shouldn": 238, "show": [1, 111, 117, 141, 143, 159, 226, 236, 237, 240], "shown": [116, 117, 142, 144], "si": [231, 235], "sic": [183, 184], "signific": [153, 195, 227], "significantwaveheight": 227, "silli": 141, "silvia": 179, "similar": [7, 11, 141, 142, 144, 163, 177, 191], "similarli": [1, 2, 3, 141, 142, 144, 153, 156], "simpl": [2, 4, 7, 8, 9, 11, 141, 142, 144], "simpler": 2, "simplest": 159, "simpli": [2, 141, 144, 153, 154, 203], "simplic": 4, "simplif": 2, "simplifi": [1, 2, 11], "simul": [4, 27, 33, 42, 43, 85, 105, 106, 116, 128, 129, 134, 140, 141, 142, 143, 144, 147, 148, 156, 159, 161, 163, 165, 166, 168, 169, 170, 171, 183, 184, 203, 213, 216, 229, 238, 240, 243, 245, 246], "simulatan": 8, "simulation_start_tim": [4, 134], "simulationstarttim": [4, 85, 105, 106], "simultan": [7, 144, 151], "simultanei": [7, 151], "sinc": [4, 7, 10, 13, 76, 77, 78, 85, 105, 106, 131, 132, 133, 136, 141, 142, 143, 144, 149, 151, 158, 183, 184, 199, 203, 210, 218, 219, 224, 225, 230, 241], "singl": [1, 4, 7, 8, 11, 77, 117, 132, 141, 143, 148, 151, 153, 156, 159, 160, 174, 180], "sio3": 208, "site": 142, "situ": [183, 184], "situat": [7, 10], "size": [7, 9, 85, 113, 114, 115, 116, 117, 119, 120, 141, 143, 144, 148, 150, 156, 160, 187, 203, 226, 234, 238, 244], "skeleton": 9, "skip": [2, 7, 13, 118, 142, 143, 144, 159, 166], "sla": 177, "slava": 239, "slice": [4, 11, 64, 85, 99, 105, 106, 109, 156], "slightli": 167, "slot": [8, 151], "slow": [148, 151], "slower": [148, 156, 158], "small": [7, 144, 153, 173], "smaller": [143, 148, 239], "smallest": 4, "smith": 181, "smithinlet": 239, "smmr": [178, 196], "smooth": [45, 181, 238], "smoothli": 5, "snow": [174, 185, 196], "so": [1, 2, 3, 4, 5, 7, 11, 45, 53, 62, 64, 77, 78, 116, 117, 141, 142, 143, 144, 151, 152, 153, 156, 159, 162, 163, 167, 180, 203, 237, 238, 241, 246], "socatv4": 186, "softwar": [141, 142, 143, 144], "soi": 179, "solid": [113, 116, 117, 119, 120, 149], "solut": [1, 4, 5, 7, 8, 9, 11, 141], "som": [145, 201, 202, 208], "some": [1, 7, 8, 11, 57, 73, 77, 78, 117, 141, 142, 143, 144, 148, 153, 156, 157, 159, 163, 167, 199, 203, 230, 241], "sometext": 1, "someth": [65, 141, 142, 143, 144], "something_els": 11, "sometim": [1, 77, 78, 141, 142, 143, 144, 203], "somewhat": [4, 9, 142, 209, 214], "somewher": [5, 142, 144], "soon": [141, 238], "sophist": 7, "sort": 103, "sose": [141, 142, 144, 145, 158, 201, 202, 236, 237, 241], "sose_10000km": 226, "sosemeridionalvelocitytransect": 237, "sosepotentialdensitytransect": 237, "sosesalinitytransect": 237, "sosesubdirectori": 158, "sosetemperaturetransect": 237, "sosetransect": [140, 146, 194], "sosevelocitymagnitudetransect": 237, "sosezonalvelocitytransect": 237, "sound": 246, "sounder": [178, 196], "sourc": [5, 9, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 143, 144, 201, 202, 204, 210, 218, 219, 224, 225, 232, 233], "sourcedescriptor": [9, 82], "south": [115, 150, 178], "southern": [31, 145, 162, 172, 174, 201, 202, 210, 219, 221, 223, 225, 234, 236, 237, 241], "southern_ocean_basin": [231, 234, 235, 236], "southernmost": [218, 224], "sowisc12to60e2r4": [142, 144, 156], "space": [10, 46, 47, 109, 110, 116, 117, 135, 136, 137, 142, 143, 159, 177, 193, 230, 231, 235, 236, 237, 238, 241], "span": [148, 163, 165, 180, 183, 184, 187, 195, 229], "spars": 141, "spatial": [109, 147, 170, 175, 179, 181, 198, 236], "spawn": [7, 148, 151, 153, 236], "spec": [142, 203], "special": [7, 143, 178, 185, 196], "specif": [1, 2, 4, 10, 11, 13, 60, 106, 130, 142, 143, 144, 153, 156, 159, 183, 184, 196, 203], "specifi": [3, 4, 5, 10, 14, 63, 68, 69, 74, 109, 112, 114, 116, 117, 141, 142, 143, 144, 148, 150, 153, 154, 155, 156, 158, 159, 160, 161, 162, 163, 164, 165, 203, 210, 226, 230, 231, 232, 235, 237], "spectra": [34, 232], "spectral_r": [217, 226, 230, 231, 237], "speed": [142, 153, 162], "spheric": 116, "sphinx": 203, "spin": [148, 165, 180, 229], "split": [1, 2, 7], "splitext": 141, "spread": 180, "spstere": 115, "sqrt": 141, "squar": [3, 179], "src_loc": 142, "src_mesh": [142, 144], "src_region": [142, 144], "sream": 93, "srun": [7, 142, 153], "ss": [10, 135, 136, 137], "ssalto": 177, "sscciobsendyear": 227, "sscciobsstartyear": 227, "ssh": [3, 28, 109, 158, 209, 214, 229, 234], "sshsubdirectori": 158, "ssm": [145, 185, 201, 204, 218, 219, 244], "ssmi": [178, 196, 218, 219], "ssmi_bootstrap_gridded_concentration_nh_ja": 218, "ssmi_bootstrap_gridded_concentration_nh_jfm": 218, "ssmi_bootstrap_gridded_concentration_sh_djf": 219, "ssmi_bootstrap_gridded_concentration_sh_jja": 219, "ssmi_nasateam_gridded_concentration_nh_ja": 218, "ssmi_nasateam_gridded_concentration_nh_jfm": 218, "ssmi_nasateam_gridded_concentration_sh_djf": 219, "ssmi_nasateam_gridded_concentration_sh_jja": 219, "sss": [1, 3, 29, 145, 158, 201, 202, 215], "sssss": [10, 135, 136, 137], "ssssubdirectori": 158, "sst": [1, 3, 30, 41, 145, 158, 183, 201, 202, 216, 242], "sstsubdirectori": 158, "sstv4": [145, 201, 202, 232], "st": 1, "stabl": [229, 248], "stack": [1, 13, 15, 115, 144], "stackoverflow": 141, "stage": [4, 64, 74, 143], "stamp": 162, "standalon": [11, 163], "standard": [1, 10, 143, 144, 150, 152, 156, 158, 177, 203, 235], "stang": 239, "stare": 143, "start": [1, 4, 11, 27, 33, 52, 59, 61, 85, 97, 105, 106, 118, 126, 127, 128, 129, 134, 140, 143, 152, 153, 158, 162, 163, 183, 184, 213, 229, 231, 234, 237, 238, 239, 240, 242, 243, 244, 245, 246], "start_dat": [4, 11], "start_year": 143, "startdat": [1, 4, 61, 85, 97, 99, 126, 129], "startdatefirstyear": 4, "startindex": 4, "starttim": 76, "startyear": [1, 33, 59, 61, 126, 142, 143, 144, 148, 155, 156, 165, 203, 229], "stat": [61, 64, 80, 81, 83, 84, 162], "state": [1, 31, 143, 144, 145, 147, 169, 180, 188, 200, 201, 202, 227, 236, 237, 241], "statist": [181, 195, 199], "statu": [7, 8, 200], "stderr": [7, 56], "stdout": [1, 7, 56], "steam": 134, "step": [1, 5, 8, 11, 60, 71, 141, 142, 143, 144, 186, 191], "stephen": 0, "stere": 141, "steregraph": 141, "stereograph": [9, 141, 143, 148, 150, 158, 178, 187, 196], "steric": 191, "sterl": 0, "stick": 144, "still": [1, 7, 116, 141, 142, 143], "store": [1, 4, 5, 7, 9, 10, 110, 127, 141, 143, 144, 152, 153, 158, 161, 162, 180, 201, 202, 203, 237, 239], "str": [1, 18, 33, 45, 46, 47, 52, 54, 59, 61, 62, 63, 64, 65, 66, 67, 68, 69, 72, 73, 74, 75, 80, 81, 82, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100, 101, 102, 103, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 129, 131, 132, 133, 134, 136, 137, 138, 141, 143, 230], "straight": 143, "strait": 246, "strategi": [141, 144, 147, 170], "stream": [1, 4, 10, 52, 60, 61, 62, 93, 94, 95, 96, 97, 134, 142, 143, 144, 147, 169, 170, 171, 183, 184, 203], "streamfound": 95, "streamfunct": [37, 141, 238], "streamfunctionmoc": [140, 146, 156, 162], "streamlin": 141, "streammap": [1, 4], "streamnam": [4, 61, 94, 95, 96, 97], "streamsdir": [1, 93], "streamsfil": [1, 52, 134], "streamsfilenam": 1, "streletz": 0, "strength": 156, "strict": [142, 144, 203], "strictli": 141, "stride": [116, 117, 119, 166], "string": [1, 10, 18, 52, 61, 68, 69, 74, 85, 97, 99, 105, 106, 108, 116, 117, 126, 135, 136, 137, 224, 225], "string_to_datetim": 4, "string_to_days_since_d": 4, "stringtodatetim": 10, "stringtorelativedelta": 10, "strongli": [142, 144, 200], "structur": [2, 3, 11, 142, 144, 152, 158], "studi": [147, 170, 172, 181, 195], "stuff": 1, "style": [1, 113, 116, 117, 119, 120, 141, 142, 183, 184], "sub": [38, 118, 141, 147, 170, 180, 205], "subbasin": 162, "subclass": [1, 9, 10, 73, 75, 130, 143], "subdir": 142, "subdirectori": [2, 64, 142, 143, 144, 152, 156, 158, 159, 160, 163, 203, 205], "subdivid": 111, "subdivis": 111, "subdivisionthreshold": 139, "submiss": 200, "submit": [7, 203], "submodul": 4, "subplot": [109, 110, 115, 116, 117, 210, 218, 219, 220, 221, 222, 223, 224, 225], "subpolar_north_atlant": [65, 79], "subprocess": [7, 140, 148, 168, 236], "subprocesscount": [124, 138, 148, 151, 236], "subsampl": [167, 230, 237, 247], "subsequ": [1, 2, 5], "subset": [1, 20, 108, 144, 180, 203, 226, 237, 239, 246], "substanti": 160, "substitut": [141, 142, 144], "subtak": 241, "subtask": [1, 6, 13, 15, 17, 45, 46, 47, 52, 53, 57, 61, 64, 72, 109, 123, 124, 126, 138, 140, 148, 151, 230, 236], "subtask_nam": [141, 143], "subtasknam": [45, 46, 47, 52, 64, 72, 109, 126, 141, 143], "subtitl": [46, 47, 109, 110, 114, 115, 117], "subtract": [4, 10, 27, 97, 109, 130, 143, 196, 209, 214], "succ": 8, "succeed": 8, "success": [2, 4, 7, 8, 10], "successfulli": [4, 5, 7, 8, 10, 147, 159, 170, 171], "suffici": 160, "suffix": [1, 109, 112, 116, 117, 124, 138, 226, 238, 240], "suggest": [1, 7, 144, 147, 170, 171], "suit": [7, 149, 176], "suitabl": 181, "sulzberg": 239, "sum": [46, 141, 143, 196, 213], "summari": [1, 2, 4, 5, 7, 8, 9, 10, 11, 12], "super": [1, 117, 141, 142, 143], "superclass": [141, 143], "supercomput": [142, 153], "supplementari": 239, "suppli": [1, 4, 5, 10, 11, 64, 77, 78, 97, 131, 132, 133, 135, 136, 137, 142, 143, 144, 152, 156, 158, 162, 230], "support": [1, 2, 5, 6, 9, 10, 11, 33, 37, 64, 76, 77, 78, 85, 97, 99, 105, 106, 124, 127, 130, 136, 137, 140, 143, 144, 147, 150, 151, 152, 153, 156, 159, 160, 163, 168, 169, 170, 171, 172, 177, 226, 228, 229, 234, 236, 237, 238], "suppos": 143, "suppress": 144, "suptitl": 117, "sure": [1, 2, 3, 4, 7, 8, 11, 54, 141, 142, 143, 144, 153, 163, 203], "surfac": [3, 28, 29, 30, 41, 109, 143, 145, 175, 177, 180, 181, 183, 184, 186, 195, 201, 202, 206, 207, 209, 214, 215, 216, 226, 228, 232, 242], "surfaceareaweightedaverag": [147, 170], "surfaceareaweightedaveragesampkg": [147, 170], "surfaceareaweightedaveragesoutput": [147, 170], "survei": 200, "suvorov": 239, "sv": 141, "sw": 1, "swinburn": 239, "switch": 141, "sy": [1, 4, 141], "symbol": [119, 163], "symlog": [141, 149, 205, 208, 210, 226, 247], "symlognorm": 141, "syntax": [1, 2, 11, 141, 142, 144, 159], "synthes": 182, "synthesi": 182, "system": [4, 7, 140, 142, 144, 147, 153, 163, 169, 170, 177, 178, 180, 183, 184, 185, 189], "systemat": [5, 209, 214], "szoek": 3, "t": [1, 7, 8, 11, 52, 77, 78, 83, 100, 127, 141, 142, 143, 144, 151, 153, 159, 162, 236, 238], "t62_oqu240wli": 142, "tab": 229, "tabl": [203, 205], "tag": [3, 18, 52, 55, 126, 141, 142, 143, 144, 159, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "take": [4, 5, 11, 45, 56, 85, 105, 106, 129, 141, 142, 143, 144, 151, 153, 157, 167, 174, 196, 203, 230, 236, 238, 241], "taken": [1, 7, 10, 113, 114, 115, 116, 117, 119, 120, 124, 127, 142, 143, 148, 163, 165, 167, 232, 236], "talk": 143, "tall": [210, 218, 219, 220, 221, 222, 223, 224, 225], "tallei": [176, 201, 202], "tar": 144, "task": [3, 6, 13, 14, 15, 17, 18, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 60, 61, 62, 64, 72, 109, 110, 123, 124, 126, 138, 140, 142, 147, 148, 149, 150, 151, 152, 156, 159, 162, 164, 165, 167, 168, 170, 171, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "task_nam": [142, 144, 159], "taskcount": 7, "tasknam": [1, 7, 52, 55, 61, 64, 124, 126, 138, 141, 143], "tasmania": 246, "tbin": 236, "tcsh": [142, 144], "teach": 172, "team": 144, "technic": [153, 199, 205, 217, 226], "techniqu": 141, "teh": 7, "temperatur": [3, 23, 30, 40, 41, 43, 141, 142, 143, 144, 145, 148, 181, 183, 184, 187, 191, 194, 201, 202, 207, 212, 216, 217, 226, 228, 230, 231, 232, 235, 236, 237, 241, 242, 245, 246, 247], "templat": [2, 3, 5, 6, 97, 140, 141, 143, 203], "tempor": [175, 192], "temporari": 142, "tend": [142, 144, 157], "term": [9, 11, 116, 117, 142, 156, 181, 203, 238], "termin": [142, 143, 144], "test": [1, 2, 4, 5, 7, 8, 9, 10, 11, 141, 142, 144, 152, 159], "test_mpas_xarrai": 11, "test_output": 142, "text": [46, 47, 113, 114, 115, 116, 117, 119, 120, 175, 178, 185, 187, 196, 200, 241], "tgradmld": [147, 170], "than": [1, 3, 4, 5, 7, 10, 11, 52, 64, 65, 82, 88, 111, 113, 114, 115, 116, 117, 119, 120, 141, 142, 143, 144, 148, 149, 153, 164, 173, 192, 203, 210, 212, 218, 219, 220, 221, 222, 223, 224, 225, 230, 237, 241, 247], "thei": [1, 3, 5, 7, 8, 10, 13, 111, 116, 117, 141, 143, 144, 147, 151, 152, 153, 156, 158, 159, 160, 162, 163, 170, 171, 180, 189, 200, 230, 234, 236, 241], "them": [1, 5, 7, 13, 45, 141, 143, 144, 152, 156, 166, 167, 194, 195, 217, 236, 237, 247], "themselv": 153, "theori": 4, "therefor": [148, 153, 230, 237, 247], "therefrom": [175, 178, 185, 187, 196], "thi": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 33, 45, 46, 47, 52, 53, 55, 57, 58, 60, 62, 64, 66, 67, 72, 73, 75, 85, 94, 99, 105, 106, 109, 110, 116, 117, 119, 123, 127, 141, 142, 143, 144, 148, 151, 152, 153, 156, 157, 158, 159, 162, 163, 165, 167, 172, 174, 176, 177, 178, 179, 180, 181, 183, 185, 186, 187, 189, 191, 195, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "thick": [50, 141, 143, 145, 149, 174, 201, 204, 214, 224, 225], "thicknessnh_fm": 224, "thicknessnh_on": 224, "thicknesssh_fm": 225, "thicknesssh_on": 225, "thin": [210, 218, 219, 220, 221, 222, 223, 224, 225], "thing": [1, 141, 143, 144], "think": [1, 7, 141], "third": [109, 116, 117, 149, 159], "thirdxaxisdata": [116, 117], "thisfil": 7, "thoma": 181, "thorn": 181, "those": [1, 2, 5, 7, 8, 9, 11, 108, 140, 148, 159, 167, 203, 212], "though": [1, 5, 7, 141, 143, 180, 203], "thought": 141, "thousand": 142, "thread": [140, 144, 148, 153, 162, 168, 236], "three": [3, 117, 148, 152, 153, 158, 164, 180, 200, 203], "threshold": [9, 148, 176, 212, 230, 237, 247], "through": [1, 4, 5, 7, 9, 13, 44, 141, 143, 144, 150, 153, 156, 158, 159, 168, 176, 179, 186, 191, 203, 246], "throughflow": 246, "throughout": 2, "thu": 7, "thumbnail": [46, 47], "thumbnailsuffix": [46, 47], "thwait": 239, "tick": [112, 116, 117, 119, 140, 141, 160, 168, 203, 206, 207, 209, 210, 213, 214, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 238, 239, 240, 242, 243, 244, 245, 246, 247], "ticker": 141, "tie": 178, "tight": 141, "tightli": 141, "time": [2, 3, 4, 5, 7, 10, 11, 33, 34, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 51, 57, 71, 76, 77, 78, 85, 99, 105, 106, 107, 113, 116, 117, 118, 119, 120, 126, 127, 128, 129, 130, 134, 135, 136, 137, 140, 141, 143, 144, 145, 147, 152, 153, 155, 156, 157, 159, 160, 162, 163, 168, 170, 179, 180, 187, 191, 195, 201, 203, 204, 208, 215, 216, 220, 221, 222, 223, 229, 230, 231, 232, 235, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "time_avg_avgvaluewithinoceanregion_avgsurfacetemperatur": 11, "time_avg_dayssincestartofsim": 11, "time_seri": 143, "time_series_ohc": 1, "time_series_sst": 1, "time_variable_nam": [4, 11], "timedelta": [10, 130], "timeend": 4, "timeendfirstyear": 4, "timeindic": 127, "timekeep": [4, 6, 140, 143], "timekeeping_reorg": 10, "timemonthly_avg_": 208, "timemonthly_avg_activetracers_salin": [230, 231, 235, 241], "timemonthly_avg_activetracers_temperatur": [143, 230, 231, 235, 241], "timemonthly_avg_dthreshmld": 241, "timemonthly_avg_ecosys_diag_": 208, "timemonthly_avg_ecosystracers_": 208, "timemonthly_avg_layerthick": [141, 143], "timemonthly_avg_normalveloc": 141, "timemonthly_avg_potentialdens": [230, 231, 235, 241], "timemonthly_avg_veloc": 3, "timemonthly_avg_velocitymeridion": 230, "timemonthly_avg_velocityzon": 230, "timemontly_avg_": 234, "timeout": 19, "timescal": 4, "timeseri": [1, 4, 52, 59, 126, 156, 159, 165, 203, 229, 232, 238, 239, 240, 241, 242, 243, 244, 245, 246], "timeseriesantarcticmelt": [140, 146, 162, 172, 188, 190], "timeseriesantarcticregion": 241, "timeseriescalcfunct": 127, "timeseriesfilenam": 129, "timeseriesoceanregion": [140, 146, 162], "timeseriesohc": [1, 2], "timeseriesohcanomali": [140, 143, 146, 213], "timeseriessalinityanomali": [140, 146], "timeseriesseaic": 1, "timeseriesseaiceareavol": [2, 140, 146, 185, 189, 196], "timeseriessst": [1, 2, 140, 142, 144, 146, 159], "timeseriesstatsmonthli": [46, 47, 61, 64, 109, 110, 126, 142, 143, 144, 147, 170, 171, 203, 231, 235], "timeseriesstatsmonthlyampkg": [147, 170, 171], "timeseriesstatsmonthlymaxoutput": 61, "timeseriesstatsmonthlyminoutput": 61, "timeseriesstatsmonthlyoutput": [61, 141, 143, 147, 170, 171, 241], "timeseriesstatsmonthlyrestart": [147, 170, 171], "timeseriessubdirectori": 159, "timeseriestemperatureanomali": [140, 146], "timeseriestransport": [140, 146], "timesindataset": 127, "timestart": 4, "timestartfirstyear": 4, "timestr": 11, "timevariablenam": [85, 99, 105, 106], "tindex": 141, "tinker": 239, "titl": [46, 47, 109, 110, 113, 114, 115, 116, 117, 119, 120, 141, 142, 143, 144, 160, 162, 163, 172, 230, 231, 235, 241, 244], "titlefontcolor": 160, "titlefonts": [113, 114, 115, 116, 117, 119, 120, 160, 234, 244], "titlefontweight": 160, "titlei": 116, "titlenam": [230, 231, 235], "tmp": 144, "tmp3_7gpndz": 144, "tmp6zm13a0": 142, "tmp76l7of28": 142, "tmpe2a9yblb": 142, "tmph58_hgz4": 144, "tmpj94wpf9y": 142, "tmpt9n4vb5n": 144, "tmpxt8x1h_6": 144, "to_scrip": 9, "togeth": [77, 156, 159], "too": [118, 127, 143, 144, 148, 151, 156, 180], "tool": [3, 142, 143, 144, 148, 153, 156, 158, 203], "top": [109, 111, 140, 141, 142, 144, 197, 206, 207, 212, 226, 228, 237, 240], "topex": [173, 177, 195], "topographi": [145, 201, 202, 214, 234], "tot": 240, "total": [3, 142, 144, 153, 196, 229], "total_energy_flux": 229, "total_mass_chang": 229, "total_mass_flux": 229, "totten": 239, "touch": 144, "tour": 141, "tournad": [173, 201, 204], "trace": [1, 13, 15, 144], "traceback": [1, 144], "tracer": 238, "track": [143, 177], "tracy_tremenchu": 239, "trail": [113, 114, 115, 116, 117, 119, 120], "transect": [44, 52, 111, 116, 117, 140, 142, 143, 144, 160, 168, 226, 228, 230, 237, 247], "transectgroup": 138, "transectstoplot": 246, "transfer": 186, "transport": [3, 35, 44, 141, 145, 201, 202, 204, 221, 223, 233, 238, 246], "transporttransect": 246, "transpos": 141, "treat": 151, "tree": [1, 4, 5, 7, 10], "trenbert": 197, "trenberth": [197, 201, 202], "trend": 196, "tri": [7, 11, 116, 117], "triad": [141, 143], "triangul": [116, 117], "triangulation_arg": [116, 117], "troubl": [4, 142], "troubleshoot": 142, "troublesom": 4, "true": [1, 7, 8, 13, 54, 56, 77, 78, 85, 95, 98, 109, 113, 115, 116, 117, 124, 141, 142, 143, 147, 148, 154, 156, 160, 170, 171, 210, 218, 219, 220, 221, 222, 223, 224, 225, 231, 233, 238, 244], "true_scale_latitud": 141, "truncat": [113, 114, 115, 116, 117, 119, 120, 147, 170, 171], "try": [1, 3, 7, 8, 141, 144, 203], "tsdiagramsfor": 236, "tsdiagramsforantarcticregion": 236, "tsdiagramsforoceanbasin": 236, "tsugaru": 246, "tthreshmld": [147, 170], "tucker": 239, "tune": 181, "tupl": [113, 114, 115, 116, 117, 119, 120], "turn": [2, 7, 54, 141, 142, 143, 163, 238], "turner": 0, "tutori": [141, 142, 143, 144, 229], "twice": 179, "two": [3, 4, 11, 116, 117, 119, 141, 143, 147, 149, 150, 152, 167, 170, 171, 177, 178, 183, 184, 197, 203, 216, 232, 236], "twofold": 143, "txt": [142, 203], "type": [4, 5, 33, 64, 76, 77, 78, 85, 97, 99, 105, 127, 141, 142, 144, 147, 149, 159, 160, 163, 167, 170, 171, 205, 206, 207, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 229, 230, 231, 233, 237, 247], "typeerror": [85, 99, 105], "typic": [1, 4, 5, 9, 11, 27, 52, 59, 72, 80, 81, 85, 99, 105, 106, 116, 117, 124, 127, 138, 142, 143, 144, 147, 148, 150, 151, 152, 153, 158, 162, 167, 170, 171, 213, 218, 219, 224, 225, 230, 237, 239, 244, 247], "u": [3, 141, 143, 172, 179], "uc": 172, "ucsd": [176, 194, 201, 202], "ugo": 142, "ui": 142, "ultra": 144, "unavail": [61, 126], "uncertainti": [119, 174, 180], "unchang": [4, 102, 141, 210, 218, 219, 224, 225, 244], "unclear": 11, "uncom": [163, 226, 230, 237], "uncoupl": [183, 184], "under": [1, 3, 4, 46, 47, 141, 142, 143, 144, 148, 149, 153, 156, 165, 172, 177, 203, 206, 207, 226], "underli": [174, 180], "underscor": [1, 2, 10, 135, 136, 137, 143], "understand": [140, 141], "underwood": 239, "unexpect": 143, "unifi": [5, 142, 153], "uniform": [142, 144], "uniform_0_to_4000m_at_10m": [167, 230, 237, 247], "unind": 141, "uniqu": [143, 167, 182, 203], "unit": [4, 9, 11, 46, 47, 109, 110, 114, 115, 116, 117, 141, 143, 208, 230, 231, 233, 235, 241], "units_scale_factor": 143, "unitslabel": [47, 109, 110, 141, 143], "univ": 179, "unknown": [144, 168, 173, 174], "unless": [117, 142, 144, 148, 162], "unlik": 4, "unmanag": 160, "unmask": [83, 84], "unmasked_": 203, "unnecessarili": [4, 148], "unneed": 141, "unphys": 148, "unpreced": 200, "unstructur": [5, 143, 147, 171], "unsupport": [85, 99, 105, 152], "untar": 144, "until": [7, 8, 143, 203], "unus": 141, "up": [1, 2, 4, 8, 53, 56, 60, 62, 71, 81, 112, 141, 142, 143, 148, 151, 153, 162, 165, 167, 203, 206, 207, 213, 226, 228, 229, 236, 239], "up2bar": 179, "updat": [1, 4, 7, 8, 9, 11, 13, 18, 127, 141, 153, 156, 162, 180, 186, 191, 199, 200, 203, 216], "upon": [190, 197, 201, 202], "upper": [111, 116, 117, 180, 199, 213, 240], "uppercas": 2, "upperfieldnam": 1, "upperxaxisticklabelprecis": [116, 117], "us": [1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 14, 18, 19, 20, 45, 46, 47, 52, 61, 64, 65, 67, 69, 72, 74, 76, 77, 78, 79, 81, 82, 85, 99, 105, 106, 109, 110, 111, 112, 114, 116, 117, 118, 119, 121, 122, 124, 126, 127, 129, 131, 132, 133, 135, 136, 137, 138, 140, 141, 142, 143, 144, 147, 148, 149, 150, 151, 152, 153, 155, 156, 157, 158, 159, 161, 162, 163, 165, 167, 169, 170, 171, 172, 173, 174, 176, 177, 178, 180, 181, 183, 184, 186, 189, 193, 194, 195, 196, 197, 198, 199, 200, 201, 203, 205, 206, 207, 209, 210, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 234, 235, 236, 237, 238, 241, 247], "usag": [11, 156], "use_numpyfunc": 143, "usempasmaskcr": [124, 139], "usencclimo": [144, 148], "usencremap": [64, 148], "user": [2, 3, 7, 11, 13, 142, 143, 148, 150, 152, 156, 167, 172, 174, 180, 194, 195, 200, 226, 237, 240, 241], "usernam": 142, "usr": 141, "usual": [2, 45, 143, 240], "usulli": [243, 245], "ut": [141, 143], "util": [1, 4, 5, 10, 11, 143, 162, 191, 203], "utility_script": 162, "utsikkar": 239, "uv": 177, "v": [47, 116, 141, 143, 163, 179, 208, 212, 229, 231, 236, 237, 240, 243, 245, 247], "v0": [141, 161, 163, 240, 242], "v1": [141, 156, 163, 195, 248], "v2": [145, 183, 184, 201, 202, 208], "v4": [181, 201, 202], "v5kd1vvf": 181, "vald": 179, "valid": [1, 2, 4, 5, 7, 8, 9, 10, 11, 109, 116, 117, 141, 142, 143, 144, 148, 159, 164, 167, 186, 195, 229, 230, 237, 247], "valu": [1, 4, 5, 7, 8, 37, 46, 47, 61, 77, 78, 85, 86, 87, 89, 90, 91, 92, 96, 98, 105, 106, 109, 110, 112, 114, 116, 117, 119, 127, 141, 142, 143, 144, 147, 148, 149, 155, 156, 158, 160, 165, 167, 168, 170, 171, 176, 196, 203, 205, 206, 207, 211, 212, 213, 214, 215, 216, 218, 219, 224, 225, 226, 228, 229, 230, 232, 233, 236, 237, 238, 240, 241, 243, 245, 247], "valueerror": [1, 10, 55, 62, 79, 85, 94, 97, 99, 105, 108, 121, 122, 131, 132, 133, 135, 136, 137, 141, 143], "van": 0, "var": [141, 147, 170, 171], "var_arrai": [147, 170], "var_list": 11, "vari": [3, 5, 77, 78, 127, 178], "variabl": [1, 3, 4, 5, 6, 9, 10, 27, 33, 34, 35, 36, 41, 45, 46, 47, 51, 52, 57, 61, 62, 64, 72, 73, 77, 78, 85, 99, 105, 106, 108, 109, 110, 123, 124, 126, 129, 138, 140, 141, 142, 143, 144, 179, 182, 186, 191, 195, 203, 208, 229, 235], "variable_list": [4, 11, 141, 143], "variable_map": [4, 11], "variable_mapping_reorg": [4, 11], "variablelist": [9, 33, 45, 61, 62, 64, 85, 99, 105, 106, 108, 126, 129, 141, 143, 234], "variablemap": [1, 4, 85], "varial": [85, 105, 106], "varianc": [145, 201, 202, 209], "variant": 7, "variat": [172, 181, 201, 202], "varieti": 201, "variou": [2, 4, 5, 8, 10, 11, 46, 142, 143, 144, 156, 168, 203, 206, 207, 226, 228], "varnam": 1, "ve": 141, "velmag": [142, 144], "veloc": [3, 141, 179, 194, 226, 230, 237, 238], "velocitymagnitud": [226, 237], "venabl": 239, "veneziani": 0, "verbos": [11, 13, 15, 142, 144], "verdi": 239, "veri": [3, 4, 5, 7, 142, 144, 151, 153, 156, 230], "verifi": [1, 5, 7, 8], "version": [1, 9, 11, 55, 64, 85, 141, 142, 143, 144, 153, 179, 180, 181, 182, 183, 184, 186, 191, 195, 198, 199, 203], "versu": 191, "vert_index": 143, "vertexdegre": 141, "vertic": [5, 64, 82, 115, 116, 117, 141, 142, 143, 144, 167, 200, 210, 213, 218, 219, 220, 221, 222, 223, 224, 225, 230, 233, 235, 237, 247], "verticalbound": 167, "verticalcomparisongrid": [167, 230, 237, 247], "verticalcomparisongridnam": [167, 230, 237, 247], "vertices_on_cel": 141, "vertices_on_edg": 141, "verticesoncel": 141, "verticesonedg": 141, "vertvelocitytop": [147, 170], "via": [1, 3, 4, 5, 7, 9, 11, 81, 116, 140, 177, 178], "view": [193, 203], "vigrid": 239, "vim": [1, 142], "vincenn": 239, "violat": [1, 4], "viridi": [208, 211, 212, 226, 227], "visual": [3, 195], "viva": 181, "vmax": [141, 149, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "vmin": [141, 149, 205, 206, 207, 208, 209, 210, 217, 220, 221, 222, 223, 226, 227, 228, 230, 231, 237, 247], "volmax": 236, "volmin": 236, "volnh": 244, "volsh": 244, "volum": [145, 174, 199, 200, 201, 204, 236, 244], "volumetr": 236, "voyeykov": 239, "vp2bar": 179, "w": [7, 181, 183, 184, 240], "wa": [4, 7, 14, 95, 96, 106, 116, 117, 141, 142, 143, 144, 153, 156, 159, 163, 177, 178, 179, 183, 191, 193, 195, 200, 238, 239, 241], "wai": [1, 2, 4, 7, 8, 10, 11, 141, 143, 158, 159, 180, 203], "wait": [7, 143, 203], "wait_for_task": 7, "waitfortask": 7, "waitpid": 7, "walk": [141, 143, 144], "want": [3, 4, 5, 8, 9, 11, 141, 142, 143, 144, 159, 203, 226], "warn": [1, 54, 141, 148, 156, 159], "wasn": 8, "wast": [143, 148, 239], "water": [192, 193, 200, 236], "wave": [32, 145, 201, 202, 227], "waveform": 173, "wc14to60e2r3": [142, 144, 156], "wcrp": 177, "we": [1, 2, 4, 5, 7, 8, 9, 10, 11, 13, 141, 142, 143, 144, 149, 153, 159, 175, 178, 185, 187, 196, 200, 234, 236, 238], "weather": [180, 197], "web": [3, 141, 142, 143, 144, 159, 160, 162, 163, 203, 234, 235, 236], "web_port": 142, "webpag": [142, 144, 154, 194, 241], "websit": [173, 175, 176, 177, 178, 181, 182, 183, 184, 186, 187, 189, 191, 193, 194, 196, 198, 199, 200, 201, 202, 203, 204], "weddel": [234, 236, 241], "wei": 181, "weight": [5, 7, 9, 77, 78, 82, 127, 142, 144, 148, 150, 160, 230, 237, 247], "weightlist": 234, "well": [2, 7, 9, 10, 116, 117, 141, 142, 143, 144, 153, 160, 191, 199, 213, 230, 231, 235, 238, 240, 241], "were": [5, 7, 11, 52, 141, 142, 144, 152, 163, 177, 179, 183, 184, 185, 186, 194, 195, 196, 214], "weren": 142, "west": [179, 239], "western": [234, 236, 241], "western_ross": 239, "wgs84": 141, "what": [1, 4, 7, 141, 142, 143, 159, 191, 203, 210, 218, 219, 220, 221, 222, 223, 224, 225], "whatev": [141, 142, 159], "when": [1, 3, 4, 5, 7, 8, 10, 11, 13, 15, 46, 47, 60, 61, 109, 110, 116, 117, 126, 142, 143, 144, 149, 150, 151, 152, 153, 159, 175, 177, 178, 180, 181, 185, 187, 195, 196, 197, 203, 209, 212, 214], "whenev": [4, 142, 144, 200], "where": [1, 2, 5, 7, 45, 46, 47, 52, 61, 77, 78, 98, 109, 110, 112, 114, 115, 116, 117, 126, 127, 141, 142, 143, 144, 152, 156, 158, 159, 161, 174, 180, 203, 226, 230, 231, 235, 236, 238], "wherea": [77, 78, 180, 228], "whether": [1, 4, 7, 9, 13, 15, 54, 55, 60, 64, 82, 115, 116, 117, 127, 141, 143, 154, 156, 205, 208, 211, 212, 220, 221, 222, 223, 227, 228, 231], "which": [1, 3, 4, 5, 7, 8, 11, 13, 20, 45, 46, 47, 52, 57, 62, 64, 71, 72, 80, 81, 82, 84, 85, 100, 106, 108, 109, 110, 113, 114, 115, 116, 117, 119, 120, 126, 127, 128, 129, 140, 141, 142, 143, 144, 148, 149, 150, 153, 156, 158, 159, 161, 163, 165, 167, 168, 173, 177, 179, 180, 181, 183, 184, 195, 196, 200, 203, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 233, 234, 236, 238, 239, 240, 241, 242, 243, 244, 245, 246], "while": [3, 4, 5, 7, 57, 141, 143, 156, 157, 158, 199, 206, 207, 212, 226, 232], "white": [116, 117, 141, 246], "white_cmo_deep": 236, "whitworth": [200, 201, 202], "whole": [1, 142, 203], "whp": 200, "why": [11, 144], "wich": 5, "wide": [181, 193, 210, 218, 219, 220, 221, 222, 223, 224, 225], "width": [111, 112, 113, 114, 116, 117, 119, 120, 141], "wilkin": 239, "wilma_robert_down": 239, "window": [142, 144, 231, 238, 240, 242, 243, 244, 245], "windward": 246, "wise": 151, "wish": [3, 142, 144, 152, 156], "withe": 4, "within": [1, 4, 7, 11, 85, 96, 99, 100, 142, 143, 147, 149, 151, 152, 153, 156, 159, 162, 169, 170, 175, 178, 185, 187, 196, 201, 203, 234, 239], "without": [1, 4, 5, 7, 11, 45, 77, 78, 127, 141, 142, 144, 147, 148, 153, 159, 169, 237], "withrow": 239, "woa": [141, 198, 208, 228], "woa18": [145, 201, 202, 228, 236, 241], "woce": [145, 201, 202, 247], "wocesalinitytransect": 247, "wocetemperaturetransect": 247, "wocetransect": [140, 146, 200], "wod": 199, "woefulli": 141, "wolf": 7, "wolfram": [0, 4, 7], "won": [1, 7, 141, 143], "woodruff": 181, "worbi": 174, "word": [2, 143], "wordi": 239, "work": [1, 3, 7, 11, 20, 58, 88, 116, 117, 127, 141, 142, 143, 144, 153, 172, 180, 205, 217, 226, 230], "workflow": [142, 144], "worktre": 141, "world": [145, 159, 177, 180, 199, 200, 201, 202, 208, 247], "worth": [144, 156], "would": [1, 2, 3, 4, 5, 7, 9, 10, 11, 141, 142, 143, 144, 148, 151, 152, 159, 163, 203, 230, 240, 241], "write": [1, 7, 9, 13, 15, 45, 52, 127, 141, 142, 144, 159, 160], "write_netcdf": [141, 143], "writelogfil": 56, "written": [3, 7, 45, 56, 113, 114, 115, 141, 142, 144, 147, 159, 160, 170, 171, 172, 194], "wrong": 142, "wrt": 203, "www": [142, 177, 179], "wyld": 239, "wyrtki": 3, "wytrki": 3, "x": [0, 113, 116, 117, 118, 119, 141, 166, 178, 186, 187, 196, 226, 231, 233, 235, 238, 239, 240, 242, 243, 244, 245, 246], "x1": 195, "x86_64": [142, 144], "x_0": 141, "x_inlin": 141, "xarrai": [4, 7, 11, 45, 61, 66, 67, 73, 76, 77, 78, 85, 99, 105, 106, 107, 108, 113, 116, 117, 119, 120, 128, 129, 141, 143, 144, 148, 151], "xbuffer": 111, "xc": 141, "xcoord": [116, 117], "xcoordistim": [116, 117], "xcorner": 141, "xformatt": 141, "xlabel": [113, 116, 117, 119], "xlim": [113, 116, 117], "xlimglob": 233, "xlocat": 141, "xml": [33, 46, 47, 52, 109, 110, 143], "xmlfilenam": [33, 52, 143], "xoutlin": 116, "xoutlinediff": 117, "xoutlinemodel": 117, "xoutlineref": 117, "xr": [4, 141, 143], "xsede": 194, "xtime": [4, 10, 11, 85, 99, 105, 106, 147, 170, 171], "xtime_end": 11, "xtime_endmonthli": 99, "xtime_start": 11, "xtime_startmonthli": 99, "xvf": 144, "xylar": [0, 1, 2, 4, 5, 7, 8, 9, 10, 11, 141, 142, 144], "xylarstorm": 142, "y": [46, 113, 116, 117, 141, 142, 147, 167, 170, 171, 203, 233, 241], "y_0": 141, "y_inlin": 141, "yank": 141, "yaxislabel": 46, "ybuffer": 111, "yc": 141, "ycorner": 141, "ye": [142, 144], "year": [4, 10, 27, 33, 42, 43, 59, 61, 76, 116, 117, 118, 119, 126, 127, 130, 131, 141, 142, 143, 144, 147, 159, 163, 166, 170, 171, 174, 189, 194, 203, 213, 216, 229, 231, 234, 238, 239, 240, 242, 243, 244, 245, 246], "year_offset": [4, 11], "yearend": 4, "yearendpreprocess": 4, "yearli": 191, "yearoffset": 4, "yearspercacheupd": 127, "yearstart": 4, "yearstrid": 118, "yearstridextick": [116, 117, 119, 166, 231, 238, 239, 240, 242, 243, 244, 245, 246], "yet": [1, 2, 5, 62, 66, 141, 142, 144, 147, 171], "yet_another_str": 11, "yformatt": 141, "yi": [185, 201, 204], "ylabel": [113, 116, 117, 119], "ylgnbu": 208, "ylim": [113, 116, 117, 231], "ylocat": 141, "you": [1, 141, 142, 143, 144, 150, 152, 153, 156, 158, 159, 160, 166, 174, 175, 176, 178, 185, 187, 194, 195, 196, 203, 218, 219, 226, 230, 232, 233, 239], "your": [141, 142, 143, 144, 152, 153, 194, 203, 230, 239], "yr": [141, 208], "yyyi": [1, 10, 135, 136, 137, 142, 144], "z": [116, 117], "z_mid": 143, "zcoord": [116, 117], "zero": [129, 141, 209, 214, 236], "zhang": [181, 189, 201, 204], "zmax": [236, 241], "zmin": [236, 241], "zonal": [3, 179, 191, 226, 230, 233, 237, 238], "zonalveloc": [142, 144, 226, 230, 237], "zoom": 160, "zorder": 141, "zoutlin": 116, "zoutlinediff": 117, "zoutlinemodel": 117, "zoutlineref": 117, "zubchatyi": 239, "zulema": 179, "zwalli": [185, 196, 201, 204, 239], "zweng": [199, 201, 202], "\u00f1": [155, 159, 232]}, "titles": ["Main Authors", "Analysis Task Template", "Config File Reorganization", "Eddy Kinetic Energy Climatology Mapping", "Generalize Calendar supported by Analysis", "Generalized Horizontal Interpolation in MPAS-Analysis", "Design Documents", "Support Parallel Tasks", "Prerequisite Tasks and Subtasks", "Remapper for \u201conline\u201d remapping of data sets", "Reorganize Timekeeping", "Moving variable mapping outside of mpas_xarray", "API reference", "mpas_analysis.__main__.add_task_and_subtasks", "mpas_analysis.__main__.build_analysis_list", "mpas_analysis.__main__.determine_analyses_to_generate", "mpas_analysis.__main__.main", "mpas_analysis.__main__.run_analysis", "mpas_analysis.__main__.update_generate", "mpas_analysis.__main__.wait_for_task", "mpas_analysis.download_data.download_analysis_data", "mpas_analysis.ocean.ClimatologyMapAntarcticMelt", "mpas_analysis.ocean.ClimatologyMapArgoSalinity", "mpas_analysis.ocean.ClimatologyMapArgoTemperature", "mpas_analysis.ocean.ClimatologyMapEKE", "mpas_analysis.ocean.ClimatologyMapMLD", "mpas_analysis.ocean.ClimatologyMapMLDMinMax", "mpas_analysis.ocean.ClimatologyMapOHCAnomaly", "mpas_analysis.ocean.ClimatologyMapSSH", "mpas_analysis.ocean.ClimatologyMapSSS", "mpas_analysis.ocean.ClimatologyMapSST", "mpas_analysis.ocean.ClimatologyMapSose", "mpas_analysis.ocean.ClimatologyMapWaves", "mpas_analysis.ocean.ConservationTask", "mpas_analysis.ocean.IndexNino34", "mpas_analysis.ocean.MeridionalHeatTransport", "mpas_analysis.ocean.OceanHistogram", "mpas_analysis.ocean.StreamfunctionMOC", "mpas_analysis.ocean.TimeSeriesAntarcticMelt", "mpas_analysis.ocean.TimeSeriesOHCAnomaly", "mpas_analysis.ocean.TimeSeriesOceanRegions", "mpas_analysis.ocean.TimeSeriesSST", "mpas_analysis.ocean.TimeSeriesSalinityAnomaly", "mpas_analysis.ocean.TimeSeriesTemperatureAnomaly", "mpas_analysis.ocean.TimeSeriesTransport", "mpas_analysis.ocean.compute_anomaly_subtask.ComputeAnomalySubtask", "mpas_analysis.ocean.plot_depth_integrated_time_series_subtask.PlotDepthIntegratedTimeSeriesSubtask", "mpas_analysis.ocean.plot_hovmoller_subtask.PlotHovmollerSubtask", "mpas_analysis.sea_ice.ClimatologyMapIcebergConc", "mpas_analysis.sea_ice.ClimatologyMapSeaIceConc", "mpas_analysis.sea_ice.ClimatologyMapSeaIceThick", "mpas_analysis.sea_ice.TimeSeriesSeaIce", "mpas_analysis.shared.AnalysisTask", "mpas_analysis.shared.AnalysisTask.add_subtask", "mpas_analysis.shared.AnalysisTask.check_analysis_enabled", "mpas_analysis.shared.AnalysisTask.check_generate", "mpas_analysis.shared.AnalysisTask.run", "mpas_analysis.shared.AnalysisTask.run_after", "mpas_analysis.shared.AnalysisTask.run_task", "mpas_analysis.shared.AnalysisTask.set_start_end_date", "mpas_analysis.shared.AnalysisTask.setup_and_check", "mpas_analysis.shared.climatology.MpasClimatologyTask", "mpas_analysis.shared.climatology.MpasClimatologyTask.add_variables", "mpas_analysis.shared.climatology.MpasClimatologyTask.get_file_name", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.add_comparison_grid_descriptor", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_masked_climatology", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.customize_remapped_climatology", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_masked_file_name", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.get_remapped_file_name", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.run_task", "mpas_analysis.shared.climatology.RemapMpasClimatologySubtask.setup_and_check", "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask", "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.build_observational_dataset", "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_file_name", "mpas_analysis.shared.climatology.RemapObservedClimatologySubtask.get_observation_descriptor", "mpas_analysis.shared.climatology.add_years_months_days_in_month", "mpas_analysis.shared.climatology.compute_climatology", "mpas_analysis.shared.climatology.compute_monthly_climatology", "mpas_analysis.shared.climatology.get_comparison_descriptor", "mpas_analysis.shared.climatology.get_masked_mpas_climatology_file_name", "mpas_analysis.shared.climatology.get_remapped_mpas_climatology_file_name", "mpas_analysis.shared.climatology.get_remapper", "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_directory", "mpas_analysis.shared.climatology.get_unmasked_mpas_climatology_file_name", "mpas_analysis.shared.generalized_reader.generalized_reader.open_multifile_dataset", "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getattr__", "mpas_analysis.shared.io.namelist_streams_interface.NameList.__getitem__", "mpas_analysis.shared.io.namelist_streams_interface.NameList.__init__", "mpas_analysis.shared.io.namelist_streams_interface.NameList.get", "mpas_analysis.shared.io.namelist_streams_interface.NameList.getbool", "mpas_analysis.shared.io.namelist_streams_interface.NameList.getfloat", "mpas_analysis.shared.io.namelist_streams_interface.NameList.getint", "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.__init__", "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.find_stream", "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.has_stream", "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.read", "mpas_analysis.shared.io.namelist_streams_interface.StreamsFile.readpath", "mpas_analysis.shared.io.namelist_streams_interface.convert_namelist_to_dict", "mpas_analysis.shared.io.open_mpas_dataset", "mpas_analysis.shared.io.utility.build_config_full_path", "mpas_analysis.shared.io.utility.check_path_exists", "mpas_analysis.shared.io.utility.make_directories", "mpas_analysis.shared.io.utility.paths", "mpas_analysis.shared.io.write_netcdf", "mpas_analysis.shared.mpas_xarray.mpas_xarray.open_multifile_dataset", "mpas_analysis.shared.mpas_xarray.mpas_xarray.preprocess", "mpas_analysis.shared.mpas_xarray.mpas_xarray.remove_repeated_time_index", "mpas_analysis.shared.mpas_xarray.mpas_xarray.subset_variables", "mpas_analysis.shared.plot.PlotClimatologyMapSubtask", "mpas_analysis.shared.plot.PlotClimatologyMapSubtask.set_plot_info", "mpas_analysis.shared.plot.add_inset", "mpas_analysis.shared.plot.colormap.setup_colormap", "mpas_analysis.shared.plot.plot_1D", "mpas_analysis.shared.plot.plot_global_comparison", "mpas_analysis.shared.plot.plot_polar_comparison", "mpas_analysis.shared.plot.plot_vertical_section", "mpas_analysis.shared.plot.plot_vertical_section_comparison", "mpas_analysis.shared.plot.ticks.plot_xtick_format", "mpas_analysis.shared.plot.timeseries_analysis_plot", "mpas_analysis.shared.plot.timeseries_analysis_plot_polar", "mpas_analysis.shared.projection.get_cartopy_projection", "mpas_analysis.shared.projection.get_pyproj_projection", "mpas_analysis.shared.regions.compute_region_masks.ComputeRegionMasks", "mpas_analysis.shared.regions.compute_region_masks_subtask.ComputeRegionMasksSubtask", "mpas_analysis.shared.regions.compute_region_masks_subtask.get_feature_list", "mpas_analysis.shared.time_series.MpasTimeSeriesTask", "mpas_analysis.shared.time_series.cache_time_series", "mpas_analysis.shared.time_series.compute_moving_avg", "mpas_analysis.shared.time_series.compute_moving_avg_anomaly_from_start", "mpas_analysis.shared.timekeeping.MpasRelativeDelta.MpasRelativeDelta", "mpas_analysis.shared.timekeeping.utility.date_to_days", "mpas_analysis.shared.timekeeping.utility.datetime_to_days", "mpas_analysis.shared.timekeeping.utility.days_to_datetime", "mpas_analysis.shared.timekeeping.utility.get_simulation_start_time", "mpas_analysis.shared.timekeeping.utility.string_to_datetime", "mpas_analysis.shared.timekeeping.utility.string_to_days_since_date", "mpas_analysis.shared.timekeeping.utility.string_to_relative_delta", "mpas_analysis.shared.transects.compute_transect_masks_subtask.ComputeTransectMasksSubtask", "mpas_analysis.shared.transects.compute_transect_masks_subtask.compute_mpas_transect_masks", "MPAS-Analysis", "Developers: Adding a new analysis task", "Developer: Getting Started", "Developers: Understanding an analysis task", "User: Getting Started", "<no title>", "Analysis Tasks", "MPAS Components and E3SM", "Climatology", "Colormaps", "Comparison Grids", "Dask threads and subprocess count", "Diagnostics", "Execute", "HTML", "Index", "Input", "Moving Average", "Ocean, Sea Ice and Iceberg Observations", "Output", "Plot", "Preprocessed Reference Runs", "Regions", "Runs", "Seasons", "Time Series", "Time-Axis Tick Marks", "Output Grids for Transects", "Configuration", "E3SM", "MPAS Ocean", "MPAS-Seaice", "Antarctic melt rates and fluxes", "Iceberg Concentration: Altiberg", "Sea ice production and transport: Haumann et al 2016", "SSS from NASA Aquarius satellite", "Argo Mixed Layer Depth (MLD) climatology", "AVISO Absolute Dynamic Topography", "Ice concentration: SSM/I, Bootstrap algorithm", "Surface Current Variance from Drifter Data", "Wave Reanalysis: ERA5", "ERS SSTv4 Nino 3.4 Index", "GLODAPv2", "HadISST Nino 3.4 Index", "SST merged Hadley Center-NOAA/OI data set", "IceSat Ice Thickness", "Landschuetzerv2016 SOM-FFN", "Ice concentration: SSM/I, NASATeam algorithm", "Antarctic melt rates and fluxes", "PIOMAS Arctic Sea Ice Volume Reanalysis", "Antarctic melt rates and fluxes", "Roemmich-Gilson Argo Climatology", "Antarctic Seafloor Temperature and Salinity", "SeaWiFS", "2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)", "Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative", "Ice area and extent time series: SSM/I derived", "Meridional Heat Transport (MHT)", "World Ocean Atlas v2", "WOA18 Temperature and Salinity Climatology", "WOCE sections", "Observations", "<no title>", "Quick Start Guide", "<no title>", "climatologyMapAntarcticMelt", "climatologyMapArgoSalinity", "climatologyMapArgoTemperature", "climatologyMapBGC", "climatologyMapEKE", "climatologyMapIcebergConcSH", "climatologyMapMLD", "climatologyMapMLDMinMax", "climatologyMapOHCAnomaly", "climatologyMapSSH", "climatologyMapSSS", "climatologyMapSST", "climatologyMapSchmidtko", "climatologyMapSeaIceConcNH", "climatologyMapSeaIceConcSH", "climatologyMapSeaIceMeltingNH", "climatologyMapSeaIceMeltingSH", "climatologyMapSeaIceProductionNH", "climatologyMapSeaIceProductionSH", "climatologyMapSeaIceThickNH", "climatologyMapSeaIceThickSH", "climatologyMapSose", "climatologyMapWaves", "climatologyMapWoa", "conservation", "geojsonTransects", "hovmollerOceanRegions", "indexNino34", "meridionalHeatTransport", "oceanHistogram", "oceanRegionalProfiles", "regionalTSDiagrams", "soseTransects", "streamfunctionMOC", "timeSeriesAntarcticMelt", "timeSeriesOHCAnomaly", "timeSeriesOceanRegions", "timeSeriesSST", "timeSeriesSalinityAnomaly", "timeSeriesSeaIceAreaVol", "timeSeriesTemperatureAnomaly", "timeSeriesTransport", "woceTransects", "Versions"], "titleterms": {"": 140, "1": [141, 142, 143, 144], "2": [141, 142, 143, 144], "2005": 194, "2010": 194, "2016": 174, "3": [141, 142, 143, 144, 181, 183], "4": [141, 142, 143, 144, 181, 183], "5": [141, 142, 143, 144], "6": [141, 142, 144], "7": [141, 142, 144], "8": 141, "One": 142, "The": [141, 143], "__getattr__": 86, "__getitem__": 87, "__init__": [88, 93], "__main__": [13, 14, 15, 16, 17, 18, 19], "absolut": 177, "activ": [142, 144], "ad": [141, 142, 149], "add_comparison_grid_descriptor": 65, "add_inset": 111, "add_subtask": 53, "add_task_and_subtask": 13, "add_vari": 62, "add_years_months_days_in_month": 76, "al": 174, "algorithm": [3, 178, 187], "altiberg": 173, "altimet": 195, "an": [141, 142, 143, 144], "analysi": [1, 4, 5, 12, 140, 141, 142, 143, 144, 146, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 203], "analysistask": [52, 53, 54, 55, 56, 57, 58, 59, 60], "anomali": [148, 165], "antarct": [172, 188, 190, 192], "api": 12, "aquariu": 175, "ar": 203, "arctic": 189, "area": 196, "argo": [176, 191], "atla": 198, "attribut": 143, "author": [0, 140], "averag": 157, "aviso": 177, "axi": 166, "base": 12, "bibliographi": 3, "big": 143, "bin": [234, 236], "bootstrap": 178, "bound": 241, "build_analysis_list": 14, "build_config_full_path": 100, "build_observational_dataset": 73, "cache_time_seri": 127, "calendar": 4, "cchdo": 200, "center": 184, "chang": 195, "check_analysis_en": 54, "check_gener": 55, "check_path_exist": 101, "class": [12, 141], "climat": 195, "climatologi": [3, 12, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 142, 144, 148, 176, 191, 194, 199], "climatologymapantarcticmelt": [21, 205], "climatologymapargosalin": [22, 206], "climatologymapargotemperatur": [23, 207], "climatologymapbgc": 208, "climatologymapbsf": 141, "climatologymapek": [24, 209], "climatologymapicebergconc": 48, "climatologymapicebergconcsh": 210, "climatologymapmld": [25, 211], "climatologymapmldminmax": [26, 212], "climatologymapohcanomali": [27, 143, 213], "climatologymapschmidtko": 217, "climatologymapseaiceconc": 49, "climatologymapseaiceconcnh": 218, "climatologymapseaiceconcsh": 219, "climatologymapseaicemeltingnh": 220, "climatologymapseaicemeltingsh": 221, "climatologymapseaiceproductionnh": 222, "climatologymapseaiceproductionsh": 223, "climatologymapseaicethick": 50, "climatologymapseaicethicknh": 224, "climatologymapseaicethicksh": 225, "climatologymapsos": [31, 226], "climatologymapssh": [28, 214], "climatologymapsss": [29, 215], "climatologymapsst": [30, 216], "climatologymapwav": [32, 227], "climatologymapwoa": 228, "clone": 142, "code": [141, 142, 143], "colorbar": 149, "colormap": [112, 149], "colormpa": 149, "comparison": 150, "compon": 147, "comput": 148, "compute_anomaly_subtask": 45, "compute_climatologi": 77, "compute_monthly_climatologi": 78, "compute_moving_avg": 128, "compute_moving_avg_anomaly_from_start": 129, "compute_mpas_transect_mask": 139, "compute_region_mask": 123, "compute_region_masks_subtask": [124, 125], "compute_transect_masks_subtask": [138, 139], "computeanomalysubtask": 45, "computeregionmask": 123, "computeregionmaskssubtask": 124, "computetransectmaskssubtask": 138, "concentr": [173, 178, 187], "conda": [142, 144], "config": [2, 141, 162, 234, 236, 241], "configur": [142, 144, 168, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "conserv": 229, "conservationtask": 33, "constructor": [141, 143], "continu": 149, "contour": [149, 236], "contributor": 0, "convert_namelist_to_dict": 98, "copi": 141, "count": 151, "creat": [142, 144, 203], "creation": 153, "current": 179, "custom": 203, "customize_masked_climatologi": [66, 141, 143], "customize_remapped_climatologi": 67, "dask": [151, 156], "data": [9, 12, 156, 179, 184, 201, 203, 226], "dataset": 12, "date_to_dai": 131, "datetime_to_dai": 132, "days_to_datetim": 133, "depth": [176, 241], "deriv": 196, "descript": [172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "design": [3, 6], "detail": 201, "determine_analyses_to_gener": 15, "develop": [140, 141, 142, 143, 203], "diagnost": [144, 152], "diagram": 236, "directori": [152, 156, 158, 159], "document": [6, 203], "download": [12, 144, 203], "download_analysis_data": 20, "download_data": 20, "drifter": 179, "dure": 144, "dynam": 177, "e3sm": [142, 147, 169], "each": 201, "eddi": 3, "edit": 142, "end": [148, 155, 165], "energi": 3, "environ": [142, 144], "er": 181, "era5": 180, "error": [144, 156], "esa": 195, "estim": [194, 226], "et": 174, "exampl": [144, 205, 206, 207, 208, 209, 210, 211, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "execut": [142, 144, 153], "exist": 141, "extent": 196, "ffn": 186, "field": 230, "file": [2, 12, 152, 153, 156, 158, 203, 230], "find_stream": 94, "flux": [172, 188, 190], "fork": 142, "framework": 143, "from": [175, 179, 194], "full": [141, 143, 226], "gener": [4, 5, 159, 203], "generalized_read": 85, "geojson": 230, "geojsontransect": 230, "get": [89, 141, 142, 144], "get_cartopy_project": 121, "get_comparison_descriptor": 79, "get_feature_list": 125, "get_file_nam": [63, 74], "get_masked_file_nam": 68, "get_masked_mpas_climatology_file_nam": 80, "get_observation_descriptor": 75, "get_pyproj_project": 122, "get_remapp": 82, "get_remapped_file_nam": 69, "get_remapped_mpas_climatology_file_nam": 81, "get_simulation_start_tim": 134, "get_unmasked_mpas_climatology_directori": 83, "get_unmasked_mpas_climatology_file_nam": 84, "getbool": 90, "getfloat": 91, "getint": 92, "gilson": 191, "git": 142, "github": 142, "glodapv2": 182, "grid": [150, 167], "group": [162, 234, 236, 241], "guid": [140, 203], "hadisst": 183, "hadlei": 184, "has_stream": 95, "haumann": 174, "heat": 197, "horizont": 5, "hovmolleroceanregion": 231, "html": 154, "i": [12, 178, 187, 196], "ic": [12, 147, 158, 171, 174, 178, 185, 187, 189, 196, 201, 239], "iceberg": [158, 173], "icesat": 185, "implement": 3, "index": [142, 144, 149, 155, 181, 183], "indexnino34": [34, 232], "initi": 195, "input": [142, 144, 156, 203], "instal": [142, 144, 203], "instruct": 203, "interpol": 5, "interv": 236, "io": [86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104], "kei": 142, "kinet": 3, "landschuetzerv2016": 186, "laptop": 142, "layer": 176, "level": 12, "line": 149, "list": [203, 234], "local": 142, "machin": 142, "main": [0, 16], "make": 142, "make_directori": 102, "mambaforg": 142, "map": [3, 11, 152, 153], "mark": [149, 166], "mask": 153, "melt": [172, 188, 190], "merg": 184, "meridion": 197, "meridionalheattransport": [35, 233], "mesh": 156, "method": [141, 143], "mht": 197, "miniconda": [142, 144], "miss": 156, "mix": 176, "mld": 176, "modul": 12, "move": [11, 157], "mpa": [5, 12, 140, 142, 143, 144, 147, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 203], "mpas_analysi": [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139], "mpas_xarrai": [11, 105, 106, 107, 108], "mpasclimatologytask": [61, 62, 63], "mpasrelativedelta": 130, "mpastimeseriestask": 126, "name": [156, 234, 236, 239, 241, 246], "namelist": [12, 86, 87, 88, 89, 90, 91, 92, 156], "namelist_streams_interfac": [86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98], "nasa": 175, "nasateam": 187, "nco": 153, "need": 203, "new": [141, 203], "nino": [181, 183], "noaa": 184, "o": [12, 147, 170], "observ": [144, 158, 195, 201, 205, 206, 207, 208, 209, 210, 211, 214, 215, 216, 217, 218, 219, 221, 223, 224, 225, 227, 228, 232, 233, 234, 236, 237, 239, 241, 244, 247], "ocean": [12, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 147, 158, 170, 194, 198, 201, 226], "oceanhistogram": [36, 234], "oceanregionalprofil": 235, "oi": 184, "old": [144, 203], "ones": 203, "onlin": 9, "open_mpas_dataset": 99, "open_multifile_dataset": [85, 105], "option": [141, 148, 159, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "other": [148, 230, 234, 236, 239, 241, 246], "output": [142, 143, 144, 159, 167, 203], "outsid": 11, "panel": 149, "parallel": [7, 153, 203], "path": 103, "physic": 3, "pictur": 143, "pioma": 189, "plot": [12, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 149, 160, 203], "plot_1d": 113, "plot_depth_integrated_time_series_subtask": 46, "plot_global_comparison": 114, "plot_hovmoller_subtask": 47, "plot_polar_comparison": 115, "plot_vertical_sect": 116, "plot_vertical_section_comparison": 117, "plot_xtick_format": 118, "plotclimatologymapsubtask": [109, 110], "plotdepthintegratedtimeseriessubtask": 46, "plothovmollersubtask": 47, "polici": [172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "poster": [141, 143], "preprocess": [106, 161], "prerequisit": 8, "product": 174, "project": [12, 121, 122], "purg": [144, 203], "queue": 203, "quick": 203, "rate": [172, 188, 190], "read": [12, 96], "readpath": 97, "reanalysi": [180, 189], "refer": [12, 141, 148, 161, 165, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "region": [12, 123, 124, 125, 162, 234, 236, 239, 241], "regionaltsdiagram": 236, "releas": [172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "remap": [9, 148, 158], "remapmpasbsfclimatologi": 141, "remapmpasclimatologysubtask": [64, 65, 66, 67, 68, 69, 70, 71], "remapmpasohcclimatologi": 143, "remapobservedclimatologysubtask": [72, 73, 74, 75], "remapp": 9, "remove_repeated_time_index": 107, "reorgan": [2, 10], "repositori": 142, "requir": 3, "result": [205, 206, 207, 208, 209, 210, 211, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247], "roemmich": 191, "run": [56, 142, 144, 147, 161, 163, 169, 170, 171, 203], "run_aft": 57, "run_analysi": 17, "run_task": [58, 70, 143], "salin": [192, 199], "satelit": 195, "satellit": 175, "script": [12, 141], "sea": [12, 147, 158, 171, 174, 189, 195, 201], "sea_ic": [48, 49, 50, 51], "seafloor": 192, "seaic": [147, 171], "season": 164, "seawif": 193, "section": [162, 200], "select": 141, "seri": [12, 165, 196], "set": [9, 144, 147, 169, 170, 171, 184, 201], "set_plot_info": 110, "set_start_end_d": 59, "setup": [142, 144], "setup_and_check": [60, 71, 141, 143], "setup_colormap": 112, "share": [12, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 143], "shelf": 239, "som": 186, "sose": [194, 226], "sosetransect": 237, "sourc": [172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200], "southern": [194, 226], "specifi": 149, "ssh": 142, "ssm": [178, 187, 196], "sss": 175, "sst": 184, "sstv4": 181, "standalon": [147, 170, 171], "start": [141, 142, 144, 148, 155, 165, 203], "state": [194, 195, 226], "stream": [12, 156], "streamfunctionmoc": [37, 238], "streamsfil": [93, 94, 95, 96, 97], "string_to_datetim": 135, "string_to_days_since_d": 136, "string_to_relative_delta": 137, "subprocess": 151, "subset_vari": 108, "subtask": [8, 141, 143], "summari": 3, "support": [4, 7, 142, 149], "surfac": 179, "switch": 142, "system": 203, "task": [1, 7, 8, 12, 141, 143, 144, 146, 153, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 203], "temperatur": [192, 199], "templat": 1, "test": 3, "thick": 185, "thread": 151, "three": 149, "tick": [118, 149, 166], "time": [12, 142, 165, 166, 196], "time_seri": [126, 127, 128, 129], "timekeep": [10, 12, 130, 131, 132, 133, 134, 135, 136, 137], "timeseri": [142, 144], "timeseries_analysis_plot": 119, "timeseries_analysis_plot_polar": 120, "timeseriesantarcticmelt": [38, 239], "timeseriesoceanregion": [40, 241], "timeseriesohcanomali": [39, 240], "timeseriessalinityanomali": [42, 243], "timeseriesseaic": 51, "timeseriesseaiceareavol": 244, "timeseriessst": [41, 242], "timeseriestemperatureanomali": [43, 245], "timeseriestransport": [44, 246], "top": 12, "topographi": 177, "tour": 143, "transect": [12, 138, 139, 167, 246], "transport": [174, 197], "troubleshoot": 144, "tutori": 140, "type": 236, "understand": 143, "up": [144, 147, 169, 170, 171], "update_gener": 18, "user": [140, 144, 203], "util": [12, 100, 101, 102, 103, 131, 132, 133, 134, 135, 136, 137], "v2": 198, "variabl": [11, 234, 241], "varianc": 179, "version": [140, 248], "via": 203, "view": [142, 144], "volum": 189, "wait_for_task": 19, "wave": [180, 195], "weight": 234, "woa18": 199, "woce": 200, "wocetransect": 247, "worktre": 142, "world": 198, "write_netcdf": 104, "xarrai": 156, "year": [148, 155, 165]}}) \ No newline at end of file diff --git a/1.11.0rc1/tutorials/dev_add_task.html b/1.11.0rc1/tutorials/dev_add_task.html new file mode 100644 index 000000000..160a4fbda --- /dev/null +++ b/1.11.0rc1/tutorials/dev_add_task.html @@ -0,0 +1,1478 @@ + + + + + + + Developers: Adding a new analysis task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Developers: Adding a new analysis task

+

This tutorial walks a new developer through the basics of creating a new +analysis task in MPAS-Analysis. It is a common practice to find an existing +analysis task that is as close as possible to the new analysis, and to copy +that existing task as a template for the new task. That is the strategy we +will demonstrate here.

+

To provide a real example, we will show how we copy and modify an analysis +task used to compute the anomaly in ocean heat content +(ClimatologyMapOHCAnomaly) to instead compute +the barotropic streamfunction (BSF).

+

For computing the BSF itself, we will make use of a script that was developed +outside of MPAS-Analysis for this purpose. This is also a common development +technique: first develop the analysis as a script or +jupyter notebook. Nearly always, the scripts or +notebooks include hard-coded paths and are otherwise not easily applied to new +simulations without considerable effort. This is the motivation for adapting +the code to MPAS-Analysis.

+
+

1. Getting started

+

To begin, please follow the Developer: Getting Started tutorial, which +will help you through the basics of creating a fork of MPAS-Analysis, +cloning it onto the machine(s) where you will do your development, making +a worktree for the feature you will develop, creating a conda environment for +testing your new MPAS-Analysis development, and running MPAS-Analysis.

+

Then, please follow the Developers: Understanding an analysis task. This will give +you a tour of the ClimatologyMapOHCAnomaly +analysis task that we will use as a starting point for developing a new task.

+
+
+

2. The reference scripts

+

I have two scripts I used in the past to compute the barotropic streamfunction +and write it out, and then to plot it. These scripts yanked out some code +from MPAS-Analysis so there are a few similarities but there’s a lot of work +to do.

+

Here’s the script for computing the BSF:

+
#!/usr/bin/env python
+
+import xarray
+import numpy
+import scipy.sparse
+import scipy.sparse.linalg
+import sys
+
+from mpas_tools.io import write_netcdf
+
+
+def main():
+
+    ds = xarray.open_dataset(sys.argv[1])
+    ds = ds[['timeMonthly_avg_layerThickness',
+             'timeMonthly_avg_normalVelocity']]
+    ds.load()
+
+    dsMesh = xarray.open_dataset(sys.argv[2])
+    dsMesh = dsMesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell',
+                     'edgesOnCell', 'verticesOnCell', 'verticesOnEdge',
+                     'dcEdge', 'dvEdge', 'lonCell', 'latCell', 'lonVertex',
+                     'latVertex']]
+    dsMesh.load()
+
+    out_filename = sys.argv[3]
+
+    bsfVertex = _compute_barotropic_streamfunction_vertex(dsMesh, ds)
+    print('bsf on vertices computed.')
+    bsfCell = _compute_barotropic_streamfunction_cell(dsMesh, bsfVertex)
+    print('bsf on cells computed.')
+    dsBSF = xarray.Dataset()
+    dsBSF['bsfVertex'] = bsfVertex
+    dsBSF.bsfVertex.attrs['units'] = 'Sv'
+    dsBSF.bsfVertex.attrs['description'] = 'barotropic streamfunction ' \
+        'on vertices'
+    dsBSF['bsfCell'] = bsfCell
+    dsBSF.bsfCell.attrs['units'] = 'Sv'
+    dsBSF.bsfCell.attrs['description'] = 'barotropic streamfunction ' \
+        'on cells'
+    dsBSF = dsBSF.transpose('Time', 'nCells', 'nVertices')
+    for var in dsMesh:
+        dsBSF[var] = dsMesh[var]
+    write_netcdf(dsBSF, out_filename)
+
+
+def _compute_transport(dsMesh, ds):
+
+    cellsOnEdge = dsMesh.cellsOnEdge - 1
+    innerEdges = numpy.logical_and(cellsOnEdge.isel(TWO=0) >= 0,
+                                   cellsOnEdge.isel(TWO=1) >= 0)
+
+    # convert from boolean mask to indices
+    innerEdges = numpy.flatnonzero(innerEdges.values)
+
+    cell0 = cellsOnEdge.isel(nEdges=innerEdges, TWO=0)
+    cell1 = cellsOnEdge.isel(nEdges=innerEdges, TWO=1)
+
+    layerThickness = ds.timeMonthly_avg_layerThickness
+    normalVelocity = ds.timeMonthly_avg_normalVelocity.isel(nEdges=innerEdges)
+
+    layerThicknessEdge = 0.5*(layerThickness.isel(nCells=cell0) +
+                              layerThickness.isel(nCells=cell1))
+    transport = dsMesh.dvEdge[innerEdges] * \
+        (layerThicknessEdge * normalVelocity).sum(dim='nVertLevels')
+
+    # ds = xarray.Dataset()
+    # ds['transport'] = transport
+    # ds['innerEdges'] = ('nEdges', innerEdges)
+    # write_netcdf(ds, 'transport.nc')
+
+    return innerEdges, transport
+
+
+def _compute_barotropic_streamfunction_vertex(dsMesh, ds):
+    innerEdges, transport = _compute_transport(dsMesh, ds)
+    print('transport computed.')
+
+    nVertices = dsMesh.sizes['nVertices']
+    nTime = ds.sizes['Time']
+
+    cellsOnVertex = dsMesh.cellsOnVertex - 1
+    verticesOnEdge = dsMesh.verticesOnEdge - 1
+    isBoundaryCOV = cellsOnVertex == -1
+    boundaryVertices = numpy.logical_or(isBoundaryCOV.isel(vertexDegree=0),
+                                        isBoundaryCOV.isel(vertexDegree=1))
+    boundaryVertices = numpy.logical_or(boundaryVertices,
+                                        isBoundaryCOV.isel(vertexDegree=2))
+
+    # convert from boolean mask to indices
+    boundaryVertices = numpy.flatnonzero(boundaryVertices.values)
+
+    nBoundaryVertices = len(boundaryVertices)
+    nInnerEdges = len(innerEdges)
+
+    indices = numpy.zeros((2, 2*nInnerEdges+nBoundaryVertices), dtype=int)
+    data = numpy.zeros(2*nInnerEdges+nBoundaryVertices, dtype=float)
+
+    # The difference between the streamfunction at vertices on an inner edge
+    # should be equal to the transport
+    v0 = verticesOnEdge.isel(nEdges=innerEdges, TWO=0).values
+    v1 = verticesOnEdge.isel(nEdges=innerEdges, TWO=1).values
+
+    ind = numpy.arange(nInnerEdges)
+    indices[0, 2*ind] = ind
+    indices[1, 2*ind] = v1
+    data[2*ind] = 1.
+
+    indices[0, 2*ind+1] = ind
+    indices[1, 2*ind+1] = v0
+    data[2*ind+1] = -1.
+
+    # the streamfunction should be zero at all boundary vertices
+    ind = numpy.arange(nBoundaryVertices)
+    indices[0, 2*nInnerEdges + ind] = nInnerEdges + ind
+    indices[1, 2*nInnerEdges + ind] = boundaryVertices
+    data[2*nInnerEdges + ind] = 1.
+
+    bsfVertex = xarray.DataArray(numpy.zeros((nTime, nVertices)),
+                                 dims=('Time', 'nVertices'))
+
+    for tIndex in range(nTime):
+        rhs = numpy.zeros(nInnerEdges+nBoundaryVertices, dtype=float)
+
+        # convert to Sv
+        ind = numpy.arange(nInnerEdges)
+        rhs[ind] = 1e-6*transport.isel(Time=tIndex)
+
+        ind = numpy.arange(nBoundaryVertices)
+        rhs[nInnerEdges + ind] = 0.
+
+        M = scipy.sparse.csr_matrix((data, indices),
+                                    shape=(nInnerEdges+nBoundaryVertices,
+                                           nVertices))
+
+        solution = scipy.sparse.linalg.lsqr(M, rhs)
+
+        bsfVertex[tIndex, :] = -solution[0]
+
+    return bsfVertex
+
+
+def _compute_barotropic_streamfunction_cell(dsMesh, bsfVertex):
+    '''
+    Interpolate the barotropic streamfunction from vertices to cells
+    '''
+    nEdgesOnCell = dsMesh.nEdgesOnCell
+    edgesOnCell = dsMesh.edgesOnCell - 1
+    verticesOnCell = dsMesh.verticesOnCell - 1
+    areaEdge = 0.25*dsMesh.dcEdge*dsMesh.dvEdge
+
+    nCells = dsMesh.sizes['nCells']
+    maxEdges = dsMesh.sizes['maxEdges']
+
+    areaVert = xarray.DataArray(numpy.zeros((nCells, maxEdges)),
+                                dims=('nCells', 'maxEdges'))
+
+    for iVert in range(maxEdges):
+        edgeIndices = edgesOnCell.isel(maxEdges=iVert)
+        mask = iVert < nEdgesOnCell
+        areaVert[:, iVert] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices)
+
+    for iVert in range(maxEdges-1):
+        edgeIndices = edgesOnCell.isel(maxEdges=iVert+1)
+        mask = iVert+1 < nEdgesOnCell
+        areaVert[:, iVert] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices)
+
+    edgeIndices = edgesOnCell.isel(maxEdges=0)
+    mask = nEdgesOnCell == maxEdges
+    areaVert[:, maxEdges-1] += 0.5*mask*areaEdge.isel(nEdges=edgeIndices)
+
+    bsfCell = ((areaVert * bsfVertex[:, verticesOnCell]).sum(dim='maxEdges') /
+               areaVert.sum(dim='maxEdges'))
+
+    return bsfCell
+
+
+if __name__ == '__main__':
+    main()
+
+
+

And here’s the one for plotting it:

+
#!/usr/bin/env python
+
+import xarray
+import numpy
+import matplotlib
+import matplotlib.pyplot as plt
+import matplotlib.ticker as mticker
+import matplotlib.colors as cols
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+import matplotlib.patches as mpatches
+import cmocean
+import cartopy
+import pyproj
+import os
+
+from pyremap import ProjectionGridDescriptor
+
+
+def get_antarctic_stereographic_projection():  # {{{
+    """
+    Get a projection for an Antarctic steregraphic comparison grid
+
+    Returns
+    -------
+    projection : ``pyproj.Proj`` object
+        The projection
+    """
+    # Authors
+    # -------
+    # Xylar Asay-Davis
+
+    projection = pyproj.Proj('+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 '
+                             '+k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84')
+
+    return projection  # }}}
+
+
+def get_fris_stereographic_comparison_descriptor():  # {{{
+    """
+    Get a descriptor of a region of a polar stereographic grid centered on the
+    Filchner-Ronne Ice Shelf, used for remapping and determining the grid name
+
+    Returns
+    -------
+    descriptor : ``ProjectionGridDescriptor`` object
+        A descriptor of the FRIS comparison grid
+    """
+    # Authors
+    # -------
+    # Xylar Asay-Davis
+
+    x = numpy.linspace(-1.6e6, -0.5e6, 1101)
+    y = numpy.linspace(0., 1.1e6, 1101)
+    Lx = 1e-3*(x[-1] - x[0])
+    Ly = 1e-3*(y[-1] - y[0])
+    dx = 1e-3*(x[1] - x[0])
+
+    projection = get_antarctic_stereographic_projection()
+
+    meshName = '{}x{}km_{}km_FRIS_stereo'.format(Lx, Ly, dx)
+    descriptor = ProjectionGridDescriptor.create(projection, x, y, meshName)
+
+    return descriptor  # }}}
+
+
+def add_land_lakes_coastline(ax):
+    land_50m = cartopy.feature.NaturalEarthFeature(
+            'physical', 'land', '50m', edgecolor='k',
+            facecolor='#cccccc', linewidth=0.5)
+    lakes_50m = cartopy.feature.NaturalEarthFeature(
+            'physical', 'lakes', '50m', edgecolor='k',
+            facecolor='white',
+            linewidth=0.5)
+    ax.add_feature(land_50m, zorder=2)
+    ax.add_feature(lakes_50m, zorder=4)
+
+
+def add_arrow_to_line2D(ax, path, arrow_spacing=100e3,):
+    """
+    https://stackoverflow.com/a/27637925/7728169
+    Add arrows to a matplotlib.lines.Line2D at selected locations.
+
+    Parameters:
+    -----------
+    axes:
+    line: list of 1 Line2D object as returned by plot command
+    arrow_spacing: distance in m between arrows
+
+    Returns:
+    --------
+    arrows: list of arrows
+    """
+    v = path.vertices
+    x = v[:, 0]
+    y = v[:, 1]
+
+    arrows = []
+    s = numpy.cumsum(numpy.sqrt(numpy.diff(x) ** 2 + numpy.diff(y) ** 2))
+    indices = numpy.searchsorted(s, arrow_spacing*numpy.arange(1,
+        int(s[-1]/arrow_spacing)))
+    for n in indices:
+        dx = numpy.mean(x[n-2:n]) - x[n]
+        dy = numpy.mean(y[n-2:n]) - y[n]
+        p = mpatches.FancyArrow(
+            x[n], y[n], dx, dy, length_includes_head=False, width=4e3,
+            facecolor='k')
+        ax.add_patch(p)
+        arrows.append(p)
+    return arrows
+
+
+def savefig(filename, tight=True, pad_inches=0.1, plot_pdf=True):
+    """
+    Saves the current plot to a file, then closes it.
+    Parameters
+    ----------
+    filename : str
+        the file name to be written
+    config :  mpas_analysis.configuration.MpasAnalysisConfigParser
+        Configuration options
+    tight : bool, optional
+        whether to tightly crop the figure
+    pad_inches : float, optional
+        The boarder around the image
+    """
+    # Authors
+    # -------
+    # Xylar Asay-Davis
+
+    if tight:
+        bbox_inches = 'tight'
+    else:
+        bbox_inches = None
+
+    filenames = [filename]
+
+    if plot_pdf:
+        pdf_filename = '{}.pdf'.format(os.path.splitext(filename)[0])
+        filenames.append(pdf_filename)
+
+    for path in filenames:
+        plt.savefig(path, dpi='figure', bbox_inches=bbox_inches,
+                    pad_inches=pad_inches)
+
+    plt.close()
+
+
+descriptor = get_fris_stereographic_comparison_descriptor()
+
+projection = cartopy.crs.Stereographic(
+    central_latitude=-90., central_longitude=0.0,
+    true_scale_latitude=-71.0)
+
+matplotlib.rc('font', size=14)
+
+x = descriptor.xCorner
+y = descriptor.yCorner
+
+extent = [x[0], x[-1], y[0], y[-1]]
+
+dx = x[1] - x[0]
+dy = y[1] - y[0]
+
+fig = plt.figure(figsize=[15, 7.5], dpi=200)
+
+titles = ['control (yrs 51-60)', 'control (yrs 111-120)']
+
+for index, yrs in enumerate(['0051-0060', '0111-0120']):
+    filename = 'control/bsf_{}_1100.0x1100.0km_1.0km_' \
+               'FRIS_stereo_patch.nc'.format(yrs)
+    with xarray.open_dataset(filename) as ds:
+
+        ds = ds.isel(Time=0)
+
+        bsf = ds.bsfVertex
+        bsf = bsf.where(bsf != 0.).values
+
+    #u = 1e6*(bsf[2:, 1:-1] - bsf[:-2, 1:-1])/dy
+    #v = -1e6*(bsf[1:-1, 2:] - bsf[1:-1, :-2])/dx
+
+    #x = 0.5*(x[1:-2] + x[2:-1])
+    #y = 0.5*(y[1:-2] + y[2:-1])
+
+    xc = 0.5*(x[0:-1] + x[1:])
+    yc = 0.5*(y[0:-1] + y[1:])
+
+    ax = fig.add_subplot(121+index, projection=projection)
+
+    ax.set_title(titles[index], y=1.06, size=16)
+
+    ax.set_extent(extent, crs=projection)
+
+    gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), color='k',
+                      linestyle=':', zorder=5, draw_labels=False)
+    gl.xlocator = mticker.FixedLocator(numpy.arange(-180., 181., 10.))
+    gl.ylocator = mticker.FixedLocator(numpy.arange(-88., 81., 2.))
+    gl.n_steps = 100
+    gl.rotate_labels = False
+    gl.x_inline = False
+    gl.y_inline = False
+    gl.xformatter = cartopy.mpl.gridliner.LONGITUDE_FORMATTER
+    gl.yformatter = cartopy.mpl.gridliner.LATITUDE_FORMATTER
+    gl.left_labels = False
+    gl.right_labels = False
+
+    add_land_lakes_coastline(ax)
+
+    norm = cols.SymLogNorm(linthresh=0.1, linscale=0.5, vmin=-10., vmax=10.)
+    ticks = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3., 10.]
+
+    levels = numpy.linspace(-1., 1., 11)
+
+    handle = plt.pcolormesh(x, y, bsf, norm=norm, cmap='cmo.curl',
+                            rasterized=True)
+
+    cs = plt.contour(xc, yc, bsf, levels=levels, colors='k')
+
+    for collection in cs.collections:
+        for path in collection.get_paths():
+            add_arrow_to_line2D(ax, path)
+
+    divider = make_axes_locatable(ax)
+    cax = divider.append_axes("right", size="5%", pad=0.1,
+                              axes_class=plt.Axes)
+    if index < 1:
+        cax.set_axis_off()
+    else:
+        cbar = plt.colorbar(handle, cax=cax)
+        cbar.set_label('Barotropic streamfunction (Sv)')
+        cbar.set_ticks(ticks)
+        cbar.set_ticklabels(['{}'.format(tick) for tick in ticks])
+
+
+

Here’s a plot that I think was produced with this code (but I’m not 100% sure).

+../_images/bsf.png +
+
+

3. Selecting an existing task to copy

+

I selected ClimatologyMapOHCAnomaly as the +analysis task that was closest to what I envision for a new +ClimatologyMapBSF task. Here were my thoughts:

+
    +
  • Both OHC and BSF plot 2D fields (as opposed to some of the analysis like +WOA, Argo and SOSE that work with 3D temperature, salinity and sometimes +other fields).

  • +
  • Neither OHC nor BSF have observations to compare with.

  • +
  • Both OHC and BSF require computing a new field, rather than directly using +output from MPAS-Ocean.

  • +
+

On the other hand, there are some major differences between the 2 that will +mean my job isn’t a simple substitution:

+
    +
  • While OHC is computed over different depth ranges, we do not want that for +the BSF analysis.

  • +
  • We will eventually want some “fancier” plotting for the BSF that draws +streamlines with arrows. That’s not currently available in any MPAS-Analysis +tasks.

  • +
  • OHC involves computing an anomaly, but that isn’t anything we need for BSF.

  • +
+

Even so, ClimatologyMapOHCAnomaly seems like +a reasonable starting point.

+
+
+

4. Developing the task

+

I’ll start just by making a new worktree, then copying the “template” analysis +task to the new name:

+
git worktree add ../add_climatology_map_bsf
+cd ../add_climatology_map_bsf
+cp mpas_analysis/ocean/climatology_map_ohc_anomaly.py mpas_analysis/ocean/climatology_map_bsf.py
+
+
+

Then, I’ll open this new worktree in PyCharm. (You can, of course, use +whatever editor you like.)

+
pycharm-community .
+
+
+

I’ll create or recreate my mpas_dev environment as in +Developer: Getting Started, and then make sure to at least do:

+
conda activate mpas_dev
+python -m pip install -e .
+
+
+
+
+

4.1 ClimatologyMapBSF class

+

In the editor, I rename the class from ClimatologyMapOHCAnomaly to +ClimatologyMapBSF and task name from climatologyMapOHCAnomaly to +climatologyMapBSF.

+

Then, I update the docstring right away because otherwise I’ll forget!

+
class ClimatologyMapBSF(AnalysisTask):
+    """
+    An analysis task for computing and plotting maps of the barotropic
+    streamfunction (BSF)
+
+    Attributes
+    ----------
+    mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+        The task that produced the climatology to be remapped and plotted
+    """
+
+
+

I keep the mpas_climatology_task attribute because I’m going to need a +climatology of the velocity field and layer thicknesses that I will get from +that task, but I know I won’t need the ref_year_climatology_task attribute +so I get rid of it.

+
+

4.2 Constructor

+

Then, I move on to the constructor. The main things I need to do besides +renaming the task are:

+
    +
  • rename the field I’m processing to barotropicStreamfunction.

  • +
  • clean up the tags a little bit (change anomaly to streamfunction).

  • +
  • get rid of ref_year_climatology_task since I’m not computing anomalies.

  • +
  • get rid of depth_range because I’m using only the full ocean column.

  • +
+
def __init__(self, config, mpas_climatology_task, control_config=None):
+     """
+     Construct the analysis task.
+
+     Parameters
+     ----------
+     config : mpas_tools.config.MpasConfigParser
+         Configuration options
+
+     mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+         The task that produced the climatology to be remapped and plotted
+
+     control_config : mpas_tools.config.MpasConfigParser, optional
+         Configuration options for a control run (if any)
+     """
+
+     field_name = 'barotropicStreamfunction'
+     # call the constructor from the base class (AnalysisTask)
+     super().__init__(config=config, taskName='climatologyMapBSF',
+                      componentName='ocean',
+                      tags=['climatology', 'horizontalMap', field_name,
+                            'publicObs', 'streamfunction'])
+
+     self.mpas_climatology_task = mpas_climatology_task
+
+     section_name = self.taskName
+
+     # read in what seasons we want to plot
+     seasons = config.getexpression(section_name, 'seasons')
+
+     if len(seasons) == 0:
+         raise ValueError(f'config section {section_name} does not contain '
+                          f'valid list of seasons')
+
+     comparison_grid_names = config.getexpression(section_name,
+                                                  'comparisonGrids')
+
+     if len(comparison_grid_names) == 0:
+         raise ValueError(f'config section {section_name} does not contain '
+                          f'valid list of comparison grids')
+
+
+

Next, I need to update the mpas_field_name (which I can choose since I’m +computing the field here, it’s not something produced by MPAS-Ocean). And then +I need to specify the fields from the timeSeriesStatsMonthlyOutput data +that I will use in the computation:

+
mpas_field_name = field_name
+
+variable_list = ['timeMonthly_avg_normalVelocity',
+                 'timeMonthly_avg_layerThickness']
+
+
+

In the next block of code, I:

+
    +
  • get rid of the for-loop over depth ranges and unindent the code that was in +it.

  • +
  • rename RemapMpasOHCClimatology to RemapMpasBSFClimatology (we will +get to this in section 5)

  • +
  • make my best guess about the arguments I do and don’t need for the +constructor of RemapMpasBSFClimatology

  • +
+
remap_climatology_subtask = RemapMpasBSFClimatology(
+    mpas_climatology_task=mpas_climatology_task,
+    parent_task=self,
+    climatology_name=field_name,
+    variable_list=variable_list,
+    comparison_grid_names=comparison_grid_names,
+    seasons=seasons)
+
+self.add_subtask(remap_climatology_subtask)
+
+
+

In the remainder of the constructor, I

+
    +
  • update things like the name of the field being plotted and the units

  • +
  • continue to get rid of things related to depth range

  • +
+
out_file_label = field_name
+remap_observations_subtask = None
+if control_config is None:
+    ref_title_label = None
+    ref_field_name = None
+    diff_title_label = 'Model - Observations'
+
+else:
+    control_run_name = control_config.get('runs', 'mainRunName')
+    ref_title_label = f'Control: {control_run_name}'
+    ref_field_name = mpas_field_name
+    diff_title_label = 'Main - Control'
+
+for comparison_grid_name in comparison_grid_names:
+    for season in seasons:
+        # make a new subtask for this season and comparison grid
+        subtask_name = f'plot{season}_{comparison_grid_name}'
+
+        subtask = PlotClimatologyMapSubtask(
+            self, season, comparison_grid_name,
+            remap_climatology_subtask, remap_observations_subtask,
+            controlConfig=control_config, subtaskName=subtask_name)
+
+        subtask.set_plot_info(
+            outFileLabel=out_file_label,
+            fieldNameInTitle=f'Barotropic Streamfunction',
+            mpasFieldName=mpas_field_name,
+            refFieldName=ref_field_name,
+            refTitleLabel=ref_title_label,
+            diffTitleLabel=diff_title_label,
+            unitsLabel='Sv',
+            imageCaption='Barotropic Streamfunction',
+            galleryGroup='Barotropic Streamfunction',
+            groupSubtitle=None,
+            groupLink='bsf',
+            galleryName=None)
+
+        self.add_subtask(subtask)
+
+
+

This will result in a “gallery” on the web page called “Barotropic +Streamfunction” with a single image in it. That seems a little silly but +we’ll change that later if we feel the need.

+
+
+

4.3 setup_and_check() method

+

In the OHC analysis task, we needed to check if the reference year for the +anomaly and the climatology year were different from one another. We don’t +need this check for the BSF because we’re not computing an anomaly here. So +we can get rid of the setup_and_check() method entirely and the version +from AnalysisTask (the superclass) will be called automatically.

+

At this point, I commit my changes even though I’m less than halfway done.

+
git add mpas_analysis/ocean/climatology_map_bsf.py
+git commit
+
+
+

I can always do

+
git commit --amend mpas_analysis/ocean/climatology_map_bsf.py
+
+
+

to keep adding changes to my commit as I go.

+
+
+
+

5. Developing a subtask

+

Similarly to how RemapMpasOHCClimatology computes the ocean heat content, +we need a class for computing the barotropic streamfunction before we remap +to the comparison grid. In general, it is important to perform computations +on the native MPAS mesh before remapping to the comparison grid but in the +case of the barotropic streamfunction, this is especially true. Any attempt +to compute this analysis directly on the comparison grid (e.g. using remapped, +reconstructed velocity components) would be woefully inaccurate.

+
+

5.1 RemapMpasBSFClimatology class

+

We start by renaming the class from RemapMpasOHCClimatology to +RemapMpasBSFClimatology, updating the docstring, removing the unneeded +attributes:

+
class RemapMpasBSFClimatology(RemapMpasClimatologySubtask):
+    """
+    A subtask for computing climatologies of the barotropic streamfunction
+    from climatologies of normal velocity and layer thickness
+    """
+
+
+
+
+

3.2 Constructor

+

I started by taking out all of the unneeded parameters from the constructor. +What I was left with was simply a call to the constructor of the superclass +RemapMpasClimatologySubtask. +In such a case, there is no point in overriding the constructor. We should +simply leave the constructor for the superclass. The main difference is that +I had switched away from mixed capitalization in the +RemapMpasOHCClimatology to conform to the PEP8 style guide. The superclass +still uses mixed case so we will have to change the call in +ClimatologyMapBSF just a little:

+
remap_climatology_subtask = RemapMpasBSFClimatology(
+    mpasClimatologyTask=mpas_climatology_task,
+    parentTask=self,
+    climatologyName=field_name,
+    variableList=variable_list,
+    comparisonGridNames=comparison_grid_names,
+    seasons=seasons)
+
+
+
+
+

5.3 setup_and_check() method

+

The same turns out to be true of setup_and_check(). As soon as I get rid +of everything we no longer need in the BSF version, all I am left with is a +call to the superclass’ version, and in that case we might as well get rid of +the method entirely.

+
+
+

5.4 customize_masked_climatology() method

+

Finally, we’ve gotten to the part where the real work will take place!

+

The sub task will run in the same way as described in +3.4 run_task() method of the +Developers: Understanding an analysis task tutorial. In the process, the +customize_masked_climatology() method will get called and that’s our chance +to make some changes.

+

Before writing that method, first, I copy the 3 helper functions +_compute_transport(), _compute_barotropic_streamfunction_vertex(), and +_compute_barotropic_streamfunction_cell() from my example script. Other +than making them methods instead of functions and cleaning up the syntax a bit +so they conform to the PEP8 style guide, I leave them unchanged:

+
def _compute_transport(self, ds_mesh, ds):
+
+    cells_on_edge = ds_mesh.cellsOnEdge - 1
+    inner_edges = np.logical_and(cells_on_edge.isel(TWO=0) >= 0,
+                                 cells_on_edge.isel(TWO=1) >= 0)
+
+    # convert from boolean mask to indices
+    inner_edges = np.flatnonzero(inner_edges.values)
+
+    cell0 = cells_on_edge.isel(nEdges=inner_edges, TWO=0)
+    cell1 = cells_on_edge.isel(nEdges=inner_edges, TWO=1)
+
+    layer_thickness = ds.timeMonthly_avg_layerThickness
+    normal_velocity = \
+        ds.timeMonthly_avg_normalVelocity.isel(nEdges=inner_edges)
+
+    layer_thickness_edge = 0.5*(layer_thickness.isel(nCells=cell0) +
+                                layer_thickness.isel(nCells=cell1))
+    transport = ds_mesh.dvEdge[inner_edges] * \
+        (layer_thickness_edge * normal_velocity).sum(dim='nVertLevels')
+
+    return inner_edges, transport
+
+def _compute_barotropic_streamfunction_vertex(self, ds_mesh, ds):
+    inner_edges, transport = self._compute_transport(ds_mesh, ds)
+    print('transport computed.')
+
+    nvertices = ds_mesh.sizes['nVertices']
+    ntime = ds.sizes['Time']
+
+    cells_on_vertex = ds_mesh.cellsOnVertex - 1
+    vertices_on_edge = ds_mesh.verticesOnEdge - 1
+    is_boundary_cov = cells_on_vertex == -1
+    boundary_vertices = np.logical_or(is_boundary_cov.isel(vertexDegree=0),
+                                      is_boundary_cov.isel(vertexDegree=1))
+    boundary_vertices = np.logical_or(boundary_vertices,
+                                      is_boundary_cov.isel(vertexDegree=2))
+
+    # convert from boolean mask to indices
+    boundary_vertices = np.flatnonzero(boundary_vertices.values)
+
+    n_boundary_vertices = len(boundary_vertices)
+    n_inner_edges = len(inner_edges)
+
+    indices = np.zeros((2, 2*n_inner_edges+n_boundary_vertices), dtype=int)
+    data = np.zeros(2*n_inner_edges+n_boundary_vertices, dtype=float)
+
+    # The difference between the streamfunction at vertices on an inner
+    # edge should be equal to the transport
+    v0 = vertices_on_edge.isel(nEdges=inner_edges, TWO=0).values
+    v1 = vertices_on_edge.isel(nEdges=inner_edges, TWO=1).values
+
+    ind = np.arange(n_inner_edges)
+    indices[0, 2*ind] = ind
+    indices[1, 2*ind] = v1
+    data[2*ind] = 1.
+
+    indices[0, 2*ind+1] = ind
+    indices[1, 2*ind+1] = v0
+    data[2*ind+1] = -1.
+
+    # the streamfunction should be zero at all boundary vertices
+    ind = np.arange(n_boundary_vertices)
+    indices[0, 2*n_inner_edges + ind] = n_inner_edges + ind
+    indices[1, 2*n_inner_edges + ind] = boundary_vertices
+    data[2*n_inner_edges + ind] = 1.
+
+    bsf_vertex = xr.DataArray(np.zeros((ntime, nvertices)),
+                              dims=('Time', 'nVertices'))
+
+    for tindex in range(ntime):
+        rhs = np.zeros(n_inner_edges+n_boundary_vertices, dtype=float)
+
+        # convert to Sv
+        ind = np.arange(n_inner_edges)
+        rhs[ind] = 1e-6*transport.isel(Time=tindex)
+
+        ind = np.arange(n_boundary_vertices)
+        rhs[n_inner_edges + ind] = 0.
+
+        matrix = scipy.sparse.csr_matrix(
+            (data, indices),
+            shape=(n_inner_edges+n_boundary_vertices, nvertices))
+
+        solution = scipy.sparse.linalg.lsqr(matrix, rhs)
+
+        bsf_vertex[tindex, :] = -solution[0]
+
+    return bsf_vertex
+
+def _compute_barotropic_streamfunction_cell(self, ds_mesh, bsf_vertex):
+    """
+    Interpolate the barotropic streamfunction from vertices to cells
+    """
+    n_edges_on_cell = ds_mesh.nEdgesOnCell
+    edges_on_cell = ds_mesh.edgesOnCell - 1
+    vertices_on_cell = ds_mesh.verticesOnCell - 1
+    area_edge = 0.25*ds_mesh.dcEdge*ds_mesh.dvEdge
+
+    ncells = ds_mesh.sizes['nCells']
+    max_edges = ds_mesh.sizes['maxEdges']
+
+    area_vert = xr.DataArray(np.zeros((ncells, max_edges)),
+                             dims=('nCells', 'maxEdges'))
+
+    for ivert in range(max_edges):
+        edge_indices = edges_on_cell.isel(maxEdges=ivert)
+        mask = ivert < n_edges_on_cell
+        area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+    for ivert in range(max_edges-1):
+        edge_indices = edges_on_cell.isel(maxEdges=ivert+1)
+        mask = ivert+1 < n_edges_on_cell
+        area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+    edge_indices = edges_on_cell.isel(maxEdges=0)
+    mask = n_edges_on_cell == max_edges
+    area_vert[:, max_edges-1] += \
+        0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+    bsf_cell = \
+        ((area_vert * bsf_vertex[:, vertices_on_cell]).sum(dim='maxEdges') /
+         area_vert.sum(dim='maxEdges'))
+
+    return bsf_cell
+
+
+

I also add some missing imports and delete an unused one at the top:

+
import xarray as xr
+import numpy as np
+import scipy.sparse
+import scipy.sparse.linalg
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+from mpas_analysis.ocean.plot_climatology_map_subtask import \
+    PlotClimatologyMapSubtask
+
+
+

Finally, I substitute the functionality of the main() function in my +script into the customize_masked_climatology() function:

+
def customize_masked_climatology(self, climatology, season):
+    """
+    Compute the ocean heat content (OHC) anomaly from the temperature
+    and layer thickness fields.
+
+    Parameters
+    ----------
+    climatology : xarray.Dataset
+        the climatology data set
+
+    season : str
+        The name of the season to be masked
+
+    Returns
+    -------
+    climatology : xarray.Dataset
+        the modified climatology data set
+    """
+    logger = self.logger
+
+    ds_mesh = xr.open_dataset(self.restartFileName)
+    ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell',
+                       'edgesOnCell', 'verticesOnCell', 'verticesOnEdge',
+                       'dcEdge', 'dvEdge']]
+    ds_mesh.load()
+
+    bsf_vertex = self._compute_barotropic_streamfunction_vertex(
+        ds_mesh, climatology)
+    logger.info('bsf on vertices computed.')
+    bsf_cell = self._compute_barotropic_streamfunction_cell(
+        ds_mesh, bsf_vertex)
+    logger.info('bsf on cells computed.')
+
+    climatology['barotropicStreamfunction'] = \
+        bsf_cell.transpose('Time', 'nCells', 'nVertices')
+    climatology.barotropicStreamfunction.attrs['units'] = 'Sv'
+    climatology.barotropicStreamfunction.attrs['description'] = \
+        'barotropic streamfunction at cell centers'
+
+    climatology = climatology.drop_vars(self.variableList)
+
+    return climatology
+
+
+

We get mesh variables from a restart file to make the xarray dataset +ds_mesh. These are passed on to the helper functions.

+

We use logger.info() instead of print() so the output goes to a log +file. (This isn’t strictly necessary since MPAS-Analysis also hijacks the +print() function to make sure its output goes to log files, but it makes +clearer what we expect and also opens up the opportunity to use +logger.debug(), logger.warn() and logger.error() where appropriate.

+

There isn’t a way to store the barotropic streamfunction on vertices in the +climatology, as was done in the original script, because the remapping code is +expecting data only at cell centers.

+

Before we return the modified climatology, we drop the normal velocity and +layer thickness from the data set, since they were only needed to help us +compute the BSF.

+
+
+
+

6. Config options

+

We’re not quite done yet. We need to set some config options for the analysis +task that the PlotClimatologyMapSubtask +subtask is expecting. Again, an easy starting point is to copy the +[climatologyMapOHCAnomaly] section of the default.cfg file into a new +[climatologyMapBSF] section, and then delete the things we don’t need, +and finally make a few modifications so the color map and data range is more +similar to the plot script I used above:

+
[climatologyMapBSF]
+## options related to plotting horizontally remapped climatologies of
+## the barotropic streamfunction (BSF) against control model results
+## (if available)
+
+# colormap for model/observations
+colormapNameResult = cmo.curl
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = symLog
+# A dictionary with keywords for the norm
+normArgsResult = {'linthresh': 0.1, 'linscale': 0.5, 'vmin': -10., 'vmax': 10.}
+colorbarTicksResult = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3., 10.]
+
+# colormap for differences
+colormapNameDifference = cmo.balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = symLog
+# A dictionary with keywords for the norm
+normArgsDifference = {'linthresh': 0.1, 'linscale': 0.5, 'vmin': -10.,
+                      'vmax': 10.}
+colorbarTicksDifference = [-10., -3., -1., -0.3, -0.1, 0., 0.1, 0.3, 1., 3.,
+                           10.]
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+
+
+

7. Adding the task

+

There is one last step required to add this task to MPAS-Analysis. You should +add the task to the mpas_analysis/<component>/__init__.py so it is a little +easier to import the task. Try to add it near similar tasks:

+
  from mpas_analysis.ocean.climatology_map_eke import ClimatologyMapEKE
+  from mpas_analysis.ocean.climatology_map_bsf import \
+      ClimatologyMapBSF
+  from mpas_analysis.ocean.climatology_map_ohc_anomaly import \
+      ClimatologyMapOHCAnomaly
+
+
+

Then, add the task in mpas_analysis/__main__.py:

+
  analyses.append(ocean.ClimatologyMapEKE(config,
+                                          oceanClimatolgyTasks['avg'],
+                                          controlConfig))
+  analyses.append(ocean.ClimatologyMapBSF(config,
+                                          oceanClimatolgyTasks['avg'],
+                                          controlConfig))
+  analyses.append(ocean.ClimatologyMapOHCAnomaly(
+      config, oceanClimatolgyTasks['avg'], oceanRefYearClimatolgyTask,
+      controlConfig))
+
+
+

A quick way to check if the task has been added correctly is to run:

+
mpas_analysis --list
+
+
+

You should see the new task in the list of tasks.

+
+
+

8. The full code for posterity

+

Since the ClimatologyMapBSF analysis task is not in MPAS-Analysis yet and +since it may have evolved by the time it gets added, here is the full code as +described in this tutorial:

+
# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import numpy as np
+import scipy.sparse
+import scipy.sparse.linalg
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+from mpas_analysis.ocean.plot_climatology_map_subtask import \
+    PlotClimatologyMapSubtask
+
+
+class ClimatologyMapBSF(AnalysisTask):
+    """
+    An analysis task for computing and plotting maps of the barotropic
+    streamfunction (BSF)
+
+    Attributes
+    ----------
+    mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+        The task that produced the climatology to be remapped and plotted
+    """
+
+    def __init__(self, config, mpas_climatology_task, control_config=None):
+        """
+        Construct the analysis task.
+
+        Parameters
+        ----------
+        config : mpas_tools.config.MpasConfigParser
+            Configuration options
+
+        mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+            The task that produced the climatology to be remapped and plotted
+
+        control_config : mpas_tools.config.MpasConfigParser, optional
+            Configuration options for a control run (if any)
+        """
+
+        field_name = 'barotropicStreamfunction'
+        # call the constructor from the base class (AnalysisTask)
+        super().__init__(config=config, taskName='climatologyMapBSF',
+                         componentName='ocean',
+                         tags=['climatology', 'horizontalMap', field_name,
+                               'publicObs', 'streamfunction'])
+
+        self.mpas_climatology_task = mpas_climatology_task
+
+        section_name = self.taskName
+
+        # read in what seasons we want to plot
+        seasons = config.getexpression(section_name, 'seasons')
+
+        if len(seasons) == 0:
+            raise ValueError(f'config section {section_name} does not contain '
+                             f'valid list of seasons')
+
+        comparison_grid_names = config.getexpression(section_name,
+                                                     'comparisonGrids')
+
+        if len(comparison_grid_names) == 0:
+            raise ValueError(f'config section {section_name} does not contain '
+                             f'valid list of comparison grids')
+
+        mpas_field_name = field_name
+
+        variable_list = ['timeMonthly_avg_normalVelocity',
+                         'timeMonthly_avg_layerThickness']
+
+        remap_climatology_subtask = RemapMpasBSFClimatology(
+            mpasClimatologyTask=mpas_climatology_task,
+            parentTask=self,
+            climatologyName=field_name,
+            variableList=variable_list,
+            comparisonGridNames=comparison_grid_names,
+            seasons=seasons)
+
+        self.add_subtask(remap_climatology_subtask)
+
+        out_file_label = field_name
+        remap_observations_subtask = None
+        if control_config is None:
+            ref_title_label = None
+            ref_field_name = None
+            diff_title_label = 'Model - Observations'
+
+        else:
+            control_run_name = control_config.get('runs', 'mainRunName')
+            ref_title_label = f'Control: {control_run_name}'
+            ref_field_name = mpas_field_name
+            diff_title_label = 'Main - Control'
+
+        for comparison_grid_name in comparison_grid_names:
+            for season in seasons:
+                # make a new subtask for this season and comparison grid
+                subtask_name = f'plot{season}_{comparison_grid_name}'
+
+                subtask = PlotClimatologyMapSubtask(
+                    self, season, comparison_grid_name,
+                    remap_climatology_subtask, remap_observations_subtask,
+                    controlConfig=control_config, subtaskName=subtask_name)
+
+                subtask.set_plot_info(
+                    outFileLabel=out_file_label,
+                    fieldNameInTitle=f'Barotropic Streamfunction',
+                    mpasFieldName=mpas_field_name,
+                    refFieldName=ref_field_name,
+                    refTitleLabel=ref_title_label,
+                    diffTitleLabel=diff_title_label,
+                    unitsLabel='Sv',
+                    imageCaption='Barotropic Streamfunction',
+                    galleryGroup='Barotropic Streamfunction',
+                    groupSubtitle=None,
+                    groupLink='bsf',
+                    galleryName=None)
+
+                self.add_subtask(subtask)
+
+
+class RemapMpasBSFClimatology(RemapMpasClimatologySubtask):
+    """
+    A subtask for computing climatologies of the barotropic streamfunction
+    from climatologies of normal velocity and layer thickness
+    """
+
+    def customize_masked_climatology(self, climatology, season):
+        """
+        Compute the ocean heat content (OHC) anomaly from the temperature
+        and layer thickness fields.
+
+        Parameters
+        ----------
+        climatology : xarray.Dataset
+            the climatology data set
+
+        season : str
+            The name of the season to be masked
+
+        Returns
+        -------
+        climatology : xarray.Dataset
+            the modified climatology data set
+        """
+        logger = self.logger
+
+        ds_mesh = xr.open_dataset(self.restartFileName)
+        ds_mesh = ds_mesh[['cellsOnEdge', 'cellsOnVertex', 'nEdgesOnCell',
+                           'edgesOnCell', 'verticesOnCell', 'verticesOnEdge',
+                           'dcEdge', 'dvEdge']]
+        ds_mesh.load()
+
+        bsf_vertex = self._compute_barotropic_streamfunction_vertex(
+            ds_mesh, climatology)
+        logger.info('bsf on vertices computed.')
+        bsf_cell = self._compute_barotropic_streamfunction_cell(
+            ds_mesh, bsf_vertex)
+        logger.info('bsf on cells computed.')
+
+        climatology['barotropicStreamfunction'] = \
+            bsf_cell.transpose('Time', 'nCells', 'nVertices')
+        climatology.barotropicStreamfunction.attrs['units'] = 'Sv'
+        climatology.barotropicStreamfunction.attrs['description'] = \
+            'barotropic streamfunction at cell centers'
+
+        climatology = climatology.drop_vars(self.variableList)
+
+        return climatology
+
+    def _compute_transport(self, ds_mesh, ds):
+
+        cells_on_edge = ds_mesh.cellsOnEdge - 1
+        inner_edges = np.logical_and(cells_on_edge.isel(TWO=0) >= 0,
+                                     cells_on_edge.isel(TWO=1) >= 0)
+
+        # convert from boolean mask to indices
+        inner_edges = np.flatnonzero(inner_edges.values)
+
+        cell0 = cells_on_edge.isel(nEdges=inner_edges, TWO=0)
+        cell1 = cells_on_edge.isel(nEdges=inner_edges, TWO=1)
+
+        layer_thickness = ds.timeMonthly_avg_layerThickness
+        normal_velocity = \
+            ds.timeMonthly_avg_normalVelocity.isel(nEdges=inner_edges)
+
+        layer_thickness_edge = 0.5*(layer_thickness.isel(nCells=cell0) +
+                                    layer_thickness.isel(nCells=cell1))
+        transport = ds_mesh.dvEdge[inner_edges] * \
+            (layer_thickness_edge * normal_velocity).sum(dim='nVertLevels')
+
+        return inner_edges, transport
+
+    def _compute_barotropic_streamfunction_vertex(self, ds_mesh, ds):
+        inner_edges, transport = self._compute_transport(ds_mesh, ds)
+        print('transport computed.')
+
+        nvertices = ds_mesh.sizes['nVertices']
+        ntime = ds.sizes['Time']
+
+        cells_on_vertex = ds_mesh.cellsOnVertex - 1
+        vertices_on_edge = ds_mesh.verticesOnEdge - 1
+        is_boundary_cov = cells_on_vertex == -1
+        boundary_vertices = np.logical_or(is_boundary_cov.isel(vertexDegree=0),
+                                          is_boundary_cov.isel(vertexDegree=1))
+        boundary_vertices = np.logical_or(boundary_vertices,
+                                          is_boundary_cov.isel(vertexDegree=2))
+
+        # convert from boolean mask to indices
+        boundary_vertices = np.flatnonzero(boundary_vertices.values)
+
+        n_boundary_vertices = len(boundary_vertices)
+        n_inner_edges = len(inner_edges)
+
+        indices = np.zeros((2, 2*n_inner_edges+n_boundary_vertices), dtype=int)
+        data = np.zeros(2*n_inner_edges+n_boundary_vertices, dtype=float)
+
+        # The difference between the streamfunction at vertices on an inner
+        # edge should be equal to the transport
+        v0 = vertices_on_edge.isel(nEdges=inner_edges, TWO=0).values
+        v1 = vertices_on_edge.isel(nEdges=inner_edges, TWO=1).values
+
+        ind = np.arange(n_inner_edges)
+        indices[0, 2*ind] = ind
+        indices[1, 2*ind] = v1
+        data[2*ind] = 1.
+
+        indices[0, 2*ind+1] = ind
+        indices[1, 2*ind+1] = v0
+        data[2*ind+1] = -1.
+
+        # the streamfunction should be zero at all boundary vertices
+        ind = np.arange(n_boundary_vertices)
+        indices[0, 2*n_inner_edges + ind] = n_inner_edges + ind
+        indices[1, 2*n_inner_edges + ind] = boundary_vertices
+        data[2*n_inner_edges + ind] = 1.
+
+        bsf_vertex = xr.DataArray(np.zeros((ntime, nvertices)),
+                                  dims=('Time', 'nVertices'))
+
+        for tindex in range(ntime):
+            rhs = np.zeros(n_inner_edges+n_boundary_vertices, dtype=float)
+
+            # convert to Sv
+            ind = np.arange(n_inner_edges)
+            rhs[ind] = 1e-6*transport.isel(Time=tindex)
+
+            ind = np.arange(n_boundary_vertices)
+            rhs[n_inner_edges + ind] = 0.
+
+            matrix = scipy.sparse.csr_matrix(
+                (data, indices),
+                shape=(n_inner_edges+n_boundary_vertices, nvertices))
+
+            solution = scipy.sparse.linalg.lsqr(matrix, rhs)
+
+            bsf_vertex[tindex, :] = -solution[0]
+
+        return bsf_vertex
+
+    def _compute_barotropic_streamfunction_cell(self, ds_mesh, bsf_vertex):
+        """
+        Interpolate the barotropic streamfunction from vertices to cells
+        """
+        n_edges_on_cell = ds_mesh.nEdgesOnCell
+        edges_on_cell = ds_mesh.edgesOnCell - 1
+        vertices_on_cell = ds_mesh.verticesOnCell - 1
+        area_edge = 0.25*ds_mesh.dcEdge*ds_mesh.dvEdge
+
+        ncells = ds_mesh.sizes['nCells']
+        max_edges = ds_mesh.sizes['maxEdges']
+
+        area_vert = xr.DataArray(np.zeros((ncells, max_edges)),
+                                 dims=('nCells', 'maxEdges'))
+
+        for ivert in range(max_edges):
+            edge_indices = edges_on_cell.isel(maxEdges=ivert)
+            mask = ivert < n_edges_on_cell
+            area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+        for ivert in range(max_edges-1):
+            edge_indices = edges_on_cell.isel(maxEdges=ivert+1)
+            mask = ivert+1 < n_edges_on_cell
+            area_vert[:, ivert] += 0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+        edge_indices = edges_on_cell.isel(maxEdges=0)
+        mask = n_edges_on_cell == max_edges
+        area_vert[:, max_edges-1] += \
+            0.5*mask*area_edge.isel(nEdges=edge_indices)
+
+        bsf_cell = \
+            ((area_vert * bsf_vertex[:, vertices_on_cell]).sum(dim='maxEdges') /
+             area_vert.sum(dim='maxEdges'))
+
+        return bsf_cell
+
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/tutorials/dev_getting_started.html b/1.11.0rc1/tutorials/dev_getting_started.html new file mode 100644 index 000000000..ab24771b9 --- /dev/null +++ b/1.11.0rc1/tutorials/dev_getting_started.html @@ -0,0 +1,829 @@ + + + + + + + Developer: Getting Started — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Developer: Getting Started

+

This mini-tutorial is meant as the starting point for other tutorials for +developers. It describes the process for creating a fork of the MPAS-Analysis +repo, cloning the repository (and your fork) locally, making a git worktree for +development, and creating a conda environment that includes the +mpas_analysis package and all of its dependencies, installed in a mode +appropriate for development.

+
+

1. Getting started on GitHub

+
+

1.1 Forking MPAS-Analysis

+

If you would like to contribute to MPAS-Analysis, you will need to create your +own fork of the repository. Go +to the link and click on Fork near the top right corner of the page. The +Owner should be your GitHub username and the Repository name should be +MPAS-Analysis. Check the box for “Copy the develop branch only”. Click +“Create fork”.

+
+
+

1.2 Adding SSH keys

+

If you have not already done so, you should add SSH keys to GitHub that allow +you to push to your fork from the machine(s) where you will do your +development. Instructions can be found +here.

+
+
+

1.3 Local git configuration

+

It will be convenient to have some basic configuration for git taken care +of before we clone the repository. Here are some recommended config options +to set. Edit your ~/.gitconfig (create it if it doesn’t exist).

+
[user]
+        name = Xylar Asay-Davis
+        email = xylarstorm@gmail.com
+[core]
+        editor = vim
+[color]
+        ui = true
+[alias]
+        logg = log --graph --oneline --decorate
+[rebase]
+        autosquash = true
+
+
+

Obviously, change [user] config options to appropriate values for you. +You must use the email address associated with your GitHub account. +Otherwise, your commits will not be associated with your GitHub user name.

+
+
+
+

2. Cloning the repository

+

You will want to clone both the main MPAS-Analysis repository and your own +fork. The MPAS-Analysis development tutorials assume that you will be +developing branches in different worktrees and recommend a directory structure +appropriate for this approach.

+
+

Note

+

If you are on a machine with an old version of git, you may need to +add:

+
module load git
+
+
+

to your .bashrc. You want a pretty recent version of git so you +have the git worktree command.

+
+

Begin by creating a “base” directory for development in a convenient location +for keeping code. This should not be on a “scratch” or other temporary drive +on an HPC machine. The base directory should be named MPAS-Analysis, +mpas-analysis or something similar.

+
$ mkdir mpas-analysis
+$ cd mpas-analysis
+
+
+

Within the base directory, clone the main repository into a directory called +develop (the default branch is the develop branch):

+
$ git clone git@github.com:MPAS-Dev/MPAS-Analysis.git develop
+$ cd develop
+
+
+

Add your fork as a “remote”:

+
$ git remote add <username>/MPAS-Analysis git@github.com:<username>/MPAS-Analysis.git
+
+
+

Make sure to replace <username> with your GitHub username.

+
+
+

3. Making a worktree

+

To do your development, first make sure you are in the develop directory +within your base directory (e.g. mpas-analysis/develop). Then, “fetch” and +changes that might have happened on the develop branch so you are using +the latest version as a starting point:

+
$ git fetch --all -p
+
+
+

This will fetch all branches from both the main repository and your fork. It +will also prune (-p) any branches you might have deleted.

+

Then, make a worktree for developing your new feature:

+
$ git worktree add ../add_my_fancy_task
+
+
+

The last argument (add_my_fancy_task in this example) is both the name of +a directory within the base directory (mpas-analysis) and the name of the +branch you will be developing.

+

Go into that directory to do your development:

+
$ cd ../add_my_fancy_task
+
+
+
+
+

4. Making a conda environment

+

MPAS-Analysis relies on several packages that are only available as conda +packages from the conda-forge channel. The first step for running +MPAS-Analysis is to create a conda environment with all the needed packages.

+
+

4.1 Installing Mambaforge

+

If you have not yet installed Anaconda, Miniconda or Mambaforge, you will need +to begin there. The concept behind Anaconda is that just about everything you +would need for a typical python workflow is included. The concept behind +Miniconda and Mambaforge is that you create different environments for +different purposes. This allows for greater flexibility and tends to lead to +fewer conflicts between incompatible packages, particularly when using a +channel other than the defaults supplied by Anaconda. Since we will use +the conda-forge channel and the mamba tools to speed up installation, +the Mambaforge approach is strongly recommended. The main advantage of +Mambaforge over Miniconda is that it automatically takes care of a few steps +that we otherwise need to do manually.

+

First download the +Mambaforge installer +for your operating system, then run it:

+
$ /bin/bash Mambaforge-Linux-x86_64.sh
+
+
+
+

Note

+

MPAS-Analysis and many of the packages it depends on support OSX and Linux +but not Windows.

+
+

If you are on an HPC system, you can still install Miniconda into your home +directory. Typically, you will need the Linux version.

+
+

Note

+

At this time, we don’t have experience with installing or running +MPAS-Analysis on ARM or Power8/9 architectures.

+
+

You will be asked to agree to the terms and conditions. Type yes to +continue.

+

You will be prompted with a location to install. In this tutorial, we assume +that Mambaforge is installed in the default location, ~/mambaforge. If +you are using Miniconda or chose to install Mambaforge somewhere else, just +make sure to make the appropriate substitution whenever you see a reference to +this path below.

+
+

Note

+

On some HPC machines (particularly at LANL Institutional Computing and +NERSC) the space in your home directory is quite limited. You may want to +install Mambaforge in an alternative location to avoid running out of +space.

+
+

You will see prompt like this:

+
Do you wish the installer to initialize Mambaforge
+by running conda init? [yes|no]
+[no] >>>
+
+
+

You may wish to skip the step (answer no) if you are working on a system +where you will also be using other conda environments, most notably +E3SM-Unified (which has its own Miniconda installation). If you do not run +conda init, you have to manually activate conda whenever you need it. +For bash and similar shells, this is:

+
$ source ~/mambaforge/etc/profile.d/conda.sh
+$ conda activate
+
+
+

If you use csh, tcsh or related shells, this becomes:

+
> source ~/mambaforge/etc/profile.d/conda.csh
+> conda activate
+
+
+

You may wish to create an alias in your .bashrc or .cshrc to make +this easier. For example:

+
alias init_conda="source ~/mambaforge/etc/profile.d/conda.sh; conda activate"
+
+
+
+
+

4.2 One-time Miniconda setup

+

If you installed Miniconda, rather than Mambaforge, you will need to add the +conda-forge channel and make sure it always takes +precedence for packages available on that channel:

+
$ conda config --add channels conda-forge
+$ conda config --set channel_priority strict
+
+
+

Then, you will need to install the mamba package:

+
$ conda install -y mamba
+
+
+

If you installed Mambaforge, these steps will happen automatically.

+
+
+

4.3 Create a development environment

+

You can create a new conda environment called mpas_dev and install the +dependencies that MPAS-Analysis needs by running the following in the worktree +where you are doing your development:

+
$ mamba create -y -n mpas_dev --file dev-spec.txt "esmf=*=nompi_*"
+
+
+

The last argument is only needed on HPC machines because the conda version of +MPI doesn’t work properly on these machines. You can omit it if you’re +setting up the conda environment on your laptop.

+

Then, you can activate the environment and install MPAS-Analysis in “edit” +mode by running:

+
$ conda activate mpas_dev
+$ python -m pip install -e .
+
+
+

In this mode, any edits you make to the code in the worktree will be available +in the conda environment. If you run mpas_analysis on the command line, +it will know about the changes.

+
+

Note

+

If you add or remove files in the code, you will need to re-install +MPAS-Analysis in the conda environment by rerunning

+
python -m pip install -e .
+
+
+
+
+
+

4.4 Activating the environment

+

Each time you open a new terminal window, to activate the mpas_dev +environment, you will need to run either for bash:

+
$ source ~/mambaforge/etc/profile.d/conda.sh
+$ conda activate mpas_dev
+
+
+

or for csh:

+
> source ~/mambaforge/etc/profile.d/conda.csh
+> conda activate mpas_dev
+
+
+

You can skip the source command if you chose to initialize Mambaforge or +Miniconda3 so it loads automatically. You can also use the init_conda +alias for this step if you defined one.

+
+
+

4.5 Switching worktrees

+

If you switch to a different worktree, it is safest to rerun the whole +process for creating the mpas_dev conda environment. If you know that +the dependencies are the same as the worktree used to create mpas_dev, +You can just reinstall mpas_analysis itself by rerunning

+
python -m pip install -e .
+
+
+

in the new worktree. If you forget this step, you will find that changes you +make in the worktree don’t affect the mpas_dev conda environment you are +using.

+
+
+
+

5. Editing code

+

You may, of course, edit the MPAS-Analysis code using whatever tool you like. +I strongly recommend editing on your laptop and using +PyCharm community edition +to do the editing. PyCharm provides many features including flagging +deviations from preferred coding style guidelines known as +PEP8 and syntax error detection using +the mpas_dev conda environment you created.

+
+
+

6. Running MPAS-Analysis on a laptop

+

If you wish to run MPAS-Analysis on your laptop (or desktop machine), you will +need to follow steps 2-6 of the User: Getting Started tutorial.

+
+
+

7. Running MPAS-Analysis on an E3SM supported machine

+
+

7.1 Configuring MPAS-Analysis

+

We configure MPAS-Analysis is with Python cfg (also called ini) files:

+
[runs]
+# mainRunName is a name that identifies the simulation being analyzed.
+mainRunName = runName
+
+[execute]
+...
+
+
+

The default config file +contains thousands of config options, which gives a lot of flexibility to +MPAS-Analysis but can be more than bit overwhelming to new users and +developers.

+

The file example_e3sm.cfg +provides you with an example with some of the most common config options you +might need to change on an E3SM supported machine. If you specify the name of +the supported machine with the --machine (or -m) flag when you call +mpas_analysis, there are several config options that will be set for you +automatically.

+

First, you should copy this file to a new name for a specific run (say +myrun.cfg). Then, you should modify any config options you want to change +in your new config file. At a minimum, you need to specify:

+
    +
  • mainRunName in [runs]: A name for the run to be included plot titles +and legends (best if it’s not super long)

  • +
  • baseDirectory in [input]: The directory for the simulation results +to analyze

  • +
  • mpasMeshName in [input]: The official name of the MPAS-Ocean and +-Seaice mesh

  • +
  • baseDirectory in [output]: The directory for the analysis results

  • +
+

We will cover these and a few other common options in this tutorial. With the +exception of a few paths that you will need to provide, the config options +displayed below are the ones appropriate for the example E3SM simulation using +the QU480 MPAS mesh.

+
+

7.1.1 [runs]

+

The [runs] section contains options related to which E3SM simulation(s) are +being analyzed:

+
[runs]
+## options related to the run to be analyzed and control runs to be
+## compared against
+
+# mainRunName is a name that identifies the simulation being analyzed.
+mainRunName = A_WCYCL1850.ne4_oQU480.anvil
+
+
+

The mainRunName can be any useful name that will appear at the top of each +web page of the analysis output and in the legends or titles of the figures. +Often, this is the full name of the E3SM simulation but sometimes it is +convenient to have a shorter name. In this case, we use part of the run name +but leave off the date of the simulation to keep it a little shorter.

+
+
+

7.1.2 [execute]

+

The [execute] section contains options related to serial or parallel +execution of the individual “tasks” that make up an MPAS-Analysis run. For +the most part, you can let MPAS-Analysis take care of this on supported +machines. The exception is that, in a development conda environment, you will +be using a version of ESMF that cannot run in parallel so you will need the +following:

+
[execute]
+## options related to executing parallel tasks
+
+# the number of MPI tasks to use in creating mapping files (1 means tasks run in
+# serial, the default)
+mapMpiTasks = 1
+
+# "None" if ESMF should perform mapping file generation in serial without a
+# command, or one of "srun" or "mpirun" if it should be run in parallel (or in
+# serial but with a command)
+mapParallelExec = None
+
+
+

If you are running into trouble with MPAS-Analysis, such as running out of +memory, you may want to explore other config options from this section.

+
+
+

7.1.3 [input]

+

The [input] section provides paths to the E3SM simulation data and the name +of the MPAS-Ocean and MPAS-Seaice mesh.

+
[input]
+## options related to reading in the results to be analyzed
+
+# directory containing model results
+baseDirectory = /lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil
+
+# Note: an absolute path can be supplied for any of these subdirectories.
+# A relative path is assumed to be relative to baseDirectory.
+# In this example, results are assumed to be in <baseDirecory>/run
+
+# subdirectory containing restart files
+runSubdirectory = run
+# subdirectory for ocean history files
+oceanHistorySubdirectory = archive/ocn/hist
+# subdirectory for sea ice history files
+seaIceHistorySubdirectory = archive/ice/hist
+
+# names of namelist and streams files, either a path relative to baseDirectory
+# or an absolute path.
+oceanNamelistFileName = run/mpaso_in
+oceanStreamsFileName = run/streams.ocean
+seaIceNamelistFileName = run/mpassi_in
+seaIceStreamsFileName = run/streams.seaice
+
+# name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3,
+# ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.)
+mpasMeshName = oQU480
+
+
+

The baseDirectory is the path for the E3SM simulation. Here are paths to +some very low resolution simulations you can use on various supported machines:

+

Anvil or Chrysalis:

+
/lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20200305.A_WCYCL1850.ne4_oQU480.anvil
+/lcrc/group/e3sm/ac.xylar/acme_scratch/anvil/20201025.GMPAS-IAF.T62_oQU240wLI.anvil
+
+
+

Cori and Perlmutter:

+
/global/cfs/cdirs/e3sm/xylar/20200305.A_WCYCL1850.ne4_oQU480.anvil
+
+
+

Compy:

+
/compyfs/asay932/analysis_testing/test_output/20200305.A_WCYCL1850.ne4_oQU480.anvil
+
+
+

The mpasMeshName is the official name of the MPAS-Ocean and -Seaice mesh +used in the simulation, which should be in the simulation name and must be a +directory on the +inputdata +server In this example, this is oQU480, meaning the quasi-uniform 480-km +mesh for the ocean and sea ice.

+

The runSubdirectory must contain valid MPAS-Ocean and MPAS-Seaice restart +files, used to get information about the MPAS mesh and the ocean vertical grid.

+

The oceanHistorySubdirectory must contain MPAS-Ocean monthly mean output +files, typically named:

+
mpaso.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc
+
+
+

Similarly, seaIceHistorySubdirectory contains the MPAS-Seaice monthly mean +output:

+
mpassi.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc
+
+
+

In this example, we are using a run where short-term archiving has been used +so the output is not in the run directory.

+

Finally, MPAS-Analysis needs a set of “namelists” and “streams” files that +provide information on the E3SM configuration for MPAS-Ocean and MPAS-Seaice, +and about the output files, respectively. These are typically also found in +the run directory.

+
+
+

7.1.4 [output]

+

The [output] section provides a path where the output from the analysis run +will be written, the option to output the results web pages to another +location, and a list of analysis to be generated (or explicitly skipped).

+
[output]
+## options related to writing out plots, intermediate cached data sets, logs,
+## etc.
+
+# The subdirectory for the analysis and output on the web portal
+subdir = ${runs:mainRunName}/clim_${climatology:startYear}-${climatology:endYear}_ts_${timeSeries:startYear}-${timeSeries:endYear}
+
+# directory where analysis should be written
+# NOTE: This directory path must be specific to each test case.
+baseDirectory = /lcrc/group/e3sm/${web_portal:username}/analysis/${output:subdir}
+
+# provide an absolute path to put HTML in an alternative location (e.g. a web
+# portal)
+htmlSubdirectory = ${web_portal:base_path}/${web_portal:username}/analysis/${output:subdir}
+
+# a list of analyses to generate.  Valid names can be seen by running:
+#   mpas_analysis --list
+# This command also lists tags for each analysis.
+# Shortcuts exist to generate (or not generate) several types of analysis.
+# These include:
+#   'all' -- all analyses will be run
+#   'all_publicObs' -- all analyses for which observations are available on the
+#                      public server (the default)
+#   'all_<tag>' -- all analysis with a particular tag will be run
+#   'all_<component>' -- all analyses from a given component (either 'ocean'
+#                        or 'seaIce') will be run
+#   'only_<component>', 'only_<tag>' -- all analysis from this component or
+#                                       with this tag will be run, and all
+#                                       analysis for other components or
+#                                       without the tag will be skipped
+#   'no_<task_name>' -- skip the given task
+#   'no_<component>', 'no_<tag>' -- in analogy to 'all_*', skip all analysis
+#                                   tasks from the given component or with
+#                                   the given tag.  Do
+#                                      mpas_analysis --list
+#                                   to list all task names and their tags
+# an equivalent syntax can be used on the command line to override this
+# option:
+#    mpas_analysis analysis.cfg --generate \
+#         only_ocean,no_timeSeries,timeSeriesSST
+generate = ['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke',
+            'no_landIceCavities']
+
+
+

In this example, I have made liberal use of +extended interpolation +in the config file to make use of config options in other config options.

+

subdir is the subdirectory for both the analysis and the output on the +web portal. It typically indicates the run being used and the years covered +by the climatology (and sometimes the time series as in this example). See +7.1.5. [climatology], [timeSeries] and [index] for more info on these config +options.

+

baseDirectory is any convenient location for the output. In this example, +I have used a typical path on Anvil or Chrysalis, including the +${web_portal:username} that will be populated automatically on a supported +machine and ${output:subdir}, the subdirectory from above.

+

htmlSubdirectory is set using the location of the web portal, which is +automatically determined on an E3SM machine, the user name, and the same +subdirectory used for analysis output. You can modify the path as needed to +match your own preferred workflow.

+
+

Note

+

On some E3SM supported machines like Chicoma, there is no +web portal so you will want to just manually replace the part of the +basePath given by /lcrc/group/e3sm/${web_portal:username} in the +example above.

+

You will need to just put the web output in an html subdirectory within +the analysis output:

+
htmlSubdirectory = html
+
+
+

and copy this from the supercomputer to your laptop to view it in your +browser.

+
+

Finally, the generate option provides a python list of flags that can be +used to determine which analysis will be generated. In this case, we are +turning off some analysis that will not work because some features +(biogeochemistry, icebergs, eddy kinetic energy and land-ice cavities) are not +available in this run and one (the El Niño climate index) is not useful.

+
+
+

7.1.5. [climatology], [timeSeries] and [index]

+

These options determine the start and end years of climatologies (time averages +over a particular month, season or the full year), time series or the El Niño +climate index.

+
[climatology]
+## options related to producing climatologies, typically to compare against
+## observations and previous runs
+
+# the first year over which to average climatalogies
+startYear = 3
+# the last year over which to average climatalogies
+endYear = 5
+
+[timeSeries]
+## options related to producing time series plots, often to compare against
+## observations and previous runs
+
+# start and end years for timeseries analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 5
+
+[index]
+## options related to producing nino index.
+
+# start and end years for El Nino 3.4 analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 5
+
+
+

For each of these, options a full year of data must exist for that year to +be included in the analysis.

+

For the example QU480 simulation, only 5 years of output are available, so we +are doing a climatology over the last 3 years (3 to 5) and displaying time +series over the full 5 years. (If the El Niño index weren’t disabled, it would +also be displayed over the full 5 years.)

+
+
+
+

7.2 Running MPAS-Analysis

+

The hard work is done. Now that we have a config file, we are ready to run.

+

To run MPAS-Analysis, you should either create a job script or log into +an interactive session on a compute node. Then, activate the mpas_dev +conda environment as in 4.4 Activating the environment.

+

On many file systems, MPAS-Analysis and other python-based software that used +NetCDF files based on the HDF5 file structure can experience file access errors +unless the following environment variable is set as follows in bash:

+
$ export HDF5_USE_FILE_LOCKING=FALSE
+
+
+

or under csh:

+
> setenv HDF5_USE_FILE_LOCKING FALSE
+
+
+

Then, running MPAS-Analysis is as simple as:

+
$ mpas_analysis -m <machine> myrun.cfg
+
+
+

where <machine> is the name of the machine (all lowercase). On Cori, we +only support the Haswell nodes (so the machine name is cori-haswell). For +now, we only support CPU nodes on Perlmutter (pm-cpu) and Chicoma +(chicoma-cpu).

+

Typical output is the analysis is running correctly looks something like:

+
$ mpas_analysis -m anvil myrun.cfg
+Detected E3SM supported machine: anvil
+Using the following config files:
+   /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/default.cfg
+   /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/lib/python3.10/site-packages/mache/machines/anvil.cfg
+   /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/configuration/anvil.cfg
+   /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/mpas_analysis/__main__.py
+   /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg
+copying /gpfs/fs1/home/ac.xylar/code/mpas-analysis/add_my_fancy_task/myrun.cfg to HTML dir.
+
+running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp76l7of28/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --ignore_unmapped
+running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpj94wpf9y/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped
+running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmp6zm13a0s/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_WOCE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped
+Preprocessing SOSE transect data...
+  temperature
+  salinity
+  potentialDensity
+  zonalVelocity
+  meridionalVelocity
+  velMag
+  Done.
+running: /gpfs/fs1/home/ac.xylar/anvil/mambaforge/envs/mpas_dev/bin/ESMF_RegridWeightGen --source /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/src_mesh.nc --destination /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/tmpe2a9yblb/dst_mesh.nc --weight /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --netcdf4 --no_log --src_loc center --src_regional --dst_regional --ignore_unmapped
+
+Running tasks: 100% |##########################################| Time:  0:06:42
+
+Log files for executed tasks can be found in /lcrc/group/e3sm/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5/logs
+Total setup time: 0:02:13.78
+Total run time: 0:08:55.86
+Generating webpage for viewing results...
+Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5
+
+
+

The first part of the output, before the progress bar, is the “setup” phase +where MPAS-Analysis is checking if the requested analysis can be run on the +simulation results. The specific output shown here is related to 1) +listing the config files used to determine the final set of config options +used in the analysis, and 2) creating mapping files that are used to +interpolate between the oQU480 mesh and the various grids MPAS-Analysis +uses to compare with observations. Since MPAS-Analysis didn’t know about that +oQU480 mesh ahead of time, it is creating mapping files and regions masks +for this mesh on the fly.

+

The mpas_analysis command-line tool has several more options you can +explore with

+
$ mpas_analysis --help
+
+
+

These include listing the available analysis tasks and their tags, purging a +previous analysis run before running the analysis again, plotting all available +color maps, and outputting verbose python error messages when the analysis +fails during the setup phase (before a progress bar appears).

+
+
+

7.3 Viewing the Output

+

The primary output from MPAS-Analysis is a set of web pages, each containing +galleries of figures. The output can be found in the directory you provided in +7.1.4 [output] and given in the last line of +the analysis output (if you are on a supported machine with a web portal), +e.g.:

+
Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output/ac.xylar/analysis/A_WCYCL1850.ne4_oQU480.anvil/clim_3-5_ts_1-5
+
+
+
+

Note

+

On Cori and Perlmutter, you will need to change the permissions so you can +see the webpage online:

+
$ chmod -R ugo+rX /global/cfs/cdirs/e3sm/www/<username>
+
+
+

where <username> is your NERSC username.

+
+

If the web page is incomplete, it presumably means there was an error during +the analysis run, since the web page is generated as the final step. Check +the analysis output and then the log files for individual analysis tasks to +see what when wrong. See 7 Troubleshooting or ask for +help if you run into trouble.

+

The main web page has links to the ocean and sea-ice web pages as well as some +“provenance” information about which version of MPAS-Analysis you were using +and how it was configured.

+

The web page generated by this tutorial should look something like this +(somewhat outdated) +example output.

+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/tutorials/dev_understand_a_task.html b/1.11.0rc1/tutorials/dev_understand_a_task.html new file mode 100644 index 000000000..fef9f13d2 --- /dev/null +++ b/1.11.0rc1/tutorials/dev_understand_a_task.html @@ -0,0 +1,1252 @@ + + + + + + + Developers: Understanding an analysis task — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Developers: Understanding an analysis task

+

This tutorial walks a new developer through an existing analysis task to get +a more in-depth understanding of the code. This tutorial is meant as the +starting point for the Developers: Adding a new analysis task tutorial. It is a common +practice to find an existing analysis task that is as close as possible to the +new analysis, and to copy that existing task as a template for the new task. +This tutorial describes an existing analysis task, and +Developers: Adding a new analysis task uses it as a starting point for developing a new +task.

+
+

1. The big picture

+

MPAS-Analysis is meant to provide a first look at E3SM simulation output from +the MPAS-Ocean and MPAS-Seaice components. The analysis is intended to be +robust and automated. However, there is currently little effort to ensure that +the time period covered by the observations and model output are the same. +In other words, we often compare pre-industrial simulation results with +present-day observations. The justification for this is twofold. First, we +typically have few if any observations covering the pre-industrial period. +Second, we may be attempting to reduce biases that we assess to be much larger +than expected differences between pre-industrial and present-day climate +conditions. Under these conditions, MPAS-Analysis provides us with a useful +first impression of how our simulation is doing.

+
+

1.1 MPAS output

+

The primary output from MPAS-Ocean and MPAS-Seaice are monthly and daily +averages of a large number of data fields. Here are links to the list of:

+ +

The components also produce a smaller amount of more specialized output, such +as monthly maximum/minimum values.

+

MPAS data is provided on unstructured meshes, meaning that it isn’t +particularly amenable to analysis with standard tools such as +ESMValTool. Additionally, E3SM’s science +campaigns require unique, sometimes regionally focused analysis not available +in existing tools.

+
+
+

1.2 Analysis tasks

+

MPAS-Analysis is designed to a series of interdependent analysis tasks in +parallel with one another. It builds up dependency graph between the tasks, +allowing independent tasks to run at the same time while putting dependent +tasks on hold until the tasks they depend on are completed. Additionally, +MPAS-Analysis has some rudimentary tools for keeping track of the resources +that some computationally intensive tasks require to prevent the tool from +running out of memory.

+

Currently, nearly all operations in MPAS-Analysis must run on a single HPC +node. (The exception is +ncclimo, +which is used to generate climatologies, and which can run in parallel across +up to 12 nodes if desired.) We hope to support broader task parallelism in +the not-too-distant future using the parsl +python package.

+

Each analysis tasks is a class that descends from the +AnalysisTask base class. Tasks +can also have “subtasks” that do part of the work needed for the final +analysis. A subtask might perform a computation on a specific region, period +of time, or season. It might combine data from other subtasks into a single +dataset. Or it might plot the data computed by a previous task. The +advantages of dividing up the work are 1) that each subtask can potentially +run in parallel with other subtasks and 2) it can allow code reuse if the same +subtask can be used across multiple analysis tasks.

+
+
+

1.3 Shared framework

+

MPAS-Analysis includes a shared framework used across analysis tasks. The +framework is made up mostly of functions that can be called from within +analysis tasks but also includes some analysis tasks and subtasks that are +common to MPAS-Ocean, MPAS-Seaice and potentially other MPAS components +(notably MALI) that may be supported in the future.

+

This tutorial will not go though the shared framework in detail. In addition +to the AnalysisTask base class, the shared +framework includes the following +packages:

+
$ ls mpas_analysis/shared
+climatology
+constants
+generalized_reader
+html
+interpolation
+io
+mpas_xarray
+plot
+projection
+regions
+time_series
+timekeeping
+transects
+...
+
+
+

A separate tutorial will explore the shared framework and how how to modify it.

+
+
+
+

2. Tour of an analysis task (ClimatologyMapOHCAnomaly)

+

Aside from some code that takes care of managing analysis tasks and generating +web pages, MPAS-Analysis is made up almost entirely of analysis tasks and +shared functions they can call. Since adding new analysis nearly always +mean creating a new class for the task, we start with a tour of an existing +analysis task as well as the AnalysisTask +base class that it descends from.

+

We will use ClimatologyMapOHCAnomaly as an +example analysis task for this tour because it will turn out to be a useful +staring point for the analysis we want to add in Developers: Adding a new analysis task. +You can read more about climatologyMapOHCAnomaly in the User’s +Guide.

+

It will be useful to open the following links in your browser to have a look +at the code directly: +ClimatologyMapOHCAnomaly

+

If you want to be a little more adventurous, you can also pull up the code +for the base class: +AnalysisTask

+
+

2.1 Attributes

+

Classes can contain pieces of data called attributes. In MPAS-Analysis, the +objects representing tasks share several attributes that they inherit from +the AnalysisTask class. A few of the most +important attributes of an analysis task are:

+
    +
  • config - an object for getting the values of config options

  • +
  • namelist - an object for getting namelist options from the E3SM +simulation

  • +
  • runStreams - an object for finding MPAS output files in the run +directory. In practice, this is always a restart file used to get the +MPAS mesh and, for MPAS-Ocean, the vertical coordinate.

  • +
  • historyStreams - an object for finding MPAS history streams (often +timeSeriesStatsMonthlyOutput).

  • +
  • calendar - the name of the calendar that was used in the MPAS run +(in practice always 'noleap' or until recently 'gregorian_noleap').

  • +
  • xmlFileNames - a list of XML files associated with plots produced by this +analysis task. As we will discuss, these are used to help populate the +web page showing the analysis.

  • +
  • logger - and object that keeps track of sending output to log files +(rather than the terminal) when the analysis is running. During the +run_task() phase of the analysis when tasks are running in parallel with +each other, make sure to use logger.info() instead of print() to +send output to the log file.

  • +
+

Within the methods of analysis task class, these attributes can be accessed +using the self object, e.g. self.config. It is often helpful to make +a local reference to the object to make the code more compact, e.g.:

+
config = self.config
+seasons = config.getexpression('climatologyMapOHCAnomaly', 'seasons')
+
+
+

The analysis task we’re looking at, ClimatologyMapOHCAnomaly +has some attributes of its own:

+
    +
  • mpasClimatologyTask - the task that produced the climatology to be +remapped and plotted

  • +
  • refYearClimatologyTask - The task that produced the climatology from the +first year to be remapped and then subtracted from the main climatology +(since we want to plot an anomaly from the beginning of the simulation)

  • +
+
+
+

2.2 Constructor

+

Almost all classes have “constructors”, which are methods for making a new +object of that class. In python, the constructor is called __init__(). +In general, the __ (double underscore) is used in python to indicate a +function or method with special meaning.

+

The constructor of a subclass (such as +ClimatologyMapOHCAnomaly) always calls the +constructor of the superclass (AnalysisTask +in this case). So we’ll talk about the constructor for +ClimatologyMapOHCAnomaly first and then get +to AnalysisTask.

+

The constructor for ClimatologyMapOHCAnomaly +starts off like this:

+
def __init__(self, config, mpas_climatology_task,
+             ref_year_climatology_task, control_config=None):
+
+
+

As with all methods, it takes the self object as the first argument. +Then, it takes a config object, which is true of all analysis tasks. Then, +it has some other arguments that are more specific to the analysis being +performed. Here, we have 2 other analysis tasks as arguments: +mpasClimatologyTask and refYearClimatologyTask. As described in +the previous section, these are tasks for computing climatologies that will +later be remapped to a comparison grid for plotting. A little later in the +constructor, we store references to these tasks as attributes:

+
self.mpas_climatology_task = mpas_climatology_task
+self.ref_year_climatology_task = ref_year_climatology_task
+
+
+

Returning to the constructor above, the first thing we do it to call the +super class’s __init__() method:

+
def __init__(self, config, mpas_climatology_task,
+             ref_year_climatology_task, control_config=None):
+    """
+    Construct the analysis task.
+
+    Parameters
+    ----------
+    config : mpas_tools.config.MpasConfigParser
+        Configuration options
+
+    mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+        The task that produced the climatology to be remapped and plotted
+
+    ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+        The task that produced the climatology from the first year to be
+        remapped and then subtracted from the main climatology
+
+    control_config : mpas_tools.config.MpasConfigParser, optional
+        Configuration options for a control run (if any)
+    """
+
+    field_name = 'deltaOHC'
+    # call the constructor from the base class (AnalysisTask)
+    super().__init__(config=config, taskName='climatologyMapOHCAnomaly',
+                     componentName='ocean',
+                     tags=['climatology', 'horizontalMap', field_name,
+                           'publicObs', 'anomaly'])
+
+
+

We’re passing along the config options to the base class so it can store +them. Then, we’re giving the task a unique taskName (the same as the class +name except that it starts with a lowercase letter). We’re saying that the +MPAS componentName is the ocean.

+

Then, we giving the task a number of tags that can be helpful in +determining whether or not to generate this particular analysis based on the +Generate Option. The tags are used to describe various aspects of the +analysis. Here, we will produce plots of a climatology (as opposed to a +time series). The plot will be a horizontalMap. It will involve the +variable deltaOHC. This analysis doesn’t involve any observations, but we +include a publicObs tag to indicate that it doesn’t require any proprietary +observational data sets that we do not have the rights to make public. +(Currently, we have a few such data sets for things like Antarctic melt rates.) +Finally, the analysis involves an anomaly computed relative to the +beginning of the simulation.

+

From there, we get the values of some config options, raising errors if we +find something unexpected:

+
section_name = self.taskName
+
+# read in what seasons we want to plot
+seasons = config.getexpression(section_name, 'seasons')
+
+if len(seasons) == 0:
+    raise ValueError(f'config section {section_name} does not contain '
+                     f'valid list of seasons')
+
+comparison_grid_names = config.getexpression(section_name,
+                                             'comparisonGrids')
+
+if len(comparison_grid_names) == 0:
+    raise ValueError(f'config section {section_name} does not contain '
+                     f'valid list of comparison grids')
+
+depth_ranges = config.getexpression('climatologyMapOHCAnomaly',
+                                    'depthRanges',
+                                    use_numpyfunc=True)
+
+
+

By default, these config options look like this:

+
[climatologyMapOHCAnomaly]
+## options related to plotting horizontally remapped climatologies of
+## ocean heat content (OHC) against control model results (if available)
+
+...
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# A list of pairs of minimum and maximum depths (positive up, in meters) to
+# include in the vertical sums.  The default values are the equivalents of the
+# default ranges of the timeSeriesOHCAnomaly task, with a value of -10,000 m
+# intended to be well below the bottom of the ocean for all existing MPAS-O
+# meshes.
+depthRanges = [(0.0, -10000.0), (0.0, -700.0), (-700.0, -2000.0), (-2000.0, -10000.0)]
+
+
+

We plot only the annual mean OHC anomaly and we plot it only on a global +latitude-longitude grid. The range of depths is:

+
    +
  • the full ocean column

  • +
  • sea surface to 700 m depth

  • +
  • 700 m to 2000 m depth

  • +
  • 2000 m to the seafloor

  • +
+

A user would be free to change any of these config options, and the analysis +should run correctly. They could choose to plot on a different comparison +grid, add new seasons, or change the depth range. As long as they ran the +analysis in a fresh directory (or purged output from a previous analysis run), +this should work correctly.

+

Next, we store some values that will be useful later:

+
mpas_field_name = 'deltaOHC'
+
+variable_list = ['timeMonthly_avg_activeTracers_temperature',
+                 'timeMonthly_avg_layerThickness']
+
+
+

This particular analysis involves 4 different depth ranges over which we +compute the ocean heat content. The remainder of the analysis is performed +separately for each of these depth ranges in subtask. We loop over the +depth range and add a subtask that will first compute the ocean heat content +(OHC) and then remap it to the comparison grids (RemapMpasOHCClimatology):

+
for min_depth, max_depth in depth_ranges:
+    depth_range_string = \
+        f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m'
+    remap_climatology_subtask = RemapMpasOHCClimatology(
+        mpas_climatology_task=mpas_climatology_task,
+        ref_year_climatology_task=ref_year_climatology_task,
+        parent_task=self,
+        climatology_name=f'{field_name}_{depth_range_string}',
+        variable_list=variable_list,
+        comparison_grid_names=comparison_grid_names,
+        seasons=seasons,
+        min_depth=min_depth,
+        max_depth=max_depth)
+
+    self.add_subtask(remap_climatology_subtask)
+
+    ...
+
+
+

We will explore the RemapMpasOHCClimatology subtask later in the tutorial +so we will not discuss it further here.

+

Still within the loop over depth range, we then add a subtask +(PlotClimatologyMapSubtask) for plot we want to create, one for each each +comparison grid and season. (By default, there is only one comparison grid +and one “season”: the full year, ANN.)

+
for min_depth, max_depth in depth_ranges:
+    ...
+    out_file_label = f'deltaOHC_{depth_range_string}'
+    remap_observations_subtask = None
+    if control_config is None:
+        ref_title_label = None
+        ref_field_name = None
+        diff_title_label = 'Model - Observations'
+
+    else:
+        control_run_name = control_config.get('runs', 'mainRunName')
+        ref_title_label = f'Control: {control_run_name}'
+        ref_field_name = mpas_field_name
+        diff_title_label = 'Main - Control'
+
+    for comparison_grid_name in comparison_grid_names:
+        for season in seasons:
+            # make a new subtask for this season and comparison grid
+            subtask_name = f'plot{season}_{comparison_grid_name}_{depth_range_string}'
+
+            subtask = PlotClimatologyMapSubtask(
+                self, season, comparison_grid_name,
+                remap_climatology_subtask, remap_observations_subtask,
+                controlConfig=control_config, subtaskName=subtask_name)
+
+            subtask.set_plot_info(
+                outFileLabel=out_file_label,
+                fieldNameInTitle=f'$\\Delta$OHC over {depth_range_string}',
+                mpasFieldName=mpas_field_name,
+                refFieldName=ref_field_name,
+                refTitleLabel=ref_title_label,
+                diffTitleLabel=diff_title_label,
+                unitsLabel=r'GJ m$^{-2}$',
+                imageCaption=f'Anomaly in Ocean Heat Content over {depth_range_string}',
+                galleryGroup='OHC Anomaly',
+                groupSubtitle=None,
+                groupLink='ohc_anom',
+                galleryName=None)
+
+            self.add_subtask(subtask)
+
+
+

First, we make sure the subtask has a unique name. If two tasks or subtasks +have the same taskName and subtaskName, MPAS-Analysis will only run +the last one and the task manager may become confused.

+

Then, we create a subtask object that is an instance of the +PlotClimatologyMapSubtask +class. This class is shared between several ocean analysis tasks for plotting +climatologies as horizontal maps. It can plot just MPAS output, remapped to +one or more comparison grids and averaged over one or more seasons. It can +also plot that data against an observational field that has been remapped to +the same comparison grid and averaged over the same seasons. In this case, +there are no observations available for comparison +(remap_observations_subtask = None). A user may have provided a +“control” run of MPAS-Analysis to compare with this analysis run (a so-called +“model vs. model” comparison). If so, control_config will have config +options describing the other analysis run. If not, control_config is +None.

+

Next, We call the +set_plot_info() +method of PlotClimatologyMapSubtask +to provide things like the title and units for the plot and the field to plot. +We also provide information needed for the final analysis web page such as the +name of the gallery group. (We do not provide a gallery name within the +gallery group because there will be no other galleries within this group.) +All the plots for a given comparison grid will end up in the same gallery, +with different depths and seasons one after the other.

+

Finally, we call add_subtask() +to add the subtask to this task.

+
+
+

2.3 setup_and_check() method

+

The setup_and_check() method of an analysis task is called when it is clear +that this particular analysis has been requested (but before the analysis is +actually ready to run). This is in contrast to the constructor, which is +run for every analysis task everytime MPAS-Analysis runs because we need +information from the analysis task (its name, component and tags) in order to +determine if it should run or not.

+

In this method, we would typically perform checks to make sure the simulation +has been configured properly to run the analysis. For example, is the +necessary analysis member enabled.

+
def setup_and_check(self):
+    """
+    Checks whether analysis is being performed only on the reference year,
+    in which case the analysis will not be meaningful.
+
+    Raises
+    ------
+    ValueError: if attempting to analyze only the reference year
+    """
+
+    # first, call setup_and_check from the base class (AnalysisTask),
+    # which will perform some common setup, including storing:
+    #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
+    #     self.namelist, self.runStreams, self.historyStreams,
+    #     self.calendar
+    super().setup_and_check()
+
+    start_year, end_year = self.mpas_climatology_task.get_start_and_end()
+    ref_start_year, ref_end_year = \
+        self.ref_year_climatology_task.get_start_and_end()
+
+    if (start_year == ref_start_year) and (end_year == ref_end_year):
+        raise ValueError('OHC Anomaly is not meaningful and will not work '
+                         'when climatology and ref year are the same.')
+
+
+

In this particular case, we first call the super class’ version of the +setup_and_check() method. This +takes care of some important setup.

+

Then, we use this method to check if the user has specified meaningful values +for the climatology start and end year and the reference year. If they happen +to be the same, it doesn’t really make sense to run the analysis and it will +raise an error so the analysis gets skipped.

+

The ClimatologyMapOHCAnomaly has delegated all its work to its subtasks +so it doesn’t define a run_task() method. Tasks or subtasks that actually +do the work typically need to define this method, as we will explore below.

+
+
+
+

3. Tour of a subtask (RemapMpasOHCClimatology)

+

The class RemapMpasOHCClimatology is, in some ways, more complicated than +its “parent” task ClimatologyMapOHCAnomaly. +It descends not from the AnalysisTask base +class but from another subtask, +RemapMpasClimatologySubtask. +This tutorial won’t attempt to cover +RemapMpasClimatologySubtask in +all its detail. The basics are that that class starts with MPAS climatology +data over one or more seasons that has previously been computed by an +MpasClimatologyTask task. It +remaps that data from the MPAS mesh to one or more comparison grids (e.g. +global latitude-longitude or Antarctic stereographic) where it can be plotted +and compared with observations or another MPAS-Analysis run.

+

Here, we are not just using +RemapMpasClimatologySubtask +directly because we need to add to its functionality. We need to compute the +OHC, which is not available straight from MPAS-Ocean output, from the +monthly-mean temperature and layer thickness.

+
+

3.1 Attributes

+

The docstring indicates the attributes that RemapMpasOHCClimatology +includes. (It also has all the attributes of its super class, +RemapMpasClimatologySubtask, +and that class’ super class, AnalysisTask, +but we don’t redundantly document these in the docstring in part because that +would be a maintenance nightmare.)

+
class RemapMpasOHCClimatology(RemapMpasClimatologySubtask):
+    """
+    A subtask for computing climatologies of ocean heat content from
+    climatologies of temperature
+
+    Attributes
+    ----------
+    ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+        The task that produced the climatology from the first year to be
+        remapped and then subtracted from the main climatology
+
+    min_depth, max_depth : float
+        The minimum and maximum depths for integration
+    """
+
+
+

The attributes are a task for computing the climatology over the reference +year (usually the start of the simulation), ref_year_climatology_task, +and the minimum and maximum depth over which the ocean heat content will be +integrated.

+
+
+

3.2 Constructor

+
def __init__(self, mpas_climatology_task, ref_year_climatology_task,
+             parent_task, climatology_name, variable_list, seasons,
+             comparison_grid_names, min_depth, max_depth):
+
+    """
+    Construct the analysis task and adds it as a subtask of the
+    ``parent_task``.
+
+    Parameters
+    ----------
+    mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+        The task that produced the climatology to be remapped
+
+    ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+        The task that produced the climatology from the first year to be
+        remapped and then subtracted from the main climatology
+
+    parent_task :  mpas_analysis.shared.AnalysisTask
+        The parent task, used to get the ``taskName``, ``config`` and
+        ``componentName``
+
+    climatology_name : str
+        A name that describes the climatology (e.g. a short version of
+        the important field(s) in the climatology) used to name the
+        subdirectories for each stage of the climatology
+
+    variable_list : list of str
+        A list of variable names in ``timeSeriesStatsMonthly`` to be
+        included in the climatologies
+
+    seasons : list of str, optional
+        A list of seasons (keys in ``shared.constants.monthDictionary``)
+        to be computed or ['none'] (not ``None``) if only monthly
+        climatologies are needed.
+
+    comparison_grid_names : list of {'latlon', 'antarctic'}
+        The name(s) of the comparison grid to use for remapping.
+
+    min_depth, max_depth : float
+        The minimum and maximum depths for integration
+    """
+
+    depth_range_string = f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m'
+    subtask_name = f'remapMpasClimatology_{depth_range_string}'
+    # call the constructor from the base class
+    # (RemapMpasClimatologySubtask)
+    super().__init__(
+        mpas_climatology_task, parent_task, climatology_name,
+        variable_list, seasons, comparison_grid_names,
+        subtaskName=subtask_name)
+
+    self.ref_year_climatology_task = ref_year_climatology_task
+    self.run_after(ref_year_climatology_task)
+    self.min_depth = min_depth
+    self.max_depth = max_depth
+
+
+

Most of the arguments to the constructor are passed along to the constructor +of RemapMpasClimatologySubtask. +These include a reference to the class for computing MPAS climatologies +(used to find the input files and to make sure this task waits until that +task is finished), a reference to the “parent” +ClimatologyMapOHCAnomaly task for some of its +attributes, the name of the climatology supplied by the parent (something like +deltaOHC_0-700m, depending on the depth range), a list of the variables +that go into computing the OHC, the season(s) over which the climatology was +requested, the comparison grid(s) to plot on and a unique name for this +subtask.

+

The ref_year_climatology_task that computing the climatology over the +reference year is retained as an attribute to the class along with +the depth range. These attributes will all be needed later when we compute the +OHC. We indicate that this task must wait for the reference climatology to be +available by calling the run_after(). +The super class will do the same for the mpas_climatology_task task. It +will also add this task as a subtask of the parent task.

+
+
+

3.3 setup_and_check() method

+

As in the parent task, we need to define the setup_and_check() method.

+
def setup_and_check(self):
+    """
+    Perform steps to set up the analysis and check for errors in the setup.
+    """
+
+    # first, call setup_and_check from the base class
+    # (RemapMpasClimatologySubtask), which will set up remappers and add
+    # variables to mpas_climatology_task
+    super().setup_and_check()
+
+    # don't add the variables and seasons to mpas_climatology_task until
+    # we're sure this subtask is supposed to run
+    self.ref_year_climatology_task.add_variables(self.variableList,
+                                                 self.seasons)
+
+
+

In this particular case, we first call the super class’ version of the +setup_and_check() +method. This takes care of some important setup including adding the variables +and season(s) we need to the mpas_climatology_task.

+

Then, we use this method to add variables we need +and the requested season(s) to the task for computing the climatology over the +reference year (ref_year_climatology_task). We don’t do this in the +constructor because if we did, we would always be asking for the variables +needed to compute the OHC even if we don’t actually end up computing it. This +could be a big waste of time and disk space. The super class +RemapMpasClimatologySubtask can’t +take care of this for us because it isn’t designed for computing anomalies, +just “normal” climatologies over a range of years.

+
+
+

3.4 run_task() method

+

Normally, the main work of a task happens in the run_task() method. +The RemapMpasOHCClimatology class doesn’t define this method because it is +happy to inherit the +run_task() +method from its super class, +RemapMpasClimatologySubtask.

+

An abbreviated version of that method looks like this:

+
def run_task(self):
+    """
+    Compute the requested climatologies
+    """
+    ...
+    for season in self.seasons:
+        self._mask_climatologies(season, dsMask)
+    ...
+
+
+

It calls a private helper method:

+
def _mask_climatologies(self, season, dsMask):
+    """
+    For each season, creates a masked version of the climatology
+    """
+    ...
+    if not os.path.exists(maskedClimatologyFileName):
+        ...
+
+        # customize (if this function has been overridden)
+        climatology = self.customize_masked_climatology(climatology,
+                                                        season)
+
+        write_netcdf(climatology, maskedClimatologyFileName)
+
+
+

This private method (the leading underscore indicates that it is private), in +turn, calls the customize_masked_climatology() method, which is our chance +to make changes to the climatology before it gets remapped. That’s where +we will actually compute the OHC from variables available from MPAS output.

+
+
+

3.5 customize_masked_climatology() method

+

Here is how we compute the OHC itself:

+
def customize_masked_climatology(self, climatology, season):
+    """
+    Compute the ocean heat content (OHC) anomaly from the temperature
+    and layer thickness fields.
+
+    Parameters
+    ----------
+    climatology : xarray.Dataset
+        the climatology data set
+
+    season : str
+        The name of the season to be masked
+
+    Returns
+    -------
+    climatology : xarray.Dataset
+        the modified climatology data set
+    """
+
+    ohc = self._compute_ohc(climatology)
+
+    ...
+
+
+

We call a private helper method to do the actual work, so let’s take a look +at that before we continue with customize_masked_climatology().

+
def _compute_ohc(self, climatology):
+    """
+    Compute the OHC from the temperature and layer thicknesses in a given
+    climatology data sets.
+    """
+    ds_restart = xr.open_dataset(self.restartFileName)
+    ds_restart = ds_restart.isel(Time=0)
+
+    # specific heat [J/(kg*degC)]
+    cp = self.namelist.getfloat('config_specific_heat_sea_water')
+    # [kg/m3]
+    rho = self.namelist.getfloat('config_density0')
+
+    units_scale_factor = 1e-9
+
+    n_vert_levels = ds_restart.sizes['nVertLevels']
+
+    z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1,
+                         ds_restart.layerThickness)
+
+    vert_index = xr.DataArray.from_dict(
+        {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)})
+
+    temperature = climatology['timeMonthly_avg_activeTracers_temperature']
+    layer_thickness = climatology['timeMonthly_avg_layerThickness']
+
+    masks = [vert_index < ds_restart.maxLevelCell,
+             z_mid <= self.min_depth,
+             z_mid >= self.max_depth]
+    for mask in masks:
+        temperature = temperature.where(mask)
+        layer_thickness = layer_thickness.where(mask)
+
+    ohc = units_scale_factor * rho * cp * layer_thickness * temperature
+    ohc = ohc.sum(dim='nVertLevels')
+    return ohc
+
+
+

This function uses a combination of mesh information taken from an MPAS +restart file (available from the self.restartFileName attribute inherited +from RemapMpasClimatologySubtask), +namelist options available from the self.namelist reader (inherited from +AnalysisTask), and temperature and +layer_thickness from the climatology dataset itself. As the +docstring for customize_masked_climatology() states, climatology is +and xarray.Dataset. We know it has variables +timeMonthly_avg_activeTracers_temperature and +timeMonthly_avg_layerThickness because we requested them back in the +constructor of ClimatologyMapOHCAnomaly. +We compute the ohc as an xarray.DataArray that we return from +this helper method.

+

Back to customize_masked_climatology(), we have:

+
def customize_masked_climatology(self, climatology, season):
+    ...
+    ohc = self._compute_ohc(climatology)
+
+    ref_file_name = self.ref_year_climatology_task.get_file_name(season)
+    ref_year_climo = xr.open_dataset(ref_file_name)
+    if 'Time' in ref_year_climo.dims:
+        ref_year_climo = ref_year_climo.isel(Time=0)
+    ref_ohc = self._compute_ohc(ref_year_climo)
+
+    climatology['deltaOHC'] = ohc - ref_ohc
+    climatology.deltaOHC.attrs['units'] = 'GJ m^-2$'
+    start_year = self.ref_year_climatology_task.startYear
+    climatology.deltaOHC.attrs['description'] = \
+        f'Anomaly from year {start_year} in ocean heat content'
+    climatology = climatology.drop_vars(self.variableList)
+
+    return climatology
+
+
+

We use the same helper function to compute the ref_ohc using the +climatology for the reference year. Then, we compute the anomaly (the +difference between these two, deltaOHC) and we add some attributes, +units and description, to make the NetCDF output that will go into the +analysis output directory a little more useful.

+
+
+
+

4. The full code for posterity

+

Since the ClimatologyMapOHCAnomaly analysis task may evolve in the future, +here is the full analysis task as described in this tutorial:

+
# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+import xarray as xr
+import numpy as np
+
+from mpas_analysis.shared import AnalysisTask
+from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask
+from mpas_analysis.ocean.plot_climatology_map_subtask import \
+    PlotClimatologyMapSubtask
+from mpas_analysis.ocean.utility import compute_zmid
+
+
+class ClimatologyMapOHCAnomaly(AnalysisTask):
+    """
+    An analysis task for comparison of the anomaly from a reference year
+    (typically the start of the simulation) of ocean heat content (OHC)
+
+    Attributes
+    ----------
+    mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+        The task that produced the climatology to be remapped and plotted
+
+    ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+        The task that produced the climatology from the first year to be
+        remapped and then subtracted from the main climatology
+    """
+
+    def __init__(self, config, mpas_climatology_task,
+                 ref_year_climatology_task, control_config=None):
+        """
+        Construct the analysis task.
+
+        Parameters
+        ----------
+        config : mpas_tools.config.MpasConfigParser
+            Configuration options
+
+        mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+            The task that produced the climatology to be remapped and plotted
+
+        ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+            The task that produced the climatology from the first year to be
+            remapped and then subtracted from the main climatology
+
+        control_config : mpas_tools.config.MpasConfigParser, optional
+            Configuration options for a control run (if any)
+        """
+
+        field_name = 'deltaOHC'
+        # call the constructor from the base class (AnalysisTask)
+        super().__init__(config=config, taskName='climatologyMapOHCAnomaly',
+                         componentName='ocean',
+                         tags=['climatology', 'horizontalMap', field_name,
+                               'publicObs', 'anomaly'])
+
+        self.mpas_climatology_task = mpas_climatology_task
+        self.ref_year_climatology_task = ref_year_climatology_task
+
+        section_name = self.taskName
+
+        # read in what seasons we want to plot
+        seasons = config.getexpression(section_name, 'seasons')
+
+        if len(seasons) == 0:
+            raise ValueError(f'config section {section_name} does not contain '
+                             f'valid list of seasons')
+
+        comparison_grid_names = config.getexpression(section_name,
+                                                     'comparisonGrids')
+
+        if len(comparison_grid_names) == 0:
+            raise ValueError(f'config section {section_name} does not contain '
+                             f'valid list of comparison grids')
+
+        depth_ranges = config.getexpression('climatologyMapOHCAnomaly',
+                                            'depthRanges',
+                                            use_numpyfunc=True)
+
+        mpas_field_name = 'deltaOHC'
+
+        variable_list = ['timeMonthly_avg_activeTracers_temperature',
+                         'timeMonthly_avg_layerThickness']
+
+        for min_depth, max_depth in depth_ranges:
+            depth_range_string = \
+                f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m'
+            remap_climatology_subtask = RemapMpasOHCClimatology(
+                mpas_climatology_task=mpas_climatology_task,
+                ref_year_climatology_task=ref_year_climatology_task,
+                parent_task=self,
+                climatology_name=f'{field_name}_{depth_range_string}',
+                variable_list=variable_list,
+                comparison_grid_names=comparison_grid_names,
+                seasons=seasons,
+                min_depth=min_depth,
+                max_depth=max_depth)
+
+            self.add_subtask(remap_climatology_subtask)
+
+            out_file_label = f'deltaOHC_{depth_range_string}'
+            remap_observations_subtask = None
+            if control_config is None:
+                ref_title_label = None
+                ref_field_name = None
+                diff_title_label = 'Model - Observations'
+
+            else:
+                control_run_name = control_config.get('runs', 'mainRunName')
+                ref_title_label = f'Control: {control_run_name}'
+                ref_field_name = mpas_field_name
+                diff_title_label = 'Main - Control'
+
+            for comparison_grid_name in comparison_grid_names:
+                for season in seasons:
+                    # make a new subtask for this season and comparison grid
+                    subtask_name = f'plot{season}_{comparison_grid_name}_{depth_range_string}'
+
+                    subtask = PlotClimatologyMapSubtask(
+                        self, season, comparison_grid_name,
+                        remap_climatology_subtask, remap_observations_subtask,
+                        controlConfig=control_config, subtaskName=subtask_name)
+
+                    subtask.set_plot_info(
+                        outFileLabel=out_file_label,
+                        fieldNameInTitle=f'$\\Delta$OHC over {depth_range_string}',
+                        mpasFieldName=mpas_field_name,
+                        refFieldName=ref_field_name,
+                        refTitleLabel=ref_title_label,
+                        diffTitleLabel=diff_title_label,
+                        unitsLabel=r'GJ m$^{-2}$',
+                        imageCaption=f'Anomaly in Ocean Heat Content over {depth_range_string}',
+                        galleryGroup='OHC Anomaly',
+                        groupSubtitle=None,
+                        groupLink='ohc_anom',
+                        galleryName=None)
+
+                    self.add_subtask(subtask)
+
+    def setup_and_check(self):
+        """
+        Checks whether analysis is being performed only on the reference year,
+        in which case the analysis will not be meaningful.
+
+        Raises
+        ------
+        ValueError: if attempting to analyze only the reference year
+        """
+
+        # first, call setup_and_check from the base class (AnalysisTask),
+        # which will perform some common setup, including storing:
+        #     self.runDirectory , self.historyDirectory, self.plotsDirectory,
+        #     self.namelist, self.runStreams, self.historyStreams,
+        #     self.calendar
+        super().setup_and_check()
+
+        start_year, end_year = self.mpas_climatology_task.get_start_and_end()
+        ref_start_year, ref_end_year = \
+            self.ref_year_climatology_task.get_start_and_end()
+
+        if (start_year == ref_start_year) and (end_year == ref_end_year):
+            raise ValueError('OHC Anomaly is not meaningful and will not work '
+                             'when climatology and ref year are the same.')
+
+
+class RemapMpasOHCClimatology(RemapMpasClimatologySubtask):
+    """
+    A subtask for computing climatologies of ocean heat content from
+    climatologies of temperature
+
+    Attributes
+    ----------
+    ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+        The task that produced the climatology from the first year to be
+        remapped and then subtracted from the main climatology
+
+    min_depth, max_depth : float
+        The minimum and maximum depths for integration
+    """
+
+    def __init__(self, mpas_climatology_task, ref_year_climatology_task,
+                 parent_task, climatology_name, variable_list, seasons,
+                 comparison_grid_names, min_depth, max_depth):
+
+        """
+        Construct the analysis task and adds it as a subtask of the
+        ``parent_task``.
+
+        Parameters
+        ----------
+        mpas_climatology_task : mpas_analysis.shared.climatology.MpasClimatologyTask
+            The task that produced the climatology to be remapped
+
+        ref_year_climatology_task : mpas_analysis.shared.climatology.RefYearMpasClimatologyTask
+            The task that produced the climatology from the first year to be
+            remapped and then subtracted from the main climatology
+
+        parent_task :  mpas_analysis.shared.AnalysisTask
+            The parent task, used to get the ``taskName``, ``config`` and
+            ``componentName``
+
+        climatology_name : str
+            A name that describes the climatology (e.g. a short version of
+            the important field(s) in the climatology) used to name the
+            subdirectories for each stage of the climatology
+
+        variable_list : list of str
+            A list of variable names in ``timeSeriesStatsMonthly`` to be
+            included in the climatologies
+
+        seasons : list of str, optional
+            A list of seasons (keys in ``shared.constants.monthDictionary``)
+            to be computed or ['none'] (not ``None``) if only monthly
+            climatologies are needed.
+
+        comparison_grid_names : list of {'latlon', 'antarctic'}
+            The name(s) of the comparison grid to use for remapping.
+
+        min_depth, max_depth : float
+            The minimum and maximum depths for integration
+        """
+
+        depth_range_string = f'{np.abs(min_depth):g}-{np.abs(max_depth):g}m'
+        subtask_name = f'remapMpasClimatology_{depth_range_string}'
+        # call the constructor from the base class
+        # (RemapMpasClimatologySubtask)
+        super().__init__(
+            mpas_climatology_task, parent_task, climatology_name,
+            variable_list, seasons, comparison_grid_names,
+            subtaskName=subtask_name)
+
+        self.ref_year_climatology_task = ref_year_climatology_task
+        self.run_after(ref_year_climatology_task)
+        self.min_depth = min_depth
+        self.max_depth = max_depth
+
+    def setup_and_check(self):
+        """
+        Perform steps to set up the analysis and check for errors in the setup.
+        """
+
+        # first, call setup_and_check from the base class
+        # (RemapMpasClimatologySubtask), which will set up remappers and add
+        # variables to mpas_climatology_task
+        super().setup_and_check()
+
+        # don't add the variables and seasons to mpas_climatology_task until
+        # we're sure this subtask is supposed to run
+        self.ref_year_climatology_task.add_variables(self.variableList,
+                                                     self.seasons)
+
+    def customize_masked_climatology(self, climatology, season):
+        """
+        Compute the ocean heat content (OHC) anomaly from the temperature
+        and layer thickness fields.
+
+        Parameters
+        ----------
+        climatology : xarray.Dataset
+            the climatology data set
+
+        season : str
+            The name of the season to be masked
+
+        Returns
+        -------
+        climatology : xarray.Dataset
+            the modified climatology data set
+        """
+
+        ohc = self._compute_ohc(climatology)
+        ref_file_name = self.ref_year_climatology_task.get_file_name(season)
+        ref_year_climo = xr.open_dataset(ref_file_name)
+        if 'Time' in ref_year_climo.dims:
+            ref_year_climo = ref_year_climo.isel(Time=0)
+        ref_ohc = self._compute_ohc(ref_year_climo)
+
+        climatology['deltaOHC'] = ohc - ref_ohc
+        climatology.deltaOHC.attrs['units'] = 'GJ m^-2'
+        start_year = self.ref_year_climatology_task.startYear
+        climatology.deltaOHC.attrs['description'] = \
+            f'Anomaly from year {start_year} in ocean heat content'
+        climatology = climatology.drop_vars(self.variableList)
+
+        return climatology
+
+    def _compute_ohc(self, climatology):
+        """
+        Compute the OHC from the temperature and layer thicknesses in a given
+        climatology data sets.
+        """
+        ds_restart = xr.open_dataset(self.restartFileName)
+        ds_restart = ds_restart.isel(Time=0)
+
+        # specific heat [J/(kg*degC)]
+        cp = self.namelist.getfloat('config_specific_heat_sea_water')
+        # [kg/m3]
+        rho = self.namelist.getfloat('config_density0')
+
+        units_scale_factor = 1e-9
+
+        n_vert_levels = ds_restart.sizes['nVertLevels']
+
+        z_mid = compute_zmid(ds_restart.bottomDepth, ds_restart.maxLevelCell-1,
+                             ds_restart.layerThickness)
+
+        vert_index = xr.DataArray.from_dict(
+            {'dims': ('nVertLevels',), 'data': np.arange(n_vert_levels)})
+
+        temperature = climatology['timeMonthly_avg_activeTracers_temperature']
+        layer_thickness = climatology['timeMonthly_avg_layerThickness']
+
+        masks = [vert_index < ds_restart.maxLevelCell,
+                 z_mid <= self.min_depth,
+                 z_mid >= self.max_depth]
+        for mask in masks:
+            temperature = temperature.where(mask)
+            layer_thickness = layer_thickness.where(mask)
+
+        ohc = units_scale_factor * rho * cp * layer_thickness * temperature
+        ohc = ohc.sum(dim='nVertLevels')
+        return ohc
+
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/tutorials/getting_started.html b/1.11.0rc1/tutorials/getting_started.html new file mode 100644 index 000000000..4d9a99111 --- /dev/null +++ b/1.11.0rc1/tutorials/getting_started.html @@ -0,0 +1,695 @@ + + + + + + + User: Getting Started — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

User: Getting Started

+

This tutorial walks a new user through the basics of using MPAS-Analysis.

+
+

1 Setting up a Conda Environment

+

MPAS-Analysis relies on several packages that are only available as conda +packages from the conda-forge channel. The first step for running +MPAS-Analysis is to create a conda environment with all the needed packages.

+
+

1.1 Installing Miniconda

+

If you have not yet installed Anaconda or Miniconda, you will need to begin +there. The concept behind Anaconda is that just about everything you would +need for a typical python workflow is included. The concept behind Miniconda +is that you create different environments for different purposes. This allows +for greater flexibility and tends to lead to fewer conflicts between +incompatible packages, particularly when using a channel other than the +defaults supplied by Anaconda. Since we will use the conda-forge +channel, the Miniconda approach is strongly recommended.

+

First download the Miniconda3 installer for your operating system, then run +it:

+
$ /bin/bash Miniconda3-latest-Linux-x86_64.sh
+
+
+
+

Note

+

MPAS-Analysis and many of the packages it depends on support OSX and Linux +but not Windows.

+
+

In this tutorial, we assume that Miniconda is installed in the default location, +~/miniconda3. If you choose to install it somewhere else, just make sure +to make the appropriate substitution whenever you see a reference to this path +below.

+

You will see prompt like this:

+
Do you wish the installer to initialize Miniconda3
+by running conda init? [yes|no]
+
+
+

You may wish to skip the step (answer “no”) if you are working on a system +where you will also be using other conda environments. This means you have to +manually activate conda whenever you need it. For bash and similar +shells, this is:

+
$ source ~/miniconda3/etc/profile.d/conda.sh
+
+
+

If you use csh, tcsh or related shells, this becomes:

+
> source ~/miniconda3/etc/profile.d/conda.csh
+
+
+
+
+

1.2 Creating a conda environment

+

First, we need to add the conda-forge channel and make sure it always takes +precedence for packages available on that channel:

+
$ conda config --add channels conda-forge
+$ conda config --set channel_priority strict
+
+
+

Then, you can create a new conda environment called mpas-analysis and +install the latest version of the mpas-analysis package into it:

+
$ conda create -n mpas-analysis python=3.8 mpas-analysis
+
+
+
+
+

1.3 Activating the environment

+

Each time you open a new terminal window, to activate the mpas-analysis +environment, you will need to run either for bash:

+
$ source ~/miniconda3/etc/profile.d/conda.sh
+$ conda activate mpas-analysis
+
+
+

or for csh:

+
> source ~/miniconda3/etc/profile.d/conda.csh
+> conda activate mpas-analysis
+
+
+

You can skip the source command if you chose to initialize Miniconda3 so it +loads automatically.

+
+
+
+

2 Downloading observations

+

MPAS-Analysis can only run a very limited set of analysis without observational +datasets for comparison. Many analysis tasks also require “mapping files” that +describe interpolation between different grids and meshes. Some tasks also +need masks that define regions of interest on different grids and meshes.

+

Most of the observational datasets as well as some common mapping files and +region masks are available for public download by calling:

+
$ download_analysis_data -o </path/to/mpas_analysis/diagnostics>
+
+
+

Substitute </path/to/mpas_analysis/diagnostics> with a convenient location +for storing the diagnostics data. You will need this path later on when you +set up a configuration file for running the analysis.

+
+

Note

+

The observational data is approximately 45GB in size and may take some +time to download. If the download gets interrupted, only incomplete or +missing files will be downloaded if you run download_analysis_data again.

+
+
+

Note

+

If you are on an E3SM supported system, the data has already +been downloaded for you to use. Please look at the baseDirectory config +option in the [diagnostics] of an example config file +for your supported machine.

+
+
+

Note

+

We do not currently support downloading a subset of the data but we plan to +add this capability in the future. In a pinch, you could manually download +files. For reference, here is the full list of files that are +automatically downloaded from the diagnostics web server.

+
+
+
+

3 Downloading an example run

+

We provide data for an “ultra-low-resolution” E3SM simulation (480 km grid cells +for the ocean and sea-ice components) as a relatively small example dataset for +testing MPAS-Analysis. The simulation is too coarse and too short to be of +scientific interest, but we wanted to keep the size manageable. The ocean and +sea-ice output are about 950 MB.

+

Download the Ultra-low-res ocean and sea-ice dataset and untar it into a +convenient directory:

+
$ mkdir A_WCYCL1850.ne4_oQU480.anvil
+$ cd A_WCYCL1850.ne4_oQU480.anvil
+$ tar -xvf ../20200305.A_WCYCL1850.ne4_oQU480.anvil.ocean_seaice.tar.gz
+$ cd ..
+
+
+
+
+

4 Configuring MPAS-Analysis

+

Configuration is with Python cfg (also called ini) files:

+
[runs]
+# mainRunName is a name that identifies the simulation being analyzed.
+mainRunName = runName
+
+[execute]
+# the number of parallel tasks (1 means tasks run in serial, the default)
+parallelTaskCount = 1
+# the parallelism mode in ncclimo ("serial" or "bck")
+ncclimoParallelMode = serial
+...
+
+
+

The default config file contains over 1,000 config options, which gives a lot +of flexibility to MPAS-Analysis but can be more than bit overwhelming to new +users.

+

You can modify the default options with one or more custom config files. We +provide an example config file as a starting point. This file contains the +most common config options that a user might want to customize. The values are +mostly the same as in the default config file.

+

If you are on an E3SM supported machine, you can use the E3SM example config file instead. If you specify the name of the supported machine with the +--machine flag when you call mpas_analysis, there are several config +options that will be set for you automatically. E3SM supported machines aren’t +the primary focus of this tutorial, the following will assume you’re working +on an unknown machine (or simply not taking advantage of known paths on a +supported machine).

+

First, you should copy this file to a new name for a specific run (say +myrun.cfg). Then, you should modify any config options you want to change +in your new config file. At a minimum, you need to specify:

+
    +
  • [runs]/mainRunName: A name for the run to be included plot titles +and legends

  • +
  • [diagnostics]/base_path: The base directory for observations, +mapping files and region masks

  • +
  • [input]/baseDirectory: The directory for the simulation results +to analyze

  • +
  • [input]/mpasMeshName: The name of the MPAS ocean/sea ice mesh

  • +
  • [output]/baseDirectory: The directory for the analysis results

  • +
+

We will cover these and a few other common options in this tutorial. With the +exception of a few paths that you will need to provide, the config options +displayed below are the ones appropriate for the example E3SM simulation from +Section 3.

+
+

4.1 [runs]

+

The [runs] section contains options related to which E3SM simulation(s) are +being analyzed:

+
[runs]
+## options related to the run to be analyzed and control runs to be
+## compared against
+
+# mainRunName is a name that identifies the simulation being analyzed.
+mainRunName = A_WCYCL1850.ne4_oQU480.anvil
+
+
+

The mainRunName can be any useful name that will appear at the top of each +web page of the analysis output and in the legends or titles of the figures. +Often, this is the full name of the E3SM simulation but sometimes it is +convenient to have a shorter name. In this case, we use part of the run name +but leave off the date of the simulation to keep it a little shorter.

+
+
+

4.2 [execute]

+

The [execute] section contains options related to serial or parallel +execution of the individual “tasks” that make up an MPAS-Analysis run.

+
[execute]
+## options related to executing parallel tasks
+
+# the number of parallel tasks (1 means tasks run in serial, the default)
+parallelTaskCount = 4
+
+# the parallelism mode in ncclimo ("serial", "bck" or "mpi")
+# Set this to "bck" (background parallelism) if running on a machine that can
+# handle 12 simultaneous processes, one for each monthly climatology.
+# Set to "mpi" to run one MPI task on each node and however many threads per
+# node to reach 12 total threads.
+ncclimoParallelMode = bck
+
+
+

parallelTaskCount should typically be equal to the number of cores on a +laptop or a desktop. In a high-performance computing (HPC) node, it is +typically not possible to run 30 to 60 tasks in parallel even if the node has +that many cores. This is because many tasks consume enough memory that the +job will crash with too many tasks running at the same time. We have found that +parallelTaskCount should typically be somewhere between 6 and 12 for the +HPC machines we use for E3SM.

+

ncclimoParallelMode indicates how ncclimo should be run to make +climatologies used in many MPAS-Analysis plots. Typically, we recommend +bck, meaning ncclimo runs with 12 threads at once on the same node. +In circumstances where ncclimo is crashing and it appears to be running out +of memory, it is worth exploring serial or mpi modes, or using the +xarray and dask instead to compute climatologies by setting +[climatology]/useNcclimo = False

+

For this tutorial, we suggest starting with 4 parallel tasks and ncclimo in +bck mode.

+
+
+

4.3 [diagnostics]

+

The diagnostics section is used to supply the directory where you downloaded +observations in Section 2.

+
[diagnostics]
+## config options related to observations, mapping files and region files used
+## by MPAS-Analysis in diagnostics computations.
+
+# The base path to the diagnostics directory.  Typically, this will be a shared
+# directory on each E3SM supported machine (see the example config files for
+# its location).  For other machines, this would be the directory pointed to
+# when running "download_analysis_data.py" to get the public observations,
+# mapping files and region files.
+base_path = /path/to/diagnostics
+
+
+

For base_path, supply the path where you downloaded the data +</path/to/mpas_analysis/diagnostics>.

+
+
+

4.4 [input]

+

The [input] section provides paths to the E3SM simulation data and the name +of the MPAS-Ocean and MPAS-Seaice mesh.

+
[input]
+## options related to reading in the results to be analyzed
+
+# directory containing model results
+baseDirectory = /dir/for/model/output
+
+# Note: an absolute path can be supplied for any of these subdirectories.
+# A relative path is assumed to be relative to baseDirectory.
+# In this example, results are assumed to be in <baseDirecory>/run
+
+# subdirectory containing restart files
+runSubdirectory = run
+# subdirectory for ocean history files
+oceanHistorySubdirectory = archive/ocn/hist
+# subdirectory for sea ice history files
+seaIceHistorySubdirectory = archive/ice/hist
+
+# names of namelist and streams files, either a path relative to baseDirectory
+# or an absolute path.
+oceanNamelistFileName = run/mpaso_in
+oceanStreamsFileName = run/streams.ocean
+seaIceNamelistFileName = run/mpassi_in
+seaIceStreamsFileName = run/streams.seaice
+
+# name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3,
+# ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.)
+mpasMeshName = oQU480
+
+
+

The baseDirectory is the path where you untarred the example run.

+

The mpasMeshName is the standard E3SM name for the MPAS-Ocean and +MPAS-Seaice mesh. In this example, this is oQU480, meaning the +quasi-uniform 480-km mesh for the ocean and sea ice.

+

The runSubdirectory must contain valid MPAS-Ocean and MPAS-Seaice restart +files, used to get information about the MPAS mesh and the ocean vertical grid.

+

The oceanHistorySubdirectory must contain MPAS-Ocean monthly mean output +files, typically named:

+
mpaso.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc
+
+
+

Similarly, seaIceHistorySubdirectory contains the MPAS-Seaice monthly mean +output:

+
mpassi.hist.am.timeSeriesStatsMonthly.YYYY-MM-DD.nc
+
+
+

Finally, MPAS-Analysis needs a set of “namelists” and “streams” files that +provide information on the E3SM configuration for MPAS-Ocean and MPAS-Seaice, +and about the output files, respectively. These are typically also found in +the run directory.

+

For the example data, only baseDirectory and mpasMeshName need to be +set, the other options can be left as the defaults from the +example config file.

+

For this tutorial, you just need to set baseDirectory to the place where +you untarred the simulation results. The other config options should be as +they are in the code block above.

+
+
+

4.5 [output]

+

The [output] section provides a path where the output from the analysis run +will be written, the option to output the results web pages to another +location, and a list of analysis to be generated (or explicitly skipped).

+
[output]
+## options related to writing out plots, intermediate cached data sets, logs,
+## etc.
+
+# directory where analysis should be written
+# NOTE: This directory path must be specific to each test case.
+baseDirectory = /dir/for/analysis/output
+
+# provide an absolute path to put HTML in an alternative location (e.g. a web
+# portal)
+htmlSubdirectory = html
+
+# a list of analyses to generate.  Valid names can be seen by running:
+#   mpas_analysis --list
+# This command also lists tags for each analysis.
+# Shortcuts exist to generate (or not generate) several types of analysis.
+# These include:
+#   'all' -- all analyses will be run
+#   'all_publicObs' -- all analyses for which observations are available on the
+#                      public server (the default)
+#   'all_<tag>' -- all analysis with a particular tag will be run
+#   'all_<component>' -- all analyses from a given component (either 'ocean'
+#                        or 'seaIce') will be run
+#   'only_<component>', 'only_<tag>' -- all analysis from this component or
+#                                       with this tag will be run, and all
+#                                       analysis for other components or
+#                                       without the tag will be skipped
+#   'no_<task_name>' -- skip the given task
+#   'no_<component>', 'no_<tag>' -- in analogy to 'all_*', skip all analysis
+#                                   tasks from the given component or with
+#                                   the given tag.  Do
+#                                      mpas_analysis --list
+#                                   to list all task names and their tags
+# an equivalent syntax can be used on the command line to override this
+# option:
+#    mpas_analysis analysis.cfg --generate \
+#         only_ocean,no_timeSeries,timeSeriesSST
+generate = ['all_publicObs']
+
+
+

baseDirectory is any convenient location for the output.

+

htmlSubdirectory can simply be the <baseDirectory>/html, the default or +an absolute path to another location. The later is useful for HPC machines that +have a web portal.

+

Finally, the generate option provides a python list of flags that can be +used to determine which analysis will be generated. For this tutorial, we will +stick with the default, 'all_publicObs', indicating that we will only run +analysis where the observations are included on the public server and which +were downloaded in Section 2 (or analysis that does not require observations).

+
+
+

4.6 [climatology], [timeSeries] and [index]

+

These options determine the start and end years of climatologies (time averages +over a particular month, season or the full year), time series or the El Niño +climate index.

+
[climatology]
+## options related to producing climatologies, typically to compare against
+## observations and previous runs
+
+# the first year over which to average climatalogies
+startYear = 3
+# the last year over which to average climatalogies
+endYear = 5
+
+[timeSeries]
+## options related to producing time series plots, often to compare against
+## observations and previous runs
+
+# start and end years for timeseries analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 5
+
+[index]
+## options related to producing nino index.
+
+# start and end years for timeseries analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 5
+
+
+

For each of these, options a full year of data must exist for that year to +be included in the analysis.

+

For the example E3SM simulation that we downloaded in Section 3, only 5 years of +simulation data are available, so we are doing a climatology over the last 3 +years (3 to 5) and displaying time series and the El Niño index over the full +5 years.

+
+
+
+

5 Running MPAS-Analysis

+

The hard work is done. Now that we have a config file, we are ready to run.

+

On many file systems, MPAS-Analysis and other python-based software that used +NetCDF files based on the HDF5 file structure can experience file access errors +unless the following environment variable is set as follows in bash:

+
$ export HDF5_USE_FILE_LOCKING=FALSE
+
+
+

or under csh:

+
> setenv HDF5_USE_FILE_LOCKING FALSE
+
+
+

Then, running MPAS-Analysis is as simple as:

+
$ mpas_analysis myrun.cfg
+
+
+

Typical output is the analysis is running correctly looks something like:

+
running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight
+Gen --source /tmp/tmph58_hgz4/src_mesh.nc --destination /tmp/tmph58_hgz4/dst_mes
+h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480.
+anvil/mapping/map_oQU480_to_0.5x0.5degree_bilinear.nc --method bilinear --netcdf
+4 --no_log --src_regional --ignore_unmapped
+running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight
+Gen --source /tmp/tmpxt8x1h_6/src_mesh.nc --destination /tmp/tmpxt8x1h_6/dst_mes
+h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480.
+anvil/mapping/map_obs_eke_0.25x0.25degree_to_0.5x0.5degree_bilinear.nc --method
+bilinear --netcdf4 --no_log --src_regional --ignore_unmapped
+running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight
+Gen --source /tmp/tmp3_7gpndz/src_mesh.nc --destination /tmp/tmp3_7gpndz/dst_mes
+h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480.
+anvil/mapping/map_oQU480_to_6000.0x6000.0km_10.0km_Antarctic_stereo_bilinear.nc
+--method bilinear --netcdf4 --no_log --src_regional --dst_regional --ignore_unma
+pped
+Preprocessing SOSE transect data...
+  temperature
+  salinity
+  potentialDensity
+  zonalVelocity
+  meridionalVelocity
+  velMag
+  Done.
+running: /home/xylar/Desktop/miniconda3/envs/mpas-analysis/bin/ESMF_RegridWeight
+Gen --source /tmp/tmpt9n4vb5n/src_mesh.nc --destination /tmp/tmpt9n4vb5n/dst_mes
+h.nc --weight /home/xylar/Desktop/analysis_test/analysis/A_WCYCL1850.ne4_oQU480.
+anvil/mapping/map_oQU480_to_SOSE_transects_5km_bilinear.nc --method bilinear --n
+etcdf4 --no_log --src_regional --dst_regional --ignore_unmapped
+
+Running tasks:   2% |                                          | ETA:   0:09:04
+Running tasks:  52% |######################                    | ETA:   0:06:13
+Running tasks: 100% |##########################################| Time:  0:18:50
+
+Log files for executed tasks can be found in /home/xylar/Desktop/analysis_test/a
+nalysis/A_WCYCL1850.ne4_oQU480.anvil/logs
+Total setup time: 0:03:11.74
+Total run time: 0:22:02.33
+Generating webpage for viewing results...
+Done.
+
+
+

The first part of the output, before the progress bar, is the “setup” phase +where MPAS-Analysis is checking if the requested analysis can be run on the +simulation results. The specific output shown here is related to creating +so-called mapping files that are used to interpolate between the oQU480 mesh +and the various grids MPAS-Analysis uses to compare with observations. Since +MPAS-Analysis didn’t know about that oQU480 mesh ahead of time, it is +creating mapping files and regions masks for this mesh on the fly.

+

The command-line tool has several more options you can explore with

+
$ mpas_analysis --help
+
+
+

These include listing the available analysis tasks and their tags, purging a +previous analysis run before running the analysis again, plotting all available +color maps, and outputting verbose python error messages when the analysis fails +during the setup phase (before a progress bar appears).

+
+
+

6 Viewing the Output

+

The primary output from MPAS-Analysis is a set of web pages, each containing +galleries of figures. The output can be found in the directory you provided in +Section 4.5, which is the html subdirectory of the base output directory by +default. If the web page is incomplete, it presumably means there was an error +during the analysis run, since the web page is generated as the final step.

+

The main web page has links to the ocean and sea-ice web pages as well as some +“provenance” information about which version of MPAS-Analysis you were using +and how it was configured.

+

The web page generated by this tutorial should look something like +this example output.

+
+
+

7 Troubleshooting

+

This section briefly describes strategies for diagnosing errors in +MPAS-Analysis. This tutorial cannot hope to provide a comprehensive guide to +troubleshooting these errors. Please search the documentation, Google the error +online, or get in touch with the MPAS-Analysis developer team (by +posting an issue on GitHub) if you are experiencing an error.

+
+

7.1 Purging old Analysis

+

One thing you might want to try first if you are experiencing problems is to +delete any analysis you may already have in your output directory:

+
$ mpas_analysis --purge myrun.cfg
+
+
+

This will first delete existing analysis and then run the analysis again.

+
+
+

7.2 Errors During Setup

+

If an error occurs during setup, by default the full python traceback is +suppressed. This is because some tasks fail because the run being analyzed was +not configured for that analysis. In such cases, many users want the analysis +to continue, simply skipping the tasks that can’t be run.

+

However, this means that sometime the analysis is not configured properly and +as a results most or all tasks are not running. To find out why, you will +probably need to run:

+
$ mpas_analysis --verbose myrun.cfg
+
+
+

This will give you a detailed python stack trace. Even if this is not helpful +to you, it might help developers to troubleshoot your issue.

+
+
+

7.2 Errors in Tasks

+

If you see the progress bar start but error occur during running of analysis +tasks, the error messages will not be displayed to the screen. Instead, they +will be in log files (as stated in the short error message letting you know +that a task has failed). The contents of these log files may help you to +determine the cause of the error. If not, please include them if you are +posting an issue on GitHub.

+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/all_obs.html b/1.11.0rc1/users_guide/all_obs.html new file mode 100644 index 000000000..1a00dd6bb --- /dev/null +++ b/1.11.0rc1/users_guide/all_obs.html @@ -0,0 +1,162 @@ + + + + + + + <no title> — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ +
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/analysis_tasks.html b/1.11.0rc1/users_guide/analysis_tasks.html new file mode 100644 index 000000000..6c227a905 --- /dev/null +++ b/1.11.0rc1/users_guide/analysis_tasks.html @@ -0,0 +1,230 @@ + + + + + + + Analysis Tasks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ +
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/components.html b/1.11.0rc1/users_guide/components.html new file mode 100644 index 000000000..80b477d64 --- /dev/null +++ b/1.11.0rc1/users_guide/components.html @@ -0,0 +1,375 @@ + + + + + + + MPAS Components and E3SM — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

MPAS Components and E3SM

+
+

MPAS Ocean

+

The Model for Prediction Across Scales Ocean (MPAS-O) is designed for the +simulation of the ocean system from time scales of months to millenia and +spatial scales from sub 1 km to global circulations.

+

MPAS-O has demonstrated the ability to accurately reproduce mesoscale ocean +activity with a local mesh refinement strategy.

+

In addition to faciliating the study of multiscale phenomena within the ocean +system, MPAS-O is intended for the study of anthropogenic climate change as +the ocean component of climate system models.

+

Full documentaiton is available at: +https://mpas-dev.github.io/ocean/ocean.html

+
+

Setting up Standalone MPAS-O Runs

+

In order to support all ocean analysis tasks from MPAS-Analysis, certain +“analysis members”, Fortran modules that perform analysis during the +simulation, need to be enabled.

+

The following is a list of suggested values for namelist options, typically +found in namelist.ocean or mpaso_in (or mpas-o_in in older E3SM runs):

+
config_AM_surfaceAreaWeightedAverages_enable = .true.
+config_AM_surfaceAreaWeightedAverages_compute_interval = '0000-00-00_01:00:00'
+config_AM_layerVolumeWeightedAverage_enable = .true.
+config_AM_layerVolumeWeightedAverage_compute_interval = '0000-00-00_01:00:00'
+config_AM_meridionalHeatTransport_enable = .true.
+config_AM_meridionalHeatTransport_compute_interval = '0000-00-00_01:00:00'
+config_AM_mixedLayerDepths_enable = .true.
+config_AM_timeSeriesStatsMonthly_enable = .true.
+
+
+

Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:

+
config_run_duration = '0002-00-00_00:00:00'
+
+
+

Several streams must be defined in the streams file, typically +streams.ocean, (even if they will not be written out – +output_interval="none"):

+
<stream name="timeSeriesStatsMonthlyRestart"
+        type="input;output"
+        filename_template="restarts/restart.AM.timeSeriesStatsMonthly.$Y-$M-$D_$h.$m.$s.nc"
+        filename_interval="output_interval"
+        reference_time="0001-01-01_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        input_interval="initial_only"
+        output_interval="stream:restart:output_interval" >
+</stream>
+
+<stream name="timeSeriesStatsMonthlyOutput"
+        type="output"
+        filename_template="analysis_members/timeSeriesStatsMonthly.$Y-$M.nc"
+        filename_interval="0000-01-00_00:00:00"
+        reference_time="0001-01-01_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        output_interval="00-01-00_00:00:00" >
+
+        <var_array name="activeTracers"/>
+        <var name="normalVelocity"/>
+        <var name="vertVelocityTop"/>
+        <var_array name="avgValueWithinOceanRegion"/>
+        <var_array name="avgValueWithinOceanLayerRegion"/>
+        <var name="dThreshMLD"/>
+        <var name="meridionalHeatTransportLatZ"/>
+        <var name="meridionalHeatTransportLat"/>
+        <var name="binBoundaryMerHeatTrans"/>
+        <var name="xtime"/>
+</stream>
+
+<stream name="layerVolumeWeightedAverageOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.layerVolumeWeightedAverage.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="layerVolumeWeightedAverageAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var_array name="minValueWithinOceanLayerRegion"/>
+    <var_array name="maxValueWithinOceanLayerRegion"/>
+    <var_array name="avgValueWithinOceanLayerRegion"/>
+    <var_array name="minValueWithinOceanVolumeRegion"/>
+    <var_array name="maxValueWithinOceanVolumeRegion"/>
+    <var_array name="avgValueWithinOceanVolumeRegion"/>
+</stream>
+
+<stream name="meridionalHeatTransportOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.meridionalHeatTransport.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="meridionalHeatTransportAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var name="binBoundaryMerHeatTrans"/>
+    <var name="meridionalHeatTransportLatZ"/>
+    <var name="meridionalHeatTransportLat"/>
+    <var name="refZMid"/>
+    <var name="refBottomDepth"/>
+</stream>
+
+<stream name="surfaceAreaWeightedAveragesOutput"
+        type="output"
+        io_type="netcdf"
+        filename_template="mpaso.hist.am.surfaceAreaWeightedAverages.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="surfaceAreaWeightedAveragesAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var_array name="minValueWithinOceanRegion"/>
+    <var_array name="maxValueWithinOceanRegion"/>
+    <var_array name="avgValueWithinOceanRegion"/>
+</stream>
+
+<stream name="mixedLayerDepthsOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.mixedLayerDepths.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="mixedLayerDepthsAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <stream name="mesh"/>
+    <var name="tThreshMLD"/>
+    <var name="dThreshMLD"/>
+    <var name="tGradMLD"/>
+    <var name="dGradMLD"/>
+</stream>
+
+
+

The filename_tempalate can be modified as desired (in most cases, these are +the defalult values from E3SM). For the timeSeriesStatsMonthlyOutput +stream, both the filename_interval and the output_interval must currently be +monthly ("0000-01-00_00:00:00").

+

Additional fields can be included in the timeSeriesStatsMonthlyOutput +streams. These are the minimum that allow the analysis to run successfully.

+
+
+
+

MPAS-Seaice

+

The Model for Prediction Across Scales Sea Ice (MPAS-Seaice) +is designed for the simulations of sea ice on unstructured grids supported by +the MPAS framework. The model has not yet been publicly released and does not +have public documentation.

+
+

Setting up Standalone MPAS Sea Ice Runs

+

In order to support all sea=ice analysis tasks from MPAS-Analysis, certain +“analysis members”, Fortran modules that perform analysis during the +simulation, need to be enabled.

+

The following is a list of suggested values for namelist options, typically +found in namelist.seaice or mpassi_in (or mpas-cice_in in +older E3SM runs):

+
config_AM_timeSeriesStatsMonthly_enable = .true.
+
+
+

Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:

+
config_run_duration = '0002-00-00_00:00:00'
+
+
+

Several streams must be defined in the streams file, typically +streams.seaice or streams.cice in older E3SM runs, (even if they will +not be written out – output_interval="none"):

+
<stream name="timeSeriesStatsMonthlyRestart"
+        type="input;output"
+        io_type="pnetcdf"
+        filename_template="mpasseaice.rst.am.timeSeriesStatsMonthly.$Y-$M-$D_$S.nc"
+        filename_interval="output_interval"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        input_interval="initial_only"
+        output_interval="stream:restart:output_interval">
+</stream>
+
+<stream name="timeSeriesStatsMonthlyOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpasseaice.hist.am.timeSeriesStatsMonthly.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="00-01-00_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG">
+
+        <var name="icePresent"/>
+        <var name="iceAreaCell"/>
+        <var name="iceVolumeCell"/>
+        <var name="xtime"/>
+</stream>
+
+
+

The filename_tempalate can be modified as desired (these are the defalult +values from E3SM). For the timeSeriesStatsMonthlyOutput stream, both the +filename_interval and the output_interval must currently be monthly +("0000-01-00_00:00:00").

+

Additional fields can be included in the timeSeriesStatsMonthlyOutput +streams. These are the minimum that allow the analysis to run successfully.

+
+
+
+

E3SM

+

The Energy Exascale Earth System Model (E3SM) Project is an ongoing, +state-of-the-science Earth system modeling, simulation, and prediction project +that optimizes the use of DOE laboratory resources to meet the science needs of +the nation and the mission needs of DOE.

+

A Full description of E3SM is available at: +https://e3sm.org/

+
+

Setting up E3SM runs

+

All online analysis and output stream within MPAS components (MPAS-O and +MPAS-SeaIce) are configured to support MPAS-Analysis without any modifications +to namelists or streams files.

+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/climatology.html b/1.11.0rc1/users_guide/config/climatology.html new file mode 100644 index 000000000..816c3be6c --- /dev/null +++ b/1.11.0rc1/users_guide/config/climatology.html @@ -0,0 +1,293 @@ + + + + + + + Climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Climatology

+

The [climatology] section of a configuration file contains options used to +determine the start and end years of climatologies, the reference years for +anomalies and to control remapping of climatologies to comparions grids:

+
[climatology]
+## options related to producing climatologies, typically to compare against
+## observations and previous runs
+
+# the year from which to compute anomalies if not the start year of the
+# simulation.  This might be useful if a long spin-up cycle is performed and
+# only the anomaly over a later span of years is of interest.
+# anomalyRefYear = 249
+
+# the first year over which to average climatalogies
+startYear = 11
+# the last year over which to average climatalogies
+endYear = 20
+
+# The comparison lat/lon grid resolution in degrees
+comparisonLatResolution = 0.5
+comparisonLonResolution = 0.5
+
+# The comparison Antarctic polar stereographic grid size and resolution in km
+comparisonAntarcticStereoWidth = 6000.
+comparisonAntarcticStereoResolution = 10.
+
+# interpolation order for model and observation results. Likely values are
+#   'bilinear', 'neareststod' (nearest neighbor) or 'conserve'
+mpasInterpolationMethod = bilinear
+
+# should climatologies be performed with ncclimo or with xarray/dask
+useNcclimo = True
+
+# should remapping be performed with ncremap or with the Remapper class
+# directly in MPAS-Analysis
+useNcremap = True
+
+# The minimum weight of a destination cell after remapping. Any cell with
+# weights lower than this threshold will therefore be masked out.
+renormalizationThreshold = 0.01
+
+# if useNcclimo = False, the number of threads dask is allowed to spawn for
+# each process computing a climatology for a given month or season
+# Decrease this number if mpasClimatology* subtasks are running
+# out of available threads
+daskThreads = 2
+
+# if useNcclimo = False, the number of subprocesses that each climatology
+# subtask gets counted as occupying.
+# Increase this number if mpasClimatology* subtasks are running
+# out of memory, and fewer tasks will be allowed to run at once
+subprocessCount = 1
+
+
+
+

Start and End Year

+

A custom config file should specify a start and end year for climatologies. +Simulation data must exist for all 12 months of each year in this range. +Otherwise, the range will be reduced to those years with complete data and +a warning message will be displayed.

+
+
+

Anomaly Reference Year

+

Anomalies between a climatology and the associated field from a reference year +is used in a few analysis tasks. By default, the reference year is not +specified in the configuration file and it taken to be the start of the +simulation (determined from the contents of a restart file). Under certain +circumstances (e.g. repetition of forcing data for several cycles, as in +the Common Ocean Reference Experiments, CORE), it may be desirable to +specify a different year to use for computing anomalies:

+
anomalyRefYear = 249
+
+
+
+
+

Remapping Options

+

Climatologies are remapped from MPAS meshes and observations grids to common +comparison grids. The remapping can be performed with any of three methods: +bilinear, neareststod (nearest neighbor) or conserve. Mapping +files are created with the ESMF_RegridWeightGen tool. The default method +is bilinear and these are the mapping files distributed from the +E3SM public data repository. The conseve method is know to be much +slower to compute and should only be used if it is necessary (e.g. because +remapped data will be checked for conservation).

+

MPAS-Analysis typically uses the NCO tool ncremap to perform remapping. +However, a python remapping capability is also available. The user can force +remapping to use the python-based remapping by specifying:

+
useNcremap = False
+
+
+

This capability is available largely for debugging purposes.

+

Remapped data typically only makes sense if it is renormalized after remapping. +For remapping of conserved quatntities like fluxes, renormalization would not +be desirable but for quantities like potential temperature, salinity and +potential density commonly used in MPAS-Anlaysis tasks, values become +physically meaningless near land boundaries and regions without data unless +renormalization is performed. A threshold is needed to determine how much of a +cell’s area on the output grid must contain valid data from the input grid or +mesh, below which that cell is considered invalid and is masked out of the +destination data set. This threshold is specified as a fraction:

+
renormalizationThreshold = 0.01
+
+
+

If noisy or unphysical values occur near maked regions on the comparison grid, +it might be necessary to increase this threshold. If too much data appears to +be being masked out unnecessarily on the comparison grid, perhaps this value +should be made smaller.

+
+
+

Computing climatologies

+

MPAS-Analysis typically uses the NCO tool ncclimo to compute +climatologies. For some large data sets on a single node, ncclimo runs +out of memory in bck mode but is painfully slow in serial mode and +wastes extra nodes in mpi mode. (See :ref: config_execute for more on +configuring ncclimo.) For such cases, there is also an xarray/dask method +of computing climatologies:

+
useNcremap = False
+
+
+
+
+

Other Options

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/colormaps.html b/1.11.0rc1/users_guide/config/colormaps.html new file mode 100644 index 000000000..b7e167574 --- /dev/null +++ b/1.11.0rc1/users_guide/config/colormaps.html @@ -0,0 +1,289 @@ + + + + + + + Colormaps — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Colormaps

+

Each analysis task that includes plots with colormaps has a series of +configuration options that can be used to manipulate the colormap. +MPAS-Analysis supports two types of plots, each of which has an associated +set of options for configuring its colormap.

+
+

Supported Colormaps

+

MPAS-Analysis supports all matplotlib colormaps, all cmocean colormaps, +all Scientific Colour Map Suite 3.0 colormaps, the selection of key +SciVisColor colormaps and five custom color maps: ferret, +erdc_iceFire_H, erdc_iceFire_L, BuOr and Maximenko.

+
+All available colormaps +
+

All available colormaps

+
+
+

To plot all availabe colormpas (as above), run:

+
mpas_analysis --plot_colormaps
+
+
+
+
+

Indexed Colormaps

+

Indexed colormaps are used in association with contour plots in MPAS-Analysis. +The following configuration options are associated with an indexed colormap:

+
# colormap name
+colormapName = RdYlBu_r
+# color indices into colormapName for filled contours
+colormapIndices = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevels = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32]
+
+
+

The indices are between 0 and 255, indicating the location within the colormap +to sample. The levels are the contour values to plot. Since the region +between two contours will be filled with a solid color specified by a colormap +index, the number of levels is one greater than the number of indices. The +exception is when a separate shading should be used for values that fall +outside the range of the contours, in which case the number of colormap indices +is one more than the number of levels values, and the first and last index +are used to specify the “under” and “over” color values.

+
+
+

Continuous Colormaps

+

A continuous colormap is specified with a different set of options:

+
# colormap for model/observations
+colormapName = erdc_iceFire_H
+# the type of norm used in the colormap
+normType = symLog
+# A dictionary with keywords for the norm
+normArgs = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100.,
+            'vmax': 100.}
+
+
+

MPAS Analysis supports three norms for the continuous color map, with an +associated dictionary of arguments. These are:

+
+
+
+

See the associated documentaiton for the arguments to each norm.

+
+
+

Colormpas in Three Panel Plots

+

Many MPAS-Analysis tasks include a panel for the main model run, another for +observations or a control run and a third panel for displaying a bias or the +the model-reference difference. In such cases, we support one colormap for the +first two panels and another for the final panel. Options for each of these +colormaps are specified by appending either Result or Difference to +each. For example:

+
# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32]
+
+# colormap for differences
+colormapNameDifference = RdBu_r
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198,
+                             227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5]
+
+
+
+
+

Adding Contour Lines

+

Contour lines can be added to a plot with the following options:

+
# contour line levels
+contourLevels = numpy.arange(-240., 130., 10.)
+# contour line thickness
+contourThickness = 0.25
+# contour color
+contourColor = 0.25
+
+
+

The levels are the field values for each contour. The line thickness is +specified in points. The contourColor can be any color name supported +by matplotlib or a floating point number between 0 and 1 specifying a +shade of gray.

+

Append Result or Difference to these options for a 3-panel plot.

+
+
+

Specifying Colorbar Tick Marks

+

By default, colorbar tick marks are chosen automatically by matplotlib. +To specify tick marks explicitly, use:

+
colorbarTicks = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5.,
+                 10., 20., 50., 100.]
+
+
+

Alternatively, numpy functions can be used to specify the tick locations:

+
colorbarTicks = numpy.linspace(-100, 100, 9)
+
+
+

Again, append Result or Difference to these options for a 3-panel plot.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/comparison_grids.html b/1.11.0rc1/users_guide/config/comparison_grids.html new file mode 100644 index 000000000..d1d8f0935 --- /dev/null +++ b/1.11.0rc1/users_guide/config/comparison_grids.html @@ -0,0 +1,191 @@ + + + + + + + Comparison Grids — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Comparison Grids

+

Currently, MPAS-Analysis supports two comparison grids to which both model +results and observations are remapped for comparison and plotting. These are +latlon, a global latitude/longitude grid, and antarctic, a +stereographic grid centered at the south pole. Many analysis tasks support +both of these grids, in which case either or both can be specified in a list:

+
# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+

The resolution of each of these grids are set through a set of configuration +options that affect all tasks using these grids:

+
[climatology]
+## options related to producing climatologies, typically to compare against
+## observations and previous runs
+
+...
+
+# The comparison lat/lon grid resolution in degrees
+comparisonLatResolution = 0.5
+comparisonLonResolution = 0.5
+
+# The comparison Antarctic polar stereographic grid size and resolution in km
+comparisonAntarcticStereoWidth = 6000.
+comparisonAntarcticStereoResolution = 10.
+
+
+

These options can be altered by the user, in which case new weights for +remapping model results and observations to these grids will be generated on +the fly. Mapping files for interpolating to the standard comparison grids from +both observations and standard MPAS meshes are provided when you download the +observations files.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/dask_threads.html b/1.11.0rc1/users_guide/config/dask_threads.html new file mode 100644 index 000000000..c03192b8d --- /dev/null +++ b/1.11.0rc1/users_guide/config/dask_threads.html @@ -0,0 +1,205 @@ + + + + + + + Dask threads and subprocess count — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Dask threads and subprocess count

+

Several tasks and subtasks have config options daskThreads and +subprocessCount used to control threading within a subtask:

+
# The number of threads dask is allowed to spawn for each task/subtask.
+# Decrease this number if tasks/subtasks are running out of available threads
+daskThreads = 2
+
+# The number of subprocesses that each task/subtask gets counted as
+# occupying. Increase this number if tasks/subtasks are running out of
+# memory, so that fewer tasks will be allowed to run at once
+subprocessCount = 1
+
+
+
+

Dask threads

+

Dask and xarray support thread-parallel operations on data sets. They also +support chunk-wise operation on data sets that can’t fit in memory. These +capabilities are very powerful but also difficult to configure for general +cases. Dask is also not desigend by default with the idea that multiple tasks, +each with multiple dask threads, might operate simultaneously. As a result, +it is possible to spawn huge numbers of dask threads in MPAS-Analysis that both +slow down analysis and lead to errors when the node runs out of threads +completely.

+

To prevent this, many tasks or subtasks that use dask threading take the number +of execution threads from a config option, typically in the config section for +the parent task. Typically, the number of daskThreads should be around +the same as the number of cores on a node divided by the number of tasks +that will run simultaneiously. Since the number of running tasks is controlled +by subprocessCount, see below, this number might differ from +parallelTaskCount.

+
+
+

Subprocess count

+

Tasks or subtasks that use dask threading may consume too much memory or use +too many threads to “count” as a single task. That is, it might not be safe to +run with parallelTaskCount simultaneious instances of the task/subtask and +it would be better if it occupied the slot of multiple tasks in the pool of +tasks. MPAS-Analysis will treat a dask-based task or subtask as occupying +the number of task slots given by the subprocessCount option. For example, +if parallelTaskCount = 8 and subprocessCount = 2, up to 4 tasks or +subtasks would be allowed to run simultaneiously.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/diagnostics.html b/1.11.0rc1/users_guide/config/diagnostics.html new file mode 100644 index 000000000..b0d353587 --- /dev/null +++ b/1.11.0rc1/users_guide/config/diagnostics.html @@ -0,0 +1,227 @@ + + + + + + + Diagnostics — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Diagnostics

+

The [diagnostics] section of a configuration file contains options related +to paths containing observations, region-mask files and mapping files used to +interpolate MPAS data and observations to common reference grids:

+
[diagnostics]
+## config options related to observations, mapping files and region files used
+## by MPAS-Analysis in diagnostics computations.
+
+# The base path to the diagnostics directory.  Typically, this will be a shared
+# directory on each E3SM supported machine (see the example config files for
+# its location).  For other machines, this would be the directory pointed to
+# when running "download_analysis_data.py" to get the public observations,
+# mapping files and region files.
+baseDirectory = /path/to/diagnostics
+
+# A second directory where custom diagonstics data such as mapping files and
+# regions masks for unsupported grids can be found.  The structure of
+# subdirectories in this directory must be the same as baseDirectory
+customDirectory = none
+
+# Directory for mapping files (if they have been generated already). If mapping
+# files needed by the analysis are not found here, they will be generated and
+# placed in the output mappingSubdirectory.  The user can supply an absolute
+# path here to point to a path that is not within the baseDirectory above.
+mappingSubdirectory = mpas_analysis/maps
+
+# Directory for region mask files. The user can supply an absolute path here to
+# point to a path that is not within the baseDirectory above.
+regionMaskSubdirectory = mpas_analysis/region_masks
+
+
+
+

Diagnostics Directories

+

The baseDirectory is the location where files were downloaded with the +download_analysis_data.py. If the user is on an E3SM supported machine, +this data has already been downloaded to a shared location (see example config +files in the subdirectories of the configs directory in the MPAS-Analysis +repository).

+

The customDirectory, if it is not none is another directory where +observations, mapping files and region masks may be stored. This is useful for +runs on non-standard grids or for testing out new observations.

+

The remaining options point to the subdirectories for mapping files (see +below) and region masks (see Colormaps), respectively. +Typically, there is no reason to change mappingSubdirectory or +regionMaskSubdirectory, as these are the standard subdirectories created +when these files are downloaded from the E3SM public data repository.

+
+
+

Mapping Files

+

Mapping files are used in many MPAS-Analysis tasks to remap from either the +native MPAS mesh or an observations grid to a comparison grid (see +Comparison Grids). By default, these mapping files are generated +on the fly as they are needed. This can be a time-consuming process, +especially for high resolution meshes, so it is useful to store a cache of +these mapping files for reuse. Mapping files at three standard resolutions +are avaliable on the E3SM public data repository. The mapping files for +the two coarser resolution meshes will be downloaded automatically along with +the publicly available observations. (See the Quick Start Guide for details +on downloading this data.)

+

If you notice that MPAS-Analysis is generating mapping files on the fly each +time you run, you may wish to copy them from the mapping files output +directory (the subdirectory mapping/ inside the output base directory) to +your mapping files cache directory.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/execute.html b/1.11.0rc1/users_guide/config/execute.html new file mode 100644 index 000000000..1174c166a --- /dev/null +++ b/1.11.0rc1/users_guide/config/execute.html @@ -0,0 +1,298 @@ + + + + + + + Execute — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Execute

+

The [execute] section of a configuration file contains options used to +control how tasks are executed within an MPAS-Analysis run:

+
[execute]
+## options related to executing parallel tasks
+
+# the number of parallel tasks (1 means tasks run in serial, the default)
+parallelTaskCount = 1
+
+# the parallelism mode in ncclimo ("serial", "bck" or "mpi")
+# Set this to "bck" (background parallelism) in most cases.  The default number
+# of threads (see below) is 12, one for each monthly climatology. Set to "mpi"
+# to run one MPI task on each node and however many threads per node to reach
+# 12 total threads.
+ncclimoParallelMode = serial
+
+# the number of total threads to use when ncclimo runs in "bck" or "mpi" mode.
+# Reduce this number if ncclimo is crashing (maybe because it is out of memory).
+# The number of threads must be a factor of 12 (1, 2, 3, 4, 6 or 12).
+ncclimoThreads = 12
+
+# the number of MPI tasks to use in creating mapping files (1 means tasks run in
+# serial, the default)
+mapMpiTasks = 1
+
+# "None" if ESMF should perform mapping file generation in serial without a
+# command, or one of "srun" or "mpirun" if it should be run in parallel (or ins
+# serial but with a command)
+mapParallelExec = None
+
+# "None" if ncremap should perform remapping without a command, or "srun"
+# possibly with some flags if it should be run with that command
+ncremapParallelExec = None
+
+# Multiprocessing method used in python mask creation ("forkserver", "fork" or
+# Multiprocessing method used in python mask creation ("forkserver", "fork" or
+# "spawn").  We have found that "spawn" is the only one that works in python
+# 3.7 on Anvil so this is the default
+multiprocessingMethod = spawn
+
+
+
+

Parallel Tasks

+

By default, MPAS-Analysis runs one task at a time, displaying any logging +output directly to the screen, rather than storing it in log files. However, +the analysis will run much more quickly if parallel processes are used. For +example, to run 12 tasks in parallel, simply set:

+
parallelTaskCount = 12
+
+
+

MPAS-Analysis currently supports parallelism through spawning processes, rather +than with MPI, meaning that MPAS-Analysis should be run on a single node of a +multi-node machine such as a cluster or supercomputer. Given that some tasks +themselves spawn multiple threads and that some tasks are memory intensive, it +may not be desirable to launch one task per core on a node with limited memory.

+

Because MPAS-Analysis does not use MPI parallelism, it can typically be run on +the login nodes of supercomputing facilities. Check with the policies of your +center to see if this is permitted and make sure not to run with a large number +of parallel tasks so as to overwhelm the shared resource.

+
+
+

Parallelism in NCO

+

The ncclimo command from the NetCDF Operators (NCO) package is used +internally in MPAS-Analysis. This command supports three options for +parallelism: serial, bck or mpi. If set to serial, the +default, any MPAS-Analysis tasks that use ncclimo will compute +climatologies one month and then one season at a time. If bck mode is +used, ncclimoThreads threads are spawned (default is 12, one for each +month), and then separate threads are used to compute each season. Given that +computing climatologies takes up a significant portion of the runtime in +MPAS-Analysis, the speed-up of nearly a factor of ncclimoThreads in these +computations can be quite noticeable. For very big data sets, it may be +necessary to run ncclimo either with fewer threads (reducing +ncclimoThreads, noting that it must be a factor of 12) or on multiple nodes +to prevent running out of memory. To run an MPI job, spawn a job with between +2 and 12 nodes, and set ncclimoParallelMode = mpi to run the 12 ncclimo +threads on multiple nodes.

+

Again, when running MPAS-Analysis on login nodes of supercomputing facilities, +it is important to be aware of the policies regarding using shared resources. +On login nodes, bck may only be appropriate with ncclimoThreads set to a +small number and mpi mode may not work at all.

+
+
+

Parallel Mapping File Creation

+

If mapping files from the MPAS mesh to the comparison grids aren’t already +available in the diagnostics directory, they will be created before any other +MPAS-Analysis tasks are run. If you are running MPAS-Analysis out of +E3SM-Unified on a compute node, on many systems (see below), ESMF has been +built with the system version of MPI and you must run mapping-file generation +with srun. If you are running with parallelTaskCount > 1, the mapping +files will be generated in parallel.

+
mapParallelExec = srun
+
+
+

Similarly, some systems (Anvil and Chrysalis) require a parallel executable +for calls to ncremap from E3SM-Unified on compute nodes:

+
ncremapParallelExec = srun
+
+
+

E3SM supported machines with system MPI support in E3SM-Unified 1.8.1:

+
    +
  • Anvil

  • +
  • Chicoma

  • +
  • Chrysalis

  • +
  • Compy

  • +
  • Cori-Haswell

  • +
  • Perlmutter

  • +
+

These machines do not have MPI support in E3SM-Unified:

+
    +
  • Andes

  • +
  • Acme1

  • +
+

In the very near future, we hope to add a capability to MPAS-Analysis so that +it will automatically recognize which machine it is on (or you can specify if +need be), allowing these and other config options to be set automatically.

+
+
+

Parallel Mask Creation

+

Tasks that involve Region Groups can generate the masks for each +region in the group on the fly. This is done with the mask generation +command-line tools form MPAS-Tools (see +Mask Creation with Python Multiprocessing), +which support 3 modes of parallelism: “spawn”, “fork” and “forkserver”. For +technical details on these modes, see +Contexts and start methods. +We have found that “spawn” seems to be the most reliable option on Anvil under +python 3.7 and 3.8. Any of these methods works well under python 3.8 but only +“spawn” was reliable under python 3.7. Therefore, we use “spawn” as the +default.

+

As we gain more experience with this setting, we may update config files for +specific machines to have different defaults.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/html.html b/1.11.0rc1/users_guide/config/html.html new file mode 100644 index 000000000..730c66002 --- /dev/null +++ b/1.11.0rc1/users_guide/config/html.html @@ -0,0 +1,171 @@ + + + + + + + HTML — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

HTML

+

The [html] section simply specifies whether or not a webpage should be +generated for displaying the plots produced by the analysis:

+
[html]
+## options related to generating a webpage to display the analysis
+
+# generate the webpage?
+generate = True
+
+
+

The webpage is produced in the directory specified by htmlSubdirectory +in the [output] section, see Output.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/index.html b/1.11.0rc1/users_guide/config/index.html new file mode 100644 index 000000000..b0520c71a --- /dev/null +++ b/1.11.0rc1/users_guide/config/index.html @@ -0,0 +1,180 @@ + + + + + + + Index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Index

+

The [index] section of a configuration file contains options used to +determine the start and end years of climate indices (such as El Niño +3.4):

+
[index]
+## options related to producing nino index.
+
+# start and end years for El Nino 3.4 analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 20
+
+
+
+

Start and End Year

+

A custom config file should specify a start and end year for time axis. +Out of range year will produce an error.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/input.html b/1.11.0rc1/users_guide/config/input.html new file mode 100644 index 000000000..59e6a4301 --- /dev/null +++ b/1.11.0rc1/users_guide/config/input.html @@ -0,0 +1,326 @@ + + + + + + + Input — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Input

+

The [input] section of a configuration file contains options used to +specify the location of the “main” simulation and some settings for how +these data will be read in:

+
[input]
+## options related to reading in the results to be analyzed
+
+# directory containing model results
+baseDirectory = /dir/for/model/output
+
+# Note: an absolute path can be supplied for any of these subdirectories.
+# A relative path is assumed to be relative to baseDirectory.
+# By default, results are assumed to be directly in baseDirectory,
+# i.e. <baseDirecory>/./
+
+# subdirectory containing restart files
+runSubdirectory = .
+# subdirectory for ocean history files
+oceanHistorySubdirectory = .
+# subdirectory for sea ice history files
+seaIceHistorySubdirectory = .
+
+# names of namelist and streams files, either a path relative to baseDirectory
+# or an absolute path.
+oceanNamelistFileName = mpaso_in
+oceanStreamsFileName = streams.ocean
+seaIceNamelistFileName = mpassi_in
+seaIceStreamsFileName = streams.seaice
+
+# name of the ocean and sea-ice mesh (e.g. EC30to60E2r2, WC14to60E2r3,
+# ECwISC30to60E2r1, SOwISC12to60E2r4, oQU240, etc.)
+mpasMeshName = mesh
+
+# Large datasets can encounter a memory error.  Specification of a maximum
+# chunk size `maxChunkSize` can be helpful to prevent the memory error.  The
+# current maximum chunk size assumes approximately 64GB of ram and large files
+# with a single time slice.
+maxChunkSize = 10000
+
+
+# Whether missing input data should produce an error.  If not, the user gets
+# a warning and the time bounds are adjusted to the beginning and end of the
+# available data
+errorOnMissing = False
+
+
+
+

Input Directories

+

The input directories are specified through a base directory and optionally +separate subdirectories for the run (continaing namelist, streams and and one +or more restart files for each MPAS component) and for each component’s +simulation output (“history” files). You will always need to set +baseDirectory in a custom config file.

+

By default, all files are assumed to be located in the base directory. +However, E3SM supports short-term archiving of component output to separate +directories. If this feature was enabled for the E3SM run to be analyzed, the +configuration should be:

+
baseDirectory = /dir/for/model/output
+runSubdirectory = run
+oceanHistorySubdirectory = archive/ocn/hist
+seaIceHistorySubdirectory = archive/ice/hist
+
+
+

where /dir/for/model/output is replaced with the directory containing +the run and archive directories.

+
+
+

Namelist Files

+

MPAS components are configured with a namelist file containing a very large +number of parameters and other configuration settings. Part of the strength +of MPAS-Analysis is that it is aware of these namelists and can automatically +disable analysis tasks that are not supported under a given configuration. +By default, the namelist files for ocean and sea ice components are:

+
oceanNamelistFileName = mpaso_in
+seaIceNamelistFileName = mpassi_in
+
+
+

For older E3SM (v1 alpha and beta) runs, a different naming convention was used +and these options will need to be updated to:

+
oceanNamelistFileName = mpas-o_in
+seaIceNamelistFileName = mpas-cice_in
+
+
+

The location of the namelist files is relative to the baseDirectory, so if +they are located within a run directory inside the base directory, they should +instead be specified as:

+
oceanNamelistFileName = run/mpaso_in
+seaIceNamelistFileName = run/mpassi_in
+
+
+
+
+

Streams Files

+

Streams files are used to configure input and output from MPAS components. +MPAS-Analysis can parse these files to determine the locations of output files +(using the *HistorySubdirectory to find them if they have been moved by +short-term archiving). Similarly to namelist files, there are default names:

+
oceanStreamsFileName = streams.ocean
+seaIceStreamsFileName = streams.seaice
+
+
+

alterations appropriate for E3SM v1 alpha and beta runs:

+
oceanStreamsFileName = streams.ocean
+seaIceStreamsFileName = streams.cice
+
+
+

and the addition of the run/ subdirectory if analyzing a run that used +short-term archiving:

+
oceanStreamsFileName = run/streams.ocean
+seaIceStreamsFileName = run/streams.seaice
+
+
+
+
+

Mesh Name

+

The MPAS-Ocean and MPAS-Seaice run on the same mesh. There are a number of +standard E3SM ocean/sea ice meshes at various resolutions. The meshes +currently supported by the public release of MPAS-Analysis include:

+
+
    +
  • oEC60to30v3: An Eddy-Closure (EC) mesh with 30-km resolution at the +poles and equator and 60-km resolution at mid latitudes,

  • +
  • oRRS30to10v3: A Rossby-Radius-Scaled (RRS) mesh with 10-km resolution +at the poles and 30-km resolution at the equator,

  • +
  • oRRS18to6: An RRS mesh with 6-km resolution at the poles and 18-km +resolution at the equator.

  • +
+
+

Mapping files (see Mapping Files below) and region mask files +(see Region Groups) are provided from the +E3SM public data repository for these meshes. For assistance with other +mesh resolutions, please contact the MPAS-Analysis developers.

+
+
+

Xarray and Dask

+

MPAS-Analysis makes extensive use of the xarray package, which uses the +dask package internally to perform operations that are too large to fit +in memory. While most tasks in MPAS-Analysis have moved away from opening +multi-file data sets using xarray in favor of concatinating these data sets +together using NCO tools, there are some legacy options that users can modify +if they experience errors related to dask:

+
maxChunkSize = 10000
+
+
+

If an out of memory error occurs, it may first be worth reducing the number +of parallel tasks running (see Execute) but if the error is +clearly related to dask (which might be the case, for example, if the error +occrus in the streamfunctionMOC task), you may wish to reduce the +maxChunkSize. This will make tasks using dask slower but will reduce their +memory usage.

+
+
+

Errors on Missing Data

+

if errorOnMissing = False, the time ranges (startYear and endYear) +in climatology, timeSeries, and index will be clipped to the range +of the available data. If this option is set to True, an error will be +produced. A value of end can be used for endYear to indicate that the +full range of the available data should be used.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/moving_average.html b/1.11.0rc1/users_guide/config/moving_average.html new file mode 100644 index 000000000..c24264d78 --- /dev/null +++ b/1.11.0rc1/users_guide/config/moving_average.html @@ -0,0 +1,168 @@ + + + + + + + Moving Average — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Moving Average

+

By default, some time series have a 12-month moving average while others do +not include a moving average (movingAverageMonths = 1). To perform +a moving average (e.g. over 12 months), set:

+
movingAverageMonths = 12
+
+
+

This can be useful for taking out the seasonal cycle to better examine annual +mean tends.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/observations.html b/1.11.0rc1/users_guide/config/observations.html new file mode 100644 index 000000000..6bc7acb2b --- /dev/null +++ b/1.11.0rc1/users_guide/config/observations.html @@ -0,0 +1,255 @@ + + + + + + + Ocean, Sea Ice and Iceberg Observations — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Ocean, Sea Ice and Iceberg Observations

+

The [oceanObservations], [seaIceObservations] and +[icebergObservations] sections of a configuration file contain options used +to point to the observations files and folders:

+
[oceanObservations]
+## options related to ocean observations with which the results will be compared
+
+# directory where ocean observations are stored
+obsSubdirectory = observations/Ocean
+sstSubdirectory = SST
+sssSubdirectory = SSS
+mldSubdirectory = MLD
+ninoSubdirectory = Nino
+mhtSubdirectory = MHT
+meltSubdirectory = Melt
+soseSubdirectory = SOSE
+sshSubdirectory = SSH
+argoSubdirectory = ARGO
+schmidtkoSubdirectory = Schmidtko
+
+# interpolation order for observations. Likely values are
+#   'bilinear', 'neareststod' (nearest neighbor) or 'conserve'
+interpolationMethod = bilinear
+
+# The directories where observation climatologies will be stored if they need
+# to be computed.  If a relative path is supplied, it is relative to the output
+# base directory.  If an absolute path is supplied, this should point to
+# cached climatology files on the desired comparison grid.  If cached remapped
+# files are supplied, there is no need to provide cached files before
+# remapping.
+climatologySubdirectory = clim/obs
+remappedClimSubdirectory = clim/obs/remapped
+
+...
+
+[seaIceObservations]
+## options related to sea ice observations with which the results will be
+## compared
+
+# directory where sea ice observations are stored
+obsSubdirectory = observations/SeaIce
+
+# interpolation order for observations. Likely values are
+#   'bilinear', 'neareststod' (nearest neighbor) or 'conserve'
+interpolationMethod = bilinear
+
+# The directories where observation climatologies will be stored if they need
+# to be computed.  If a relative path is supplied, it is relative to the output
+# base directory.  If an absolute path is supplied, this should point to
+# cached climatology files on the desired comparison grid.  If cached remapped
+# files are supplied, there is no need to provide cached files before
+# remapping.
+climatologySubdirectory = clim/obs
+remappedClimSubdirectory = clim/obs/remapped
+
+...
+
+[icebergObservations]
+## options related to iceberg observations with which the results will be
+## compared
+
+# directory where sea ice observations are stored
+obsSubdirectory = observations/Icebergs
+concentrationAltibergSH = Altiberg/Altiberg_1991-2017_20180308.nc
+
+
+
+

Files and Directories

+

The input directories are specified through a “base” subdirectory +obsSubdirectory and either subdirectories or file names for each set of +observations. obsSubdirectory is relative to baseDirectory in the +diagnostics section, while all file paths and other subdirectories are +relative to obsSubdirectory. You will typically not need to change any +of these paths, since they are structured in a standard way following the +E3SM public data repository (see the Quick Start Guide for more details).

+

The directories for storing cached datasets before and afer remapping +(specified in climatologySubdirectory and remappedClimSubdirectory) +may be given any relative or absoltue path, but should typically be left as the +default values.

+
+
+

Remapping

+

Observational climatologies are remapped from the native grid (typically +global latitude/longitude or Antarctic stereographic) to common +comparison grids. The remapping can be performed with any of three methods: +bilinear, neareststod (nearest neighbor) or conserve. Mapping +files are created with the ESMF_RegridWeightGen tool. The default method +is bilinear and these are the mapping files distributed from the +E3SM public data repository. The conseve method is know to be much +slower to compute and should only be used if it is necessary (e.g. because +remapped data will be checked for conservation).

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/output.html b/1.11.0rc1/users_guide/config/output.html new file mode 100644 index 000000000..6dc7f0b4d --- /dev/null +++ b/1.11.0rc1/users_guide/config/output.html @@ -0,0 +1,307 @@ + + + + + + + Output — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Output

+

The [output] section of a configuration file contains options used to +specify the location of the “main” simulation:

+
[output]
+## options related to writing out plots, intermediate cached data sets, logs,
+## etc.
+
+# directory where analysis should be written
+# NOTE: This directory path must be specific to each test case.
+baseDirectory = /dir/for/analysis/output
+
+# subdirectories within baseDirectory for analysis output
+plotsSubdirectory = plots
+logsSubdirectory = logs
+mpasClimatologySubdirectory = clim/mpas
+mappingSubdirectory = mapping
+timeSeriesSubdirectory = timeseries
+# provide an absolute path to put HTML in an alternative location (e.g. a web
+# portal)
+htmlSubdirectory = html
+
+# a list of analyses to generate.  Valid names can be seen by running:
+#   mpas_analysis --list
+# This command also lists tags for each analysis.
+# Shortcuts exist to generate (or not generate) several types of analysis.
+# These include:
+#   'all' -- all analyses will be run
+#   'all_publicObs' -- all analyses for which observations are availabe on the
+#                      public server (the default)
+#   'all_<tag>' -- all analysis with a particular tag will be run
+#   'all_<component>' -- all analyses from a given component (either 'ocean'
+#                        or 'seaIce') will be run
+#   'only_<component>', 'only_<tag>' -- all analysis from this component or
+#                                       with this tag will be run, and all
+#                                       analysis for other components or
+#                                       without the tag will be skipped
+#   'no_<task_name>' -- skip the given task
+#   'no_<component>', 'no_<tag>' -- in analogy to 'all_*', skip all analysis
+#                                   tasks from the given compoonent or with
+#                                   the given tag.  Do
+#                                      mpas_analysis --list
+#                                   to list all task names and their tags
+# an equivalent syntax can be used on the command line to override this
+# option:
+#    mpas_analysis analysis.cfg --generate \
+#         only_ocean,no_timeSeries,timeSeriesSST
+generate = ['all_publicObs']
+
+
+
+

Output Directories

+

The output directories are specified through a base directory and a set of +subdirectories within that base. You will always need to set baseDirectory +in a custom configuration file.

+

The subdirectories of output can be renamed if desired or an absolute path +can be specified if output to another location is desired. This is +particularly relevant to htmlSubdirectory, which may be pointed to a +a public space such as a web portal for display on the web. Note: +MPAS-Analysis does not change the HTML output to be world readable so you +will need to do this manually after a run has completed (or inside of a job +script) to see the results on a public web page.

+
+
+

Generate Option

+

The generate option is used to control which tasks run. The simplest +choice is:

+
generate = ['all']
+
+
+

in which case MPAS-Analysis will attempt to run all analysis tasks. In this +mode, some tasks may fail with a warning printed to the screen during their +setup_and_check() +phase if the simulation was not configured to support that task. All tasks +that pass the +setup_and_check() +phase will be run.

+

The next simplest option is to specify a single task name:

+
generate = ['climatologyMapSST']
+
+
+

or a list of task names:

+
generate = ['climatologyMapSST', 'climatologyMapSSS']
+
+
+

in which case only the listed tasks are run.

+

A third way to determine which tasks to generate is to make use of “tags” for +each task. To see what tags each task has, run:

+
mpas_analysis --list
+
+
+

This will show all available tasks together with the component they belong to +and the tags for each. To run only those analysis tasks with a particular tag, +set, for example climatology:

+
generate = ['all_climatology']
+
+
+

This will generate only those tasks that make use of climatologies.

+

A useful tag for the public release of MPAS-Analysis is the publicObs tag, +which is found on all tasks that will run successfully if you have downloaded +the observations from the E3SM public data repository. Some MPAS-Analysis +tasks make use of data sets that are only available after registering with a +data portal or by contacting the authors of that data set directly, so that +these data have not been included in the data repository. The default is to +generate only the tasks with observations in in the repository:

+
generate = ['all_publicObs']
+
+
+

The names of components (ocean or seaIce) can also be used as tags.

+

There are also ways to specify that a given tag should not be present +(no_<tag>) or that only analysis with the given tag should be run +(only_<tag>). These options are useful when combined in a series with +other generate options. For example, to generate all tasks with publicly +available observation except those for the seaIce component, you could +specify:

+
generate = ['all_publicObs', 'no_seaIce']
+
+
+

If an appropriate reference year isn’t available for computing anomalies, +include ‘no_anomaly’ in the generate list to skip all tasks that require the +reference year for computing anomalies:

+
generate = ['all_publicObs', 'no_anomaly']
+
+
+

To specify that you wanted to plot climatologies from the ocean component, you +could use:

+
generate = ['all_publicObs', 'only_climatologies', 'only_ocean']
+
+
+

If you wanted to plot all tasks with publicly available data sets that used +either climatologies or time series, you could use:

+
generate = ['all_climatologies', 'all_timeSeries', 'only_publicObs']
+
+
+

Finally, we note that the generate option in the configuration file can +be overridden by specifying the --generate option on the command line:

+
mpas_analysis --generate=all_publicObs,no_index,no_climatologyMapSST \
+    my_run.cfg
+
+
+

This example would override whatever generate option was specified in +my_run.cfg with a directive to generate only tasks that support the +publicly available observations, skipping those using climate indices (e.g. +El Niño 3.4) and also skipping climatologyMapSST.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/plot.html b/1.11.0rc1/users_guide/config/plot.html new file mode 100644 index 000000000..ca0cc50dd --- /dev/null +++ b/1.11.0rc1/users_guide/config/plot.html @@ -0,0 +1,202 @@ + + + + + + + Plot — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Plot

+

The [plot] section of a configuration file contains options that define +default properties for all plots produce by MPAS-Analysis:

+
[plot]
+## options related to plotting that are the defaults across all analysis
+## modules
+
+# font size on axes
+axisFontSize = 16
+
+# the default font size for axis ticks, etc.
+defaultFontSize = 10
+
+# title and axis font properties for single-panel plots
+titleFontSize = 16
+titleFontColor = black
+titleFontWeight = normal
+axisFontSize = 12
+
+# font size for cartopy grid labels
+cartopyGridFontSize = 12
+
+# the dots per inch of output figures
+dpi = 200
+
+# Write out PDFs in addition to PNGs?
+pdf = False
+
+
+

The options for title font size, color and weight as well as axis font size +specify properties of these parts of each plot. The default font size covers +the axis tick marks, color-bar ticks and axis labels, contour labels, etc. +The cartopy grid font size are for the labels (either interior or along the +plot boundary) for cartopy labels. Sizes are given in points.

+

The value of dpi specifies the resolution of the images written out by +MPAS-Analysis (in dots per inch). The default produces large images that +are appropriate for zooming in substantially and may be sufficient for +publication. They are large (but not entirely unmanageable) for the web.

+

You can set pdf = True to write out PDF files in the plots subdirectory +along with PNG files. The PDFs are not copied to the HTML folder.

+

Many types of individual plots, including climatologies, transects, Hovmoller +plots, and most time series, also support setting the defaultFontSize and +titleFontSize config options just of that type of plot.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/preprocessed.html b/1.11.0rc1/users_guide/config/preprocessed.html new file mode 100644 index 000000000..87d481136 --- /dev/null +++ b/1.11.0rc1/users_guide/config/preprocessed.html @@ -0,0 +1,183 @@ + + + + + + + Preprocessed Reference Runs — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Preprocessed Reference Runs

+

The [oceanPreprocessedReference] and [seaIcePreprocessedReference] +sections of a configuration file contain options used to point to preprocessed +data from E3SM v0 reference runs:

+
[oceanPreprocessedReference]
+## options related to preprocessed ocean reference run with which the results
+## will be compared (e.g. a POP, CESM or ACME v0 run)
+
+# directory where ocean reference simulation results are stored
+baseDirectory = /dir/to/ocean/reference
+
+...
+
+[seaIcePreprocessedReference]
+## options related to preprocessed sea ice reference run with which the results
+## will be compared (e.g. a CICE, CESM or ACME v0 run)
+
+# directory where ocean reference simulation results are stored
+baseDirectory = /dir/to/seaice/reference
+
+
+

If such a preprocessed reference run is available, the name of the reference +run should be specified (see Runs) and the base directories +should be specified here.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/regions.html b/1.11.0rc1/users_guide/config/regions.html new file mode 100644 index 000000000..3149ab3be --- /dev/null +++ b/1.11.0rc1/users_guide/config/regions.html @@ -0,0 +1,218 @@ + + + + + + + Regions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Regions

+
+

Region Config Section

+

The [regions] section of a configuration file contains options related +to regions either defined in MPAS components’ online analysis or determined +within MPAS-Analysis using region mask files:

+
[regions]
+## options related to ocean regions used in several analysis modules
+
+# list of region names (needs to be in the same order as region indices in
+# time-series stats)
+regions = ['arctic', 'equatorial', 'so', 'nino3', 'nino4', 'nino3.4', 'global']
+# list of plot titles (needs to be in the same order as region indices in
+# time-series stats)
+plotTitles = ['Arctic', 'Equatorial (15S-15N)', 'Southern Ocean', 'Nino 3',
+              'Nino 4', 'Nino 3.4', 'Global Ocean']
+
+
+

MPAS-Ocean currently has hard coded into its online analysis 7 regions in a +fixed order, as given in the regions option. This should not be altered +unless corresponding changes to the MPAS-Ocean code have been made.

+

The corresponding plotTitles can be modified as desired to update how +these regions are named in plot titles and in gallery names on the resulting +web page.

+
+
+

Region Groups

+

Currently, eight analysis tasks (climatologyMapAntarcticMelt, +hovmollerOceanRegions, oceanRegionalProfiles, +regionalTSDiagrams, streamfunctionMOC, +oceanHistogram, timeSeriesAntarcticMelt, and +timeSeriesOceanRegions) use masks that define regions in an MPAS +mesh as part of their analysis. Most of these region group are defined in +geometric_features.aggregation.get_aggregator_by_name(). +Several tasks (hovmollerOceanRegions, oceanHistogram, +oceanRegionalProfiles, regionalTSDiagrams, and +timeSeriesOceanRegions) can use any of the defined region groups. +Currently, available region groups are: Artic Ocean Regions, Antarctic Regions, +Ocean Basins, Ice Shelves, and Ocean Subbasins.

+

The option regionMaskSubdirectory in the [diagnostics] section specifies +the path to cached mask files for these region groups, typically +diagnostics/mpas_analysis/region_masks. Region masks for common MPAS Ocean +and Seaice meshes are supplied as part of the data from the +E3SM public data repository +(see the Quick Start Guide).

+

If masks for for a given grid don’t already exist in the cached mask location, +they will be generated automatically from the aggregation function from the +geometric_features package, see +Aggregate Existing Features. +The mask data will be stored in a geojson file with the region group’s prefix +and date stamp (e.g. iceShelves20200621.geojson). Then, masks on the MPAS +Ocean and Seaice mesh will be computed, a process that can be time consuming for +large meshes. To generate the masks in advance (using threading to speed up the +process), see the example utility script utility_scripts/make_region_mask.py.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/runs.html b/1.11.0rc1/users_guide/config/runs.html new file mode 100644 index 000000000..9d1cba196 --- /dev/null +++ b/1.11.0rc1/users_guide/config/runs.html @@ -0,0 +1,264 @@ + + + + + + + Runs — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Runs

+

The [runs] section of a configuration file contains options used to name +the “main” run, a preprocessed E3SM v0 run (if any) and to point to analysis +of a control E3SM v1 or standalone MPAS run (if any):

+
[runs]
+## options related to the run to be analyzed and control runs to be
+## compared against
+
+# mainRunName is a name that identifies the simulation being analyzed.
+mainRunName = runName
+
+# preprocessedReferenceRunName is the name of a control run that has been
+# preprocessed to compare against (or None to turn off comparison).  Reference
+# runs of this type would have preprocessed results because they were not
+# performed with MPAS components (so they cannot be easily ingested by
+# MPAS-Analysis)
+preprocessedReferenceRunName = None
+
+# config file for a control run to which this run will be compared.  The
+# analysis should have already been run to completion once with this config
+# file, so that the relevant MPAS climatologies already exist and have been
+# remapped to the comparison grid.  Leave this option commented out if no
+# control run is desired.
+# controlRunConfigFile = /path/to/config/file
+
+# config file for a main run on which the analysis was already run to
+# completion.  The relevant MPAS climatologies already exist and have been
+# remapped to the comparison grid and time series have been extracted.
+# Leave this option commented out if the analysis for the main run should be
+# performed.
+# mainRunConfigFile = /path/to/config/file
+
+
+

The name of the “main” run (as opposed to a control run, if any) can be any +identifier that will be used in figure titles, legends, web pages and file +names to identify this run. It does not need to be the name of the simulation +as specified in E3SM:

+
mainRunName = runName
+
+
+

A few of the time series plots in MPAS-Analysis can be compared against a +preprocessed control run from E3SM v0 (which was similar to the CESM, the +Community Earth System Model). If these data are available and the comparison +to these runs is desired, the name of the control run should be specified +here and the paths to the data set should be specified (see +Preprocessed Reference Runs). If not this name should be left as None:

+
preprocessedReferenceRunName = None
+
+
+

MPAS-Analysis supports comparison between the “main” run and a control run +from either E3SM or a standalone MPAS component. By default, this feature is +disabled by commenting out the configuration option:

+
# controlRunConfigFile = /path/to/config/file
+
+
+

To specify a control run, first run MPAS analysis on the control run. Be +sure that:

+
+
    +
  • the start and end year for climatologies, time series and climate indices +is covered by the simulation output.

  • +
  • most configuration options for the control run are the same as for the +main run. The exceptions are contents of the [run], [input] and +[output] sections. The range of years for climatologies can be +different, but this is discouraged.

  • +
+
+

Once the analysis has been run on the control run, a comparison is made by +uncommenting controlRunConfigFile and specifying the path to the +configuration file use in this analysis, e.g.:

+
controlRunConfigFile = control_run.cfg
+
+
+

If analysis has already been run on the “main” run in a “main vs ref” +comparison, some time can be saved in performing the comparison +(particularly for higher resolution output, for which a lot of the +computation time goes into computing climatologies and extracting time +series). By default, this feature is disabled by commenting out the +configuration option:

+
# mainRunConfigFile = /path/to/config/file
+
+
+

To specify a main run, first run MPAS analysis on the main run. The +“comparison” config file should be nearly identical to the “main” config +file except that:

+
+
    +
  • The output baseDirectory should be different.

  • +
  • the start and end year for climatologies, time series and climate indices +must be the actual range used if output data was not available to span +the requested range in the “main” run.

  • +
+
+

All configuration information for the “main” run in the “main vs ref” +comparison is taken from the “comparsion” config file, not the “main” config. +Only the output directories and subdirectories for climatologies, time series, +mapping files and mask files (if these latter 2 were generated on the fly) +will be taken from the “main” config file. Symbolic links will be made to +these directories so the comparison analysis run can reuse this data. +Specify the path to the configuration file use in “main” analysis by +uncommenting the option and providing a relative or absolute path to the +config file:

+
mainRunConfigFile = main_run.cfg
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/seasons.html b/1.11.0rc1/users_guide/config/seasons.html new file mode 100644 index 000000000..973613909 --- /dev/null +++ b/1.11.0rc1/users_guide/config/seasons.html @@ -0,0 +1,173 @@ + + + + + + + Seasons — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Seasons

+

Nearly all analysis tasks that produce climatology plots include a +configuration option for specifying a list of seasons to plot:

+
# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep,
+# Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+
+

Valid seasons include the three-letter abbreviations of each month (e.g. +Jan), several 2- and 3-month seasons specified by consecutive first letters +of each month name (JFM, AMJ, JAS, OND, ON, FM, DJF +and JJA), and ANN for all 12 months.

+

If seasons other than these are needed, please post an issue on GitHub or +contact the developers.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/timeSeries.html b/1.11.0rc1/users_guide/config/timeSeries.html new file mode 100644 index 000000000..5a1069830 --- /dev/null +++ b/1.11.0rc1/users_guide/config/timeSeries.html @@ -0,0 +1,200 @@ + + + + + + + Time Series — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Time Series

+

The [timeSeries] section of a configuration file contains options used to +determine the start and end years of time series plots and the reference years +for anomalies:

+
[timeSeries]
+## options related to producing time series plots, often to compare against
+## observations and previous runs
+
+# the year from which to compute anomalies if not the start year of the
+# simulation.  This might be useful if a long spin-up cycle is performed and
+# only the anomaly over a later span of years is of interest.
+# anomalyRefYear = 249
+
+# start and end years for timeseries analysis.  Out-of-bounds values will lead
+# to an error.
+startYear = 1
+endYear = 20
+
+
+
+

Start and End Year

+

A custom config file should specify a start and end year for time series. +Out of range year will produce an error.

+
+
+

Anomaly Reference Year

+

Anomalies between a moving average of a time series and the average over a +reference year are used in several analysis tasks. By default, the reference +year is not specified in the configuration file and it taken to be the start of +the simulation (determined from the contents of a restart file). Under certain +circumstances (e.g. repetition of forcing data for several cycles, as in +the Common Ocean Reference Experiments, CORE), it may be desirable to +specify a different year to use for computing anomalies:

+
anomalyRefYear = 249
+
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/time_axis_ticks.html b/1.11.0rc1/users_guide/config/time_axis_ticks.html new file mode 100644 index 000000000..b17451e23 --- /dev/null +++ b/1.11.0rc1/users_guide/config/time_axis_ticks.html @@ -0,0 +1,169 @@ + + + + + + + Time-Axis Tick Marks — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Time-Axis Tick Marks

+

By default tick marks on the time (x) axis are determined automatically by +matplotlib. You can explicitly control them by setting a first year and +a stride (skip) in years. For example:

+
firstYearXTicks = 2
+yearStrideXTicks = 2
+
+
+

will place the first tick mark at simulation year 2 and will give a tick mark +every 2 years.

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/config/transects.html b/1.11.0rc1/users_guide/config/transects.html new file mode 100644 index 000000000..264f46f4a --- /dev/null +++ b/1.11.0rc1/users_guide/config/transects.html @@ -0,0 +1,223 @@ + + + + + + + Output Grids for Transects — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Output Grids for Transects

+

Several tasks for producing transects use a common methodology for producing +the comparison grid for each transect:

+
# The approximate horizontal resolution (in km) of each transect.  Latitude/
+# longitude between observation points will be subsampled at this interval.
+# Use 'obs' to indicate no subsampling. Use 'mpas' to indicate plotting of
+# model data on the native grid, in which case comparison with observations
+# will take place on the observation grid.
+#horizontalResolution = mpas
+#horizontalResolution = obs
+horizontalResolution = 5
+
+# The name of the vertical comparison grid.  Valid values are 'mpas' for the
+# MPAS vertical grid, 'obs' to use the locations of observations or
+# any other name if the vertical grid is defined by 'verticalComparisonGrid'.
+# If horizontalResolution is 'mpas', model data (both main and control) will be
+# plotted on the MPAS vertical grid, regardless of the comparison grid.
+#verticalComparisonGridName = mpas
+#verticalComparisonGridName = obs
+verticalComparisonGridName = uniform_0_to_4000m_at_10m
+
+# The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or
+# 'obs'.  This should be numpy array of (typically negative) elevations (in m).
+verticalComparisonGrid = numpy.linspace(0, -4000, 401)
+
+# A range for the y axis (if any)
+verticalBounds = []
+
+
+

The horizontalResolution of all transects can be obs, mpas or a +number of kilometers. If obs, model data are sampled at latitude and +longitude points corresponding to the observations. If the horizontal grid +is mpas, then the native MPAS-Ocean mesh is used for both the horizontal and +vertical grids. If a number of kilometers is given, linear interpolation +between observation points is performed with approximately the requested +resolution. The distance between observation points is always divided into an +integer number of segments of equal length so the resolution may be slightly +above or below horizontalResolution.

+

The vertical grid is determined by two parameters, +verticalComparisonGridName and verticalComparisonGrid. If +verticalComparisonGridName = mpas, but horizontalResoltuion is not +mpas, the MPAS-Ocean vertical coordinate will be interpolated horizontally +from grid cell centers to the latitude and longitude of each point along the +transect, and the observations will be interpolated vertically to the resulting +grid. If verticalComparisonGridName = obs, the vertical grid of the +observations is used instead. If verticalComparisonGridName is anything +else, it is taken to be the name of a user-defined vertical grid (best to make +it descriptive and unique, e.g. uniform_0_to_4000m_at_10m) and +verticalComparisonGrid should be assigned a valid array of positive-up +depth values (in the form of a python list or numpy array), e.g.:

+
verticalComparisonGrid = numpy.linspace(0, -4000, 401)
+
+
+

produces points between 0 and -4000 m sampled every 10 m.

+

verticalBounds is a list of minimum and maximum limits for the vertical axis +of the transect. The default is an empty list, which means matplotlib +selects the axis limits to encompass the full range of the vertical grid.

+
+

Note

+

Some types of transects (e.g. those produce with geojson files) do not have +a vertical grid associated with them (just horizontal latitude/longitude +points), meaning that verticalComparisonGridName = obs is not a valid +option for tasks with these transects.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/configuration.html b/1.11.0rc1/users_guide/configuration.html new file mode 100644 index 000000000..efb37786b --- /dev/null +++ b/1.11.0rc1/users_guide/configuration.html @@ -0,0 +1,192 @@ + + + + + + + Configuration — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Configuration

+

MPAS-Analysis is controlled through configuration files, which are customized +to point to different simulation results and to control how the output is +computed and displayed. Several groups of configuration options are found +across a number of analysis tasks.

+

Examples of configuration files for various E3SM supported machines can be +found in the configs/ folder or can be browsed on GitHub. The files +example.cfg and example_e3sm.cfg provide a list of the default values +for the configuration options that are most commonly modified on unknown and +E3SM-supported machines, repsectively.

+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/e3sm.html b/1.11.0rc1/users_guide/e3sm.html new file mode 100644 index 000000000..0dcd392c4 --- /dev/null +++ b/1.11.0rc1/users_guide/e3sm.html @@ -0,0 +1,144 @@ + + + + + + + E3SM — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

E3SM

+

The Energy Exascale Earth System Model (E3SM) Project is an ongoing, +state-of-the-science Earth system modeling, simulation, and prediction project +that optimizes the use of DOE laboratory resources to meet the science needs of +the nation and the mission needs of DOE.

+

A Full description of E3SM is available at: +https://e3sm.org/

+
+

Setting up E3SM runs

+

All online analysis and output stream within MPAS components (MPAS-O and +MPAS-SeaIce) are configured to support MPAS-Analysis without any modifications +to namelists or streams files.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/mpaso.html b/1.11.0rc1/users_guide/mpaso.html new file mode 100644 index 000000000..eea9b6757 --- /dev/null +++ b/1.11.0rc1/users_guide/mpaso.html @@ -0,0 +1,278 @@ + + + + + + + MPAS Ocean — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

MPAS Ocean

+

The Model for Prediction Across Scales Ocean (MPAS-O) is designed for the +simulation of the ocean system from time scales of months to millenia and +spatial scales from sub 1 km to global circulations.

+

MPAS-O has demonstrated the ability to accurately reproduce mesoscale ocean +activity with a local mesh refinement strategy.

+

In addition to faciliating the study of multiscale phenomena within the ocean +system, MPAS-O is intended for the study of anthropogenic climate change as +the ocean component of climate system models.

+

Full documentaiton is available at: +https://mpas-dev.github.io/ocean/ocean.html

+
+

Setting up Standalone MPAS-O Runs

+

In order to support all ocean analysis tasks from MPAS-Analysis, certain +“analysis members”, Fortran modules that perform analysis during the +simulation, need to be enabled.

+

The following is a list of suggested values for namelist options, typically +found in namelist.ocean or mpaso_in (or mpas-o_in in older E3SM runs):

+
config_AM_surfaceAreaWeightedAverages_enable = .true.
+config_AM_surfaceAreaWeightedAverages_compute_interval = '0000-00-00_01:00:00'
+config_AM_layerVolumeWeightedAverage_enable = .true.
+config_AM_layerVolumeWeightedAverage_compute_interval = '0000-00-00_01:00:00'
+config_AM_meridionalHeatTransport_enable = .true.
+config_AM_meridionalHeatTransport_compute_interval = '0000-00-00_01:00:00'
+config_AM_mixedLayerDepths_enable = .true.
+config_AM_timeSeriesStatsMonthly_enable = .true.
+
+
+

Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:

+
config_run_duration = '0002-00-00_00:00:00'
+
+
+

Several streams must be defined in the streams file, typically +streams.ocean, (even if they will not be written out – +output_interval="none"):

+
<stream name="timeSeriesStatsMonthlyRestart"
+        type="input;output"
+        filename_template="restarts/restart.AM.timeSeriesStatsMonthly.$Y-$M-$D_$h.$m.$s.nc"
+        filename_interval="output_interval"
+        reference_time="0001-01-01_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        input_interval="initial_only"
+        output_interval="stream:restart:output_interval" >
+</stream>
+
+<stream name="timeSeriesStatsMonthlyOutput"
+        type="output"
+        filename_template="analysis_members/timeSeriesStatsMonthly.$Y-$M.nc"
+        filename_interval="0000-01-00_00:00:00"
+        reference_time="0001-01-01_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        output_interval="00-01-00_00:00:00" >
+
+        <var_array name="activeTracers"/>
+        <var name="normalVelocity"/>
+        <var name="vertVelocityTop"/>
+        <var_array name="avgValueWithinOceanRegion"/>
+        <var_array name="avgValueWithinOceanLayerRegion"/>
+        <var name="dThreshMLD"/>
+        <var name="meridionalHeatTransportLatZ"/>
+        <var name="meridionalHeatTransportLat"/>
+        <var name="binBoundaryMerHeatTrans"/>
+        <var name="xtime"/>
+</stream>
+
+<stream name="layerVolumeWeightedAverageOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.layerVolumeWeightedAverage.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="layerVolumeWeightedAverageAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var_array name="minValueWithinOceanLayerRegion"/>
+    <var_array name="maxValueWithinOceanLayerRegion"/>
+    <var_array name="avgValueWithinOceanLayerRegion"/>
+    <var_array name="minValueWithinOceanVolumeRegion"/>
+    <var_array name="maxValueWithinOceanVolumeRegion"/>
+    <var_array name="avgValueWithinOceanVolumeRegion"/>
+</stream>
+
+<stream name="meridionalHeatTransportOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.meridionalHeatTransport.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="meridionalHeatTransportAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var name="binBoundaryMerHeatTrans"/>
+    <var name="meridionalHeatTransportLatZ"/>
+    <var name="meridionalHeatTransportLat"/>
+    <var name="refZMid"/>
+    <var name="refBottomDepth"/>
+</stream>
+
+<stream name="surfaceAreaWeightedAveragesOutput"
+        type="output"
+        io_type="netcdf"
+        filename_template="mpaso.hist.am.surfaceAreaWeightedAverages.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="surfaceAreaWeightedAveragesAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <var_array name="minValueWithinOceanRegion"/>
+    <var_array name="maxValueWithinOceanRegion"/>
+    <var_array name="avgValueWithinOceanRegion"/>
+</stream>
+
+<stream name="mixedLayerDepthsOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpaso.hist.am.mixedLayerDepths.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="none"
+        clobber_mode="truncate"
+        packages="mixedLayerDepthsAMPKG">
+
+    <var name="xtime"/>
+    <var name="daysSinceStartOfSim"/>
+    <stream name="mesh"/>
+    <var name="tThreshMLD"/>
+    <var name="dThreshMLD"/>
+    <var name="tGradMLD"/>
+    <var name="dGradMLD"/>
+</stream>
+
+
+

The filename_tempalate can be modified as desired (in most cases, these are +the defalult values from E3SM). For the timeSeriesStatsMonthlyOutput +stream, both the filename_interval and the output_interval must currently be +monthly ("0000-01-00_00:00:00").

+

Additional fields can be included in the timeSeriesStatsMonthlyOutput +streams. These are the minimum that allow the analysis to run successfully.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/mpasseaice.html b/1.11.0rc1/users_guide/mpasseaice.html new file mode 100644 index 000000000..91b1d4b67 --- /dev/null +++ b/1.11.0rc1/users_guide/mpasseaice.html @@ -0,0 +1,189 @@ + + + + + + + MPAS-Seaice — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

MPAS-Seaice

+

The Model for Prediction Across Scales Sea Ice (MPAS-Seaice) +is designed for the simulations of sea ice on unstructured grids supported by +the MPAS framework. The model has not yet been publicly released and does not +have public documentation.

+
+

Setting up Standalone MPAS Sea Ice Runs

+

In order to support all sea=ice analysis tasks from MPAS-Analysis, certain +“analysis members”, Fortran modules that perform analysis during the +simulation, need to be enabled.

+

The following is a list of suggested values for namelist options, typically +found in namelist.seaice or mpassi_in (or mpas-cice_in in +older E3SM runs):

+
config_AM_timeSeriesStatsMonthly_enable = .true.
+
+
+

Additionally, the duration of the run should be set to at least two years and +typically longer before most analysis is useful:

+
config_run_duration = '0002-00-00_00:00:00'
+
+
+

Several streams must be defined in the streams file, typically +streams.seaice or streams.cice in older E3SM runs, (even if they will +not be written out – output_interval="none"):

+
<stream name="timeSeriesStatsMonthlyRestart"
+        type="input;output"
+        io_type="pnetcdf"
+        filename_template="mpasseaice.rst.am.timeSeriesStatsMonthly.$Y-$M-$D_$S.nc"
+        filename_interval="output_interval"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG"
+        input_interval="initial_only"
+        output_interval="stream:restart:output_interval">
+</stream>
+
+<stream name="timeSeriesStatsMonthlyOutput"
+        type="output"
+        io_type="pnetcdf"
+        filename_template="mpasseaice.hist.am.timeSeriesStatsMonthly.$Y-$M-$D.nc"
+        filename_interval="00-01-00_00:00:00"
+        output_interval="00-01-00_00:00:00"
+        clobber_mode="truncate"
+        packages="timeSeriesStatsMonthlyAMPKG">
+
+        <var name="icePresent"/>
+        <var name="iceAreaCell"/>
+        <var name="iceVolumeCell"/>
+        <var name="xtime"/>
+</stream>
+
+
+

The filename_tempalate can be modified as desired (these are the defalult +values from E3SM). For the timeSeriesStatsMonthlyOutput stream, both the +filename_interval and the output_interval must currently be monthly +("0000-01-00_00:00:00").

+

Additional fields can be included in the timeSeriesStatsMonthlyOutput +streams. These are the minimum that allow the analysis to run successfully.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/adusumilli_melt.html b/1.11.0rc1/users_guide/obs/adusumilli_melt.html new file mode 100644 index 000000000..fcacc91fe --- /dev/null +++ b/1.11.0rc1/users_guide/obs/adusumilli_melt.html @@ -0,0 +1,215 @@ + + + + + + + Antarctic melt rates and fluxes — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Antarctic melt rates and fluxes

+
+

Description

+

Melt rates and melt fluxes from Adusumilli et al. (2020)

+
+
+

Source

+

Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves

+
+
+

Release Policy

+

Under copyright (US)

+

Use: This work is available from the UC San Diego Library. This digital +copy of the work is intended to support research, teaching, and private +study.

+

Constraint(s) on Use: This work is protected by the U.S. Copyright Law +(Title 17, U.S.C.). Use of this work beyond that allowed by “fair use” +or any license applied to this work requires written permission of the +copyright holder(s). Responsibility for obtaining permissions and any +use and distribution of this work rests exclusively with the user and +not the UC San Diego Library. Inquiries can be made to the UC San Diego +Library program having custody of the work.

+
+
+

References

+

Adusumilli et al. (2020)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/altiberg.html b/1.11.0rc1/users_guide/obs/altiberg.html new file mode 100644 index 000000000..1aa7b982f --- /dev/null +++ b/1.11.0rc1/users_guide/obs/altiberg.html @@ -0,0 +1,206 @@ + + + + + + + Iceberg Concentration: Altiberg — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Iceberg Concentration: Altiberg

+
+

Description

+

Iceberg probabilities from the Altiberg dataset for small icebergs (less than 3km in length) detected by altimeters using the high resolution waveforms. The database is also available for other several altimeter mission (ERS-1, ERS-2, Jason-1, Jason-2, CryoSat-2, Topex, Envisat, AltiKa). A merged product combining all the available altimeters is also provided, which is used in MPAS-Analysis.

+
+
+

Source

+

Altiberg website

+
+
+

Release Policy

+

Unknown, openly available on website.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/aniceflux.html b/1.11.0rc1/users_guide/obs/aniceflux.html new file mode 100644 index 000000000..b17304e3a --- /dev/null +++ b/1.11.0rc1/users_guide/obs/aniceflux.html @@ -0,0 +1,220 @@ + + + + + + + Sea ice production and transport: Haumann et al 2016 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Sea ice production and transport: Haumann et al 2016

+
+

Description

+

This data set provides estimates of annual freshwater fluxes related to sea-ice formation from ocean freezing +and snow-ice formation, sea-ice melting, lateral transport of sea ice in the Southern Ocean over the period +1982 to 2008. It is derived from a mass balance calculation of local sea-ice volume change and divergence +from satellite data and sea-ice reconstructions. The mass balance is calculated on a daily basis and fluxes are +then integrated over the entire year, where a year is defined from March to February of the next year (i.e. +from March 1982 to February 2009). This approach combines multiple products of sea-ice concentration +(Cavalieri & Parkinson, 2008; Comiso, 1986; Meier et al., 2013), sea-ice thickness (Kurtz & Markus, 2012; +Massonnet et al., 2013; Worby et al., 2008), and sea-ice drift (Fowler et al., 2013; Kwok 2005; Schwegmann +et al., 2011). For a detailed description of the method see Haumann et al. (2016). The data set is derived to +estimate large-scale (regional to basin-scale) fluxes on an annual basis. Our confidence is reduced on a grid +cell basis, such as for single coastal polynyas, where the method and underlying data induce large, unknown +uncertainties.

+
+
+

Source

+

EnviDat

+
+
+

Release Policy

+

This data set is free to use for any non-commercial purpose at the risk of the user’ +and the authors do not take any liability on the use of the data set. The authors +assembled the data set carefully and assessed accuracy, errors, and uncertainties. +Please contact the authors if you find any issues.

+
+
+

References

+

Haumann et al (2016), data +Haumann et al (2016), paper

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/aquarius_sss.html b/1.11.0rc1/users_guide/obs/aquarius_sss.html new file mode 100644 index 000000000..fe1718d64 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/aquarius_sss.html @@ -0,0 +1,210 @@ + + + + + + + SSS from NASA Aquarius satellite — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

SSS from NASA Aquarius satellite

+
+

Description

+

Level 3 Aquarius sea surface salinity (SSS) data products have a temporal +resolutions of daily, 8 day, monthly, 3 months, and annual. Monthly and +seasonal climatology products from Aquarius are also available. The Aquarius +instrument provides global coverage every 7 days. L3 products are gridded +at 1 degree spatial resolution.

+
+
+

Source

+

NASA Aquarius Website

+
+
+

Release Policy

+

NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +Data Citation and Acknowledgements

+
+
+

References

+

Lagerloef et al. (2015)

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/argo_mld.html b/1.11.0rc1/users_guide/obs/argo_mld.html new file mode 100644 index 000000000..d7fdac1df --- /dev/null +++ b/1.11.0rc1/users_guide/obs/argo_mld.html @@ -0,0 +1,219 @@ + + + + + + + Argo Mixed Layer Depth (MLD) climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Argo Mixed Layer Depth (MLD) climatology

+
+

Description

+

A mixed layer climatology and database (described in Holte et al. 2017) +using Argo profiles and a hybrid method (Holte and Talley 2009) for +finding the mixed layer depth (MLD). The climatology incorporates over +1,385,000 Argo profiles (through February 2017). The new hybrid algorithm +models the general shape of each profile, searches for physical features +in the profile, and calculates threshold and gradient MLDs to assemble a +suite of possible MLD values. It then analyzes the patterns in the suite +to select a final MLD estimate. Results are also presented for MLDs +calculated using de Boyer Montegut et al.’s (2004) threshold values.

+
+
+

Source

+

UCSD Mixed Layer Website

+
+
+

Release Policy

+

Acknowledgment: If you use this data, +please cite it as: Holte, J., L. D. Talley, J. Gilson, and D. Roemmich +(2017), An Argo mixed layer climatology and database, Geophys. Res. +Lett., 44, 5618-5626, doi:10.1002/2017GL073426.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/aviso_ssh.html b/1.11.0rc1/users_guide/obs/aviso_ssh.html new file mode 100644 index 000000000..3fec2aedc --- /dev/null +++ b/1.11.0rc1/users_guide/obs/aviso_ssh.html @@ -0,0 +1,220 @@ + + + + + + + AVISO Absolute Dynamic Topography — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

AVISO Absolute Dynamic Topography

+
+

Description

+

NASA JPL AVISO website +This dataset contains absolute dynamic topography (similar to sea level +but with respect to the geoid) binned and averaged monthly on 1 degree +grids. The coverage is from October 1992 to December 2010. These data +were provided by AVISO (French space agency data provider) to support the +CMIP5 (Coupled Model Intercomparison Project Phase 5) under the World +Climate Research Program (WCRP) and was first made available via the JPL +Earth System Grid. The dynamic topography are derived from sea surface +height measured by several satellites including Envisat, TOPEX/Poseidon, +Jason-1 and OSTM/Jason-2, and referenced to the geoid. Along with this +dataset, two additional ancillary data files are included in the same +directory which contain the number of observations and standard error +co-located on the same 1 degree grids.

+
+
+

Source

+

NASA JPL AVISO website

+
+
+

Release Policy

+

When using Ssalto/Duacs data (NRT or DT along-track Absolute Dynamic +Topography (ADT), maps of SLA geostrophic currents (MSLA UV) or maps of +ADT heights and currents (MADT H and UV), climatologies and averages of +MSLA-H), please cite: “The altimeter products were produced by +Ssalto/Duacs and distributed by Aviso, with support from Cnes +(http://www.aviso.altimetry.fr/duacs/)”

+
+
+

References

+

AVISO: Sea Surface Height above Geoid

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/bootstrap_conc.html b/1.11.0rc1/users_guide/obs/bootstrap_conc.html new file mode 100644 index 000000000..c3cbf548f --- /dev/null +++ b/1.11.0rc1/users_guide/obs/bootstrap_conc.html @@ -0,0 +1,219 @@ + + + + + + + Ice concentration: SSM/I, Bootstrap algorithm — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Ice concentration: SSM/I, Bootstrap algorithm

+
+

Description

+

This sea ice concentration data set was derived using measurements from +the Scanning Multichannel Microwave Radiometer (SMMR) on the Nimbus-7 +satellite and from the Special Sensor Microwave/Imager (SSM/I) sensors on +the Defense Meteorological Satellite Program’s (DMSP) -F8, -F11, and -F13 +satellites. Measurements from the Special Sensor Microwave Imager/Sounder +(SSMIS) aboard DMSP-F17 are also included. The data set has been +generated using the Advanced Microwave Scanning Radiometer - Earth +Observing System (AMSR-E) Bootstrap Algorithm with daily varying +tie-points. Daily (every other day prior to July 1987) and monthly data +are available for both the north and south polar regions. Data are +gridded on the SSM/I polar stereographic grid (25 x 25 km) and provided +in two-byte integer format. Data are available via FTP.

+
+
+

Source

+

NSIDC Bootstrap Website

+
+
+

Release Policy

+

NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +Data Citation and Acknowledgements

+
+
+

References

+

Comiso (2017)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/drifter_eke.html b/1.11.0rc1/users_guide/obs/drifter_eke.html new file mode 100644 index 000000000..38d1f4ed0 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/drifter_eke.html @@ -0,0 +1,230 @@ + + + + + + + Surface Current Variance from Drifter Data — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Surface Current Variance from Drifter Data

+
+

Description

+

Contains information about current variance (eddy kinetic energy) derived from +residuals with respect to the time-mean, seasonal, spatial gradient, and +SOI-related currents. The file contains the following variables:

+
    +
  • Lon (1x720): longitude (degrees), negative=West.

  • +
  • Lat (1x317): latitude (degrees), 73S to 85N.

  • +
  • Up2bar (317x720): Zonal Velocity Variance, mean(u’ ^2) (m^2/s^2).

  • +
  • Vp2bar (317x720): Meridional Velocity Variance, mean(v’ ^2) (m^2/s^2).

  • +
  • rA (317x720): Variance ellipse semimajor axis (m^2/s^2).

  • +
  • rB (317x720): Variance ellipse semiminor axis (m^2/s^2).

  • +
  • angle (317x720): Orientation angle of variance ellipse (degrees, 0=east/west).

  • +
  • N (317x720): Number of drifter-days per square degree.

  • +
+

Note that Up2bar+Vp2bar=rA+rB, which is twice the eddy kinetic energy.

+

For more information, see: http://www.aoml.noaa.gov/phod/gdp/mean_velocity.php

+

The global near-surface current climatology described here is publicly available +through the Gulf of Mexico Research Initiative Information & Data Cooperative (GRIIDC), and through +NOAA/AOML at +http://www.aoml.noaa.gov/phod/dac/drifter_variance.nc.

+

This climatology was developed by Rick Lumpkin (NOAA/AOML) and Lucas Laurindo +(Univ. Miami), in collaboration with Arthur Mariano (Univ. Miami), Mayra Pazos +(NOAA/AOML), and Erik Valdes (CIMAS/AOML). Previous versions were developed with +Gregory Johnson (NOAA/PMEL), Silvia Garzoli (NOAA/AOML), Jessica Redman (CIMAS), +and Zulema Garraffo(Univ. Miami).

+
+
+

Source

+

NOAA/AOML

+
+
+

Release Policy

+

Please cite as reference: +Laurindo, L., A. Mariano, and R. Lumpkin, 2017: An improved near-surface +velocity climatology for the global ocean from drifter observations Deep-Sea +Res. I, 124, pp.73-92, doi:10.1016/j.dsr.2017.04.009.

+
+
+

References

+

Laurindo et al. (2017)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/era5_waves.html b/1.11.0rc1/users_guide/obs/era5_waves.html new file mode 100644 index 000000000..5eef1fce0 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/era5_waves.html @@ -0,0 +1,240 @@ + + + + + + + Wave Reanalysis: ERA5 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Wave Reanalysis: ERA5

+
+

Description

+

ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 4 to 7 decades. +Currently data is available from 1950, with Climate Data Store entries for 1950-1978 (preliminary back extension) +and from 1959 onwards (final release plus timely updates, this page). ERA5 replaces the ERA-Interim reanalysis.

+

Reanalysis combines model data with observations from across the world into a globally complete and +consistent dataset using the laws of physics. This principle, called data assimilation, is based on +the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) +a previous forecast is combined with newly available observations in an optimal way to produce a new +best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. +Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset +spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, +so there is more time to collect observations, and when going further back in time, to allow for the +ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product.

+

ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. +An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. +Ensemble mean and spread have been pre-computed for convenience. +Such uncertainty estimates are closely related to the information content of the available +observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. +To facilitate many climate applications, monthly-mean averages have been pre-calculated too, +though monthly means are not available for the ensemble mean and spread.

+

ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). +In case that serious flaws are detected in this early release (called ERA5T), this data could be different +from the final release 2 to 3 months later. In case that this occurs users are notified.

+

The data set presented here is a regridded subset of the full ERA5 data set on native resolution. +It is online on spinning disk, which should ensure fast and easy access. +It should satisfy the requirements for most common applications.

+

An overview of all ERA5 datasets can be found in this article. +Information on access to ERA5 data on native resolution is provided in these guidelines.

+

Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and +0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). +There are four main sub sets: hourly and monthly products, both on pressure levels +(upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities).

+

The present entry is “ERA5 monthly mean data on single levels from 1959 to present”.

+
+
+

Source

+

Copernicus Climate Data Store

+
+
+

Release Policy

+

Access to Copernicus Products is given for any purpose in so far as it is lawful, whereas use +may include, but is not limited to: reproduction; distribution; communication to the public; +adaptation, modification and combination with other data and information; or any +combination of the foregoing. Licence Agreement

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/ers_sst_nino.html b/1.11.0rc1/users_guide/obs/ers_sst_nino.html new file mode 100644 index 000000000..b150bbc6e --- /dev/null +++ b/1.11.0rc1/users_guide/obs/ers_sst_nino.html @@ -0,0 +1,222 @@ + + + + + + + ERS SSTv4 Nino 3.4 Index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

ERS SSTv4 Nino 3.4 Index

+
+

Description

+

The Nino 3.4 Index is also computed using the Extended Reconstructed +Sea Surface Temperature (ERSST) dataset, which is a global monthly +sea surface temperature dataset derived from the International Comprehensive +Ocean-Atmosphere Dataset (ICOADS). It is produced on a 2 degree by 2 degree +grid with spatial completeness enhanced using statistical methods. This +monthly analysis begins in January 1854 continuing to the present and +includes anomalies computed with respect to a 1971-2000 monthly climatology. +The newest version of ERSST, version 4, is based on optimally tuned parameters +using the latest datasets and improved analysis methods. ERSST is suitable +for long-term global and basin-wide studies, and smoothed local and +short-term variations are used in the dataset.

+
+
+

Source

+

NOAA ERSST v4 website

+
+
+

Release Policy

+

Cite this dataset when used as a source: Boyin Huang, Viva F. Banzon, Eric Freeman, +Jay Lawrimore, Wei Liu, Thomas C. Peterson, Thomas M. Smith, Peter W. Thorne, +Scott D. Woodruff, and Huai-Min Zhang, 2015: Extended Reconstructed Sea Surface +Temperature (ERSST), Version 4. NOAA National Centers for Environmental Information. +doi:10.7289/V5KD1VVF [access date: January 2017].

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/glodapv2.html b/1.11.0rc1/users_guide/obs/glodapv2.html new file mode 100644 index 000000000..57d483cea --- /dev/null +++ b/1.11.0rc1/users_guide/obs/glodapv2.html @@ -0,0 +1,211 @@ + + + + + + + GLODAPv2 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

GLODAPv2

+
+

Description

+

GLODAPv2 (Global Ocean Data Analysis Project version 2) is an international +data synthesis project for interior ocean inorganic carbon data and +related variables for the global ocean. It includes data from all ocean +areas of the globe and synthesizes 724 unique cruises.

+
+
+

Source

+

GLODAPv2 Website

+
+
+

Release Policy

+

The GLODAPv2 database is freely available.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/hadisst_nino.html b/1.11.0rc1/users_guide/obs/hadisst_nino.html new file mode 100644 index 000000000..0e4ce9865 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/hadisst_nino.html @@ -0,0 +1,218 @@ + + + + + + + HadISST Nino 3.4 Index — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

HadISST Nino 3.4 Index

+
+

Description

+

Nino 3.4 Index is computed from the Hadley-OI sea surface temperature +(SST) and sea ice concentration (SIC) data set. This product was +specifically developed as surface forcing data set for AMIP style +uncoupled simulations of the Community Atmosphere Model (CAM). The +Hadley Centre’s SST/SIC version 1.1 (HADISST1), which is derived gridded, +bias-adjusted in situ observations, were merged with the NOAA-Optimal +Interpolation (version 2; OI.v2) analyses. The HADISST1 spanned 1870 +onward but the OI.v2, which started in November 1981, better resolved +features such as the Gulf Stream and Kuroshio Current which are important +components of the climate system. Since the two data sets used different +development methods, anomalies from a base period were used to create +a more homogeneous record. Also, additional adjustments were made to +the SIC data set.

+
+
+

Source

+

NCAR Hadley-NOAA/OI SST website

+
+
+

Release Policy

+

Acknowledgment: Hurrell, J. W., J. J. Hack, D. Shea, J. M. Caron, and J. Rosinski, +2008: A New Sea Surface Temperature and Sea Ice Boundary Dataset for the Community +Atmosphere Model. Journal of Climate, 21, 5145-5153.

+
+
+

References

+

Hurrell et al. (2008)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/hadley_center_sst.html b/1.11.0rc1/users_guide/obs/hadley_center_sst.html new file mode 100644 index 000000000..dae433d37 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/hadley_center_sst.html @@ -0,0 +1,217 @@ + + + + + + + SST merged Hadley Center-NOAA/OI data set — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

SST merged Hadley Center-NOAA/OI data set

+
+

Description

+

The merged Hadley-OI sea surface temperature (SST) and sea ice +concentration (SIC) data sets were specifically developed as surface +forcing data sets for AMIP style uncoupled simulations of the Community +Atmosphere Model (CAM). The Hadley Centre’s SST/SIC version 1.1 +(HADISST1), which is derived gridded, bias-adjusted in situ observations, +were merged with the NOAA-Optimal Interpolation (version 2; OI.v2) +analyses. The HADISST1 spanned 1870 onward but the OI.v2, which started +in November 1981, better resolved features such as the Gulf Stream and +Kuroshio Current which are important components of the climate system. +Since the two data sets used different development methods, anomalies +from a base period were used to create a more homogeneous record. Also, +additional adjustments were made to the SIC data set.

+
+
+

Source

+

NCAR Hadley-NOAA/OI SST website

+
+
+

Release Policy

+

Acknowledgment: Hurrell, J. W., J. J. Hack, D. Shea, J. M. Caron, and J. Rosinski, +2008: A New Sea Surface Temperature and Sea Ice Boundary Dataset for the Community +Atmosphere Model. Journal of Climate, 21, 5145-5153.

+
+
+

References

+

Hurrell et al. (2008)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/icesat_thickness.html b/1.11.0rc1/users_guide/obs/icesat_thickness.html new file mode 100644 index 000000000..7a19ef342 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/icesat_thickness.html @@ -0,0 +1,211 @@ + + + + + + + IceSat Ice Thickness — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

IceSat Ice Thickness

+
+

Description

+

This data set provides measurements of sea ice freeboard and sea ice +thickness for the Arctic region. The data were derived from measurements +made by from the Ice, Cloud, and land Elevation Satellite (ICESat) +Geoscience Laser Altimeter System (GLAS) instrument, the Special Sensor +Microwave/Imager (SSM/I), and climatologies of snow and drift of ice.

+
+
+

Source

+

NASA: Arctic Sea Ice Freeboard and Thickness

+
+
+

Release Policy

+

NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +Data Citation and Acknowledgements

+
+
+

References

+

Yi and Zwally (2009)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/landschuetzer-som-ffn.html b/1.11.0rc1/users_guide/obs/landschuetzer-som-ffn.html new file mode 100644 index 000000000..dacf6a524 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/landschuetzer-som-ffn.html @@ -0,0 +1,221 @@ + + + + + + + Landschuetzerv2016 SOM-FFN — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Landschuetzerv2016 SOM-FFN

+
+

Description

+

The observation-based pCO2 fields were created using a 2-step neural +network method extensively described and validated in Landschuetzer et +al. 2013, 2014, 2016. The method first clusters the global ocean into +biogeochemical provinces and in a second step reconstructs the non-liner +relationship between CO2 driver variables and observations from the 4th +release of the Surface Ocean CO2 Atlas (SOCATv4, Bakker et al. 2016). +This file contains the resulting monthly pCO2 fields at 1 x 1 deg resolution +covering the global ocean with the exception of the Arctic Ocean and few +marginal seas. The air-sea CO2 fluxes are computed from the air-sea CO2 +partial pressure difference and a bulk gas transfer formulation +following Landschuetzer et al. 2013, 2014, 2016. Furthermore, the monthly +climatology is created from the monthly average of the period 1985-2015.

+
+
+

Source

+

SOM-FFN Website

+
+
+

Release Policy

+

This product is free to be used. Please cite the data set as: +Landschuetzer, P., N. Gruber and D.C.E. Bakker (2017). An updated observation-based global monthly gridded sea surface pCO2 and air-sea CO2 flux product from 1982 through 2015 and its monthly climatology (NCEI Accession 0160558). Version 2.2. NOAA National Centers for Environmental Information. Dataset. [2017-07-11]

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/nasateam_conc.html b/1.11.0rc1/users_guide/obs/nasateam_conc.html new file mode 100644 index 000000000..1c4911727 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/nasateam_conc.html @@ -0,0 +1,212 @@ + + + + + + + Ice concentration: SSM/I, NASATeam algorithm — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Ice concentration: SSM/I, NASATeam algorithm

+
+

Description

+

This data set is generated from brightness temperature data and is +designed to provide a consistent time series of sea ice concentrations +spanning the coverage of several passive microwave instruments. The data +are provided in the polar stereographic projection at a grid cell size of +25 x 25 km.

+
+
+

Source

+

NSIDC NASATeam Website

+
+
+

Release Policy

+

NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +Data Citation and Acknowledgements

+
+
+

References

+

Cavalieri et al. (1996)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/paolo_melt.html b/1.11.0rc1/users_guide/obs/paolo_melt.html new file mode 100644 index 000000000..a5e6e9cb7 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/paolo_melt.html @@ -0,0 +1,205 @@ + + + + + + + Antarctic melt rates and fluxes — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Antarctic melt rates and fluxes

+
+

Description

+

Melt rates and melt fluxes from Paolo et al. (2023)

+
+
+

Source

+

Data from: ANT_G1920V01_IceShelfMelt.nc

+
+
+

Release Policy

+

Not stated.

+
+
+

References

+

Paolo et al. (2023)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/piomass_ice_volume.html b/1.11.0rc1/users_guide/obs/piomass_ice_volume.html new file mode 100644 index 000000000..9133a78b8 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/piomass_ice_volume.html @@ -0,0 +1,213 @@ + + + + + + + PIOMAS Arctic Sea Ice Volume Reanalysis — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

PIOMAS Arctic Sea Ice Volume Reanalysis

+
+

Description

+

Sea Ice Volume is calculated using the Pan-Arctic Ice Ocean Modeling and +Assimilation System (PIOMAS, Zhang and Rothrock, 2003) developed at +APL/PSC. Anomalies for each day are calculated relative to the average +over the 1979-2016 period for that day of the year to remove the annual +cycle. The model mean annual cycle of sea ice volume over this period +ranges from 28,000 km3 in April to 11,500 km3 in September.

+
+
+

Source

+

PIOMAS website

+
+
+

Release Policy

+

Data is public, but they optionally ask for basic information about the +person downloading the data (name, e-mail, and affiliation).

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/rignot_melt.html b/1.11.0rc1/users_guide/obs/rignot_melt.html new file mode 100644 index 000000000..bb5b0746e --- /dev/null +++ b/1.11.0rc1/users_guide/obs/rignot_melt.html @@ -0,0 +1,205 @@ + + + + + + + Antarctic melt rates and fluxes — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Antarctic melt rates and fluxes

+
+

Description

+

Melt rates and melt fluxes from Rignot et al. (2013)

+
+
+

Source

+

Ice-Shelf Melting Around Antarctica

+
+
+

Release Policy

+

Data available upon request from co-author J. Mouginot.

+
+
+

References

+

Rignot et al. (2013)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/roemmich_gilson_argo.html b/1.11.0rc1/users_guide/obs/roemmich_gilson_argo.html new file mode 100644 index 000000000..188344faf --- /dev/null +++ b/1.11.0rc1/users_guide/obs/roemmich_gilson_argo.html @@ -0,0 +1,217 @@ + + + + + + + Roemmich-Gilson Argo Climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Roemmich-Gilson Argo Climatology

+
+

Description

+

This new version of the Roemmich-Gilson Argo Climatology extends the +analysis of Argo-only derived potential temperature and salinity fields +through 2016. Several marginal seas and the Artic Oean have been added. +The analysis method is similar to what was descibed in the Progress In +Oceanography Roemmich and Gilson paper (2009). The only modification has +been to scale the zonal equatorial correlation of the optimal estimation +step, by 8 times, versus 4 times as in the 2009 paper. The additional +Argo data utilized in the analysis results in a longer monthly record as +well as better estimates of the mean and variability fields. Monthly +updates are available in between major yearly re-analyses.

+
+
+

Source

+

Scripps Roemmich-Gilson Argo Website

+
+
+

Release Policy

+

Acknowledgment: Roemmich, +D. and J. Gilson, 2009: The 2004-2008 mean and annual cycle of +temperature, salinity, and steric height in the global ocean from the +Argo Program. Progress in Oceanography, 82, 81-100.

+
+
+

References

+

Roemmich and Gilson (2009)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/schmidtko.html b/1.11.0rc1/users_guide/obs/schmidtko.html new file mode 100644 index 000000000..f2653283e --- /dev/null +++ b/1.11.0rc1/users_guide/obs/schmidtko.html @@ -0,0 +1,206 @@ + + + + + + + Antarctic Seafloor Temperature and Salinity — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Antarctic Seafloor Temperature and Salinity

+
+

Description

+

Temporal means in conservative temperature and absolute salinity +of Antarctic Continental Shelf Bottom Water (ASBW) for depths shallower +than 1500 m for the period 1975 to 2012.

+
+
+

Source

+

ASCII data file

+
+
+

Release Policy

+

(missings)

+
+
+

References

+

Schmidtko et al. (2014)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/seawifs.html b/1.11.0rc1/users_guide/obs/seawifs.html new file mode 100644 index 000000000..5fdac7a0f --- /dev/null +++ b/1.11.0rc1/users_guide/obs/seawifs.html @@ -0,0 +1,214 @@ + + + + + + + SeaWiFS — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

SeaWiFS

+
+

Description

+

SeaWiFS (Sea-Viewing Wide Field-of-View Senson) was a project dedicated +to monitoring ocean water quality and ecological characteristics covering +multiple optical bands with a resolution of roughly 1.1km. Chlorophyll +concentrations are derived from images of ocean color.

+
+
+

Source

+

Ocean Color Website

+
+
+

Release Policy

+

Please provide acknowledgement of the use of Ocean Biology Processing +Group (OBPG) data products, images, and services, e.g.:

+

NASA Goddard Space Flight Center, Ocean Ecology Laboratory, Ocean +Biology Processing Group; (2014): Sea-viewing Wide Field-of-view Sensor +(SeaWiFS) Ocean Color Data, NASA OB.DAAC. http://doi.org/10.5067/ORBVIEW +-2/SEAWIFS_OC.2014.0. Accessed on 2016/02/29.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/sose.html b/1.11.0rc1/users_guide/obs/sose.html new file mode 100644 index 000000000..8c4e711fc --- /dev/null +++ b/1.11.0rc1/users_guide/obs/sose.html @@ -0,0 +1,215 @@ + + + + + + + 2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE) — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

+
+

Description

+

Monthly potential temperature, salinity velocity components and neutral +density output from the Southern Ocean State Estimate (SOSE) covering +years 2005-2010

+
+
+

Source

+

SOSE Website at UCSD

+
+
+

Release Policy

+

Conditions of use: The data on +these webpages are made freely available for scientific, bona fide, +not-for-profit research only. If your use of the data is different (e.g. +commercial), you must contact the data providers and receive written +permission for your use of the data prior to any such use. The user must +acknowledge SOSE data in all products or publications that use them, e.g. +by including the following written note: “Computational resources for the +SOSE were provided by NSF XSEDE resource grant OCE130007.” An appropriate +citation should also be made.

+
+
+

References

+

Mazloff et al. (2010)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/sscci_waves.html b/1.11.0rc1/users_guide/obs/sscci_waves.html new file mode 100644 index 000000000..7831e8be3 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/sscci_waves.html @@ -0,0 +1,225 @@ + + + + + + + Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative

+
+

Description

+

The ESA Sea State Climate Change Initiative (CCI) project has produced global merged multi-sensor +time-series of monthly gridded satellite altimeter significant wave height (referred to as Level 4 (L4) data) +with a particular focus for use in climate studies.

+

This dataset contains the Version 1.1 Remote Sensing Sea Surface Height product, gridded over a global +regular cylindrical projection (1°x1° resolution), averaging valid and good measurements from all +available altimeters on a monthly basis (using the L2P products also available). +These L4 products are meant for statistics and visualization.

+

This first version of the Sea State CCI products is inherited from the GlobWave project, +building on experience and existing outputs. It extends and improves the GlobWave products, +which were a post-processing over existing L2 altimeter agency products with additional filtering, +corrections and variables. A major improvement consists in a new denoised sea surface height +variable using Empirical Mode Decomposition, which was used as input to these monthly statistical fields.

+

The altimeter data used in the Sea State CCI dataset v1.1 come from multiple satellite missions +spanning from 1991 to 2018 (ERS-1, ERS-2, Topex, Envisat, GFO, CryoSat-2, Jason-1, Jason-2, Jason-3, SARAL). +Many altimeters are bi-frequency (Ku-C or Ku-S) and only measurements in Ku band were used, +for consistency reasons, being available on each altimeter but SARAL (Ka band).

+
+
+

Source

+ +
+
+

Release Policy

+

Public data: access to these data is available to both registered and non-registered users. +Use of these data is covered by the following licence. +When using these data you must cite them correctly using the citation given on the CEDA Data Catalogue record.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/ssmi_ice_area.html b/1.11.0rc1/users_guide/obs/ssmi_ice_area.html new file mode 100644 index 000000000..00d5e251d --- /dev/null +++ b/1.11.0rc1/users_guide/obs/ssmi_ice_area.html @@ -0,0 +1,238 @@ + + + + + + + Ice area and extent time series: SSM/I derived — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Ice area and extent time series: SSM/I derived

+
+

Description

+

The sea ice data presented here were derived from satellite +passive-microwave radiometers, specifically, the Scanning Multichannel +Microwave Radiometer (SMMR) on NASA’s Nimbus 7 satellite, for November +1978-August 1987, a sequence of Special Sensor Microwave Imagers (SSMIs) +on the F8, F11, and F13 satellites of the Defense Meteorological +Satellite Program (DMSP), for August 1987-December 2007, and the Special +Sensor Microwave Imager Sounder (SSMIS) on the DMSP F17 satellite for +January 2008-December 2012. The baseline data used were daily maps of +sea ice concentration. The maps are polar stereographic projections with +individual grid elements of approximately 25 km x 25 km; and the ice +concentration data are also archived at the National Snow and Ice Data +Center (NSIDC) at http://nsidc.org. The concentrations are calculated for +each ocean grid element and are used to derive ‘sea ice extent’, which is +calculated as the sum of all ocean elements having a sea ice +concentration of at least 15%, and ‘sea ice area’, which is calculated as +the sum over all ocean grid elements of the product of ice concentration +and grid element area. The data sets provided here include the +hemispheric totals and additionally the values for nine regions in the +Arctic and five regions in the Antarctic. These regions are identified in +Figures 1 and 2 respectively. Figures 3 and 4 provide plots of the trends +in the Arctic and Antarctic sea ice extents, along with monthly +deviations and 12-month running means. The monthly deviations are +calculated by taking the individual month’s ice extent/area and +subtracting from it the average over the course of the data set of the +extents/areas for that month.

+
+
+

Source

+

NASA Ice area and extent website

+
+
+

Release Policy

+

NASA data are not copyrighted; however, when you publish our data or +results derived therefrom, we request that you include an acknowledgment +within the text of the publication and reference list. +Data Citation and Acknowledgements

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/trenberth_mht.html b/1.11.0rc1/users_guide/obs/trenberth_mht.html new file mode 100644 index 000000000..264f68fee --- /dev/null +++ b/1.11.0rc1/users_guide/obs/trenberth_mht.html @@ -0,0 +1,212 @@ + + + + + + + Meridional Heat Transport (MHT) — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Meridional Heat Transport (MHT)

+
+

Description

+

The Trenbert and Caron oceanic meridional heat transport is computed +by computation of energy balance of the atmosphere, adjusted to fit +physical constraints, and using two reanalysis products: the National +Centers for Environmental Prediction-National Center for Atmospheric +Research (NCEP-NCAR) reanalysis and the European Centre for Medium-Range +Weather Forecasts (ECMWF) product. The analysis focuses on the period +from February 1985 to April 1989 when there are reliable top-of-the-atmosphere +radiation data from the Earth Radiation Budget Experiment.

+
+
+

Source

+

Data available upon request from Dr. Kevin Trenberth

+
+
+

Release Policy

+

Acknowledgment: please cite: Trenberth and Caron (2001). Estimates of +Meridional Atmosphere and Ocean Heat Transports, J. of Climate, 14, 3433-3443.

+
+
+

References

+

Trenberth and Caron (2001)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/woa.html b/1.11.0rc1/users_guide/obs/woa.html new file mode 100644 index 000000000..09f0fb7dc --- /dev/null +++ b/1.11.0rc1/users_guide/obs/woa.html @@ -0,0 +1,215 @@ + + + + + + + World Ocean Atlas v2 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

World Ocean Atlas v2

+
+

Description

+

The World Ocean Atlas (WOA) is a data product of the Ocean Climate +Laboratory of the National Oceanographic Data Center. It consists of +objectively analyzed climatological mean fields at 1 degree spatial +resolution. The climatology is based on all data that are available from +the early 1900s to present.

+
+
+

Source

+ +
+
+

Release Policy

+

Cite the World Ocean Atlas 2013 for the data sets used (see references). +The World Ocean Atlas 2013 (and all previous versions) are available free +of charge.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/woa18_t_s.html b/1.11.0rc1/users_guide/obs/woa18_t_s.html new file mode 100644 index 000000000..6a34ab6e0 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/woa18_t_s.html @@ -0,0 +1,222 @@ + + + + + + + WOA18 Temperature and Salinity Climatology — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

WOA18 Temperature and Salinity Climatology

+
+

Description

+

The World Ocean Atlas 2018 (WOA18) release July 2019 updates previous +versions of the World Ocean Atlas to include approximately 3 million +new oceanographic casts added to the World Ocean Database (WOD) since +the previous release as well as renewed and updated quality control. +Animal mounted pinniped temperature profiles have been added as a data +source improving coverage in some high latitude areas. WOA18 includes +analysis for temperature, salinity, dissolved oxygen, and dissolved +inorganic nutrients (only temperature and salinity are used in MPAS-Analysis +as of Aug 2019). The atlas includes annual, seasonal, and monthly +climatologies and related statistical fields. Annual fields are available +for the full depth (0-5500 m), while monthly and seasonal fields are +available for the upper 1500 m only. Climatologies are available on a 1deg +regular grid and on a 0.25deg grid.

+
+
+

Source

+

NOAA National Oceanographic Data Center (NODC) website

+
+
+

Release Policy

+

Acknowledgment: Locarnini and co-authors, 2019: World Ocean Atlas 2018, +Volume 1: Temperature. A. Mishonov Technical Editor, NOAA Atlas NESDIS 81. +Zweng and co-authors, 2019: World Ocean Atlas 2018, Volume 2: Salinity. +A. Mishonov Technical Editor, NOAA Atlas NESDIS 82.

+
+
+

References

+ +

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/obs/woce.html b/1.11.0rc1/users_guide/obs/woce.html new file mode 100644 index 000000000..9d17590c1 --- /dev/null +++ b/1.11.0rc1/users_guide/obs/woce.html @@ -0,0 +1,234 @@ + + + + + + + WOCE sections — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

WOCE sections

+
+

Description

+
+
+

WOCE

+

The Hydrographic Programme of the international World Ocean Circulation +Experiment (WOCE) was a comprehensive global hydrographic survey of +physical and chemical properties, of unprecedented scope and quality, and +represents the “state of the oceans” during the 1990s.

+

The “Printed Atlas” is a copy of the published volume and contains full +introductory text. The “Digital Atlas” presents the same graphical +material, with additional properties and levels, ancillary data sets, and +bibliographic material for each of the vertical sections.

+
+
+

CCHDO

+

The CCHDO’s primary mission is to deliver the highest possible quality +global CTD and hydrographic data to users. These data are a product of +decades of observations related to the physical characteristics of ocean +waters carried out during WOCE, CLIVAR and numerous other oceanographic +research programs. Whenever possible we provide these data in three +easy-to-use formats: WHP-Exchange (which we recommend for data +submissions to the CCHDO), WOCE, and netCDF. +global Argo and OceanSITES programs.

+
+
+

Source

+ +
+
+

Release Policy

+

“Public” data may be placed on-line. They may be exchanged between +investigators as desired. They may or may not be preliminary, and +especially for preliminary public data users are strongly advised to +contact the originating investigators regarding the status of updates or +further data processing. See Full Policy

+
+
+

References

+

Orsi and Whitworth (2005)

+

bibtex file

+
+
+

MPAS-Analysis Tasks

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/observations.html b/1.11.0rc1/users_guide/observations.html new file mode 100644 index 000000000..4c6a16f9d --- /dev/null +++ b/1.11.0rc1/users_guide/observations.html @@ -0,0 +1,416 @@ + + + + + + + Observations — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Observations

+

A variety of observational datasets are used within MPAS-Analysis:

+
+

Ocean Observations

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Observational Dataset

Source

References

SST merged Hadley Center-NOAA/OI data set

NCAR Hadley-NOAA/OI SST website

Hurrell et al. (2008)

SSS from NASA Aquarius satellite

NASA Aquarius Website

Lagerloef et al. (2015)

WOA18 Temperature and Salinity Climatology

NOAA National Oceanographic Data Center (NODC) website

+

AVISO Absolute Dynamic Topography

NASA JPL AVISO website

AVISO: Sea Surface Height above Geoid

Argo Mixed Layer Depth (MLD) climatology

UCSD Mixed Layer Website

+

Meridional Heat Transport (MHT)

Data available upon request from Dr. Kevin Trenberth

Trenberth and Caron (2001)

Roemmich-Gilson Argo Climatology

Scripps Roemmich-Gilson Argo Website

Roemmich and Gilson (2009)

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

SOSE Website at UCSD

Mazloff et al. (2010)

Antarctic melt rates and fluxes

Ice-Shelf Melting Around Antarctica

Rignot et al. (2013)

Antarctic melt rates and fluxes

Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves

Adusumilli et al. (2020)

Antarctic melt rates and fluxes

Data from: ANT_G1920V01_IceShelfMelt.nc

Paolo et al. (2023)

HadISST Nino 3.4 Index

NCAR Hadley-NOAA/OI SST website

Hurrell et al. (2008)

ERS SSTv4 Nino 3.4 Index

NOAA ERSST v4 website

+

Antarctic Seafloor Temperature and Salinity

ASCII data file

Schmidtko et al. (2014)

WOCE sections

+

Orsi and Whitworth (2005)

World Ocean Atlas v2

+ +

Landschuetzerv2016 SOM-FFN

SOM-FFN Website

+

SeaWiFS

Ocean Color Website

+

GLODAPv2

GLODAPv2 Website

+

Surface Current Variance from Drifter Data

NOAA/AOML

Laurindo et al. (2017)

Wave Reanalysis: ERA5

Copernicus Climate Data Store

+

Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative

+ +
+
+
+

Sea Ice Observations

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Observational Dataset

Source

References

Ice concentration: SSM/I, NASATeam algorithm

NSIDC NASATeam Website

Cavalieri et al. (1996)

Ice concentration: SSM/I, Bootstrap algorithm

NSIDC Bootstrap Website

Comiso (2017)

Ice area and extent time series: SSM/I derived

NASA Ice area and extent website

+

IceSat Ice Thickness

NASA: Arctic Sea Ice Freeboard and Thickness

Yi and Zwally (2009)

PIOMAS Arctic Sea Ice Volume Reanalysis

PIOMAS website

+

Sea ice production and transport: Haumann et al 2016

EnviDat

Haumann et al (2016), data +Haumann et al (2016), paper

Iceberg Concentration: Altiberg

Altiberg website

+
+
+
+

Details on Each Data Set

+ +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/ocean_obs_table.html b/1.11.0rc1/users_guide/ocean_obs_table.html new file mode 100644 index 000000000..5e4fa4ef3 --- /dev/null +++ b/1.11.0rc1/users_guide/ocean_obs_table.html @@ -0,0 +1,275 @@ + + + + + + + <no title> — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Observational Dataset

Source

References

SST merged Hadley Center-NOAA/OI data set

NCAR Hadley-NOAA/OI SST website

Hurrell et al. (2008)

SSS from NASA Aquarius satellite

NASA Aquarius Website

Lagerloef et al. (2015)

WOA18 Temperature and Salinity Climatology

NOAA National Oceanographic Data Center (NODC) website

+

AVISO Absolute Dynamic Topography

NASA JPL AVISO website

AVISO: Sea Surface Height above Geoid

Argo Mixed Layer Depth (MLD) climatology

UCSD Mixed Layer Website

+

Meridional Heat Transport (MHT)

Data available upon request from Dr. Kevin Trenberth

Trenberth and Caron (2001)

Roemmich-Gilson Argo Climatology

Scripps Roemmich-Gilson Argo Website

Roemmich and Gilson (2009)

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

SOSE Website at UCSD

Mazloff et al. (2010)

Antarctic melt rates and fluxes

Ice-Shelf Melting Around Antarctica

Rignot et al. (2013)

Antarctic melt rates and fluxes

Data from: Interannual variations in meltwater input to the Southern Ocean from Antarctic ice shelves

Adusumilli et al. (2020)

Antarctic melt rates and fluxes

Data from: ANT_G1920V01_IceShelfMelt.nc

Paolo et al. (2023)

HadISST Nino 3.4 Index

NCAR Hadley-NOAA/OI SST website

Hurrell et al. (2008)

ERS SSTv4 Nino 3.4 Index

NOAA ERSST v4 website

+

Antarctic Seafloor Temperature and Salinity

ASCII data file

Schmidtko et al. (2014)

WOCE sections

+

Orsi and Whitworth (2005)

World Ocean Atlas v2

+ +

Landschuetzerv2016 SOM-FFN

SOM-FFN Website

+

SeaWiFS

Ocean Color Website

+

GLODAPv2

GLODAPv2 Website

+

Surface Current Variance from Drifter Data

NOAA/AOML

Laurindo et al. (2017)

Wave Reanalysis: ERA5

Copernicus Climate Data Store

+

Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative

+ +
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/quick_start.html b/1.11.0rc1/users_guide/quick_start.html new file mode 100644 index 000000000..d96f05b4b --- /dev/null +++ b/1.11.0rc1/users_guide/quick_start.html @@ -0,0 +1,431 @@ + + + + + + + Quick Start Guide — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Quick Start Guide

+

Analysis for simulations produced with Model for Prediction Across Scales +(MPAS) components and the Energy Exascale Earth System Model (E3SM), which +used those components.

+sea surface temperature +
+

Installation for users

+

MPAS-Analysis is available as an anaconda package via the conda-forge channel:

+
conda config --add channels conda-forge
+conda create -n mpas-analysis mpas-analysis
+conda activate mpas-analysis
+
+
+
+
+

Installation for developers

+
+
To use the latest version for developers, get the code from:

https://github.com/MPAS-Dev/MPAS-Analysis

+
+
+

Then, you will need to set up a conda environment from the MPAS-Analysis repo. +This environment will include the required dependencies for the development +branch from dev-spec.txt and will install the mpas_analysis package into +the conda environment in a way that points directly to the local branch (so +changes you make to the code directly affect mpas_analysis in the conda +environment):

+
conda config --add channels conda-forge
+conda config --set channel_priority strict
+conda create -y -n mpas_dev --file dev-spec.txt
+conda activate mpas_dev
+python -m pip install -e .
+
+
+

If you are developing another conda package at the same time (this is common +for MPAS-Tools or geometric_features), you should first comment out the other +package in dev-spec.txt. Then, you can install both packages in the same +development environment, e.g.:

+
conda create -y -n mpas_dev --file tools/MPAS-Tools/conda_package/dev-spec.txt \
+    --file analysis/MPAS-Analysis/dev-spec.txt
+conda activate mpas_dev
+cd tools/MPAS-Tools/conda_package
+python -m pip install -e .
+cd ../../../analysis/MPAS-Analysis
+python -m pip install -e .
+
+
+

Obviously, the paths to the repos may be different in your local clones. With +the mpas_dev environment as defined above, you can make changes to both +mpas_tools and mpas-analysis packages in their respective branches, and +these changes will be reflected when refer to the packages or call their +respective entry points (command-line tools).

+
+
+

Download analysis input data

+

If you installed the mpas-analysis package, download the data that is +necessary to MPAS-Analysis by running:

+
download_analysis_data -o /path/to/mpas_analysis/diagnostics
+
+
+

where /path/to/mpas_analysis/diagnostics is the main folder that will contain +two subdirectories:

+
    +
  • mpas_analysis, which includes mapping and region mask files for +standard resolution MPAS meshes

  • +
  • observations, which includes the pre-processed observations listed in the +Observations table +and used to evaluate the model results

  • +
+

Once you have downloaded the analysis data, you will point to its location +(your equivalent of path/to/mpas_analysis/diagnostics above) in the config +option baseDirectory in the [diagnostics] section.

+
+
+

List Analysis

+

If you installed the mpas-analysis package, list the available analysis tasks +by running:

+
mpas_analysis --list
+
+
+

This lists all tasks and their tags. These can be used in the generate +command-line option or config option. See mpas_analysis/default.cfg +for more details.

+
+
+

Running the analysis

+
    +
  1. Create and empty config file (say myrun.cfg), copy example.cfg, +or copy one of the example files in the configs directory (if using a +git repo) or download one from the +example configs directory.

  2. +
  3. Either modify config options in your new file or copy and modify config +options from mpas_analysis/default.cfg (in a git repo) or directly +from GitHub: +default.cfg.

  4. +
  5. If you installed the mpas-analysis package, run: +mpas_analysis myrun.cfg. This will read the configuration +first from mpas_analysis/default.cfg and then replace that +configuration with any changes from from myrun.cfg

  6. +
  7. If you want to run a subset of the analysis, you can either set the +generate option under [output] in your config file or use the +--generate flag on the command line. See the comments in +mpas_analysis/default.cfg for more details on this option.

    +

    Requirements for custom config files:

    +
  8. +
+
    +
  • At minimum you should set baseDirectory under [output] to the folder +where output is stored. NOTE this value should be a unique +directory for each run being analyzed. If multiple runs are analyzed in +the same directory, cached results from a previous analysis will not be +updated correctly.

  • +
  • Any options you copy into the config file must include the +appropriate section header (e.g. ‘[run]’ or ‘[output]’)

  • +
  • You do not need to copy all options from mpas_analysis/default.cfg. +This file will automatically be used for any options you do not include +in your custom config file.

  • +
  • You should not modify mpas_analysis/default.cfg directly.

  • +
+
+
+

List of MPAS output files that are needed by MPAS-Analysis:

+
    +
  • mpas-o files:

    +
      +
    • mpaso.hist.am.timeSeriesStatsMonthly.*.nc (Note: since OHC +anomalies are computed wrt the first year of the simulation, +if OHC diagnostics is activated, the analysis will need the +first full year of mpaso.hist.am.timeSeriesStatsMonthly.*.nc +files, no matter what [timeSeries]/startYear and +[timeSeries]/endYear are. This is especially important to know if +short term archiving is used in the run to analyze: in that case, set +[input]/runSubdirectory, [input]/oceanHistorySubdirectory and +[input]/seaIceHistorySubdirectory to the appropriate run and archive +directories and choose [timeSeries]/startYear and +[timeSeries]/endYear to include only data that have been short-term +archived).

    • +
    • mpaso.hist.am.meridionalHeatTransport.0001-03-01.nc (or any +hist.am.meridionalHeatTransport file)

    • +
    • mpaso.rst.0002-01-01_00000.nc (or any other mpas-o restart file)

    • +
    • streams.ocean

    • +
    • mpaso_in

    • +
    +
  • +
  • mpas-seaice files:

    +
      +
    • mpasseaice.hist.am.timeSeriesStatsMonthly.*.nc

    • +
    • mpasseaice.rst.0002-01-01_00000.nc (or any other mpas-seaice restart +file)

    • +
    • streams.seaice

    • +
    • mpassi_in

    • +
    +
  • +
+

Note: for older runs, mpas-seaice files will be named:

+
    +
  • mpascice.hist.am.timeSeriesStatsMonthly.*.nc

  • +
  • mpascice.rst.0002-01-01_00000.nc

  • +
  • streams.cice

  • +
  • mpas-cice_in +Also, for older runs mpaso_in will be named:

  • +
  • mpas-o_in

  • +
+
+
+

Purge Old Analysis

+

To purge old analysis (delete the whole output directory) before running run +the analysis, add the --purge flag. If you installed mpas-analysis as +a package, run:

+
mpas_analysis --purge <config.file>
+
+
+

All of the subdirectories listed in output will be deleted along with the +climatology subdirectories in oceanObservations and seaIceObservations.

+

It is a good policy to use the purge flag for most changes to the config file, +for example, updating the start and/or end years of climatologies (and +sometimes time series), changing the resolution of a comparison grid, renaming +the run, changing the seasons over which climatologies are computed for a given +task, updating the code to the latest version.

+

Cases where it is reasonable not to purge would be, for example, changing +options that only affect plotting (color map, ticks, ranges, font sizes, etc.), +rerunning with a different set of tasks specified by the generate option +(though this will often cause climatologies to be re-computed with new +variables and may not save time compared with purging), generating only the +final website with --html_only, and re-running after the simulation has +progressed to extend time series (however, not recommended for changing the +bounds on climatologies, see above).

+
+
+

Running in parallel via a queueing system

+

If you are running from a git repo:

+
    +
  1. If you are running from a git repo, copy the appropriate job script file +from configs/<machine_name> to the root directory (or another directory +if preferred). The default script, configs/job_script.default.bash, is +appropriate for a laptop or desktop computer with multiple cores.

  2. +
  3. If using the mpas-analysis conda package, download the job script and/or +sample config file from the +example configs directory.

  4. +
  5. Modify the number of parallel tasks, the run name, the output directory +and the path to the config file for the run.

  6. +
  7. Note: the number of parallel tasks can be anything between 1 and the +number of analysis tasks to be performed. If there are more tasks than +parallel tasks, later tasks will simply wait until earlier tasks have +finished.

  8. +
  9. Submit the job using the modified job script

  10. +
+

If a job script for your machine is not available, try modifying the default +job script in configs/job_script.default.bash or one of the job scripts for +another machine to fit your needs.

+
+
+

Customizing plots or creating new ones

+

There are three main ways to either customize the plots that MPAS-Analysis +already makes or creating new ones:

+
    +
  1. customize the config file. Some features, such as colormaps and colorbar +limits for color shaded plot or depth ranges for ocean region time series, +can be customized: look at mpas_analysis/default.cfg for available +customization for each analysis task.

  2. +
  3. read in the analysis data computed by MPAS-Analysis into custom scripts. When +running MPAS-Analysis with the purpose of generating both climatologies +and time series, the following data sets are generated:

    +
      +
    • [baseDirectory]/clim/mpas/avg/unmasked_[mpasMeshName]: MPAS-Ocean +and MPAS-seaice climatologies on the native grid.

    • +
    • [baseDirectory]/clim/mpas/avg/remapped: remapped climatologies +for each chosen task (climatology files are stored in different +subdirectories according to the task name).

    • +
    • [baseDirectory]/clim/obs: observational climatologies.

    • +
    • [baseDirectory]/clim/mpas/avg/mocStreamfunction_years[startYear]-[endYear].nc.

    • +
    • [baseDirectory]/clim/mpas/avg/meridionalHeatTransport_years[startYear]-[endYear].nc.

    • +
    • [baseDirectory]/timeseries: various time series data. +Custom scripts can then utilize these datasets to generate custom plots.

    • +
    +
  4. +
  5. add a new analysis task to MPAS-Analysis (see below).

  6. +
+
+
+

Instructions for creating a new analysis task

+

Analysis tasks can be found in a directory corresponding to each component, +e.g., mpas_analysis/ocean for MPAS-Ocean. Shared functionality is contained +within the mpas_analysis/shared directory.

+
    +
  1. create a new task by copying mpas_analysis/analysis_task_template.py to +the appropriate folder (ocean, sea_ice, etc.) and modifying it as +described in the template. Take a look at +mpas_analysis/shared/analysis_task.py for additional guidance.

  2. +
  3. note, no changes need to be made to mpas_analysis/shared/analysis_task.py

  4. +
  5. modify mpas_analysis/default.cfg (and possibly any machine-specific +config files in configs/<machine>)

  6. +
  7. import new analysis task in mpas_analysis/<component>/__init__.py

  8. +
  9. add new analysis task to mpas_analysis/__main__.py under +build_analysis_list, see below.

  10. +
+

A new analysis task can be added with:

+
analyses.append(<component>.MyTask(config, myArg='argValue'))
+
+
+

This will add a new object of the MyTask class to a list of analysis tasks +created in build_analysis_list. Later on in run_analysis, it will first +go through the list to make sure each task needs to be generated +(by calling check_generate, which is defined in AnalysisTask), then, +will call setup_and_check on each task (to make sure the appropriate AM is +on and files are present), and will finally call run on each task that is +to be generated and is set up properly.

+
+
+

Generating Documentation

+

Create a development environment as described above in “Installation for +developers”. Then run: +To generate the sphinx documentation, run:

+
cd docs
+make clean
+make html
+
+
+

The results can be viewed in your web browser by opening:

+
_build/html/index.html
+
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/seaice_obs_table.html b/1.11.0rc1/users_guide/seaice_obs_table.html new file mode 100644 index 000000000..f485307c8 --- /dev/null +++ b/1.11.0rc1/users_guide/seaice_obs_table.html @@ -0,0 +1,183 @@ + + + + + + + <no title> — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ +
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapAntarcticMelt.html b/1.11.0rc1/users_guide/tasks/climatologyMapAntarcticMelt.html new file mode 100644 index 000000000..15a4be552 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapAntarcticMelt.html @@ -0,0 +1,264 @@ + + + + + + + climatologyMapAntarcticMelt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapAntarcticMelt

+

An analysis task for comparison of Antarctic maps of melt rates against +observations from Paolo et al. (2023).

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, meltRate, landIceCavities
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapAntarcticMelt]
+## options related to plotting horizontally regridded maps of Antarctic
+## sub-ice-shelf melt rates against control model results and observations
+
+# comparison grid(s)
+# only the Antarctic really makes sense but lat-lon could technically work.
+comparisonGrids = ['antarctic']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# colormap for model/observations
+colormapNameResult = erdc_iceFire_H
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = symLog
+# A dictionary with keywords for the norm
+normArgsResult = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100.,
+                  'vmax': 100.}
+colorbarTicksResult = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2., 5.,
+                       10., 20., 50., 100.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = symLog
+# A dictionary with keywords for the norm
+normArgsDifference = {'linthresh': 1., 'linscale': 0.5, 'vmin': -100.,
+                      'vmax': 100.}
+colorbarTicksDifference = [-100., -50., -20., -10., -5., -2., -1., 0., 1., 2.,
+                           5., 10., 20., 50., 100.]
+
+# make a tables of mean melt rates and melt fluxes for individual ice shelves?
+makeTables = False
+
+# If making tables, which ice shelves?  This is a list of ice shelves or
+# ['all'] for all 106 ice shelves and regions.
+iceShelvesInTable = []
+
+
+

The option meltTables determines if tables of mean melt rates, averaged +over each ice shelf or region, are computed. iceShelvesInTable is a list +of the ice shelf or region names to include in the table. A value of 'all' +indicates that all 106 ice shelves and regions will be included in the table. +The table is in csv format and can be found in the tables subdirectory.

+
+
For more details, see:
+
+
+
+
+

Observations

+

Antarctic melt rates and fluxes

+
+
+

Example Result

+../../_images/ant_melt.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapArgoSalinity.html b/1.11.0rc1/users_guide/tasks/climatologyMapArgoSalinity.html new file mode 100644 index 000000000..ddf488ee9 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapArgoSalinity.html @@ -0,0 +1,254 @@ + + + + + + + climatologyMapArgoSalinity — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapArgoSalinity

+

An analysis task for comparing salinity at various depths against +Argo observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, argo, salinity, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapArgoSalinity]
+## options related to plotting climatology maps of Global
+## salinity at various levels against
+## reference model results and the Roemmich-Gilson Argo Climatology
+
+# comparison grid(s)
+comparisonGrids = ['latlon']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN','JFM','JAS']
+
+# list of depths in meters (positive up) at which to analyze, 'top' for the
+# sea surface.  Argo data is only available above -2000 m
+depths = ['top', -25, -50, -100, -150, -200, -400, -600, -800, -1500]
+
+# colormap for model/observations
+colormapNameResult = haline
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 30, 'vmax': 39.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(34.2, 35.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+
+
+
+
For more details, see:
+
+
+

The option depths is a list of (approximate) depths at which to sample +the salinity field. A value of 'top' indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +'bot' indicates the seafloor.

+
+
+

Observations

+

Roemmich-Gilson Argo Climatology

+
+
+

Example Result

+../../_images/clim_argo_salin.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapArgoTemperature.html b/1.11.0rc1/users_guide/tasks/climatologyMapArgoTemperature.html new file mode 100644 index 000000000..02781e87d --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapArgoTemperature.html @@ -0,0 +1,255 @@ + + + + + + + climatologyMapArgoTemperature — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapArgoTemperature

+

An analysis task for comparing potential temperature at various depths against +Argo observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, argo, temperature, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapArgoTemperature]
+## options related to plotting climatology maps of Global
+## potential temperature at various levels against
+## reference model results and Roemmich-Gilson Argo Climatology
+
+# comparison grid(s)
+comparisonGrids = ['latlon']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN','JAS','JFM']
+
+# list of depths in meters (positive up) at which to analyze, 'top' for the
+# sea surface.  Argo data is only available above -2000 m
+depths = ['top', -25, -50, -100, -150, -200, -400, -800, -1500]
+
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -2., 'vmax': 30.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9)
+
+
+
+
For more details, see:
+
+
+

The option depths is a list of (approximate) depths at which to sample +the potential temperature field. A value of 'top' indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +'bot' indicates the seafloor.

+
+
+

Observations

+

Roemmich-Gilson Argo Climatology

+
+
+

Example Result

+../../_images/clim_argo_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapBGC.html b/1.11.0rc1/users_guide/tasks/climatologyMapBGC.html new file mode 100644 index 000000000..359b43b5b --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapBGC.html @@ -0,0 +1,379 @@ + + + + + + + climatologyMapBGC — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapBGC

+

An analysis task for comparison of global maps of biogeochemistry (BGC) fields +against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, BGC, PO4, NO3, SiO3, CO2_gas_flux, pH_3D,
+      DIC, ALK, O2, pCO2surface, Chl
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapBGC]
+## options related to plotting climatology mpas of BGC
+
+# Variables to plot:
+# pH : 'pH_3D'
+# DIC : 'DIC'
+# ALK : 'ALK'
+# PO4 : 'PO4'
+# NO3 : 'NO3'
+# SiO3 : 'SiO3'
+# CO2 flux : 'CO2_gas_flux'
+# O2 : 'O2'
+variables = ['PO4', 'NO3', 'SiO3', 'CO2_gas_flux', 'pH_3D', 'DIC', 'ALK',
+             'O2', 'pCO2surface', 'Chl']
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep,
+# Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons = ['ANN', 'JFM', 'JAS']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon', 'antarctic']
+
+# Whether to compare to preindustrial observations that are available.
+preindustrial = False
+
+[climatologyMapBGC_PO4]
+# Colormap for climatology
+colormapNameResult = dense
+# Colormap for clim - obs difference
+colormapNameDifference = balance
+# linear vs. log scaling for climatology
+normTypeResult = linear
+# Colorbar bounds for climatology
+normArgsResult = {'vmin': 0, 'vmax': 2.5}
+# linear vs. log scaling for obs
+normTypeDifference = linear
+# Colorbar bounds for obs
+normArgsDifference = {'vmin': -1, 'vmax': 1}
+# BGC property units
+units = mmol m$^{-3}$
+# Prefix to variable name in MPAS-O output
+filePrefix = timeMonthly_avg_ecosysTracers_
+# Acronym/label for observational dataset
+observationsLabel = WOA
+# Acronym/label for gallery variable (can be different from MPAS name)
+galleryLabel = PO4
+
+[climatologyMapBGC_NO3]
+colormapNameResult = dense
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 0, 'vmax': 35.0}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -10, 'vmax': 10}
+units = mmol m$^{-3}$
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = WOA
+galleryLabel = NO3
+
+[climatologyMapBGC_SiO3]
+colormapNameResult = dense
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 0, 'vmax': 80}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -20, 'vmax': 20}
+units = mmol m$^{-3}$
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = WOA
+galleryLabel = SiO3
+
+[climatologyMapBGC_CO2_gas_flux]
+colormapNameResult = BrBG_r
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': -5, 'vmax': 5}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -5, 'vmax': 5}
+units = mol m$^{-2}$ yr$^{-1}$
+filePrefix = timeMonthly_avg_
+observationsLabel = SOM-FFNv2016
+galleryLabel = CO2 Flux
+
+[climatologyMapBGC_O2]
+colormapNameResult = matter
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 2, 'vmax': 8}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -2, 'vmax': 2}
+units = mL/L
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = WOA
+galleryLabel = O2
+
+[climatologyMapBGC_pH_3D]
+colormapNameResult = PuBuGn_r
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 8, 'vmax': 8.2}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+units =
+filePrefix = timeMonthly_avg_ecosys_diag_
+observationsLabel = GLODAPv2
+galleryLabel = pH
+
+[climatologyMapBGC_DIC]
+colormapNameResult = YlGnBu
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 1900, 'vmax': 2300}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -100, 'vmax': 100}
+units = mmol m$^{-3}$
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = GLODAPv2
+galleryLabel = DIC
+
+[climatologyMapBGC_ALK]
+colormapNameResult = PuBuGn
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 2150, 'vmax': 2450}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -100, 'vmax': 100}
+units = meq m$^{-3}$
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = GLODAPv2
+galleryLabel = Alkalinity
+
+[climatologyMapBGC_pCO2surface]
+colormapNameResult = viridis
+colormapNameDifference = balance
+normTypeResult = linear
+normArgsResult = {'vmin': 300, 'vmax': 450}
+normTypeDifference = linear
+normArgsDifference = {'vmin': -50, 'vmax': 50}
+units = $\mu$atm
+filePrefix = timeMonthly_avg_ecosys_diag_
+observationsLabel = SOM-FFNv2016
+galleryLabel = pCO2
+
+[climatologyMapBGC_Chl]
+colormapNameResult = viridis
+colormapNameDifference = balance
+normTypeResult = log
+normArgsResult = {'vmin': 0.01, 'vmax': 20}
+normTypeDifference = symLog
+normArgsDifference = {'linthresh': 0.1, 'vmin': -10, 'vmax': 10}
+units = mg m$^{-3}$
+filePrefix = timeMonthly_avg_ecosysTracers_
+observationsLabel = SeaWIFS
+galleryLabel = Chlorophyll
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+
+
+
+
+

Example Result

+../../_images/bgc.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapEKE.html b/1.11.0rc1/users_guide/tasks/climatologyMapEKE.html new file mode 100644 index 000000000..7d598b836 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapEKE.html @@ -0,0 +1,247 @@ + + + + + + + climatologyMapEKE — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapEKE

+

An analysis task for comparison of global maps of sea surface height (SSH) +with zero mean against observations. The mean has been subtracted because the +initial sea level is somewhat arbitrary and will lead to a systematic offset +when compared with the observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, eke, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapEKE]
+## options related to plotting horizontally remapped climatologies of
+## eddy kinetic energy (EKE) against reference model results and observations
+
+# colormap for model/observations
+colormapNameResult = magma_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 1000.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -300., 'vmax': 300.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+

Surface Current Variance from Drifter Data

+
+
+

Example Result

+../../_images/eke.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapIcebergConcSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapIcebergConcSH.html new file mode 100644 index 000000000..d9056b6ae --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapIcebergConcSH.html @@ -0,0 +1,265 @@ + + + + + + + climatologyMapIcebergConcSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapIcebergConcSH

+

An analysis task for plotting maps of Antarctic iceberg concentration against +observations.

+

Component and Tags:

+
component: seaIce
+tags: icebergs, climatology, horizontalMap
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapIcebergConcSH]
+## options related to plotting horizontally remapped climatologies of
+## iceberg concentration against reference model results and observations
+## in the southern hemisphere (SH)
+
+# colormap for model/observations
+colormapNameResult = ice
+# the type of norm used in the colormap
+normTypeResult = log
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1e-5, 'vmax': 1e-2}
+# specify the ticks
+colorbarTicksResult = [1e-5, 1e-4, 1e-3, 1e-2]
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = symLog
+# A dictionary with keywords for the norm
+normArgsDifference = {'linthresh': 1e-5, 'linscale': 1, 'vmin': -1e-2, 'vmax': 1e-2}
+
+# Months or seasons to plot (These should be left unchanged, since
+# observations are only available for these seasons)
+seasons =  ['ANN', DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for iceberg plots in the southern hemisphere
+minimumLatitude = -50
+referenceLongitude = 180
+
+# a list of prefixes describing the sources of the observations to be used
+observationPrefixes = ['Altiberg']
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+concentrationAltibergSH = Altiberg/Altiberg_1991-2017_20180308.nc
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/berg_conc_sh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapMLD.html b/1.11.0rc1/users_guide/tasks/climatologyMapMLD.html new file mode 100644 index 000000000..3bc92cc11 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapMLD.html @@ -0,0 +1,245 @@ + + + + + + + climatologyMapMLD — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapMLD

+

An analysis task for comparison of global maps of mixed layer depth (MLD) +against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, mld, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapMLD]
+## options related to plotting horizontally remapped climatologies of
+## mixed layer depth (MLD) against control model results and observations
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+

Argo Mixed Layer Depth (MLD) climatology

+
+
+

Example Result

+../../_images/mld.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapMLDMinMax.html b/1.11.0rc1/users_guide/tasks/climatologyMapMLDMinMax.html new file mode 100644 index 000000000..e4a766f67 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapMLDMinMax.html @@ -0,0 +1,422 @@ + + + + + + + climatologyMapMLDMinMax — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapMLDMinMax

+

An analysis task for plotting climatologies of monthly min and max of +mixed layer depth (MLD).

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, mld, publicObs, min, max
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapMLDMinMax]
+## options related to plotting horizontally remapped climatologies of
+## monthly min/max mixed layer depth (MLD), optionally against control model
+## results
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+[climatologyMapDensityMLDMinMax]
+## color mpas for density-threshold MLD min/max plots in the same figure
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = viridis
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150]
+
+[climatologyMapDensityMLDMin]
+## color mpas for density MLD min plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+[climatologyMapDensityMLDMax]
+## color mpas for density MLD max plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+
+[climatologyMapTemperatureMLDMinMax]
+## color mpas for temperature-threshold MLD min/max plots in the same figure
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = viridis
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150]
+
+[climatologyMapTemperatureMLDMin]
+## color mpas for temperature MLD min plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+[climatologyMapTemperatureMLDMax]
+## color mpas for temperature MLD max plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+
+[climatologyMapBLDMinMax]
+## color mpas for boundary-layer depth min/max plots in the same figure
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = viridis
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [0, 1, 5, 10, 20, 30, 50, 80, 110, 150]
+
+[climatologyMapBLDMin]
+## color mpas for BLD min plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+[climatologyMapBLDMax]
+## color mpas for BLD max plots in separate figures for main vs. control
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = indexed
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 20, 50, 80, 120, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [10., 30., 50., 75., 100., 150., 200., 400., 800.]
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = indexed
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-150, -80, -30, -10, -1, 0, 1, 10, 30, 80, 150]
+
+
+

Config sections climatologyMapDensityMLD* apply to plots of the MLD based +on a density threshold, while climatologyMapTemperatureMLD* are those +related to MLD with a temperature threshold, and climatologyMabBLD* are of +controling plots of the boundary-layer depth (BLD). Each of these has a +section ending in MinMax that controls plots in when not comparing a main +reference to observations, in which case the maximum field is in the top panel, +minimum in the middle panel and the difference in the bottom panel. When +comparing with a reference run, min. and max. plots are performed separately +along with their corresponding fields from the reference run and the difference +between main and reference. In these cases, a different color map may be +appropriate (e.g with positive and negative values for the difference, rather +than just positive for max. minus min.).

+
+
For more details, see:
+
+
+
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapOHCAnomaly.html b/1.11.0rc1/users_guide/tasks/climatologyMapOHCAnomaly.html new file mode 100644 index 000000000..a91dcec09 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapOHCAnomaly.html @@ -0,0 +1,248 @@ + + + + + + + climatologyMapOHCAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapOHCAnomaly

+

An analysis task for plotting anomalies in ocean heat content (OHC) compared +with a reference year (typically the start of the simulation).

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, deltaOHC, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapOHCAnomaly]
+## options related to plotting horizontally remapped climatologies of
+## ocean heat content (OHC) against reference model results (if available)
+
+# colormap for model/observations
+colormapNameResult = BuOr
+# color indices into colormapName for filled contours
+colormapIndicesResult = numpy.array(numpy.linspace(0, 255, 38), int)
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = numpy.linspace(-12., 12., 37)
+# colormap levels/values for ticks (defaults to same as levels)
+colorbarTicksResult = numpy.linspace(-12., 12., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = numpy.array(numpy.linspace(0, 255, 10), int)
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = numpy.linspace(-2., 2., 11)
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep,
+# Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# A list of pairs of minimum and maximum depths (positive up, in meters) to
+# include in the vertical sums.  The default values are the equivalents of the
+# default ranges of the timeSeriesOHCAnomaly task, with a value of -10,000 m
+# intended to be well below the bottom of the ocean for all existing MPAS-O
+# meshes.
+depthRanges = [(0.0, -10000.0), (0.0, -700.0), (-700.0, -2000.0),
+               (-2000.0, -10000.0)]
+
+
+
+
For more details, see:
+
+
+

The option depthRange is a list of upper and lower bounds in meters over +which to integrate the heat content.

+
+
+

Example Result

+../../_images/clim_ohc.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSSH.html new file mode 100644 index 000000000..c88446241 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSSH.html @@ -0,0 +1,259 @@ + + + + + + + climatologyMapSSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSSH

+

An analysis task for comparison of global maps of sea surface height (SSH) +with zero mean against observations. The mean has been subtracted because the +initial sea level is somewhat arbitrary and will lead to a systematic offset +when compared with the observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, ssh, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSSH]
+## options related to plotting horizontally remapped climatologies of
+## sea surface height (SSH) against reference model results and observations
+
+# colormap for model/observations
+colormapNameResult = Maximenko
+# color indices into colormapName for filled contours
+colormapIndicesResult = numpy.array(numpy.linspace(0, 255, 38), int)
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = numpy.arange(-240., 130., 10.)
+# colormap levels/values for ticks (defaults to same as levels)
+colorbarTicksResult = numpy.arange(-240., 160., 40.)
+
+# contour line levels
+contourLevelsResult = numpy.arange(-240., 130., 10.)
+# contour line thickness
+contourThicknessResult = 0.25
+# contour color
+contourColorResult = 0.25
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 23, 46, 70, 93, 116, 128, 128, 139, 162, 185,
+                             209, 232, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-100., -80., -60., -40., -20., -10., 0., 10.,
+                            20.,  40.,  60.,  80.,  100.]
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+
+
For more details, see:
+
+
+
+

Note

+

The color map and contours for these plots were inspired by Fig. 1 from +Maximenko et al. (2009)

+
+
+
+

Observations

+

AVISO Absolute Dynamic Topography

+
+
+

Example Result

+../../_images/ssh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSSS.html b/1.11.0rc1/users_guide/tasks/climatologyMapSSS.html new file mode 100644 index 000000000..4724845cb --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSSS.html @@ -0,0 +1,242 @@ + + + + + + + climatologyMapSSS — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSSS

+

An analysis task for comparison of global maps of sea surface salinity (SSS) +against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, sss, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSSS]
+## options related to plotting horizontally remapped climatologies of
+## sea surface salinity (SSS) against reference model results and observations
+
+# colormap for model/observations
+colormapNameResult = haline
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [28, 29, 30, 31, 32, 33, 34, 35, 36, 38]
+
+# colormap for differences
+colormapNameDifference = RdBu_r
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198,
+                             227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-3, -2, -1, -0.5, -0.02, 0,  0.02, 0.5, 1, 2, 3]
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep,
+# Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+

SSS from NASA Aquarius satellite

+
+
+

Example Result

+../../_images/sss.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSST.html b/1.11.0rc1/users_guide/tasks/climatologyMapSST.html new file mode 100644 index 000000000..e135e8022 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSST.html @@ -0,0 +1,255 @@ + + + + + + + climatologyMapSST — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSST

+

An analysis task for comparison of global maps of sea surface temperature (SST) +against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, sst, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSST]
+## options related to plotting horizontally remapped climatologies of
+## sea surface temperature (SST) against reference model results and
+## observations
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# color indices into colormapName for filled contours
+colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32]
+
+# colormap for differences
+colormapNameDifference = RdBu_r
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198,
+                             227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5]
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep,
+# Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# first and last year of SST observational climatology (preferably one of the
+# two ranges given below)
+# values for preindustrial
+obsStartYear = 1870
+obsEndYear = 1900
+# alternative values for present day
+#obsStartYear = 1990
+#obsEndYear = 2011
+
+
+

By default, a “preindustrial” climatology is computed for comparison with the +model results. For simulations covering a different time period, the range of +years (obsStartYear and obsEndYear) should be updated.

+
+
For details on the remaining configuration options, see:
+
+
+
+
+

Observations

+

SST merged Hadley Center-NOAA/OI data set

+
+
+

Example Result

+../../_images/sst.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSchmidtko.html b/1.11.0rc1/users_guide/tasks/climatologyMapSchmidtko.html new file mode 100644 index 000000000..3861dfaee --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSchmidtko.html @@ -0,0 +1,295 @@ + + + + + + + climatologyMapSchmidtko — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSchmidtko

+

An analysis task for comparison of Antarctic maps of seafloor potential +temperature, salinity and potential density against observations from +Schmidtko et al. (2014).

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, temperature, salinity, potentialDensity
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSchmidtko]
+## options related to plotting climatology maps of Antarctic fields at the
+## seafloor and comparing them against data from Schmidtko et al. (2014)
+
+# comparison grid(s)
+# only the Antarctic really makes sense but lat-lon could technically work.
+comparisonGrids = ['antarctic']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN','JFM','JAS']
+
+[climatologyMapSchmidtkoTemperature]
+## options related to plotting climatology maps of potential temperature at the
+## seafloor and comparing them against data from Schmidtko et al. (2014)
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9)
+
+[climatologyMapSchmidtkoSalinity]
+## options related to plotting climatology maps of salinity at the
+## seafloor and comparing them against data from Schmidtko et al. (2014)
+
+# colormap for model/observations
+colormapNameResult = haline
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 33.8, 'vmax': 35.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(33.8, 35.0, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+
+[climatologyMapSchmidtkoPotentialDensity]
+## options related to plotting climatology maps of potential density at the
+## seafloor and comparing them against data from Schmidtko et al. (2014)
+
+# colormap for model/observations
+colormapNameResult = Spectral_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1026.5, 'vmax': 1028.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(1026.5, 1028., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.3, 'vmax': 0.3}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9)
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+

Antarctic Seafloor Temperature and Salinity

+
+
+

Example Result

+../../_images/schmidtko_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcNH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcNH.html new file mode 100644 index 000000000..741fffc4c --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcNH.html @@ -0,0 +1,278 @@ + + + + + + + climatologyMapSeaIceConcNH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceConcNH

+

An analysis task for plotting maps of Arctic sea ice concentration against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceConc, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceConcNH]
+## options related to plotting horizontally remapped climatologies of
+## sea ice concentration against reference model results and observations
+## in the northern hemisphere (NH)
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1]
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192,
+                             224, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2,
+                            0.4, 0.6, 0.8, 1.]
+
+# Months or seasons to plot (These should be left unchanged, since
+# observations are only available for these seasons)
+seasons =  ['JFM', 'JAS']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = 50
+referenceLongitude = 0
+
+# a list of prefixes describing the sources of the observations to be used
+observationPrefixes = ['NASATeam', 'Bootstrap']
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+concentrationNASATeamNH_JFM = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jfm.interp0.5x0.5_20180710.nc
+concentrationNASATeamNH_JAS = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_NH_jas.interp0.5x0.5_20180710.nc
+concentrationBootstrapNH_JFM = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_NH_jfm.interp0.5x0.5_20180710.nc
+concentrationBootstrapNH_JAS = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_NH_jas.interp0.5x0.5_20180710.nc
+
+
+

The option minimumLatitude determines what the southernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

By default, plots are produced comparing modeled sea ice concentration against +both observations produced with both the NasaTeam and Bootstrap +algorithms. By altering observationPrefixes, you can select only one +(or none) of these.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+

The ability to modify observations files pointed to by +concentrationNASATeamNH_JFM, concentrationNASATeamNH_JAS, +concentrationBootstrapNH_JFM and concentrationBootstrapNH_JAS is +provided for debugging purposes and these options +should typically remain unchanged.

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/ice_conc_nh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcSH.html new file mode 100644 index 000000000..fe0e54bda --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceConcSH.html @@ -0,0 +1,278 @@ + + + + + + + climatologyMapSeaIceConcSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceConcSH

+

An analysis task for plotting maps of Antarctic sea ice concentration against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceConc, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceConcSH]
+## options related to plotting horizontally remapped climatologies of
+## sea ice concentration against reference model results and observations
+## in the southern hemisphere (SH)
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [0.15, 0.3, 0.5, 0.7, 0.8, 0.85, 0.9, 0.95, 1]
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 32, 64, 96, 112, 128, 128, 144, 160, 192,
+                             224, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-1., -0.8, -0.6, -0.4, -0.2, -0.1, 0, 0.1, 0.2,
+                            0.4, 0.6, 0.8, 1.]
+
+# Months or seasons to plot (These should be left unchanged, since
+# observations are only available for these seasons)
+seasons =  ['DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = -50
+referenceLongitude = 180
+
+# a list of prefixes describing the sources of the observations to be used
+observationPrefixes = ['NASATeam', 'Bootstrap']
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+concentrationNASATeamSH_DJF = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_djf.interp0.5x0.5_20180710.nc
+concentrationNASATeamSH_JJA = SSMI/NASATeam_NSIDC0051/SSMI_NASATeam_gridded_concentration_SH_jja.interp0.5x0.5_20180710.nc
+concentrationBootstrapSH_DJF = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_SH_djf.interp0.5x0.5_20180710.nc
+concentrationBootstrapSH_JJA = SSMI/Bootstrap_NSIDC0079/SSMI_Bootstrap_gridded_concentration_SH_jja.interp0.5x0.5_20180710.nc
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

By default, plots are produced comparing modeled sea ice concentration against +both observations produced with both the NasaTeam and Bootstrap +algorithms. By altering observationPrefixes, you can select only one +(or none) of these.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+

The ability to modify observations files pointed to by +concentrationNASATeamSH_DJF, concentrationNASATeamSH_JJA, +concentrationBootstrapSH_DJF and concentrationBootstrapSH_JJA is +provided for debugging purposes and these options +should typically remain unchanged.

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/ice_conc_sh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingNH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingNH.html new file mode 100644 index 000000000..100be04e2 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingNH.html @@ -0,0 +1,256 @@ + + + + + + + climatologyMapSeaIceMeltingNH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceMeltingNH

+

An analysis task for plotting maps of Arctic sea ice melting.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceMelting, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceMeltingNH]
+# options related to plotting horizontally remapped climatologies of
+# sea ice melting against control model results and observations
+# in the northern hemisphere (NH)
+
+# colormap for model/observations
+colormapNameResult = amp
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -3., 'vmax': 3.}
+# place the ticks automatically by default
+colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3]
+
+# Times for comparison times
+seasons =  ['ANN', 'DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = 50
+referenceLongitude = 0
+
+# arrange subplots vertically?
+vertical = False
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Example Result

+../../_images/ice_melting_nh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingSH.html new file mode 100644 index 000000000..ffacc1d22 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceMeltingSH.html @@ -0,0 +1,269 @@ + + + + + + + climatologyMapSeaIceMeltingSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceMeltingSH

+

An analysis task for plotting maps of Antarctic sea ice melting against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceMelting, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceMeltingSH]
+# options related to plotting horizontally remapped climatologies of
+# sea ice melting against control model results and observations
+# in the southern hemisphere (SH)
+
+# colormap for model/observations
+colormapNameResult = amp
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -3., 'vmax': 3.}
+# place the ticks automatically by default
+colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3]
+
+# Times for comparison times
+seasons =  ['ANN', 'DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = -50
+referenceLongitude = 180
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+meltingSH = AnIceFlux/aniceflux_v01_clim_mean_1982-2008.nc
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

By default, plots are produced comparing modeled sea ice melting against +AnIceFlux (annual climatology only).

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/ice_melting_sh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionNH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionNH.html new file mode 100644 index 000000000..4d540380d --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionNH.html @@ -0,0 +1,256 @@ + + + + + + + climatologyMapSeaIceProductionNH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceProductionNH

+

An analysis task for plotting maps of Arctic sea ice production.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceProduction, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceProductionNH]
+# options related to plotting horizontally remapped climatologies of
+# sea ice production against control model results and observations
+# in the northern hemisphere (NH)
+
+# colormap for model/observations
+colormapNameResult = dense
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -3., 'vmax': 3.}
+# place the ticks automatically by default
+colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3]
+
+# Times for comparison times
+seasons =  ['ANN', 'DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = 50
+referenceLongitude = 0
+
+# arrange subplots vertically?
+vertical = False
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Example Result

+../../_images/ice_production_nh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionSH.html new file mode 100644 index 000000000..30ef390a4 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceProductionSH.html @@ -0,0 +1,269 @@ + + + + + + + climatologyMapSeaIceProductionSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceProductionSH

+

An analysis task for plotting maps of Antarctic sea ice production against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceProduction, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceProductionSH]
+# options related to plotting horizontally remapped climatologies of
+# sea ice production against control model results and observations
+# in the southern hemisphere (SH)
+
+# colormap for model/observations
+colormapNameResult = dense
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -3., 'vmax': 3.}
+# place the ticks automatically by default
+colorbarTicksDifference = [-3, -2, -1, 0, 1, 2, 3]
+
+# Times for comparison times
+seasons =  ['ANN', 'DJF', 'JJA']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = -50
+referenceLongitude = 180
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+productionSH = AnIceFlux/aniceflux_v01_clim_mean_1982-2008.nc
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

By default, plots are produced comparing modeled sea ice production against +AnIceFlux (annual climatology only).

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/ice_production_sh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickNH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickNH.html new file mode 100644 index 000000000..836ca8987 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickNH.html @@ -0,0 +1,268 @@ + + + + + + + climatologyMapSeaIceThickNH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceThickNH

+

An analysis task for plotting maps of Arctic sea ice thickness against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceThick, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceThickNH]
+## options related to plotting horizontally remapped climatologies of
+## sea ice thickness against reference model results and observations
+## in the northern hemisphere (NH)
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [0, 0.25, 0.5, 1, 1.5, 2, 2.5, 3, 3.5]
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.]
+
+# Months or seasons to plot (These should be left unchanged, since
+# observations are only available for these seasons)
+seasons =  ['FM', 'ON']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = 50
+referenceLongitude = 0
+
+# a list of prefixes describing the sources of the observations to be used
+observationPrefixes = ['']
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+thicknessNH_ON = ICESat/ICESat_gridded_mean_thickness_NH_on.interp0.5x0.5_20180710.nc
+thicknessNH_FM = ICESat/ICESat_gridded_mean_thickness_NH_fm.interp0.5x0.5_20180710.nc
+
+
+

The option minimumLatitude determines what the southernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

The option observationPrefixes should be left as a list of the empty +string and is included for allowing easy code reuse with the +climatologyMapSeaIceConc* tasks.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+

The ability to modify observations files pointed to by thicknessNH_ON and +thicknessNH_FM is provided for debugging purposes and these options +should typically remain unchanged.

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+

IceSat Ice Thickness

+
+
+

Example Result

+../../_images/ice_thick_nh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickSH.html b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickSH.html new file mode 100644 index 000000000..e8600486b --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSeaIceThickSH.html @@ -0,0 +1,268 @@ + + + + + + + climatologyMapSeaIceThickSH — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSeaIceThickSH

+

An analysis task for plotting maps of Antarctic sea ice thickness against +observations.

+

Component and Tags:

+
component: seaIce
+tags: climatology, horizontalMap, seaIceThick, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSeaIceThickSH]
+## options related to plotting horizontally remapped climatologies of
+## sea ice thickness against reference model results and observations
+## in the southern hemisphere (SH)
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+colormapIndicesResult = [20, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsResult = [0, 0.2, 0.4, 0.6, 0.8, 1, 1.5, 2, 2.5]
+
+# colormap for differences
+colormapNameDifference = balance
+# color indices into colormapName for filled contours
+colormapIndicesDifference = [0, 32, 64, 96, 128, 128, 160, 192, 224, 255]
+# colormap levels/values for contour boundaries
+colorbarLevelsDifference = [-3., -2.5, -2, -0.5, -0.1, 0, 0.1, 0.5, 2, 2.5, 3.]
+
+# Months or seasons to plot (These should be left unchanged, since
+# observations are only available for these seasons)
+seasons =  ['FM', 'ON']
+
+# comparison grid(s) ('latlon', 'antarctic') on which to plot analysis
+comparisonGrids = ['latlon']
+
+# reference lat/lon for sea ice plots in the northern hemisphere
+minimumLatitude = -50
+referenceLongitude = 180
+
+# a list of prefixes describing the sources of the observations to be used
+observationPrefixes = ['']
+
+# arrange subplots vertically?
+vertical = False
+
+# observations files
+thicknessSH_ON = ICESat/ICESat_gridded_mean_thickness_SH_on.interp0.5x0.5_20180710.nc
+thicknessSH_FM = ICESat/ICESat_gridded_mean_thickness_SH_fm.interp0.5x0.5_20180710.nc
+
+
+

The option minimumLatitude determines what the northernmost latitude (in +degrees) included in the plot will be. The option referenceLongitude +defines which longitude will be at the bottom of the plot.

+

The option observationPrefixes should be left as a list of the empty +string and is included for allowing easy code reuse with the +climatologyMapSeaIceConc* tasks.

+

The option vertical = True can be used to plot 3 panels one above another +(resulting in a tall, thin image) rather than next to each other, the default +(resulting in a short, wide image).

+

The ability to modify observations files pointed to by thicknessSH_ON and +thicknessSH_FM is provided for debugging purposes and these options +should typically remain unchanged.

+
+
For details on the remaining configration options, see:
+
+
+
+
+

Observations

+

IceSat Ice Thickness

+
+
+

Example Result

+../../_images/ice_thick_sh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapSose.html b/1.11.0rc1/users_guide/tasks/climatologyMapSose.html new file mode 100644 index 000000000..2aa96a1a5 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapSose.html @@ -0,0 +1,438 @@ + + + + + + + climatologyMapSose — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapSose

+

An analysis task for comparing fields at various depths against +results from the Southern Ocean State Estimate (SOSE).

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, sose, publicObs, temperature, salinity,
+      potentialDensity, mixedLayerDepth, zonalVelocity, meridionalVelocity,
+      velocityMagnitude
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapSose]
+## options related to plotting climatology maps of Antarctic fields at various
+## levels, including the sea floor against reference model results and SOSE
+## reanalysis data
+
+# comparison grid(s)
+# only the Antarctic really makes sense but lat-lon could technically work.
+comparisonGrids = ['antarctic']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN','JFM','JAS']
+
+# list of depths in meters (positive up) at which to analyze, 'top' for the
+# sea surface, 'bot' for the sea floor
+depths = ['top', -200, -400, -600, -800, 'bot']
+
+# a list of fields top plot for each transect.  All supported fields are listed
+# below
+fieldList = ['temperature', 'salinity', 'potentialDensity', 'mixedLayerDepth',
+             'zonalVelocity', 'meridionalVelocity', 'velocityMagnitude']
+
+# set the suffix for files, e.g. if you want to use a different comparison
+# grid from the default
+fileSuffix = 6000.0x6000.0km_10.0km_Antarctic_stereo_20180710
+
+[climatologyMapSoseTemperature]
+## options related to plotting climatology maps of Antarctic
+## potential temperature at various levels, including the sea floor against
+## reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9)
+
+
+[climatologyMapSoseSalinity]
+## options related to plotting climatology maps of Antarctic
+## salinity at various levels, including the sea floor against
+## reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = haline
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 33.8, 'vmax': 35.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(34.2, 35.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+
+
+[climatologyMapSosePotentialDensity]
+## options related to plotting climatology maps of Antarctic
+## potential density at various levels, including the sea floor against
+## reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = Spectral_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1026.5, 'vmax': 1028.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(1026., 1028., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.3, 'vmax': 0.3}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9)
+
+
+[climatologyMapSoseMixedLayerDepth]
+## options related to plotting climatology maps of Antarctic
+## mixed layer depth against reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = viridis
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = log
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 10., 'vmax': 300.}
+# specify the ticks
+colorbarTicksResult = [10, 20, 40, 60, 80, 100, 200, 300]
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = symLog
+# A dictionary with keywords for the norm
+normArgsDifference = {'linthresh': 10., 'linscale': 0.5, 'vmin': -200.,
+                      'vmax': 200.}
+colorbarTicksDifference = [-200., -100., -50., -20., -10., 0., 10., 20., 50., 100., 200.]
+
+
+[climatologyMapSoseZonalVelocity]
+## options related to plotting climatology maps of Antarctic
+## zonal velocity against reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+[climatologyMapSoseMeridionalVelocity]
+## options related to plotting climatology maps of Antarctic
+## meridional against reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+[climatologyMapSoseVelocityMagnitude]
+## options related to plotting climatology maps of Antarctic
+## meridional against reference model results and SOSE reanalysis data
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(0, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+

There is a section for options that apply to all SOSE climatology maps and +one for each field supported for specifying the color map.

+

The option depths is a list of (approximate) depths at which to sample +the potential temperature field. A value of 'top' indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +'bot' indicates the seafloor.

+

The user can select only to plot a subset of the supported fields by adding +only the desired field names to fieldList. The default value shows the +list of all available fields.

+
+
+

SOSE data for the full Southern Ocean

+

The default SOSE data is on a 6,000 x 6,000 km grid focused on the Antarctic +continental shelf. An alternative data set is available on a 10,000 x 10,000 km +grid. These data can be downloaded directly from the data repository or +by calling:

+
download_analysis_data -o /output/path/for/diagnostics -d sose_10000km
+
+
+

where the output path is the baseDirectory given in the diagnostics +section of the config file (see Diagnostics). The data set is not +included in the default download because of its large size (~27 GB).

+

Climatologies can be plotted with these data by setting:

+
fileSuffix = 10000.0x10000.0km_10.0km_Antarctic_stereo_20190603
+
+
+
+
For more details, see:
+
+
+
+
+

State Estimate

+

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

+
+
+

Example Result

+../../_images/clim_sose_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapWaves.html b/1.11.0rc1/users_guide/tasks/climatologyMapWaves.html new file mode 100644 index 000000000..15926641c --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapWaves.html @@ -0,0 +1,287 @@ + + + + + + + climatologyMapWaves — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapWaves

+

An analysis task for comparison of global maps of wave quantities +(significant wave height and peak period) against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, waves
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapWaves]
+## options related to plotting climatology maps of wave fields
+## ERA5 climatological data
+
+# comparison grid(s) on which to plot analysis
+comparisonGrids = ['latlon']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN', 'JFM', 'JAS']
+
+# a list of fields to plot ('significantWaveHeight', 'peakWavePeriod')
+fieldList = ['significantWaveHeight', 'peakWavePeriod']
+
+era5ObsStartYear = 1959
+era5ObsEndYear = 2021
+sscciObsStartYear = 1991
+sscciObsEndYear = 2018
+
+[climatologyMapWavesSignificantWaveHeight]
+## options related to plotting climatology maps of significant wave height
+
+# colormap for model/observations
+colormapNameResult = viridis
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0., 'vmax': 7.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 10., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -5., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-5., 5., 9)
+
+[climatologyMapWavesPeakWavePeriod]
+## options related to plotting climatology maps of peak wave frequency
+
+# colormap for model/observations
+colormapNameResult = plasma
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0.0, 'vmax':15.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 10., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -5., 'vmax': 5.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-5., 5., 9)
+
+
+
+
For more details, see:
+
+
+
+
+

Observations

+

Wave Reanalysis: ERA5 +Wave Satelite Altimeter Observations: ESA Sea State Climate Change Initiative

+
+
+

Example Result

+../../_images/swh.png +../../_images/peak_period.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/climatologyMapWoa.html b/1.11.0rc1/users_guide/tasks/climatologyMapWoa.html new file mode 100644 index 000000000..8128b13c2 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/climatologyMapWoa.html @@ -0,0 +1,296 @@ + + + + + + + climatologyMapWoa — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

climatologyMapWoa

+

An analysis task for comparing potential temperature and salinity +at various depths against WOA18 climatology.

+

Component and Tags:

+
component: ocean
+tags: climatology, horizontalMap, woa, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[climatologyMapWoa]
+## options related to plotting climatology maps of Temperature and Salinity
+## fields at various levels, including the sea floor against control model
+## results and WOA climatological data
+
+# comparison grid(s) ('latlon', 'antarctic', 'arctic') on which to plot analysis
+comparisonGrids = ['arctic']
+
+# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN','JFM','JAS']
+
+# list of depths in meters (positive up) at which to analyze, 'top' for the
+# sea surface. Note that, for seasons='ANN', depths can be as deep as 5500 m,
+# otherwise use a maximum depth of 1500 m.
+depths = ['top', -500, -1000]
+
+# a list of fields top plot for each transect.  All supported fields are listed
+# below
+fieldList = ['temperature', 'salinity']
+
+[climatologyMapWoaTemperature]
+## options related to plotting climatology maps of potential temperature
+## at various levels, including the sea floor against control model results
+## and WOA18 climatological data
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9)
+
+[climatologyMapWoaSalinity]
+## options related to plotting climatology maps of salinity
+## at various levels, including the sea floor against control model results
+## and WOA18 climatological data
+
+# colormap for model/observations
+colormapNameResult = haline
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 33.8, 'vmax': 35.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(34.2, 35.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+
+
+
+
For more details, see:
+
+
+

The option depths is a list of (approximate) depths at which to sample +the temperature and salinity fields. A value of 'top' indicates the sea +surface. Note that, for the annual climatology, WOA18 data is available down +to 5500 m, whereas, for the seasonal or monthly climatologies, WOA18 data +is only available down to 1500 m.

+
+
+

Observations

+

WOA18 Temperature and Salinity Climatology

+
+
+

Example Result

+../../_images/clim_woa_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/conservation.html b/1.11.0rc1/users_guide/tasks/conservation.html new file mode 100644 index 000000000..1f19d633e --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/conservation.html @@ -0,0 +1,239 @@ + + + + + + + conservation — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

conservation

+

An analysis task for plotting histograms of 2-d variables of climatologies +in ocean regions.

+

Component and Tags:

+
component: ocean
+tags: timeseries, conservation
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[conservation]
+## options related to producing time series plots, often to compare against
+## observations and previous runs
+
+# the year from which to compute anomalies if not the start year of the
+# simulation.  This might be useful if a long spin-up cycle is performed and
+# only the anomaly over a later span of years is of interest.
+# anomalyRefYear = 249
+
+# start and end years for timeseries analysis. Use endYear = end to indicate
+# that the full range of the data should be used.  If errorOnMissing = False,
+# the start and end year will be clipped to the valid range.  Otherwise, out
+# of bounds values will lead to an error.  In a "control" config file used in
+# a "main vs. control" analysis run, the range of years must be valid and
+# cannot include "end" because the original data may not be available.
+startYear = 1
+endYear = end
+
+# Plot types to generate. The following plotTypes are supported:
+#    total_energy_flux : Total energy flux
+#    absolute_energy_error : Energy error
+#    ice_salt_flux : Salt flux related to land ice and sea ice
+#    absolute_salt_error : Salt conservation error
+#    total_mass_flux : Total mass flux
+#    total_mass_change : Total mass anomaly
+#    land_ice_mass_change : Mass anomaly due to land ice fluxes
+#    land_ice_ssh_change : SSH anomaly due to land ice fluxes
+#    land_ice_mass_flux_components : Mass fluxes from land ice
+plotTypes = 'land_ice_mass_flux_components'
+
+# line colors for the main, control and obs curves
+# see https://matplotlib.org/stable/gallery/color/named_colors.html
+# and https://matplotlib.org/stable/tutorials/colors/colors.html
+mainColor = black
+controlColor = tab:red
+
+
+
+
+

Example Result

+../../_images/total_mass_flux.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/geojsonTransects.html b/1.11.0rc1/users_guide/tasks/geojsonTransects.html new file mode 100644 index 000000000..5725193c8 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/geojsonTransects.html @@ -0,0 +1,439 @@ + + + + + + + geojsonTransects — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

geojsonTransects

+

An analysis task for interpolating MPAS fields to transects specified by files +in geojson format..

+

Component and Tags:

+
component: ocean
+tags: climatology, transect, geojson
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[geojsonTransects]
+## options related to plotting model transects at points determined by a
+## geojson file.  To generate your own geojson file, go to:
+##   http://geojson.io/
+## and draw one or more polylines, then add a name to each:
+##
+##      "properties": {
+##        "name": "My Favorite Name"
+##      },
+## and save the file as GeoJSON (say transects.geojson).  Finally, set the
+## option:
+##      geojsonFiles = ['transects.geojson']
+## (giving an absolute path if necessary) in your custom config file.
+
+# a list of geojson files containing lat/lon points in LineStrings to be
+# plotted.  If relative paths are given, they are relative to the current
+# working directory.  The files must be listed in quotes, e.g.:
+# geojsonFiles = ['file1.geojson', '/path/to/file2.geojson']
+geojsonFiles = []
+
+# a list of dictionaries for each field to plot.  The dictionary includes
+# prefix (used for file names, task names and sections) as well as the mpas
+# name of the field, units for colorbars and a the name as it should appear
+# in figure titles and captions.
+fields =
+    [{'prefix': 'temperature',
+      'mpas': 'timeMonthly_avg_activeTracers_temperature',
+      'units': r'$\degree$C',
+      'titleName': 'Potential Temperature'},
+     {'prefix': 'salinity',
+      'mpas': 'timeMonthly_avg_activeTracers_salinity',
+      'units': r'PSU',
+      'titleName': 'Salinity'},
+     {'prefix': 'potentialDensity',
+      'mpas': 'timeMonthly_avg_potentialDensity',
+      'units': r'kg m$^{-3}$',
+      'titleName': 'Potential Density'},
+     {'prefix': 'zonalVelocity',
+      'mpas': 'timeMonthly_avg_velocityZonal',
+      'units': r'm s$^{-1}$',
+      'titleName': 'Zonal Velocity'},
+     {'prefix': 'meridionalVelocity',
+      'mpas': 'timeMonthly_avg_velocityMeridional',
+      'units': r'm s$^{-1}$',
+      'titleName': 'Meridional Velocity'}]
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# The approximate horizontal resolution (in km) of each transect.  Latitude/
+# longitude between observation points will be subsampled at this interval.
+# Use 'obs' to indicate no subsampling.
+horizontalResolution = 5
+
+# The name of the vertical comparison grid.  Valid values are 'mpas' for the
+# MPAS vertical grid or any other name if the vertical grid is defined by
+# 'verticalComparisonGrid'
+#verticalComparisonGridName = mpas
+verticalComparisonGridName = uniform_0_to_4000m_at_10m
+
+# The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas'.
+# This should be numpy array of (typically negative) elevations (in m).
+verticalComparisonGrid = numpy.linspace(0, -4000, 401)
+
+# The minimum weight of a destination cell after remapping. Any cell with
+# weights lower than this threshold will therefore be masked out.
+renormalizationThreshold = 0.01
+
+
+[geojsonTemperatureTransects]
+## options related to plotting geojson transects of potential temperature
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the SemiLogNorm
+normArgsResult = {'vmin': -2., 'vmax': 30.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(-2., 2., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the SemiLogNorm
+normArgsDifference = {'vmin': -2., 'vmax': 2.}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2., 2., 9)
+
+
+[geojsonSalinityTransects]
+## options related to plotting geojson transects of salinity
+
+# colormap for model/observations
+colormapNameResult = haline
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the SemiLogNorm
+normArgsResult = {'vmin': 30, 'vmax': 39.0}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(34.2, 35.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the SemiLogNorm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+
+
+[geojsonPotentialDensityTransects]
+## options related to plotting geojson transects of potential density
+
+# colormap for model/observations
+colormapNameResult = Spectral_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1026.5, 'vmax': 1028.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(1026., 1028., 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.3, 'vmax': 0.3}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9)
+
+
+[geojsonZonalVelocityTransects]
+## options related to plotting geojson transects of zonal velocity
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+[geojsonMeridionalVelocityTransects]
+## options related to plotting geojson transects of meridional velocity
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+
+
+

Geojson Files

+

This task takes a list of geojson file names (supplied as a python list of +str:

+
geojsonFiles = ['file1.geojson', '/path/to/file2.geojson']
+
+
+

Transects are specified by LineString objects in the files. Some examples +are provided in the MPAS geometric features repository. You can also +generate your own very easily at To generate your own geojson file, go to +geojson.io and draw one or more polylines, then add a name to each:

+
...
+"properties": {
+  "name": "My Favorite Name"
+},
+...
+
+
+

and save the file as GeoJSON (say transects.geojson). Finally, set the +option:

+
geojsonFiles = ['transects.geojson']
+
+
+

(giving an absolute path if necessary) in your custom config file.

+
+
+

Fields

+

Since there are no observations associated with geojson transects, you are +free to choose which MPAS fields you would like to plot. These fields are +provided as a python dictionary. The keys are names for the fields (anything +you would like use as a prefix on files and subtask names, best if it does +not contain spaces). The values are python dictionaries. The values +associate with the mpas key are the names of the 3D fields where transects +are desired. The units entry indicates the units to display on the +colorbar. The titleName entry specifies the name of the field to include +in plot titles and captions.

+

Each field must have a corresponding section in the config file defining its +color maps. For example, temperature has an associated +[geojsonTemperatureTransect] section.

+
+
+

Other Options

+
+
For details on the remaining configuration options, see:
+
+
+
+
+

Example Result

+../../_images/geojson_transect.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/hovmollerOceanRegions.html b/1.11.0rc1/users_guide/tasks/hovmollerOceanRegions.html new file mode 100644 index 000000000..452828b0e --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/hovmollerOceanRegions.html @@ -0,0 +1,398 @@ + + + + + + + hovmollerOceanRegions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

hovmollerOceanRegions

+

An analysis task for plotting depth profiles vs. time of temperature, salinity, +potential density, etc. averaged over regions.

+

Component and Tags:

+
component: ocean
+tags: profiles, climatology, hovmoller
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[hovmollerOceanRegions]
+## options related to plotting Hovmoller diagrams (depth vs. time plots) of
+## regional means of 3D MPAS fields
+
+# the names of region groups to plot, each with its own section below
+regionGroups = ['Ocean Basins']
+
+
+[hovmollerOceanBasins]
+## options related to plotting Hovmoller diagrams of ocean basins
+
+# a list of dictionaries for each field to plot.  The dictionary includes
+# prefix (used for file names, task names and sections) as well as the MPAS
+# name of the field, units for colorbars and a the name as it should appear
+# in figure titles and captions.
+fields =
+    [{'prefix': 'potentialTemperature',
+      'mpas': 'timeMonthly_avg_activeTracers_temperature',
+      'units': r'$\degree$C',
+      'titleName': 'Potential Temperature'},
+     {'prefix': 'salinity',
+      'mpas': 'timeMonthly_avg_activeTracers_salinity',
+      'units': r'PSU',
+      'titleName': 'Salinity'},
+     {'prefix': 'potentialDensity',
+      'mpas': 'timeMonthly_avg_potentialDensity',
+      'units': r'kg m$^{-3}$',
+      'titleName': 'Potential Density'}]
+
+# a list of region names from the region masks file to plot
+regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin",
+               "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin",
+               "Global Ocean", "Global Ocean 65N to 65S",
+               "Global Ocean 15S to 15N"]
+
+# whether to compute an anomaly with respect to the start of the time series
+computeAnomaly = False
+
+# Number of points over which to compute moving average(e.g., for monthly
+# output, movingAverageMonths=12 corresponds to a 12-month moving average
+# window)
+movingAverageMonths = 12
+
+
+[hovmollerOceanRegionsPotentialTemperature]
+## options related to plotting time series of temperature vs. depth in ocean
+## regions
+
+# colormap
+colormapNameResult = RdYlBu_r
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -2., 'vmax': 30.}
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -5., 'vmax': 5.}
+
+# contour line levels (use [] for automatic contour selection, 'none' for no
+# contour lines)
+contourLevels = 'none'
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+# limits on depth, the full range by default
+# yLim = [-6000., 0.]
+
+
+[hovmollerOceanRegionsSalinity]
+## options related to plotting time series of salinity vs. depth in ocean
+## regions
+
+# colormap
+colormapNameResult = haline
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 30, 'vmax': 39.0}
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+
+# contour line levels (use [] for automatic contour selection, 'none' for no
+# contour lines)
+contourLevels = 'none'
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+# limits on depth, the full range by default
+# yLim = [-6000., 0.]
+
+
+[hovmollerOceanRegionsPotentialDensity]
+## options related to plotting time series of potential density vs. depth in
+## ocean regions
+
+# colormap
+colormapNameResult = Spectral_r
+# whether the colormap is indexed or continuous
+colormapTypeResult = continuous
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1026.5, 'vmax': 1028.}
+
+
+# colormap for differences
+colormapNameDifference = balance
+# whether the colormap is indexed or continuous
+colormapTypeDifference = continuous
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.3, 'vmax': 0.3}
+
+# contour line levels (use [] for automatic contour selection, 'none' for no
+# contour lines)
+contourLevels = 'none'
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+# limits on depth, the full range by default
+# yLim = [-6000., 0.]
+
+
+

The [hovmollerOceanRegions] section contains a list of regionGroups, +one or more of the Region Groups defined in +geometric_features.aggregation.get_aggregator_by_name().

+

For each region group, there is a corresponding section +[hovmoller<RegionGroup>], where <RegionGroup> is the name of the region +group with spaces removed. In this section, the fields dictionary is used +to specify a list of 3D MPAS fields to average and plot. The key prefix is +a convenient name appended to tasks and file names to describe the field. +mpas is the name of the field in MPAS timeSeriesStatsMonthly output +files. The units are the SI units of the field to include on the plot’s +color bar and titleName is the name of the field to use in its gallery name +and on the title of the plot.

+

regionNames is a list of regions from the full the region group or +regionNames = ['all'] to indicate that all regions should be used. For +the available regions, see +Aggregate Existing Features.

+

If computeAnomaly = True, the plots will be the anomaly with respect to the +beginning of the time series (averaged over movingAverageMonths months).

+

Each field has a [hovmollerOceanRegions<FieldName>] section, where +<FieldName> is the associated prefix but starting with a capital letter. +Each of these sections has a yLim option that can specify the desired depth +range. The default is the full range.

+
+
For more details on the remaining config options, see
+
+
+
+
+

Example Result

+../../_images/hovmoller_weddell.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/indexNino34.html b/1.11.0rc1/users_guide/tasks/indexNino34.html new file mode 100644 index 000000000..909883f1d --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/indexNino34.html @@ -0,0 +1,229 @@ + + + + + + + indexNino34 — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

indexNino34

+

An analysis task for plotting both time series and spectra of the El Niño +3.4 Climate Index against observations.

+

Component and Tags:

+
component: ocean
+tags: timeSeries, index, nino, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[indexNino34]
+## options related to plotting time series of the El Nino 3.4 index
+
+# Specified region for the Nino Index,'nino3', 'nino4', or 'nino3.4'
+# The indexNino34 routine only accepts one value at a time
+region = nino3.4
+
+# Data source to read for comparison.  There are two options
+# 1 - ERS_SSTv4 -- Extended Reconstructed Sea Surface Temperature -- 1854 - 2016
+# 2 - HADIsst -- Hadley Center analysis -- 1870 - 2016
+observationData = HADIsst
+
+
+

While the default is the El Niño 3.4 region, you may select among +nino3, nino4, and nino3.4. See Regions for more +information about regions in MPAS-Analyis.

+

By default, observations are taken from the Hadley Center analysis. To use +the Extended Reconstructed Sea Surface Temperature (ERS SSTv4), set +observationData = ERS_SSTv4.

+
+
+

Observations

+ +
+
+

Example Result

+../../_images/nino.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/meridionalHeatTransport.html b/1.11.0rc1/users_guide/tasks/meridionalHeatTransport.html new file mode 100644 index 000000000..714136896 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/meridionalHeatTransport.html @@ -0,0 +1,254 @@ + + + + + + + meridionalHeatTransport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

meridionalHeatTransport

+

An analysis task for plotting the zonal mean meridional heat transport (MHT) +against observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[meridionalHeatTransport]
+## options related to plotting meridional heat transport (MHT)
+
+# Data source to read for comparison
+observationData = mht_TrenberthCaron.NoAtm_20180710.nc
+
+# colormap for model results
+colormapName = balance
+# colormap indices for contour color
+colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255]
+# colorbar levels/values for contour boundaries
+colorbarLevels = [-0.1, -0.05, -0.02, -0.01, -0.005, 0, 0.005, 0.01, 0.02,
+                  0.05, 0.1]
+# contour line levels
+contourLevels = [-0.1, -0.01, 0.01, 0.1]
+
+# latitude and depth limits
+xLimGlobal = [-80, 80]
+depthLimGlobal = [-1000, 0]
+
+# compare to observations?
+compareWithObservations = True
+
+# plot the vertical section of MHT?
+plotVerticalSection = False
+
+# Number of points over which to compute moving average (with respect to
+# latitude) for MHT vertical section plots
+movingAveragePoints = 1
+
+
+

The option observationData allows the selection of the observational file +to compare with (available largely for debugging purposes).

+

By default, only a line plot of depth-integrated MHT is plotted. Optionally, +you can set plotVerticalSection = True to produce a plot of the MHT per +unit depth as a funciton of latitude and depth. Because this type of plot is +not commonly produced in ocean models and observations are not available for +comparison, it is disabled by default.

+

The options xLimGlobal and depthLimGlobal control the bounds of the +x axis of both plots and the y axis of the vertical section plot, respectively.

+
+
For more details on the remaining configuration options, see:
+
+
+
+
+

Observations

+

Meridional Heat Transport (MHT)

+
+
+

Example Result

+../../_images/mht.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/oceanHistogram.html b/1.11.0rc1/users_guide/tasks/oceanHistogram.html new file mode 100644 index 000000000..5490db925 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/oceanHistogram.html @@ -0,0 +1,296 @@ + + + + + + + oceanHistogram — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

oceanHistogram

+

An analysis task for plotting histograms of 2-d variables of climatologies +in ocean regions.

+

Component and Tags:

+
component: ocean
+tags: climatology, histogram, regions, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[oceanHistogram]
+## options related to plotting histograms of climatologies of 2-d ocean
+## variables
+
+# list of variables to plot
+variableList = []
+
+# list of observations to compare against
+obsList = ['AVISO']
+
+# list of ocean variables by which to weight variables in variable list
+weightList = []
+
+# list of regions to plot from the region list in [regions] below
+regionGroups = ['Ocean Basins']
+
+# list of region names within the region group listed above
+regionNames = []
+
+# Seasons to conduct analysis over
+# Note: start and end year will be inherited from climatology section
+seasons =  ['ANN']
+
+# Number of histogram bins
+bins = 40
+
+
+
+
+

Region Groups

+

A list of groups of regions, each of which will get its own gallery on +the resulting analysis web page. See Region Groups for +more information on the available region groups. By default, +the only region group is 'Ocean Basins'.

+
+
+

Region Names

+

The regionNames can be set to ['all'] to plot all of the +regions in the geojson file. In the case of Antarctic Regions, these +are:

+
["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf",
+ "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf",
+ "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep",
+ "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf",
+ "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep",
+ "Western Ross Sea Shelf", "Western Ross Sea Deep",
+ "East Antarctic Seas Shelf", "East Antarctic Seas Deep"]
+
+
+

For Ocean Basins, they are:

+
["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin",
+ "Southern_Ocean_Basin", "Mediterranean_Basin", "Global Ocean",
+ "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"]
+
+
+
+
+

Variable List

+

The variableList option determines the variables to plot. Currently, only +2-d variables are supported but we envision extending to 3-d variables in the +near future. The variables are to be listed according to their registry names, with timeMontly_avg_ prepended in the code.

+
+
+

Observations

+

The obsList option contains a list of the names of observational data sets. +Currently, “AVISO” is the only data set available, but we anticipate adding +several additional data sets in the near future.

+

AVISO Absolute Dynamic Topography

+
+
+

Weight List

+

The weightList option determines the variables to normalize by in plotting +the histogram. This is an optional feature. If used, weightList should be +of the same length as variableList. We currently only support for 2-d +variables that are the same size as their corresponding variables in +variableList and that are present in the restart file. For example, we use +areaCell to weight cell-centered variables such as ssh.

+
+
+

Bins

+

bins sets the number of bins to include in the histogram.

+
+
+

Other Config Options

+

Other config options include lineWidth, mainColor, obsColor, +controlColor, titleFontSize, defaultFontSize. For more details on +the remaining config options, see Seasons.

+
+
+

Example Result

+../../_images/histogram_ssh_aviso_atl.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/oceanRegionalProfiles.html b/1.11.0rc1/users_guide/tasks/oceanRegionalProfiles.html new file mode 100644 index 000000000..54e806e37 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/oceanRegionalProfiles.html @@ -0,0 +1,276 @@ + + + + + + + oceanRegionalProfiles — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

oceanRegionalProfiles

+

An analysis task for plotting depth profiles of temperature, salinity, +potential density, etc. averaged over regions and in time. The plots also +include a measure of variability (the standard deviation in space and time).

+

Component and Tags:

+
component: ocean
+tags: profiles, climatology
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[oceanRegionalProfiles]
+## options related to plotting vertical profiles of regional means (and
+## variability) of 3D MPAS fields
+
+# The name of a region group defining the region for each profile
+regionGroups = ['Ocean Basins']
+
+
+[profilesOceanBasins]
+## options related to plotting vertical profiles ocean basins
+
+# a list of dictionaries for each field to plot.  The dictionary includes
+# prefix (used for file names, task names and sections) as well as the mpas
+# name of the field, units for colorbars and a the name as it should appear
+# in figure titles and captions.
+fields =
+    [{'prefix': 'potentialTemperature',
+      'mpas': 'timeMonthly_avg_activeTracers_temperature',
+      'units': r'$\degree$C',
+      'titleName': 'Potential Temperature'},
+     {'prefix': 'salinity',
+      'mpas': 'timeMonthly_avg_activeTracers_salinity',
+      'units': r'PSU',
+      'titleName': 'Salinity'},
+     {'prefix': 'potentialDensity',
+      'mpas': 'timeMonthly_avg_potentialDensity',
+      'units': r'kg m$^{-3}$',
+      'titleName': 'Potential Density'}]
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['JFM', 'JAS', 'ANN']
+
+# minimum and maximum depth of profile plots, or empty for the full depth range
+depthRange = []
+
+# a list of region names from the region masks file to plot
+regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin",
+               "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin",
+               "Global Ocean", "Global Ocean 65N to 65S",
+               "Global Ocean 15S to 15N"]
+
+# web gallery options
+profileGalleryGroup = Ocean Basin Profiles
+
+
+

The [oceanRegionalProfiles] section contains a list of regionGroups, +one or more of the Region Groups defined in +geometric_features.aggregation.get_aggregator_by_name().

+

For each region group, there is a corresponding section +[profiles<RegionGroup>], where <RegionGroup> is the name of the region +group with spaces removed. In this section, the fields dictionary is used +to specify a list of 3D MPAS fields to average and plot. The key prefix is +a convenient name appended to tasks and file names to describe the field. +mpas is the name of the field in MPAS timeSeriesStatsMonthly output +files. The units are the SI units of the field to include on the plot’s x +axis and titleName is the name of the field to use in its gallery name and +on the x axis of the profile.

+

regionNames is a list of regions from the full the region group or +regionNames = ['all'] to indicate that all regions should be used. For +the available regions, see +Aggregate Existing Features.

+

A config option is available to specify the names of the gallery group for the +profiles (profileGalleryGroup).

+

A minimum and maximum depth for profiles can be specified with depthRange. +The default is the full range.

+
+
For more details on the remaining config options, see
+
+
+
+
+

Example Result

+../../_images/ocean_profile.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/regionalTSDiagrams.html b/1.11.0rc1/users_guide/tasks/regionalTSDiagrams.html new file mode 100644 index 000000000..345b9fd1f --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/regionalTSDiagrams.html @@ -0,0 +1,391 @@ + + + + + + + regionalTSDiagrams — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

regionalTSDiagrams

+

An analysis task for plotting T-S (potential temperature vs. salinity) +diagrams of climatologies in ocean regions.

+

Component and Tags:

+
component: ocean
+tags: climatology, regions, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[regionalTSDiagrams]
+## options related to plotting T/S diagrams of ocean regions
+
+# the names of region groups to plot, each with its own section below
+regionGroups = ['Ocean Basins']
+
+# a list of seasons to compute climatologies over
+seasons = ['ANN']
+
+# the number of threads dask is allowed to spawn for each process computing
+# the observational climatologies
+# Decrease this number if regionalTSDiagrams subtasks are running
+# out of available threads
+daskThreads = 8
+
+# the number of subprocesses that each observational climatology subtask gets
+# counted as occupying
+# Increase this number if regionalTSDiagrams subtasks are running
+# out of memory, and fewer tasks will be allowed to run at once
+subprocessCount = 4
+
+[TSDiagramsForAntarcticRegions]
+## options related to plotting T/S diagrams of Antarctic regions
+
+# list of regions to plot or ['all'] for all regions in the masks file.
+# See "regionNames" in the antarcticRegions masks file in
+# regionMaskSubdirectory for details.
+regionNames = []
+
+# diagram type, either 'volumetric' or 'scatter', depending on if the points
+# should be binned the plot should show the volume fraction in each bin or
+# scattered points colored by their depth
+diagramType = volumetric
+
+# if diagramType == 'volumetric', the bin boundaries for T and S
+# if diagramType == 'scatter', only the min and max are important (and the
+#   bins are only used for computing neutral density contours)
+Tbins = numpy.linspace(-2.5, 4, 131)
+Sbins = numpy.linspace(33.8, 34.8, 201)
+
+# density contour interval
+rhoInterval = 0.1
+
+# The color map for depth or volume
+colormap = cmo.deep
+# The following is more appropriate if diagramType == 'scatter'
+# colormap = cmo.deep_r
+# the type of norm used in the colormap {'linear', 'log'}
+normType = log
+
+# The minimum and maximum depth over which fields are plotted, default is
+# to take these values from the geojson feature's zmin and zmax properties.
+# Add these to a custom config file to override the defaults.
+# zmin = -1000
+# zmax = -400
+
+# the minimum and maximum volume for the colorbar, default is the minimum and
+# maximum over the mode output
+# volMin = 3e9
+# volMax = 1e12
+
+# Obserational data sets to compare against
+obs = ['SOSE', 'WOA18']
+
+[TSDiagramsForOceanBasins]
+## options related to plotting T/S diagrams of major ocean basins
+
+# list of regions to plot or ['all'] for all regions in the masks file.
+# See "regionNames" in the oceanBasins masks file in
+# regionMaskSubdirectory for details.
+regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin",
+               "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin",
+               "Global Ocean", "Global Ocean 65N to 65S",
+               "Global Ocean 15S to 15N"]
+
+# diagram type, either 'volumetric' or 'scatter', depending on if the points
+# should be binned the plot should show the volume fraction in each bin or
+# scattered points colored by their depth
+diagramType = volumetric
+
+# if diagramType == 'volumetric', the bin boundaries for T and S
+# if diagramType == 'scatter', only the min and max are important (and the
+#   bins are only used for computing neutral density contours)
+Tbins = numpy.linspace(-2.5, 16, 926)
+Sbins = numpy.linspace(33.8, 35.8, 1001)
+
+# density contour interval
+rhoInterval = 0.2
+
+# The color map for depth or volume
+colormap = white_cmo_deep
+# The following is more appropriate if diagramType == 'scatter'
+# colormap = cmo.deep_r
+# the type of norm used in the colormap {'linear', 'log'}
+normType = log
+
+# The minimum and maximum depth over which fields are plotted.
+zmin = -1000
+zmax = 0
+
+# Obserational data sets to compare against
+obs = ['WOA18']
+
+
+
+
+

Region Groups

+

A list of groups of regions, each of which will get its own gallery on +the resulting analysis web page. See Region Groups for +more information on the available region groups. For each region group, there +should be a corresponding TSDiagramsFor<RegionGroup> section of the config +file, with any spaces removed from the name of the region group. By default, +the only region group is 'Ocean Basins'.

+
+
+

Region Names

+

The regionNames can be set to ['all'] to plot all of the +regions in the geojson file. In the case of Antarctic Regions, these +are:

+
["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf",
+ "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf",
+ "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep",
+ "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf",
+ "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep",
+ "Western Ross Sea Shelf", "Western Ross Sea Deep",
+ "East Antarctic Seas Shelf", "East Antarctic Seas Deep"]
+
+
+

For Ocean Basins, they are:

+
["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin",
+ "Southern_Ocean_Basin", "Mediterranean_Basin", "Global Ocean",
+ "Global Ocean 65N to 65S", "Global Ocean 15S to 15N"]
+
+
+
+
+

Diagram Type

+

By default, a “volumetric” diagram is produced, where the volume of ocean water +in a region is binned in T and S space, and the volume of each bin is plotted. +This allows for more quantitative comparison with observation- and model-based +climatologies.

+

If diagramType is set to scatter, a point cloud is plotted instead, +shaded by depth. We anticipate that this will useful for plotting data sets +that are spatially scattered (e.g. the MEOP seal data), because each sample +does not correspond to a volume. This type of diagram may also be helpful for +comparison with publications that use scatter plots.

+

For volumetric diagrams, two norms for the continuous color map are supported, +linear and log, with log being the default. The range of the +colormap is is between zero and the maximum bin volume for linear and +between the minimum non-zero bin volume and the max for log. The min/max +bin volume is taken from the first panel containing the “main” MPAS-Ocean plot, +and the same color map range is used for all panels.

+
+
+

Bins and Contour Intervals

+

If diagramType = volumetric, the number and spacing of potential +temperature and salinity bins is set in Tbins and Sbins. For +diagramType = scatter, Tbins and Sbins are used to make contour +plots of neutral density and are used to determine the bounds of the figure +in T/S space. rhoInterval is the interval between contours of neutral +density. zmin and zmax are the minimum and maximum depths (positive +up) of the ocean region. If available (e.g. for “Antarctic Regions”), the +default is to read them from geojson file.

+
+
+

Observations

+

The obs option contains a list of the names of observational data sets. +Currently, “SOSE” and “WOA18” are the only data sets available, but we +anticipate adding several additional data sets in the near future.

+

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE) +WOA18 Temperature and Salinity Climatology

+
+
+

Other Config Options

+
+
For more details on the remaining config options, see
+
+
+
+
+

Example Result

+../../_images/so_ts_diag.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/soseTransects.html b/1.11.0rc1/users_guide/tasks/soseTransects.html new file mode 100644 index 000000000..1f5353cba --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/soseTransects.html @@ -0,0 +1,452 @@ + + + + + + + soseTransects — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

soseTransects

+

An analysis task for computing meridional transects of MPAS fields at evenly +spaced latitudes around Antarctica and comparing them with results from the +Southern Ocean State Estimate (SOSE).

+

Component and Tags:

+
component: ocean
+tags: climatology, transect, sose, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[soseTransects]
+## options related to plotting model vs. Southern Ocean State Estimate (SOSE)
+## transects.
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# The approximate horizontal resolution (in km) of each transect.  Latitude/
+# longitude between observation points will be subsampled at this interval.
+# Use 'obs' to indicate no subsampling.
+# horizontalResolution = obs
+horizontalResolution = 5
+
+# The name of the vertical comparison grid.  Valid values are 'mpas' for the
+# MPAS vertical grid, 'obs' to use the locations of observations or
+# any other name if the vertical grid is defined by 'verticalComparisonGrid'
+# verticalComparisonGridName = mpas
+# verticalComparisonGridName = obs
+verticalComparisonGridName = uniform_0_to_4000m_at_10m
+
+# The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or
+# 'obs'.  This should be numpy array of (typically negative) elevations (in m).
+verticalComparisonGrid = numpy.linspace(0, -4000, 401)
+
+# The minimum weight of a destination cell after remapping. Any cell with
+# weights lower than this threshold will therefore be masked out.
+renormalizationThreshold = 0.01
+
+# min and max latitude of transects
+minLat = -80
+maxLat = -60
+
+# longitudes of transects
+longitudes = numpy.linspace(0, 330, 12)
+
+# a list of fields top plot for each transect.  All supported fields are listed
+# below.  Note that 'velocityMagnitude' cannot be plotted without
+# 'zonalVelocity' and 'meridionalVelocity' because the components are needed
+# to compute the magnitude.
+fieldList = ['temperature', 'salinity', 'potentialDensity', 'zonalVelocity',
+             'meridionalVelocity', 'velocityMagnitude']
+
+
+[soseTemperatureTransects]
+## options related to plotting SOSE transects of potential temperature
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0.0, 'vmax': 6.0}
+# color indices into colormapName for filled contours
+#colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsResult = [0, 0.25, 0.5, 0.75, 1, 2, 3, 4, 5, 6]
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(0.0, 6.0, 9)
+# contour line levels
+contourLevelsResult = np.arange(0.5, 6.0, 1.0)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2.0, 'vmax': 2.0}
+# color indices into colormapName for filled contours
+#colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsDifference = [-2, -1.5, -1.25, -1, -0.2, 0, 0.2, 1, 1.25, 1.5, 2]
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2.0, 2.0, 9)
+# contour line levels
+contourLevelsDifference = np.arange(-1.8, 2.0, 0.4)
+
+
+[soseSalinityTransects]
+## options related to plotting SOSE transects of salinity
+
+# colormap for model/observations
+colormapNameResult = haline
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 34.0, 'vmax': 35.0}
+# color indices into colormapName for filled contours
+#colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsResult = [34, 34.3, 34.5, 34.65, 34.675, 34.7, 34.725, 34.75, 34.8, 35]
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(34.0, 35.0, 9)
+# contour line levels
+contourLevelsResult = np.arange(34.1, 35.0, 0.1)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.5, 'vmax': 0.5}
+# color indices into colormapName for filled contours
+#colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsDifference = [-0.5, -0.2, -0.1, -0.05, -0.02, 0,  0.02, 0.05, 0.1, 0.2, 0.5]
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.5, 0.5, 9)
+# contour line levels
+contourLevelsDifference = numpy.linspace(-0.6, 0.6, 9)
+
+
+[sosePotentialDensityTransects]
+## options related to plotting SOSE transects of potential density
+
+# colormap for model/observations
+colormapNameResult = Spectral_r
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 1026.5, 'vmax': 1028.}
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(1026., 1028., 9)
+contourLevelsResult = numpy.linspace(1026.5, 1028., 7)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.3, 'vmax': 0.3}
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-0.3, 0.3, 9)
+contourLevelsDifference = numpy.linspace(-0.3, 0.3, 9)
+
+
+[soseZonalVelocityTransects]
+## options related to plotting SOSE transects of zonal velocity
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+contourLevelsResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+[soseMeridionalVelocityTransects]
+## options related to plotting SOSE transects of meridional velocity
+
+# colormap for model/observations
+colormapNameResult = delta
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(-0.2, 0.2, 9)
+contourLevelsResult = numpy.linspace(-0.2, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+[soseVelocityMagnitudeTransects]
+## options related to plotting SOSE transects of velocity magnitude
+
+# colormap for model/observations
+colormapNameResult = ice
+# color indices into colormapName for filled contours
+# the type of norm used in the colormap
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksResult = numpy.linspace(0, 0.2, 9)
+contourLevelsResult = numpy.linspace(0, 0.2, 9)
+
+# colormap for differences
+colormapNameDifference = balance
+# the type of norm used in the colormap
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -0.2, 'vmax': 0.2}
+# determine the ticks automatically by default, uncomment to specify
+# colorbarTicksDifference = numpy.linspace(-0.2, 0.2, 9)
+contourLevelsDifference = numpy.linspace(-0.2, 0.2, 9)
+
+
+

The options minLat and maxLat determine the start and end of each +meridional transect (in degrees). The option longitudes is a list or +numpy array of longitudes for each transect, e.g.:

+
longitudes = numpy.linspace(0, 330, 12)
+
+
+

produces 12 transects spaced every 30°.

+
+

Note

+

SOSE’s domain extends only to 25°S, so maxLat should typically be +less than -25.

+
+

The user can select only to plot a subset of the supported fields by adding +only the desired field names to fieldList. The default value shows the +list of all available fields.

+
+

Note

+

Because velocityMagnitude is computed internally rather than being stored +as a separate field with the other SOSE output, it is not possible to plot +velocityMagnitude without also plotting zonalVelocity and +meridionalVelocity.

+
+

Ater the soseTransects section, there is a section for each supported field +specifying the information related to the colormap.

+
+
For details on remaining configuration options, see:
+
+
+
+
+

Observations

+

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

+
+
+

Example Result

+../../_images/sose_transect.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/streamfunctionMOC.html b/1.11.0rc1/users_guide/tasks/streamfunctionMOC.html new file mode 100644 index 000000000..6bfef2edb --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/streamfunctionMOC.html @@ -0,0 +1,288 @@ + + + + + + + streamfunctionMOC — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

streamfunctionMOC

+

An analysis task for plotting the zonal mean meridional overturning circulation +(MOC). Currently we support plots of the global and Atlantic MOCs but not of +other regions (notably the Indo-pacific). This task also produces a time +series plot of the maximum Atlantic MOC at 26.5°N.

+

Component and Tags:

+
component: ocean
+tags: streamfunction, moc, climatology, timeSeries, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[streamfunctionMOC]
+## options related to plotting the streamfunction of the meridional overturning
+## circulation (MOC)
+
+# Include the bolus velocity from the Gent-McWilliams parameterization?  This
+# only needs to be disabled if the simulation was run with GM turned on but
+# the MOC shouldn't include the bolus term
+includeBolus = True
+
+# Region names for basin MOC calculation.
+# Supported options are Atlantic and IndoPacific
+regionNames = ['Atlantic']
+
+# Size of latitude bins over which MOC streamfunction is integrated
+latBinSizeGlobal = 1.
+latBinSizeAtlantic = 0.5
+latBinSizeIndoPacific = 0.5
+
+# colormap for model results
+colormapNameGlobal = RdYlBu_r
+colormapNameAtlantic = RdYlBu_r
+colormapNameIndoPacific = RdYlBu_r
+# colormap indices for contour color
+colormapIndicesGlobal = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+colormapIndicesAtlantic = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+colormapIndicesIndoPacific = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colorbar levels/values for contour boundaries
+colorbarLevelsGlobal = [-20, -10, -5, -2, 2, 5, 10, 20, 30, 40]
+colorbarLevelsAtlantic = [-10, -5, -2, 0, 5, 8, 10, 14, 18, 22]
+colorbarLevelsIndoPacific = [-10, -5, -2, 0, 5, 8, 10, 14, 18, 22]
+# contour line levels
+contourLevelsGlobal = np.arange(-25.1, 35.1, 10)
+contourLevelsAtlantic = np.arange(-8, 20.1, 2)
+contourLevelsIndoPacific = np.arange(-8, 20.1, 2)
+
+# Number of points over which to compute moving average for
+# MOC timeseries (e.g., for monthly output, movingAveragePoints=12
+# corresponds to a 12-month moving average window)
+movingAveragePoints = 12
+
+# Number of points over which to compute moving average (with respect to
+# latitude) for climatological MOC plots
+movingAveragePointsClimatological = 1
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+

For runs that use an eddy closure parameterization, tracer transport is +augmented with the Bolus velocity. By default, the Bolus velocity is included +in the MOC but this can be disabled with includeBolus = False. +(MPAS-Analysis will automatically recognize runs where the bolus velocity is +not used and will not include it in climatology computations or add it to the +MOC in these cases to save disk space and computation time.)

+

Currently, the only supported region is the Atlantic, so regionNames should +be left as it is. In the near future, we anticipate including the Indo-pacific +as well.

+

Each region has its own bin size (in degrees latitudes). Adjust these as +desired, e.g.:

+
latBinSizeGlobal = 0.5
+
+
+

for half-degree bins for the global MOC.

+

Each region supports its own colormap, with suffix Global, Atlantic +and (soon) IndoPacific. See Colormaps for more details.

+

The MOC time series is plotted with a 12-month moving average by default +(taking out noise and the annual cycle). For more details, see +Moving Average.

+

The latitude-depth MOC plots can also optionally be smoothed in latitude with +a moving average, e.g.:

+
movingAveragePointsClimatological = 4
+
+
+

will perform a 4-bin smoothing of the MOC.

+

For more details on the remaining config options, see +Time-Axis Tick Marks.

+
+
+

Example Result

+../../_images/moc.png +../../_images/time_series_moc.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesAntarcticMelt.html b/1.11.0rc1/users_guide/tasks/timeSeriesAntarcticMelt.html new file mode 100644 index 000000000..fc3002afb --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesAntarcticMelt.html @@ -0,0 +1,297 @@ + + + + + + + timeSeriesAntarcticMelt — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesAntarcticMelt

+

An analysis task for plotting time series of mean melt rates per ice shelf or +Antarctic region along with observations from Rignot et al. (2013), +Adusumilli et al. (2020), +and Paolo et al. (2023).

+

Component and Tags:

+
component: ocean
+tags: timeSeries, melt, landIceCavities
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesAntarcticMelt]
+## options related to plotting time series of melt below Antarctic ice shelves
+
+# list of ice shelves to plot or ['all'] for all 106 ice shelves and regions.
+# See "regionNames" in the ice shelf masks file in regionMaskSubdirectory for
+# details.
+iceShelvesToPlot = ['Antarctica', 'Peninsula', 'West Antarctica',
+                    'East Antarctica', 'Larsen_C', 'Filchner', 'Ronne',
+                    'Filchner-Ronne', 'Brunt_Stancomb', 'Fimbul', 'Amery',
+                    'Totten', 'Eastern_Ross', 'Western_Ross', 'Ross', 'Getz',
+                    'Thwaites', 'Pine_Island', 'Abbot', 'George_VI']
+
+# Number of months over which to compute moving average
+movingAverageMonths = 1
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+
+
+

Ice Shelf and Region Names

+

The iceShelvesToPlot can be set to ['all'] to plot all 106 ice shelves +and regions (not recommended, as this is typically a waste of time). Otherwise, +it is a list consisting of any subset of the following ice shelves:

+
["Abbot", "Amery", "Atka", "Aviator", "Bach", "Baudouin", "Borchgrevink",
+ "Brahms", "Brunt_Stancomb", "Campbell", "Cheetham", "Conger_Glenzer",
+ "Cook", "Cosgrove", "Crosson", "Dennistoun", "Dibble", "Dotson",
+ "Drygalski", "Edward_VIII", "Ekstrom", "Ferrigno", "Filchner", "Fimbul",
+ "Fitzgerald", "Frost", "GeikieInlet", "George_VI", "Getz", "Gillet",
+ "Hamilton", "Hannan", "HarbordGlacier", "Helen", "Holmes", "HolmesWest",
+ "Hull", "Jelbart", "Land", "Larsen_B", "Larsen_C", "Larsen_D", "Larsen_E",
+ "Larsen_F", "Larsen_G", "Lazarev", "Lillie", "Mariner", "Matusevitch",
+ "Mendelssohn", "Mertz", "Moscow_University", "Moubray", "Mulebreen",
+ "Myers", "Nansen", "Nickerson", "Ninnis", "Nivl", "Noll", "Nordenskjold",
+ "Pine_Island", "PourquoiPas", "Prince_Harald", "Publications",  "Quar",
+ "Rayner_Thyer", "Rennick", "Richter", "Riiser-Larsen", "Ronne", "Western_Ross",
+ "Eastern_Ross", "Shackleton", "Shirase", "Slava", "SmithInlet", "Stange",
+ "Sulzberger", "Suvorov", "Swinburne", "Thwaites", "Tinker", "Totten",
+ "Tracy_Tremenchus", "Tucker", "Underwood", "Utsikkar", "Venable", "Verdi",
+ "Vigrid", "Vincennes", "Voyeykov", "West", "Wilkins", "Wilma_Robert_Downer",
+ "Withrow", "Wordie", "Wylde",  "Zubchatyy"]
+
+
+

or these regions made up of 2 or more ice shelves:

+
["Antarctica", "Peninsula", "West Antarctica", "East Antarctica", "Ross",
+ "Filchner-Ronne", "IMBIE1", "IMBIE2", "IMBIE3", "IMBIE4", "IMBIE5",
+ "IMBIE6", "IMBIE7", "IMBIE8", "IMBIE9",   "IMBIE10", "IMBIE11",  "IMBIE12",
+ "IMBIE13", "IMBIE14", "IMBIE15", "IMBIE16", "IMBIE17", "IMBIE18", "IMBIE19",
+ "IMBIE20", "IMBIE21", "IMBIE22", "IMBIE23", "IMBIE24", "IMBIE25", "IMBIE26",
+ "IMBIE27"]
+
+
+

The default list of ice shelves and regions was determined to highlight the +largest ice shelves and regions along with a representative sample of smaller +shelves from different regions.

+

Most of these regions are determined based on the centroid locations given +in the supplementary material of Rignot et al. (2013). Ice shelf regions +have been extended into grounded ice and onto the Antarctic continental shelf +based which floating ice shelf is closest. The IMBIE Basins are defined +following Zwally et al. (2012)

+
+

Note

+

Time series are computed and stored in a NetCDF file for all 106 ice shelves +and regions even if only a subset are plotted, allowing you to plot +additional ice shelves externally or by re-running MPAS-Analysis. These +can be found in timeseries/iceShelfAggregatedFluxes.nc within your output +base directory

+
+
+
+

Other Options

+ +
+
+

Observations

+ +
+
+

Example Result

+../../_images/melt_flux_east_ant.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesOHCAnomaly.html b/1.11.0rc1/users_guide/tasks/timeSeriesOHCAnomaly.html new file mode 100644 index 000000000..fddd316f4 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesOHCAnomaly.html @@ -0,0 +1,286 @@ + + + + + + + timeSeriesOHCAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesOHCAnomaly

+

An analysis task for plotting a Hovmoller plot (time and depth axes) and +depth-integrated time series of the anomaly in ocean heat content (OHC) +from a reference year (usually the first year of the simulation).

+

Component and Tags:

+
component: ocean
+tags: timeSeries, ohc, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesOHCAnomaly]
+## options related to plotting time series of ocean heat content (OHC)
+## anomalies from year 1
+
+# list of regions to plot from the region list in [regions] below
+regions = ['global']
+
+# approximate depths (m) separating plots of the upper, middle and lower ocean
+depths = [700, 2000]
+
+# preprocessed file prefix, with format OHC.<preprocessedRunName>.year*.nc
+preprocessedFilePrefix = OHC
+
+# prefix on preprocessed field name, with format ohc_<suffix> for suffixes
+# 'tot', '700m', '2000m', 'btm'
+preprocessedFieldPrefix = ohc
+
+# Number of points over which to compute moving average(e.g., for monthly
+# output, movingAveragePoints=12 corresponds to a 12-month moving average
+# window)
+movingAveragePoints = 12
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+# yearStrideXTicks = 1
+
+[hovmollerOHCAnomaly]
+## options related to time vs. depth Hovmoller plots of ocean heat content
+## (OHC) anomalies from year 1
+
+# Note: regions and moving average points are the same as for the time series
+# plot
+
+# colormap
+colormapName = balance
+# colormap indices for contour color
+colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255]
+# colorbar levels/values for contour boundaries
+colorbarLevels = [-2.4, -0.8, -0.4, -0.2, 0, 0.2, 0.4, 0.8, 2.4]
+# contour line levels
+contourLevels = np.arange(-2.5, 2.6, 0.5)
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+# yearStrideXTicks = 1
+
+
+

For the depth-integrated time-series plot, the user may select the depths (in +meters) that separate the upper, middle and lower regions of the ocean, e.g.:

+
depths = [700, 2000]
+
+
+

indicates that OHC will be integrated from 0 to 700 m, 700 to 2000 m, +and 2000 m to the ocean floor (as well as from 0 to the ocean floor).

+

The OHC can be compared with results from a reference v0 simulation. If +preprocessedRunName in the [runs] section is not None, the +depth integrated time series will be read in with a file prefix given by +preprocessedFilePrefix and a field prefix given by +preprocessedFieldPrefix. Generally, these options should not be altered +except of debugging purposes.

+

Recently, a right-hand axis and an associated set of lines has been added to the +OHC anomaly time series. This axis and these lines show the equivalent +top-of-atmosphere energy flux (\(W/m^2\)) that the ocean heat anomaly would +induce.

+
+
For more details on other config options, see:
+
+
+
+
+

Example Result

+../../_images/time_series_ohc.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesOceanRegions.html b/1.11.0rc1/users_guide/tasks/timeSeriesOceanRegions.html new file mode 100644 index 000000000..bafde3bcb --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesOceanRegions.html @@ -0,0 +1,313 @@ + + + + + + + timeSeriesOceanRegions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesOceanRegions

+

An analysis task for plotting time series of of temperature, salinity, +potential density and other fields of interest averaged over ocean regions.

+

Component and Tags:

+
component: ocean
+tags: timeSeries, regions
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesOceanRegions]
+## options related to plotting time series of groups of ocean regions
+
+# the names of region groups to plot, each with its own section below
+regionGroups = ['Antarctic Regions']
+
+
+[timeSeriesAntarcticRegions]
+## options related to plotting time series of Antarctic regions
+
+# list of regions to plot or ['all'] for all regions in the masks file.
+# See "regionNames" in the antarcticRegions masks file in
+# regionMaskSubdirectory for details.
+regionNames = []
+
+# a list of variables to plot
+variables = [{'name': 'temperature',
+              'title': 'Temperature',
+              'units': r'$^\circ$C',
+              'mpas': 'timeMonthly_avg_activeTracers_temperature'},
+             {'name': 'salinity',
+              'title': 'Salinity',
+              'units': 'PSU',
+              'mpas': 'timeMonthly_avg_activeTracers_salinity'},
+             {'name': 'potentialDensity',
+              'title': 'Potential Density',
+              'units': 'kg m$^{-3}$',
+              'mpas': 'timeMonthly_avg_potentialDensity'},
+             {'name': 'mixedLayerDepth',
+              'title': 'Mixed Layer Depth',
+              'units': 'm',
+              'mpas': 'timeMonthly_avg_dThreshMLD'}]
+
+# The minimum and maximum depth over which fields are averaged, default is
+# to take these values from the geojson feature's zmin and zmax properties.
+# Add these to a custom config file to override the defaults.
+# zmin = -1000
+# zmax = -400
+
+# Observational data sets to compare against
+obs = ['SOSE', 'WOA18']
+
+
+
+
+

Region Groups

+

regionGroup is a list of region groups,each of which will get its own +gallery group on the resulting analysis webpage. See +Region Groups for more information on the available region +groups. For each region group, there should be a corresponding +timeSeries<RegionGroup> section of the config file, with any spaces removed +from the name of the region group. By default, the only region group for this +task is 'Antarctic Regions'.

+
+
+

Region Names

+

The regionNames can be set to ['all'] to plot all of the regions in the +region group. In the case of Antarctic Regions, these are:

+
["Southern Ocean", "Southern Ocean 60S", "Eastern Weddell Sea Shelf",
+ "Eastern Weddell Sea Deep", "Western Weddell Sea Shelf",
+ "Western Weddell Sea Deep", "Weddell Sea Shelf", "Weddell Sea Deep",
+ "Bellingshausen Sea Shelf", "Bellingshausen Sea Deep", "Amundsen Sea Shelf",
+ "Amundsen Sea Deep", "Eastern Ross Sea Shelf", "Eastern Ross Sea Deep",
+ "Western Ross Sea Shelf", "Western Ross Sea Deep",
+ "East Antarctic Seas Shelf", "East Antarctic Seas Deep"]
+
+
+
+
+

Variables

+

The variables list has a python dictionary for each variable to be plotted. +A separate gallery will be produced for each variable with a title given by +the "title" entry in the dictionary. The "units" entry is used for the +y-axis label of each plot. The "name" is the name of the variable in +the NetCDF files as well as the text appended to subtaks names and file names. +It should contain no spaces. The "mpas" entry is the name of the +corresponding field in the MPAS-Ocean timeSeriesStatsMonthlyOutput files.

+
+
+

Depth Bounds

+

Some region groups such as Antarctic Regions define default depth bounds +(zmin and zmax) for each region. For Antarctic Regions, this was +done so regions on the continental shelf (ending in “Shelf”) would be averaged +over a different range (zmax = -200 m, zmin = -1000 m) than the regions +of the deeper ocean (ending in “Deep”, with zmax = -400 m, +zmin = -1000 m). The user can override these defaults by defining her own +zmin and zmax. Note that zmin is deeper and zmax is shallower +since they have negative values.

+
+
+

Other Config Options

+
+
For more details, see:
+
+
+
+
+

Observations

+

obs is a list of the observational data sets to plot as reference lines +(constant in time). Possible values are 'SOSE' and 'WOA18'. An empty +list can be provided if no observations should be plotted.

+

2005-2010 climatology from SOSE the Southern Ocean State Estimate (SOSE)

+

WOA18 Temperature and Salinity Climatology

+
+
+

Example Result

+../../_images/west_ross_shelf_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesSST.html b/1.11.0rc1/users_guide/tasks/timeSeriesSST.html new file mode 100644 index 000000000..7a266e3b6 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesSST.html @@ -0,0 +1,233 @@ + + + + + + + timeSeriesSST — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesSST

+

An analysis task for plotting a time series of sea surface temperature (SST), +optionally against results from a preprocessed E3SM v0 run (see +Runs).

+

Component and Tags:

+
component: ocean
+tags: timeSeries, sst, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesSST]
+## options related to plotting time series of sea surface temperature (SST)
+
+# list of regions to plot from the region list in [regions] below
+regions = ['global']
+
+# Number of points over which to compute moving average (e.g., for monthly
+# output, movingAveragePoints=12 corresponds to a 12-month moving average
+# window)
+movingAveragePoints = 12
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+
+
For more details on these config options, see:
+
+
+
+
+

Example Result

+../../_images/time_series_sst.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesSalinityAnomaly.html b/1.11.0rc1/users_guide/tasks/timeSeriesSalinityAnomaly.html new file mode 100644 index 000000000..f1e4cf705 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesSalinityAnomaly.html @@ -0,0 +1,243 @@ + + + + + + + timeSeriesSalinityAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesSalinityAnomaly

+

An analysis task for plotting a Hovmoller plot (time and depth axes) of the +anomaly in ocean salinity from a reference year (usully the first year of +the simulation).

+

Component and Tags:

+
component: ocean
+tags: timeSeries, salinity, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[hovmollerSalinityAnomaly]
+## options related to plotting time series of salinity vs. depth
+
+# list of regions to plot from the region list in [regions] below
+regions = ['global']
+
+# Number of points over which to compute moving average(e.g., for monthly
+# output, movingAveragePoints=12 corresponds to a 12-month moving average
+# window)
+movingAveragePoints = 12
+
+# colormap
+colormapName = balance
+# color indices into colormapName for filled contours
+colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevels = [-0.1, -0.02, -0.003, -0.001, 0, 0.001, 0.003, 0.02, 0.1]
+# contour line levels
+contourLevels = np.arange(-0.1, 0.11, 0.02)
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+
+
For more details, see:
+
+
+
+
+

Example Result

+../../_images/hovmoller_salin.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesSeaIceAreaVol.html b/1.11.0rc1/users_guide/tasks/timeSeriesSeaIceAreaVol.html new file mode 100644 index 000000000..37c60475c --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesSeaIceAreaVol.html @@ -0,0 +1,257 @@ + + + + + + + timeSeriesSeaIceAreaVol — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesSeaIceAreaVol

+

An analysis task for plotting time series of sea ice area and volume for both +the Arctic and Antarctic against observations.

+

Component and Tags:

+
component: seaIce
+tags: timeSeries, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesSeaIceAreaVol]
+## options related to plotting time series of sea ice area and volume
+
+# compare to observations?
+compareWithObservations = True
+# Number of points over which to compute moving average (e.g., for monthly
+# output, movingAveragePoints=12 corresponds to a 12-month moving average
+# window)
+movingAveragePoints = 1
+# title font properties
+titleFontSize = 18
+# plot on polar plot
+polarPlot = False
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+# observations files
+areaNH = IceArea_timeseries/iceAreaNH_climo_20180710.nc
+areaSH = IceArea_timeseries/iceAreaSH_climo_20180710.nc
+volNH = PIOMAS/PIOMASvolume_monthly_climo_20180710.nc
+volSH = none
+
+
+

compareWithObservations can be set to False to disable comparison with +both sets of observations (see below).

+

The title font size can be customized with titleFontSize, given in points.

+

To produce polar plots (with time progressing clockwise around the origin and +sea ice area or volume the distance from the origin) in addition to the +typical time series with time on the x axis, set polarPlot = True.

+

The ability to modify observations files pointed to by areaNH, areaSH, +volNH and volSH is provided for debugging purposes and these options +should typically remain unchanged.

+
+
For details on the remaining config options, see:
+
+
+
+
+

Observations

+ +
+
+

Example Result

+../../_images/ice_area_nh.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesTemperatureAnomaly.html b/1.11.0rc1/users_guide/tasks/timeSeriesTemperatureAnomaly.html new file mode 100644 index 000000000..fa9b4d1c2 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesTemperatureAnomaly.html @@ -0,0 +1,243 @@ + + + + + + + timeSeriesTemperatureAnomaly — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesTemperatureAnomaly

+

An analysis task for plotting a Hovmoller plot (time and depth axes) of the +anomaly in ocean potential temperature from a reference year (usully the first +year of the simulation).

+

Component and Tags:

+
component: ocean
+tags: timeSeries, temperature, publicObs
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[hovmollerTemperatureAnomaly]
+## options related to plotting time series of potential temperature vs. depth
+
+# list of regions to plot from the region list in [regions] below
+regions = ['global']
+
+# Number of points over which to compute moving average(e.g., for monthly
+# output, movingAveragePoints=12 corresponds to a 12-month moving average
+# window)
+movingAveragePoints = 12
+
+# colormap
+colormapName = balance
+# color indices into colormapName for filled contours
+colormapIndices = [0, 28, 57, 85, 113, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+colorbarLevels = [-1, -0.5, -0.2, -0.05, 0, 0.05, 0.2, 0.5, 1]
+# contour line levels
+contourLevels = np.arange(-1.0, 1.26, 0.25)
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+
+
For more details, see:
+
+
+
+
+

Example Result

+../../_images/hovmoller_temp.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/timeSeriesTransport.html b/1.11.0rc1/users_guide/tasks/timeSeriesTransport.html new file mode 100644 index 000000000..d26b87f9c --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/timeSeriesTransport.html @@ -0,0 +1,255 @@ + + + + + + + timeSeriesTransport — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

timeSeriesTransport

+

An analysis task for plotting time series of of temperature, salinity, +potential density and other fields of interest averaged over ocean regions.

+

Component and Tags:

+
component: ocean
+tags: timeSeries, transport
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[timeSeriesTransport]
+## options related to plotting time series of transport through transects
+
+# list of ocean transects from geometric_features to plot or ['all'] for all
+# available transects.
+transectsToPlot = ['Drake Passage', 'Tasmania-Ant', 'Africa-Ant', 'Antilles Inflow',
+                   'Mona Passage', 'Windward Passage', 'Florida-Cuba', 'Florida-Bahamas',
+                   'Indonesian Throughflow', 'Agulhas', 'Mozambique Channel', 'Bering Strait',
+                   'Lancaster Sound', 'Fram Strait', 'Nares Strait']
+
+# Number of months over which to compute moving average
+movingAverageMonths = 1
+
+# An optional first year for the tick marks on the x axis. Leave commented out
+# to start at the beginning of the time series.
+
+# firstYearXTicks = 1
+
+# An optional number of years between tick marks on the x axis.  Leave
+# commented out to determine the distance between ticks automatically.
+
+# yearStrideXTicks = 1
+
+
+
+
+

Transect Names

+

The transectsToPlot can be set to ['all'] to plot all of the transects +defined in the transportTransects transect group. These are:

+
["Africa-Ant",  "Agulhas", "Antarctic Peninsula", "Antilles Inflow",
+ "Baja CA blockage", "Baltic Sea Deepen", "Barents Sea Opening",
+ "Bering Strait", "Davis Strait", "Drake Passage", "English Channel Deepen",
+ "Florida-Bahamas", "Florida-Cuba", "Fram Strait", "Indonesian Throughflow",
+ "Ireland North Channel Deepen", "Japan Hokkaido blockage",
+ "Japan La Perouse Strait Deepen",  "Japan Tsugaru Strait Deepen",
+ "Japan blockage", "Lancaster Sound", "Mona Passage", "Mozambique Channel",
+ "Nares Strait", "Nares Strait Deepen", "Persian Gulf Deepen",
+ "Red Sea Deepen", "Sakhalin blockage", "Strait of Gibralter Deepen 1",
+  "Strait of Gibralter Deepen 2", "Tasmania-Ant", "White Sea",
+  "Windward Passage"]
+
+
+

Many of these are likely not of interest in most simulations, so a subset of +the most relevant transects has been chosen in the default configuration.

+
+
+

Other Options

+ +
+
+

Example Result

+../../_images/drake_passage_transport.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/users_guide/tasks/woceTransects.html b/1.11.0rc1/users_guide/tasks/woceTransects.html new file mode 100644 index 000000000..06fa7d279 --- /dev/null +++ b/1.11.0rc1/users_guide/tasks/woceTransects.html @@ -0,0 +1,315 @@ + + + + + + + woceTransects — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

woceTransects

+

An analysis task for interpolating MPAS fields to +World Ocean Circulation Experiment (WOCE) transects and comparing them with +ship-based observations.

+

Component and Tags:

+
component: ocean
+tags: climatology, transect, woce
+
+
+
+

Configuration Options

+

The following configuration options are available for this task:

+
[woceTransects]
+## options related to plotting model vs. World Ocean Circulation Experiment
+## (WOCE) transects.
+
+# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct,
+# Nov, Dec, JFM, AMJ, JAS, OND, ANN)
+seasons =  ['ANN']
+
+# The approximate horizontal resolution (in km) of each transect.  Latitude/
+# longitude between observation points will be subsampled at this interval.
+# Use 'obs' to indicate no subsampling.
+horizontalResolution = obs
+
+# The name of the vertical comparison grid.  Valid values are 'mpas' for the
+# MPAS vertical grid, 'obs' to use the locations of observations or
+# any other name if the vertical grid is defined by 'verticalComparisonGrid'
+# verticalComparisonGridName = obs
+verticalComparisonGridName = uniform_0_to_4000m_at_10m
+#verticalComparisonGridName = mpas
+
+# The vertical comparison grid if 'verticalComparisonGridName' is not 'mpas' or
+# 'obs'.  This should be numpy array of (typically negative) elevations (in m).
+verticalComparisonGrid = numpy.linspace(0, -4000, 401)
+
+# The minimum weight of a destination cell after remapping. Any cell with
+# weights lower than this threshold will therefore be masked out.
+renormalizationThreshold = 0.01
+
+
+[woceTemperatureTransects]
+## options related to plotting WOCE transects of potential temperature
+
+# colormap for model/observations
+colormapNameResult = RdYlBu_r
+# the type of norm used in the colormap (linear, log, or symLog)
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 0.0, 'vmax': 18.0}
+# color indices into colormapName for filled contours
+#colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsResult = [0, 1, 2, 3, 4, 6, 8, 10, 14, 18]
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(0.0, 18.0, 9)
+# contour line levels
+contourLevelsResult = np.arange(1.0, 18.0, 2.0)
+
+# colormap for differences
+colormapNameDifference = RdBu_r
+# the type of norm used in the colormap (linear, log, or symLog)
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -2.0, 'vmax': 2.0}
+# color indices into colormapName for filled contours
+#colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsDifference = [-2, -1.5, -1.25, -1, -0.2, 0, 0.2, 1, 1.25, 1.5, 2]
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-2.0, 2.0, 9)
+# contour line levels
+contourLevelsDifference = np.arange(-1.8, 2.0, 0.4)
+
+
+
+[woceSalinityTransects]
+## options related to plotting WOCE transects of salinity
+
+# colormap for model/observations
+colormapNameResult = BuOr
+# the type of norm used in the colormap (linear, log, or symLog)
+normTypeResult = linear
+# A dictionary with keywords for the norm
+normArgsResult = {'vmin': 33.0, 'vmax': 36.0}
+# color indices into colormapName for filled contours
+#colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsResult = [33, 34, 34.25, 34.5, 34.6, 34.7, 34.8, 34.9, 35, 36]
+# place the ticks automatically by default
+# colorbarTicksResult = numpy.linspace(33.0, 36.0, 9)
+# contour line levels
+contourLevelsResult = np.arange(33.3, 36.0, 0.3)
+
+# colormap for differences
+colormapNameDifference = RdBu_r
+# the type of norm used in the colormap (linear, log, or symLog)
+normTypeDifference = linear
+# A dictionary with keywords for the norm
+normArgsDifference = {'vmin': -1.0, 'vmax': 1.0}
+# color indices into colormapName for filled contours
+#colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255]
+# colormap levels/values for contour boundaries
+#colorbarLevelsDifference = [-1, -0.5, -0.2, -0.05, -0.02, 0,  0.02, 0.05, 0.2, 0.5, 1]
+# place the ticks automatically by default
+# colorbarTicksDifference = numpy.linspace(-1.0, 1.0, 9)
+# contour line levels
+contourLevelsDifference = np.arange(-0.9, 1.0, 0.4)
+
+
+
+
For details on these configuration options, see:
+
+
+
+
+

Observations

+

WOCE sections

+
+
+

Example Result

+../../_images/woce_transect.png +
+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/1.11.0rc1/versions.html b/1.11.0rc1/versions.html new file mode 100644 index 000000000..b1ccfa3d7 --- /dev/null +++ b/1.11.0rc1/versions.html @@ -0,0 +1,195 @@ + + + + + + + Versions — MPAS-Analysis 1.11.0rc1 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Versions

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Documentation

On GitHub

stable

main

latest

develop

v1.2.6

1.2.6

v1.2.7

1.2.7

v1.2.8

1.2.8

v1.2.9

1.2.9

v1.3.0

1.3.0

v1.4.0

1.4.0

v1.5.0

1.5.0

v1.6.0

1.6.0

v1.6.1

1.6.1

v1.7.0

1.7.0

v1.7.1

1.7.1

v1.7.2

1.7.2

v1.8.0

1.8.0

v1.9.0

1.9.0

v1.10.0

1.10.0

+
+ + +
+
+
+ +
+ +
+

© Copyright This software is open source software available under the BSD-3license. Copyright (c) 2022 Triad National Security, LLC. All rights reserved. Copyright (c) 2018 Lawrence Livermore National Security, LLC. All rights reserved. Copyright (c) 2018 UT-Battelle, LLC. All rights reserved..

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file