From 662472a1af3f598f00224716240473d3e0756db9 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 10:29:42 -0600 Subject: [PATCH 01/32] Add GH workflow: "Pull Request Docs Check" --- .github/workflows/pr-docs-check.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/workflows/pr-docs-check.yml diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml new file mode 100644 index 0000000000..e6784be7d0 --- /dev/null +++ b/.github/workflows/pr-docs-check.yml @@ -0,0 +1,12 @@ +name: "Pull Request Docs Check" +on: +- pull_request + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: ammaraskar/sphinx-action@master + with: + docs-folder: "docs/" \ No newline at end of file From 0fa782dce392d0559642825d8d50a59d9e1d6ca8 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 10:33:00 -0600 Subject: [PATCH 02/32] Update docs-folder. --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index e6784be7d0..cff03951b9 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -9,4 +9,4 @@ jobs: - uses: actions/checkout@v1 - uses: ammaraskar/sphinx-action@master with: - docs-folder: "docs/" \ No newline at end of file + docs-folder: "doc/source" \ No newline at end of file From 003cd2026607560f14be290c55ff09cc808c6b06 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 10:36:52 -0600 Subject: [PATCH 03/32] Add requirements.txt. --- doc/source/requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/source/requirements.txt diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt new file mode 100644 index 0000000000..d9d1ef1c27 --- /dev/null +++ b/doc/source/requirements.txt @@ -0,0 +1,2 @@ +sphinx>=6.1.3 +docutils>=0.19 \ No newline at end of file From a40ec60d93004c5d525510f150748b91d33928ce Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 12:54:42 -0600 Subject: [PATCH 04/32] Add pre-build-command and build-command. --- .github/workflows/pr-docs-check.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index cff03951b9..ab5033b7c7 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -9,4 +9,6 @@ jobs: - uses: actions/checkout@v1 - uses: ammaraskar/sphinx-action@master with: - docs-folder: "doc/source" \ No newline at end of file + docs-folder: "doc/source" + pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" + build-command: "sphinx-build -b html . _build" From 349e0a1f31615c4e495991c1b18c4158327b5880 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 12:56:32 -0600 Subject: [PATCH 05/32] Add sphinx_rtd_theme to requirements. --- doc/source/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt index d9d1ef1c27..65a7364dea 100644 --- a/doc/source/requirements.txt +++ b/doc/source/requirements.txt @@ -1,2 +1,3 @@ sphinx>=6.1.3 -docutils>=0.19 \ No newline at end of file +docutils>=0.19 +sphinx_rtd_theme From e5caf1706fa75282eac8ae49bb6135c0d38f4def Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 13:03:30 -0600 Subject: [PATCH 06/32] Make sphinx throw nitpicky errors instead of warning. --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index ab5033b7c7..b0093f6cdb 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -11,4 +11,4 @@ jobs: with: docs-folder: "doc/source" pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" - build-command: "sphinx-build -b html . _build" + build-command: "sphinx-build -W --keep-going -n -b html . _build" From ffe094754799ae9a524a6ca663231eb6b358220e Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 14:00:39 -0600 Subject: [PATCH 07/32] Remove docs references to PTCLM. --- doc/source/users_guide/index.rst | 1 - .../users_guide/overview/getting-help.rst | 3 - .../users_guide/overview/introduction.rst | 4 +- .../running-PTCLM/adding-ptclm-site-data.rst | 88 ------------ .../users_guide/running-PTCLM/index.rst | 20 --- .../running-PTCLM/introduction-to-ptclm.rst | 135 ------------------ .../running-PTCLM/ptclm-examples.rst | 33 ----- .../users_guide/running-PTCLM/using-ptclm.rst | 114 --------------- ...point-and-regional-grid-configurations.rst | 9 +- doc/source/users_guide/testing/testing.rst | 11 -- .../observational-sites-datasets.rst | 4 +- 11 files changed, 4 insertions(+), 418 deletions(-) delete mode 100644 doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst delete mode 100644 doc/source/users_guide/running-PTCLM/index.rst delete mode 100644 doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst delete mode 100644 doc/source/users_guide/running-PTCLM/ptclm-examples.rst delete mode 100644 doc/source/users_guide/running-PTCLM/using-ptclm.rst diff --git a/doc/source/users_guide/index.rst b/doc/source/users_guide/index.rst index 75a2949bec..8c675c79db 100644 --- a/doc/source/users_guide/index.rst +++ b/doc/source/users_guide/index.rst @@ -24,7 +24,6 @@ adding-new-resolutions/index.rst running-special-cases/index.rst running-single-points/index.rst - running-PTCLM/index.rst trouble-shooting/index.rst testing/index.rst diff --git a/doc/source/users_guide/overview/getting-help.rst b/doc/source/users_guide/overview/getting-help.rst index 74765d1ad0..7c33071db9 100644 --- a/doc/source/users_guide/overview/getting-help.rst +++ b/doc/source/users_guide/overview/getting-help.rst @@ -146,9 +146,6 @@ MOSART PFT Plant Function Type (PFT). A type of vegetation that CLM parameterizes. -PTCLM - PoinT CLM (PTCLM) a python script that operates on top of CLM for |version| to run single point simulations for CLM. - ROF River runOff Model to route flow of surface water over land out to the ocean. |cesmrelease| has two components options for this the new model MOSART and previous model RTM. diff --git a/doc/source/users_guide/overview/introduction.rst b/doc/source/users_guide/overview/introduction.rst index bc7c1fd82c..e02fa3e422 100644 --- a/doc/source/users_guide/overview/introduction.rst +++ b/doc/source/users_guide/overview/introduction.rst @@ -60,9 +60,7 @@ As a followup to the tools chapter, :ref:`adding-new-resolutions-section` tells In :ref:`running-special-cases-section`, again for the expert user, we give details on how to do some particularly difficult special cases. For example, we give the protocol for spinning up the |version|-BGC and CLMCN models as well as CLM with dynamic vegetation active (CNDV). We give instructions to do a spinup case from a previous case with Coupler history output for atmospheric forcing. We also give instructions on running both the prognostic crop and irrigation models. Lastly we tell the user how to use the DATM model to send historical CO2 data to CLM. -:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple PTS_MODE to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database. The PTCLM python script to run single-point simulations was added back in for this release (but it has bugs that don't allow it to work out of the box). CLM4 in CESM1.0.5 has a fully working versions of PTCLM. - -Need :ref:`running-PTCLM` blurb... +:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple PTS_MODE to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database. :ref:`troubleshooting-index` gives some guidance on trouble-shooting problems when using |version|. It doesn't cover all possible problems with CLM, but gives you some guidelines for things that can be done for some common problems. diff --git a/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst b/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst deleted file mode 100644 index b95831427f..0000000000 --- a/doc/source/users_guide/running-PTCLM/adding-ptclm-site-data.rst +++ /dev/null @@ -1,88 +0,0 @@ -.. include:: ../substitutions.rst - -.. _adding-ptclm-site-data: - -============================ -Adding PTCLMmkdata Site Data -============================ - -The "sitegroupname" option to PTCLMmkdata looks for groups of sites in the files in the ``PTCLM_sitedata`` directory under the PTCLMmkdata directory. You can add new names available for this option including your own lists of sites, by adding more files in this directory. There are three files for each "sitegroupname": ``$SITEGROUP_sitedata.txt``, ``$SITEGROUP_soildata.txt`` and ``$SITEGROUP_pftdata.txt`` (where ``$SITEGROUP`` is the name that would be entered as "sitegroupname" to PTCLMmkdata). Each file needs to have the same list of sites, but gives different information: site data, PFT data, and soil data respectively. Although the site codes need to be the same between the three files, the files do NOT have to be in the same order. Each file has a one-line header that lists the contents of each column which are separated by commas. The first column for each of the files is the "site_code" which must be consistent between the three files. The site code can be any unique character string, but in general we use the AmeriFlux site code. - -Site data file:`` $SITEGROUP_sitedata.txt``): The header for this file is: -:: - - site_code,name,state,lon,lat,elev,startyear,endyear,alignyear - -The columns: name, state, and elevation are informational only. Name is a longer descriptive name of the site, and state is the state for U.S. sites or country for non U.S. sites. The columns: lon and lat are the longitude and latitude of the location in decimal degrees. The last three columns are the start and ending year for the data and the align year for an 1850 case for the data. The align year is currently unused. - -Soil data file: ``$SITEGROUP_soildata.txt``): The header for this file is: -:: - - site_code,soil_depth,n_layers,layer_depth,layer_sand%,layer_clay% - -The first three fields after "site_code" are currently unused. The only two that are used are the percent sand and clay columns to set the soil texture. - -PFT data file: ``$SITEGROUP_pftdata.txt```): The header for this file is: -:: - - site_code,pft_f1,pft_c1,pft_f2,pft_c2,pft_f3,pft_c3,pft_f4,pft_c4,pft_f5,pft_c5 - -This file gives the vegetation coverage for the different vegetation types for the site. The file only supports up to five PFT's at the same time. The columns with "pft_f" are the fractions for each PFT, and the columns with "pft_c" is the integer index of the given PFT. Look at the pft-physiology file to see what the PFT index for each PFT type is. - ----------------------------------------------------- -Dynamic Land-Use Change Files for use by PTCLMmkdata ----------------------------------------------------- - -There is a mechanism for giving site-specific land-use change in PTCLMmkdata. Adding site specific files to the ``PTCLM_sitedata`` directory under PTCLMmkdata allows you to specify the change in vegetation and change in harvesting (for the CN model) for that site. Files are named: ``$SITE_dynpftdata.txt``. There is a sample file for the US-Ha1 site called: ``US-Ha1_dynpftdata.txt``. The file has a one-line header with the information that the file has, and then one-line for each year with a transition. The header line is as follows: -:: - - trans_year,pft_f1,pft_c1,pft_f2,pft_c2,pft_f3,pft_c3,pft_f4,pft_c4,pft_f5,pft_c5,har_vh1,har_vh2,har_sh1,har_sh2,har_sh3,graze,hold_harv,hold_graze - -This file only requires a line for each year where a transition or harvest happens. As in the "pftdata" file above "pft_f" refers to the fraction and "pft_c" refers to the PFT index, and only up to five vegetation types are allowed to co-exist. The last eight columns have to do with harvesting and grazing. The last two columns are whether to hold harvesting and/or grazing constant until the next transition year and will just be either 1 or 0. This file will be converted by the **PTCLM_sitedata/cnvrt_trnsyrs2_pftdyntxtfile.pl** script in the PTCLMmkdata directory to a format that **mksurfdata_esmf** can read that has an entry for each year for the range of years valid for the compset in question. - -.. _converting-ameriflux-for-ptclmmkdata: - ------------------------------------------------- -Converting AmeriFlux Data for use by PTCLMmkdata ------------------------------------------------- - -AmeriFlux data comes in comma separated format and is available from: `http://public.ornl.gov/ameriflux/dataproducts.shtml `_. Before you download the data you need to agree to the usage terms. - -Here is a copy of the usage terms from the web-site on June/13/2011. - -"The AmeriFlux data provided on this site are freely available and were furnished by individual AmeriFlux scientists who encourage their use. Please kindly inform the appropriate AmeriFlux scientist(s) of how you are using the data and of any publication plans. Please acknowledge the data source as a citation or in the acknowledgments if the data are not yet published. If the AmeriFlux Principal Investigators (PIs) feel that they should be acknowledged or offered participation as authors, they will let you know and we assume that an agreement on such matters will be reached before publishing and/or use of the data for publication. If your work directly competes with the PI's analysis they may ask that they have the opportunity to submit a manuscript before you submit one that uses unpublished data. In addition, when publishing, please acknowledge the agency that supported the research. Lastly, we kindly request that those publishing papers using AmeriFlux data provide preprints to the PIs providing the data and to the data archive at the Carbon Dioxide Information Analysis Center (CDIAC)." - -The above agreement applies to the "US-UMB" dataset imported into our repository as well, and Gil Bohrer is the PI on record for that dataset. - -The CESM can NOT handle missing data, so we recommend using the "Level 4" Gap filled datasets. The fields will also need to be renamed. The "WS" column becomes "WIND", "PREC" becomes "PRECmms", "RH" stays as "RH", "TA" becomes "TBOT", "Rg" becomes "FSDS", "Rgl" becomes "FLDS", "PRESS" becomes "PSRF". "ZBOT" can just be set to the constant of "30" (m). The units of Temperature need to be converted from "Celsius" to "Kelvin" (use the value in ``SHR_CONST_TKFRZ`` in the file ``models/csm_share/shr/shr_const.F90`` of ``273.15``. The units of Pressure also need to be converted from "kPa" to "Pa". LATIXY, and LONGXY should also be set to the latitude and longitude of the site. - ------------------------------------------------------------------ -Example: PTCLMmkdata transient example over a shorter time period ------------------------------------------------------------------ - -This is an example of using PTCLMmkdata for Harvard Forest (AmeriFlux site code US-Ha1) for transient land use 1991-2006. In order to do this we would've needed to have converted the AmeriFlux data into NetCDF format as shown in :ref:`converting-ameriflux-for-ptclmmkdata` section above. Also note that this site has a site-specific dynamic land-use change file for it ``PTCLM_sitedata/US-Ha1_dynpftdata.txt`` in the PTCLMmkdata directory and this file will be used for land-use change and harvesting rather than the global dataset. - -:: - - > cd $CTSMROOT/tools/PTCLM - # We are going to use forcing data over 1991 to 2006, but we need to start with - # a transient compset to do so, so we use the 20th Century transient: 1850-2000 - # Note: When creating the fpftdyn dataset for this site it will use the - # PTCLM_sitedata/US-Ha1_dynpftdata.txt - # file for land-use change and harvesting - > ./PTCLMmkdata -s US-Ha1 -d $MYCSMDATA --sitegroupname AmeriFlux - > mkdir $MYCSMDATA/atm/datm7/CLM1PT_data/1x1pt_US-Ha1 - > cd $MYCSMDATA/atm/datm7/CLM1PT_data/1x1pt_US-Ha1 - # Copy data in NetCDF format to this directory, filenames should be YYYY-MM.nc - # The fieldnames on the file should be: - # FLDS,FSDS,LATIXY, LONGXY, PRECTmms,PSRF,RH,TBOT,WIND,ZBOT - # With units - # W/m2,W/m2,degrees_N,degrees_E,mm/s, Pa, %, K, m/s, m - # The time coordinate units should be: days since YYYY-MM-DD 00:00:00 - > cd ../../../../../US-Ha1_I20TRCRUCLM45BGC - # Now we need to set the start date to 1991, and make sure the align year is for 1991 - > ./xmlchange RUN_STARTDATE=1991-01-01,DATM_CLMNCEP_YR_ALIGN=1991 - # Similarly for Nitrogen deposition data we cycle over: 1991 to 2006 - > cat << EOF >> user_nl_clm - model_year_align_ndep=1991,stream_year_first_ndep=1991,stream_year_last_ndep=2006 - EOF diff --git a/doc/source/users_guide/running-PTCLM/index.rst b/doc/source/users_guide/running-PTCLM/index.rst deleted file mode 100644 index 0b44c01b49..0000000000 --- a/doc/source/users_guide/running-PTCLM/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. on documentation master file, created by - sphinx-quickstart on Tue Jan 31 19:46:36 2017. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -.. include:: ../substitutions.rst - -.. _running-PTCLM: - -##################################### -Running PTCLM -##################################### - -.. toctree:: - :maxdepth: 2 - - introduction-to-ptclm.rst - using-ptclm.rst - ptclm-examples.rst - adding-ptclm-site-data.rst diff --git a/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst b/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst deleted file mode 100644 index 9ae4186d2c..0000000000 --- a/doc/source/users_guide/running-PTCLM/introduction-to-ptclm.rst +++ /dev/null @@ -1,135 +0,0 @@ -.. _introduction-to-ptclm.rst: - -.. include:: ../substitutions.rst - -.. _what-is-ptclm: - -===================== - What is PTCLMmkdata? -===================== - -PTCLMmkdata (pronounced Pee-Tee Cee-L-M make data is a Python script to help you set up PoinT CLM simulations. - -It runs the CLM tools for you to get datasets set up, and copies them to a location you can use them including the changes needed for a case to use the dataset with namelist and XML changes. - -Then you run **create_newcase** and point to the directory so that the namelist and XML changes are automatically applied. - -PTCLMmkdata has a simple ASCII text file for storing basic information for your sites. - -We also have complete lists for AmeriFlux and Fluxnet-Canada sites, although we only have the meteorology data for one site. - -For other sites you will need to obtain the meteorology data and translate it to a format that the CESM datm model can use. - -But, even without meteorology data PTCLMmkdata is useful to setup datasets to run with standard ``CLM_QIAN`` data. - -The original authors of PTCLMmkdata are: Daniel M. Ricciuto, Dali Wang, Peter E. Thornton, Wilfred M. Post all at Environmental Sciences Division, Oak Ridge National Laboratory (ORNL) and R. Quinn Thomas at Cornell University. It was then modified fairly extensively by Erik Kluzek at NCAR. We want to thank all of these individuals for this contribution to the CESM effort. We also want to thank the folks at University of Michigan Biological Stations (US-UMB) who allowed us to use their Fluxnet station data and import it into our inputdata repository, especially Gil Bohrer the PI on record for this site. - -.. _details-of-ptclm: - -======================= - Details of PTCLMmkdata -======================= - -To get help on PTCLM2_180611 use the "--help" option as follows. -:: - - > cd $CTSMROOT/tools/PTCLM - > ./PTCLMmkdata --help - -The output to the above command is as follows: -:: - - Usage: PTCLM.py [options] -d inputdatadir -m machine -s sitename - - Python script to create cases to run single point simulations with tower site data. - - Options: - --version show program's version number and exit - -h, --help show this help message and exit - - Required Options: - -d CCSM_INPUT, --csmdata=CCSM_INPUT - Location of CCSM input data - -m MYMACHINE, --machine=MYMACHINE - Machine, valid CESM script machine (-m list to list valid - machines) - -s MYSITE, --site=MYSITE - Site-code to run, FLUXNET code or CLM1PT name (-s list to list - valid names) - - Configure and Run Options: - -c MYCOMPSET, --compset=MYCOMPSET - Compset for CCSM simulation (Must be a valid 'I' compset [other - than IG compsets], use -c list to list valid compsets) - --coldstart Do a coldstart with arbitrary initial conditions - --caseidprefix=MYCASEID - Unique identifier to include as a prefix to the case name - --cesm_root=BASE_CESM - Root CESM directory (top level directory with models and scripts - subdirs) - --debug Flag to turn on debug mode so won't run, but display what would - happen - --finidat=FINIDAT Name of finidat initial conditions file to start CLM from - --list List all valid: sites, compsets, and machines - --namelist=NAMELIST - List of namelist items to add to CLM namelist (example: - --namelist="hist_fincl1='TG',hist_nhtfrq=-1" - --QIAN_tower_yrs Use the QIAN forcing data year that correspond to the tower - years - --rmold Remove the old case directory before starting - --run_n=MYRUN_N Number of time units to run simulation - --run_units=MYRUN_UNITS - Time units to run simulation (steps,days,years, etc.) - --quiet Print minimul information on what the script is doing - --sitegroupname=SITEGROUP - Name of the group of sites to search for you selected site in - (look for prefix group names in the PTCLM_sitedata directory) - --stdurbpt If you want to setup for standard urban namelist settings - --useQIAN use QIAN input forcing data instead of tower site meterology data - --verbose Print out extra information on what the script is doing - - Input data generation options: - These are options having to do with generation of input datasets. Note: When - running for supported CLM1PT single-point datasets you can NOT generate new - datasets. For supported CLM1PT single-point datasets, you MUST run with the - following settings: --nopointdata And you must NOT set any of these: --soilgrid - --pftgrid --owritesrf - - --nopointdata Do NOT make point data (use data already created) - --owritesrf Overwrite the existing surface datasets if they exist (normally - do NOT recreate them) - --pftgrid Use pft information from global gridded file (rather than site - data) - --soilgrid Use soil information from global gridded file (rather than site - data) - - Main Script Version Id: $Id: PTCLM.py 47576 2013-05-29 19:11:16Z erik $ Scripts URL: $HeadURL: https://svn-ccsm-models.cgd.ucar.edu/PTCLM/trunk_tags/PTCLM1_130529/PTCLM.py $: - -Here we give a simple example of using PTCLMmkdata for a straightforward case of running at the US-UMB Fluxnet site on cheyenne where we already have the meteorology data on the machine. Note, see :ref:`converting-ameriflux-for-ptclmmkdata` for permission information to use this data. - -Example 6-1. Example of running PTCLMmkdata for US-UMB on cheyenne ------------------------------------------------------------------- -:: - - > setenv CSMDATA $CESMDATAROOT/inputdata - > setenv MYDATAFILES `pwd`/mydatafiles - > setenv SITE US-UMB - > setenv MYCASE testPTCLM - - # Next build all of the clm tools you will need - > cd $CTSMROOT/tools/PTCLM - > buildtools - # next run PTCLM (NOTE -- MAKE SURE python IS IN YOUR PATH) - > cd $CTSMROOT/tools/PTCLM - # Here we run it using qcmd so that it will be run on a batch node - > qcmd -- ./PTCLMmkdata --site=$SITE --csmdata=$CSMDATA --mydatadir=$MYDATAFILES >& ptclmrun.log & - > cd $CIMEROOT/scripts - > ./create_newcase --user-mods-dir $MYDATAFILES/1x1pt_$SITE --case $MYCASE --res CLM_USRDAT --compset I1PtClm50SpGs - # Next setup, build and run as normal - > cd $MYCASE - > ./case.setup - -PTCLMmkdata includes a README file that gives some extra details and a simple example. - -.. include:: ../../../../tools/PTCLM/README - :literal: diff --git a/doc/source/users_guide/running-PTCLM/ptclm-examples.rst b/doc/source/users_guide/running-PTCLM/ptclm-examples.rst deleted file mode 100644 index 6801c5f3d8..0000000000 --- a/doc/source/users_guide/running-PTCLM/ptclm-examples.rst +++ /dev/null @@ -1,33 +0,0 @@ -.. include:: ../substitutions.rst - -.. _ptclm-examples: - -============================== - Examples of using PTCLMmkdata -============================== - -Now let's give a few more complex examples using some of the options we have discussed above. - -Now, let's demonstrate using a different group list, doing a spinup, running with Qian global forcing data, but using tower years to set the years to run over. This uses the options: sitegroupname, useQIAN, and QIANtower_years. - -Example: Running PTCLMmkdata without tower years ------------------------------------------------- -:: - - > cd $CTSMROOT/tools/PTCLM - > ./PTCLMmkdata -s US-Ha1 -d $CSMDATA --sitegroupname AmeriFlux --donot_use_tower_yrs - > cd ../../../../../US-Ha1_ICRUCLM45BGC_QIAN - # Now build and run normally - ``` - -Finally, let's demonstrate using a generic machine (which then requires the scratchroot option), using the global grid for PFT and soil types, and setting the run length to two months. - -Example: Running PTCLMmkdata with global PFT and soil types dataset -------------------------------------------------------------------- -:: - - > cd $CTSMROOT/tools/PTCLM - # Note, see the the Section called Converting AmeriFlux Data for use by PTCLMmkdata with permission information - # to use the US-UMB data. - > ./PTCLMmkdata -s US-UMB -d $CSMDATA --pftgrid --soilgrid - > cd ../../../../../US-UMB_ICRUCLM45BGC diff --git a/doc/source/users_guide/running-PTCLM/using-ptclm.rst b/doc/source/users_guide/running-PTCLM/using-ptclm.rst deleted file mode 100644 index e7be79bee6..0000000000 --- a/doc/source/users_guide/running-PTCLM/using-ptclm.rst +++ /dev/null @@ -1,114 +0,0 @@ -.. include:: ../substitutions.rst - -.. _using-ptclm.rst: - -************************** -Using PTCLMmkdata -************************** - -There are two types of options to PTCLMmkdata: required and optional. The three required options are the three settings that MUST be specified for PTCLMmkdata to work at all. The other settings have default values that will default to something useful. Most options use a double dash "--" "longname" such as "--list", but the most common options also have a short-name with a single dash. - -The required options to PTCLMmkdata are: inputdata directory (-d) and site-name (-s). Inputdata directory is the directory where you have the CESM inputdata files. Finally site-name is the name of the site that you want to run for. Site-name is a Fluxnet site name from the list of sites you are running on (see the --sitegroupname for more information about the site lists). - -After PTCLMmkdata is run you can run **create_newcase** to setup a case to use the datasets created. It also creates a ``README.PTCLM`` in that directory that documents the commandline options to PTCLMmkdata that were used to create it. - -After "help" the "list" option is one of the most useful options for getting help on using PTCLMmkdata. This option gives you information about some of the other options to PTCLMmkdata. To get a list of the sites that can be used for PTCLMmkdata use the "--list" option as follows. -:: - - > cd $CTSMROOT/tools/PTCLM - > ./PTCLMmkdata --list - -The output to the above command is as follows: -:: - - /bin/sh: line 1: PTCLMmkdata: command not found - -Steps in running PTCLMmkdata -============================ - -1. Build the CLM tools Next you need to make sure all the CLM FORTRAN tools are built. - :: - - > cd $CTSMROOT/tools/PTCLM - > ./buildtools - > gmake clean - -2. Run PTCLMmkdata Next you actually run PTCLMmkdata which does the different things listed below: - - a. PTCLMmkdata names your output file directory based on your input - :: - - [Prefix_]SiteCode - - Where: - ``Prefix`` is from the caseidprefix option (or blank if not used). - - ``SiteCode`` is the site name you entered with the -s option. - - For example, the casename for the following will be: - :: - - > cd scripts - > ./PTCLMmkdata -s US-UMB -d $MYCSMDATA - - b. PTCLMmkdata creates datasets for you It will populate $MYCSMDATA with new datasets it creates using the CLM tools. - - c. If a transient compset and PTCLMmkdata finds a _dynpftdata.txt file If you are running a transient compset (such as the "I_1850-2000_CN" compset) AND you there is a file in the PTCLM_sitedata directory under the PTCLMmkdata directory called $SITE_dynpftdata.txt it will use this file for the land-use changes. Otherwise it will leave land-use constant, unless you use the pftgrid option so it uses the global dataset for landuse changes. See the Section called Dynamic Land-Use Change Files for use by PTCLMmkdata for more information on this. There is a sample transient dataset called US-Ha1_dynpftdata.txt. Transient compsets, are compsets that create transient land-use change and forcing conditions such as: 'I_1850-2000', 'I_1850-2000_CN', 'I_RCP8.5_CN', 'I_RCP6.0_CN', 'I_RCP4.5_CN', or 'I_RCP2.6_CN'. - - d. PTCLMmkdata creates a pft-physiology for you PTCLMmkdata will create a local copy of the pft-physiology specific for your site that you could then customize with changes specific for that site. - - e. PTCLMmkdata creates a README.PTCLM for you PTCLMmkdata will create a simple text file with the command line for it in a file called README.PTCLM in the case directory it creates for you. - -3. Run create_newcase pointing to the directory created - -4. Customize, setup, build and run case as normal You then customize your case as you would normally. See the Chapter 1 chapter for more information on doing this. - -PTCLMmkdata options -========================= - -Next we discuss the setup and run-time options, dividing them up into setup, initial condition (IC), and run-time options. - -Configure options include: - -- --cesm_root=BASE_CESM -- --sitegroupname=SITEGROUP -- --donot_use_tower_yrs - -``--cesm_root`` - This option is for running PTCLMmkdata with a different root directory to CESM than the version PTCLMmkdata exists in. Normally you do NOT need to use this option. - -``--sitegroupname`` - In the PTCLMmkdata directory there is a subdirectory "PTCLM_sitedata" that contains files with the site, PFT and soil data information for groups of sites. These site groups are all separate ASCII files with the same prefix followed by a "_*data.txt" name. See :ref:`adding-ptclm-site-data` for more information on these files. By default we have provided three different valid group names: - -EXAMPLE -------- -AmeriFlux - -Fluxnet-Canada - -The EXAMPLE is the group used by default and ONLY includes the US-UMB site as that is the only site we have data provided for. The other two site groups include the site information for all of both the AmeriFlux and Fluxnet-Canada sites. You can use the "sitegroupname" option to use one of the other lists, or you can create your own lists using the EXAMPLE file as an example. Your list of sites could be real world locations or could be theoretical "virtual" sites given to exercise CLM on differing biomes for example. Note, see :ref:`converting-ameriflux-for-ptclmmkdata` with permission information to use the US-UMB data. - -``--donot_use_tower_yrs`` - This option is used with the "useQIAN" option to set the years to cycle over for the Qian data. In this case Qian atmospheric forcing will be used, but the simulation will run over the same years that tower site is available for this site. - -**Run-time options include:** - -- --debug - -This option tells PTCLMmkdata to echo what it would do if it were run, but NOT actually run anything. So it will show you the dataset creation commands it would use. It does however, run **create_newcase**, but then it only displays the **xmlchange** commands and changes that it would do. Also note that if you give the "--rmold" option it won't delete the case directory beforehand. Primarily this is intended for debugging the operation of PTCLMmkdata. - -**The dataset generation options are:** - -- --pftgrid -- --soilgrid - -The options that with a "grid" suffix all mean to create datasets using the global gridded information rather than using the site specific point data. By default the site specific point data is used. The "nopointdata" and "owritesrfaer" options have to do with file creation. - -Because supported single-point datasets already have the data created for them, you MUST use the "nopointdata" and "ndepgrid" options when you are using a supported single-point site. You must use "ndepgrid" even for a compset without CN. You also can NOT use the options: "soilgrid", "pftgrid", "aerdepgrid", or "owritesrfaer". - -``--pftgrid`` - This option says to use the PFT values provided on the global dataset rather than using the specific site based values from the PTCLM_sitedata/\*_pftdata.txt file when creating the surface dataset. This option must NOT be used when you you are using a site that is a supported single point dataset. - -``--soilgrid`` - This option says to use the soil values provided on the global dataset rather than using the specific site based values from the PTCLM_sitedata/\*_soildata.txt file when creating the surface dataset. This option must NOT be used when you you are using a site that is a supported single point dataset. - diff --git a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst index 34a199ebe8..0edc07f187 100644 --- a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst +++ b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst @@ -16,11 +16,6 @@ There are three different ways to do this for normal-supported site ``CLM_USRDAT_NAME`` runs using your own datasets (single-point or regional). -``PTCLMmkdata`` - easily setup simulations to run for tower sites.. - -.. note:: ``PTS_MODE`` and ``PTCLMmkdata`` only works for a single point, while the other two options can also work for regional datasets as well. - .. _options-for-single-points: ========================================= @@ -33,7 +28,5 @@ In general :ref:`pts_mode` is the quick and dirty method that gets you started w Next, ``CLM_USRDAT_NAME`` is the best way to setup cases quickly where you have to create your own datasets (see :ref:`running-single-point-datasets`). With this method you don't have to change DATM or add files to the XML database -- but you have to follow a strict naming convention for files. However, once the files are named and in the proper location, you can easily setup new cases that use these datasets. This is good for treating all the required datasets as a "group" and for a particular model version. For advanced CLM developers who need to track dataset changes with different model versions you would be best off adding these datasets as supported datasets with the "normal supported datasets" method. -Lastly *PTCLMmkdata* is a great way to easily create datasets, setup simulations and run simulations for tower sites. It takes advantage of both normal supported site functionality and CLM_USRDAT_NAME internally. A big advantage to it, is that it's one-stop shopping, it runs tools to create datasets, and runs **create_newcase** and sets the appropriate env variables for you. So you only have to learn how to run one tool, rather than work with many different ones. PTCLMmkdata is described in the next chapter, :ref:`running-PTCLM`. - -Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. :ref:`converting-ameriflux-for-ptclmmkdata` tells you how to use AmeriFlux data for atmospheric forcing. +Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. diff --git a/doc/source/users_guide/testing/testing.rst b/doc/source/users_guide/testing/testing.rst index bad1183fff..ab51931aa7 100644 --- a/doc/source/users_guide/testing/testing.rst +++ b/doc/source/users_guide/testing/testing.rst @@ -42,14 +42,3 @@ If something went wrong, you can find the failing tests like so: :: > grep -E "^[0-9]+/[0-9]+ < [a-zA-Z]+" namelist_test.log | grep -v "PASS" - -Testing PTCLM -============= - -.. include:: ../../../../tools/PTCLM/README - :literal: - -To run on cheyenne, you do the following: - -.. include:: ../../../../tools/PTCLM/test/README.run_cheyenne - :literal: diff --git a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst index 50a7969281..05fe461db2 100644 --- a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst +++ b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst @@ -6,6 +6,6 @@ Observational Sites Datasets ******************************* -There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`creating-your-own-singlepoint-dataset`. ``PTCLM`` uses these methods to customize datasets; see Chapter :numref:`running-PTCLM`. +There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`creating-your-own-singlepoint-dataset`. -Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`creating-your-own-singlepoint-dataset` for more information on this. :ref:`converting-ameriflux-for-ptclmmkdata` has information on using the AmeriFlux tower site data as atmospheric forcing. +Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`creating-your-own-singlepoint-dataset` for more information on this. From 500806030839f837fce7d9e46a820fd27f084c1c Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 14:06:44 -0600 Subject: [PATCH 08/32] Remove ref to missing figure ndown_ctsm_diagram.svg. --- doc/source/lilac/specific-atm-models/wrf-nesting.rst | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/doc/source/lilac/specific-atm-models/wrf-nesting.rst b/doc/source/lilac/specific-atm-models/wrf-nesting.rst index f4c4570f2f..db3b6f05cb 100644 --- a/doc/source/lilac/specific-atm-models/wrf-nesting.rst +++ b/doc/source/lilac/specific-atm-models/wrf-nesting.rst @@ -50,13 +50,7 @@ A full description of all steps for a WRF-CTSM run are included here. Therefore, we are not repeating the steps necessary for building WRF and CTSM. -In this example we use a nested domain over the CONUS as shows below: - -.. _Figure ctsm-ndown: - -.. figure:: ndown_ctsm_diagram.svg - - Flowchart for WRF-CTSM one-way nested simulations +In this example we use a nested domain over the continental United States. Nested Simulations : Pre-processing (geogrid.exe) ------------------------------------------------- From 63a73ef188fcc906e74904d85fd7ceb427739e70 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 14:41:54 -0600 Subject: [PATCH 09/32] Make pr-docs-check run in escomp:base Docker container. --- .github/workflows/pr-docs-check.yml | 2 ++ doc/source/requirements.txt | 3 --- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 doc/source/requirements.txt diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index b0093f6cdb..a86d3ed074 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -5,6 +5,8 @@ on: jobs: docs: runs-on: ubuntu-latest + container: + image: escomp/base:latest steps: - uses: actions/checkout@v1 - uses: ammaraskar/sphinx-action@master diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt deleted file mode 100644 index 65a7364dea..0000000000 --- a/doc/source/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -sphinx>=6.1.3 -docutils>=0.19 -sphinx_rtd_theme From 4f362e04e5a2b4c76bffa33fac11fa8d5bf132e7 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 14:54:42 -0600 Subject: [PATCH 10/32] Trying to get pr-docs-check to work in container. --- .github/workflows/pr-docs-check.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index a86d3ed074..2da0cddd27 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -12,5 +12,5 @@ jobs: - uses: ammaraskar/sphinx-action@master with: docs-folder: "doc/source" - pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" - build-command: "sphinx-build -W --keep-going -n -b html . _build" + pre-build-command: "make fetch-images" + build-command: "make html" From 8b8657de3767607159f2efaa19fed1a4b9fe14c9 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 14:57:18 -0600 Subject: [PATCH 11/32] Set docs-folder to just "doc". --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index 2da0cddd27..b5a7390930 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -11,6 +11,6 @@ jobs: - uses: actions/checkout@v1 - uses: ammaraskar/sphinx-action@master with: - docs-folder: "doc/source" + docs-folder: "doc" pre-build-command: "make fetch-images" build-command: "make html" From 7873afd1373d3fa758e15fdb82aa8fad24995564 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:00:08 -0600 Subject: [PATCH 12/32] Add doc/requirements.txt, with just sphinx_rtd_theme. --- doc/requirements.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/requirements.txt diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000..52b04f2ece --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1 @@ +sphinx_rtd_theme \ No newline at end of file From fa38d72d152939d964c701d0b7f7ca3bf6a6daf8 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:03:37 -0600 Subject: [PATCH 13/32] Add docutils>=0.19 to requirements.txt. --- doc/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/requirements.txt b/doc/requirements.txt index 52b04f2ece..727d7310b8 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1 +1,2 @@ +docutils>=0.19 sphinx_rtd_theme \ No newline at end of file From 9acd01bc26dffa2431dc26c36a7d0d2f5168c125 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:07:10 -0600 Subject: [PATCH 14/32] make now fails on nitpicky warnings. --- doc/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/Makefile b/doc/Makefile index 49e9764b7a..c36990a15e 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -W --keep-going -n SPHINXBUILD = sphinx-build SPHINXPROJ = clmdoc SOURCEDIR = source From da177a80392296ade35e73191f5e65c1bd9e9b1d Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:18:34 -0600 Subject: [PATCH 15/32] Change build-command from "make html" to "make". --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index b5a7390930..0179422534 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -13,4 +13,4 @@ jobs: with: docs-folder: "doc" pre-build-command: "make fetch-images" - build-command: "make html" + build-command: "make" From 43dec3c685a01c44d7a4c38d9a1a884baca07b2b Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:21:17 -0600 Subject: [PATCH 16/32] Revert "Change build-command from "make html" to "make"." This reverts commit da177a80392296ade35e73191f5e65c1bd9e9b1d. --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index 0179422534..b5a7390930 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -13,4 +13,4 @@ jobs: with: docs-folder: "doc" pre-build-command: "make fetch-images" - build-command: "make" + build-command: "make html" From 9476fe731125306667f59544da01f281857ad7b1 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:43:43 -0600 Subject: [PATCH 17/32] Try running pr-docs-check on CentOS instead of Ubuntu. --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index b5a7390930..1a50346138 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -4,7 +4,7 @@ on: jobs: docs: - runs-on: ubuntu-latest + runs-on: centos-latest container: image: escomp/base:latest steps: From 0cfefbb59b6920b2c9c34e034ac4d8e0261350bc Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 29 Apr 2024 15:47:39 -0600 Subject: [PATCH 18/32] Revert "Try running pr-docs-check on CentOS instead of Ubuntu." This reverts commit 9476fe731125306667f59544da01f281857ad7b1. --- .github/workflows/pr-docs-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index 1a50346138..b5a7390930 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -4,7 +4,7 @@ on: jobs: docs: - runs-on: centos-latest + runs-on: ubuntu-latest container: image: escomp/base:latest steps: From cf15c11fa72756425ed21c088446bed0da951f83 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 07:38:56 -0600 Subject: [PATCH 19/32] Resolve "invalid language code" warning. --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 6c00f5a686..4d03986d32 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -80,7 +80,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From cf57f65e2608471509ba26ad2283de200b261eb0 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 07:40:51 -0600 Subject: [PATCH 20/32] Update intersphinx_mapping to new format. See https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_mapping --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4d03986d32..7300102f74 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -178,7 +178,7 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} +intersphinx_mapping = {'python': ('https://docs.python.org/', None)} numfig = True numfig_format = {'figure': 'Figure %s', From 499651cc82345d6dd3e09e41d57abe19802181e5 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 07:52:47 -0600 Subject: [PATCH 21/32] pr-docs-check now runs raw; added new pr-docs-check-container. --- .github/workflows/pr-docs-check-container.yml | 16 ++++++++++++++++ .github/workflows/pr-docs-check.yml | 8 +++----- doc/source/requirements.txt | 3 +++ 3 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/pr-docs-check-container.yml create mode 100644 doc/source/requirements.txt diff --git a/.github/workflows/pr-docs-check-container.yml b/.github/workflows/pr-docs-check-container.yml new file mode 100644 index 0000000000..82dd517e8b --- /dev/null +++ b/.github/workflows/pr-docs-check-container.yml @@ -0,0 +1,16 @@ +name: "Pull Request Docs Check (Container)" +on: +- pull_request + +jobs: + docs: + runs-on: ubuntu-latest + container: + image: escomp/base:latest + steps: + - uses: actions/checkout@v1 + - uses: ammaraskar/sphinx-action@master + with: + docs-folder: "doc" + pre-build-command: "make fetch-images" + build-command: "make html" diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml index b5a7390930..b0093f6cdb 100644 --- a/.github/workflows/pr-docs-check.yml +++ b/.github/workflows/pr-docs-check.yml @@ -5,12 +5,10 @@ on: jobs: docs: runs-on: ubuntu-latest - container: - image: escomp/base:latest steps: - uses: actions/checkout@v1 - uses: ammaraskar/sphinx-action@master with: - docs-folder: "doc" - pre-build-command: "make fetch-images" - build-command: "make html" + docs-folder: "doc/source" + pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" + build-command: "sphinx-build -W --keep-going -n -b html . _build" diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt new file mode 100644 index 0000000000..25ca68743f --- /dev/null +++ b/doc/source/requirements.txt @@ -0,0 +1,3 @@ +sphinx>=6.1.3 +docutils>=0.19 +sphinx_rtd_theme \ No newline at end of file From c0a5b29c68388a5bbd75fe0b994c632a3be7e126 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 08:30:20 -0600 Subject: [PATCH 22/32] Call "make fetch-images" from doc/. --- .github/workflows/pr-docs-check-container.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check-container.yml b/.github/workflows/pr-docs-check-container.yml index 82dd517e8b..3a166bf90f 100644 --- a/.github/workflows/pr-docs-check-container.yml +++ b/.github/workflows/pr-docs-check-container.yml @@ -12,5 +12,5 @@ jobs: - uses: ammaraskar/sphinx-action@master with: docs-folder: "doc" - pre-build-command: "make fetch-images" + pre-build-command: "(cd doc && make fetch-images)" build-command: "make html" From 9e97967d90d475c7dfe9736be225e9bf2f0129c5 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 08:48:43 -0600 Subject: [PATCH 23/32] Add pr-docs-check-container-raw.yml. --- .github/workflows/pr-docs-check-container-raw.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/workflows/pr-docs-check-container-raw.yml diff --git a/.github/workflows/pr-docs-check-container-raw.yml b/.github/workflows/pr-docs-check-container-raw.yml new file mode 100644 index 0000000000..77390be5f5 --- /dev/null +++ b/.github/workflows/pr-docs-check-container-raw.yml @@ -0,0 +1,12 @@ +name: "Pull Request Docs Check (Container, Raw)" +on: +- pull_request + +jobs: + docs: + runs-on: ubuntu-latest + container: + image: escomp/base:latest + steps: + - run: git lfs install && git lfs pull --exclude='' --include='' + - run: sphinx-build -W --keep-going -n -b html . _build From b1c1aca230a665433957851de713f4ae48fcb799 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 08:50:32 -0600 Subject: [PATCH 24/32] Remove "git lfs install" step rom pr-docs-check-container-raw. --- .github/workflows/pr-docs-check-container-raw.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check-container-raw.yml b/.github/workflows/pr-docs-check-container-raw.yml index 77390be5f5..64f18c5acc 100644 --- a/.github/workflows/pr-docs-check-container-raw.yml +++ b/.github/workflows/pr-docs-check-container-raw.yml @@ -8,5 +8,5 @@ jobs: container: image: escomp/base:latest steps: - - run: git lfs install && git lfs pull --exclude='' --include='' + - run: git lfs pull --exclude='' --include='' - run: sphinx-build -W --keep-going -n -b html . _build From c58699b599fdf9029aa5c3d23e7c1c775cb848e7 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 08:52:28 -0600 Subject: [PATCH 25/32] Add troubleshooting output to pr-docs-check-container-raw. --- .github/workflows/pr-docs-check-container-raw.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pr-docs-check-container-raw.yml b/.github/workflows/pr-docs-check-container-raw.yml index 64f18c5acc..6ac20fe484 100644 --- a/.github/workflows/pr-docs-check-container-raw.yml +++ b/.github/workflows/pr-docs-check-container-raw.yml @@ -8,5 +8,7 @@ jobs: container: image: escomp/base:latest steps: + - run: pwd + - run: ls -lh - run: git lfs pull --exclude='' --include='' - run: sphinx-build -W --keep-going -n -b html . _build From 92e8cba6d5e562478b252c6761d76a8f0bfa79ba Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 08:58:09 -0600 Subject: [PATCH 26/32] Add pr-docs-check-container-ssr action. --- .github/workflows/pr-docs-check-container-ssr.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 .github/workflows/pr-docs-check-container-ssr.yml diff --git a/.github/workflows/pr-docs-check-container-ssr.yml b/.github/workflows/pr-docs-check-container-ssr.yml new file mode 100644 index 0000000000..ad48ae547d --- /dev/null +++ b/.github/workflows/pr-docs-check-container-ssr.yml @@ -0,0 +1,14 @@ +name: "Pull Request Docs Check (Container, SSR Fork)" +on: +- pull_request + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: samsrabin/sphinx-action@use-escomp-base-container + with: + docs-folder: "doc" + pre-build-command: "(cd doc && make fetch-images)" + build-command: "make html" From 7326b4328ca3def1c14567df71cfd0686b26674a Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 09:03:46 -0600 Subject: [PATCH 27/32] pr-docs-check-container-ssr: Skip "git lfs install" step. --- .github/workflows/pr-docs-check-container-ssr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check-container-ssr.yml b/.github/workflows/pr-docs-check-container-ssr.yml index ad48ae547d..bda02446c6 100644 --- a/.github/workflows/pr-docs-check-container-ssr.yml +++ b/.github/workflows/pr-docs-check-container-ssr.yml @@ -10,5 +10,5 @@ jobs: - uses: samsrabin/sphinx-action@use-escomp-base-container with: docs-folder: "doc" - pre-build-command: "(cd doc && make fetch-images)" + pre-build-command: "git lfs pull --exclude='' --include=''" build-command: "make html" From a5d155fda74d9120c9e3dde865d4cdc550348db3 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 09:22:07 -0600 Subject: [PATCH 28/32] Revert pr-docs-check-container-ssr; add pr-docs-check-ssr. --- .github/workflows/pr-docs-check-container-ssr.yml | 4 +++- .github/workflows/pr-docs-check-ssr.yml | 14 ++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/pr-docs-check-ssr.yml diff --git a/.github/workflows/pr-docs-check-container-ssr.yml b/.github/workflows/pr-docs-check-container-ssr.yml index bda02446c6..1ff0ab1c47 100644 --- a/.github/workflows/pr-docs-check-container-ssr.yml +++ b/.github/workflows/pr-docs-check-container-ssr.yml @@ -5,10 +5,12 @@ on: jobs: docs: runs-on: ubuntu-latest + container: + image: escomp/base:latest steps: - uses: actions/checkout@v1 - uses: samsrabin/sphinx-action@use-escomp-base-container with: docs-folder: "doc" - pre-build-command: "git lfs pull --exclude='' --include=''" + pre-build-command: "(cd doc && make fetch-images)" build-command: "make html" diff --git a/.github/workflows/pr-docs-check-ssr.yml b/.github/workflows/pr-docs-check-ssr.yml new file mode 100644 index 0000000000..f3280714df --- /dev/null +++ b/.github/workflows/pr-docs-check-ssr.yml @@ -0,0 +1,14 @@ +name: "Pull Request Docs Check (SSR Fork)" +on: +- pull_request + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: samsrabin/sphinx-action@use-escomp-base-container + with: + docs-folder: "doc/source" + pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" + build-command: "sphinx-build -W --keep-going -n -b html . _build" From 7e72de8f6b962c2a0361a510eb325ed451ac9571 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Tue, 30 Apr 2024 09:36:22 -0600 Subject: [PATCH 29/32] Delete all requirements.txt in doc/. --- doc/requirements.txt | 2 -- doc/source/requirements.txt | 3 --- 2 files changed, 5 deletions(-) delete mode 100644 doc/requirements.txt delete mode 100644 doc/source/requirements.txt diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index 727d7310b8..0000000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -docutils>=0.19 -sphinx_rtd_theme \ No newline at end of file diff --git a/doc/source/requirements.txt b/doc/source/requirements.txt deleted file mode 100644 index 25ca68743f..0000000000 --- a/doc/source/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -sphinx>=6.1.3 -docutils>=0.19 -sphinx_rtd_theme \ No newline at end of file From af4abb2005d0516c54fdf3786e8e85589d485746 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Wed, 1 May 2024 13:37:48 -0600 Subject: [PATCH 30/32] pr-docs-check-ssr: Provide full path to build dir. Works! Works in conjunction with latest change to my fork of the Github action. --- .github/workflows/pr-docs-check-ssr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-docs-check-ssr.yml b/.github/workflows/pr-docs-check-ssr.yml index f3280714df..e23e9940da 100644 --- a/.github/workflows/pr-docs-check-ssr.yml +++ b/.github/workflows/pr-docs-check-ssr.yml @@ -11,4 +11,4 @@ jobs: with: docs-folder: "doc/source" pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" - build-command: "sphinx-build -W --keep-going -n -b html . _build" + build-command: "sphinx-build -W --keep-going -n -b html . /home/user/mounted_home/_build" From 33f9e9dc6ae1f70cfe88d283f15b294aa689dee0 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Wed, 1 May 2024 13:40:41 -0600 Subject: [PATCH 31/32] Delete now-unneeded workflow experiments. --- .../workflows/pr-docs-check-container-raw.yml | 14 -------------- .../workflows/pr-docs-check-container-ssr.yml | 16 ---------------- .github/workflows/pr-docs-check-container.yml | 16 ---------------- .github/workflows/pr-docs-check.yml | 14 -------------- 4 files changed, 60 deletions(-) delete mode 100644 .github/workflows/pr-docs-check-container-raw.yml delete mode 100644 .github/workflows/pr-docs-check-container-ssr.yml delete mode 100644 .github/workflows/pr-docs-check-container.yml delete mode 100644 .github/workflows/pr-docs-check.yml diff --git a/.github/workflows/pr-docs-check-container-raw.yml b/.github/workflows/pr-docs-check-container-raw.yml deleted file mode 100644 index 6ac20fe484..0000000000 --- a/.github/workflows/pr-docs-check-container-raw.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: "Pull Request Docs Check (Container, Raw)" -on: -- pull_request - -jobs: - docs: - runs-on: ubuntu-latest - container: - image: escomp/base:latest - steps: - - run: pwd - - run: ls -lh - - run: git lfs pull --exclude='' --include='' - - run: sphinx-build -W --keep-going -n -b html . _build diff --git a/.github/workflows/pr-docs-check-container-ssr.yml b/.github/workflows/pr-docs-check-container-ssr.yml deleted file mode 100644 index 1ff0ab1c47..0000000000 --- a/.github/workflows/pr-docs-check-container-ssr.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: "Pull Request Docs Check (Container, SSR Fork)" -on: -- pull_request - -jobs: - docs: - runs-on: ubuntu-latest - container: - image: escomp/base:latest - steps: - - uses: actions/checkout@v1 - - uses: samsrabin/sphinx-action@use-escomp-base-container - with: - docs-folder: "doc" - pre-build-command: "(cd doc && make fetch-images)" - build-command: "make html" diff --git a/.github/workflows/pr-docs-check-container.yml b/.github/workflows/pr-docs-check-container.yml deleted file mode 100644 index 3a166bf90f..0000000000 --- a/.github/workflows/pr-docs-check-container.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: "Pull Request Docs Check (Container)" -on: -- pull_request - -jobs: - docs: - runs-on: ubuntu-latest - container: - image: escomp/base:latest - steps: - - uses: actions/checkout@v1 - - uses: ammaraskar/sphinx-action@master - with: - docs-folder: "doc" - pre-build-command: "(cd doc && make fetch-images)" - build-command: "make html" diff --git a/.github/workflows/pr-docs-check.yml b/.github/workflows/pr-docs-check.yml deleted file mode 100644 index b0093f6cdb..0000000000 --- a/.github/workflows/pr-docs-check.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: "Pull Request Docs Check" -on: -- pull_request - -jobs: - docs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: ammaraskar/sphinx-action@master - with: - docs-folder: "doc/source" - pre-build-command: "git lfs install && git lfs pull --exclude='' --include=''" - build-command: "sphinx-build -W --keep-going -n -b html . _build" From b1c328ad69f4005c1c7f325bd61f76a0f2063325 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Wed, 1 May 2024 13:40:55 -0600 Subject: [PATCH 32/32] Rename remaining workflow. --- .github/workflows/{pr-docs-check-ssr.yml => pr-docs-check.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{pr-docs-check-ssr.yml => pr-docs-check.yml} (100%) diff --git a/.github/workflows/pr-docs-check-ssr.yml b/.github/workflows/pr-docs-check.yml similarity index 100% rename from .github/workflows/pr-docs-check-ssr.yml rename to .github/workflows/pr-docs-check.yml