From 13ca2cdd4e3370e664b5efad82733dea003f350e Mon Sep 17 00:00:00 2001 From: Nick Szapiro Date: Mon, 29 Apr 2024 18:20:38 +0000 Subject: [PATCH 1/2] Update to CICE-Consortium (935 to 943) --- cicecore/cicedyn/analysis/ice_history.F90 | 55 ++++- .../cicedyn/analysis/ice_history_shared.F90 | 64 +++++- cicecore/cicedyn/general/ice_flux.F90 | 10 +- cicecore/cicedyn/general/ice_init.F90 | 2 +- cicecore/cicedyn/general/ice_step_mod.F90 | 42 ++-- .../io/io_binary/ice_history_write.F90 | 16 +- .../io/io_netcdf/ice_history_write.F90 | 190 +++++++++--------- .../io/io_pio2/ice_history_write.F90 | 170 ++++++++-------- .../infrastructure/io/io_pio2/ice_pio.F90 | 2 +- .../drivers/direct/hadgem3/CICE_RunMod.F90 | 8 +- .../direct/nemo_concepts/CICE_RunMod.F90 | 8 +- cicecore/drivers/mct/cesm1/CICE_RunMod.F90 | 10 +- cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 | 10 +- .../drivers/nuopc/cmeps/ice_comp_nuopc.F90 | 25 ++- cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 | 10 +- .../drivers/standalone/cice/CICE_RunMod.F90 | 10 +- .../drivers/unittest/opticep/CICE_InitMod.F90 | 3 +- .../drivers/unittest/opticep/CICE_RunMod.F90 | 10 +- .../drivers/unittest/opticep/ice_step_mod.F90 | 42 ++-- configuration/scripts/cice.batch.csh | 2 +- configuration/scripts/cice.launch.csh | 20 ++ configuration/scripts/ice_in | 8 + .../scripts/machines/Macros.carpenter_cray | 60 ++++++ .../scripts/machines/Macros.carpenter_gnu | 69 +++++++ .../scripts/machines/Macros.carpenter_gnuimpi | 69 +++++++ .../scripts/machines/Macros.carpenter_intel | 59 ++++++ .../machines/Macros.carpenter_intelimpi | 59 ++++++ .../scripts/machines/env.carpenter_cray | 54 +++++ .../scripts/machines/env.carpenter_gnu | 58 ++++++ .../scripts/machines/env.carpenter_gnuimpi | 58 ++++++ .../scripts/machines/env.carpenter_intel | 57 ++++++ .../scripts/machines/env.carpenter_intelimpi | 57 ++++++ .../scripts/options/set_nml.histinst | 1 + doc/source/user_guide/ug_case_settings.rst | 2 +- doc/source/user_guide/ug_implementation.rst | 150 +++++++------- 35 files changed, 1118 insertions(+), 352 deletions(-) create mode 100644 configuration/scripts/machines/Macros.carpenter_cray create mode 100644 configuration/scripts/machines/Macros.carpenter_gnu create mode 100644 configuration/scripts/machines/Macros.carpenter_gnuimpi create mode 100644 configuration/scripts/machines/Macros.carpenter_intel create mode 100644 configuration/scripts/machines/Macros.carpenter_intelimpi create mode 100644 configuration/scripts/machines/env.carpenter_cray create mode 100644 configuration/scripts/machines/env.carpenter_gnu create mode 100644 configuration/scripts/machines/env.carpenter_gnuimpi create mode 100644 configuration/scripts/machines/env.carpenter_intel create mode 100644 configuration/scripts/machines/env.carpenter_intelimpi diff --git a/cicecore/cicedyn/analysis/ice_history.F90 b/cicecore/cicedyn/analysis/ice_history.F90 index 87a339529..32f744477 100644 --- a/cicecore/cicedyn/analysis/ice_history.F90 +++ b/cicecore/cicedyn/analysis/ice_history.F90 @@ -362,6 +362,7 @@ subroutine init_hist (dt) f_sidmasslat = 'mxxxx' f_sndmasssnf = 'mxxxx' f_sndmassmelt = 'mxxxx' + f_sndmassdyn = 'mxxxx' f_siflswdtop = 'mxxxx' f_siflswutop = 'mxxxx' f_siflswdbot = 'mxxxx' @@ -402,6 +403,11 @@ subroutine init_hist (dt) f_siu = f_CMIP f_siv = f_CMIP f_sispeed = f_CMIP + f_sndmasssubl = f_CMIP + f_sndmasssnf = f_CMIP + f_sndmassmelt = f_CMIP + f_sndmassdyn = f_CMIP + f_sidmasssi = f_CMIP endif if (grid_ice == 'CD' .or. grid_ice == 'C') then @@ -447,6 +453,14 @@ subroutine init_hist (dt) if (f_Tsnz (1:1) /= 'x') f_VGRDs = .true. if (tr_fsd) f_NFSD = .true. + call broadcast_scalar (f_tlon, master_task) + call broadcast_scalar (f_tlat, master_task) + call broadcast_scalar (f_ulon, master_task) + call broadcast_scalar (f_ulat, master_task) + call broadcast_scalar (f_nlon, master_task) + call broadcast_scalar (f_nlat, master_task) + call broadcast_scalar (f_elon, master_task) + call broadcast_scalar (f_elat, master_task) call broadcast_scalar (f_tmask, master_task) call broadcast_scalar (f_umask, master_task) call broadcast_scalar (f_nmask, master_task) @@ -646,6 +660,7 @@ subroutine init_hist (dt) call broadcast_scalar (f_sidmasslat, master_task) call broadcast_scalar (f_sndmasssnf, master_task) call broadcast_scalar (f_sndmassmelt, master_task) + call broadcast_scalar (f_sndmassdyn, master_task) call broadcast_scalar (f_siflswdtop, master_task) call broadcast_scalar (f_siflswutop, master_task) call broadcast_scalar (f_siflswdbot, master_task) @@ -1640,7 +1655,7 @@ subroutine init_hist (dt) "none", c1, c0, & ns1, f_sidmassevapsubl) - call define_hist_field(n_sndmasssubl,"sndmassubl","kg m-2 s-1",tstr2D, tcstr, & + call define_hist_field(n_sndmasssubl,"sndmasssubl","kg m-2 s-1",tstr2D, tcstr, & "snow mass change from evaporation and sublimation", & "none", c1, c0, & ns1, f_sndmasssubl) @@ -1670,6 +1685,11 @@ subroutine init_hist (dt) "none", c1, c0, & ns1, f_sndmassmelt) + call define_hist_field(n_sndmassdyn,"sndmassdyn","kg m-2 s-1",tstr2D, tcstr, & + "snow mass change from dynamics ridging", & + "none", c1, c0, & + ns1, f_sndmassdyn) + call define_hist_field(n_siflswdtop,"siflswdtop","W/m2",tstr2D, tcstr, & "down shortwave flux over sea ice", & "positive downward", c1, c0, & @@ -1973,6 +1993,21 @@ subroutine init_hist (dt) ! floe size distribution call init_hist_fsd_4Df + !----------------------------------------------------------------- + ! fill icoord array with namelist values + !----------------------------------------------------------------- + + icoord=.true. + + icoord(n_tlon ) = f_tlon + icoord(n_tlat ) = f_tlat + icoord(n_ulon ) = f_ulon + icoord(n_ulat ) = f_ulat + icoord(n_nlon ) = f_nlon + icoord(n_nlat ) = f_nlat + icoord(n_elon ) = f_elon + icoord(n_elat ) = f_elat + !----------------------------------------------------------------- ! fill igrd array with namelist values !----------------------------------------------------------------- @@ -2137,7 +2172,7 @@ subroutine accum_hist (dt) taubxN, taubyN, strocnxN, strocnyN, & strairxE, strairyE, strtltxE, strtltyE, strintxE, strintyE, & taubxE, taubyE, strocnxE, strocnyE, & - fmU, fmN, fmE, daidtt, dvidtt, daidtd, dvidtd, fsurf, & + fmU, fmN, fmE, daidtt, dvidtt, daidtd, dvidtd, dvsdtd, fsurf, & fcondtop, fcondbot, fsurfn, fcondtopn, flatn, fsensn, albcnt, snwcnt, & stressp_1, stressm_1, stress12_1, & stresspT, stressmT, stress12T, & @@ -3045,7 +3080,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = evaps(i,j,iblk)*rhos + worka(i,j) = evaps(i,j,iblk) endif enddo enddo @@ -3057,7 +3092,7 @@ subroutine accum_hist (dt) do j = jlo, jhi do i = ilo, ihi if (aice(i,j,iblk) > puny) then - worka(i,j) = aice(i,j,iblk)*fsnow(i,j,iblk)*rhos + worka(i,j) = aice(i,j,iblk)*fsnow(i,j,iblk) endif enddo enddo @@ -3076,6 +3111,18 @@ subroutine accum_hist (dt) call accum_hist_field(n_sndmassmelt, iblk, worka(:,:), a2D) endif + if (f_sndmassdyn(1:1) /= 'x') then + worka(:,:) = c0 + do j = jlo, jhi + do i = ilo, ihi + if (aice(i,j,iblk) > puny) then + worka(i,j) = dvsdtd(i,j,iblk)*rhos + endif + enddo + enddo + call accum_hist_field(n_sndmassdyn, iblk, worka(:,:), a2D) + endif + if (f_siflswdtop(1:1) /= 'x') then worka(:,:) = c0 do j = jlo, jhi diff --git a/cicecore/cicedyn/analysis/ice_history_shared.F90 b/cicecore/cicedyn/analysis/ice_history_shared.F90 index ac2cf8afb..d6fa78542 100644 --- a/cicecore/cicedyn/analysis/ice_history_shared.F90 +++ b/cicecore/cicedyn/analysis/ice_history_shared.F90 @@ -57,7 +57,7 @@ module ice_history_shared history_rearranger ! history file rearranger, box or subset for pio character (len=char_len), public :: & - hist_suffix(max_nstrm) ! appended to 'h' in filename when not 'x' + hist_suffix(max_nstrm) ! appended to history_file in filename integer (kind=int_kind), public :: & history_iotasks , & ! iotasks, root, stride defines io pes for pio @@ -131,6 +131,7 @@ module ice_history_shared avail_hist_fields(max_avail_hist_fields) integer (kind=int_kind), parameter, public :: & + ncoord = 8 , & ! number of coordinate variables: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT nvar_grd = 21 , & ! number of grid fields that can be written ! excluding grid vertices nvar_grdz = 6 ! number of category/vertical grid fields written @@ -165,6 +166,7 @@ module ice_history_shared avgct(max_nstrm) ! average sample counter logical (kind=log_kind), public :: & + icoord(ncoord) , & ! true if coord field is written to output file igrd (nvar_grd), & ! true if grid field is written to output file igrdz(nvar_grdz) ! true if category/vertical grid field is written @@ -194,6 +196,10 @@ module ice_history_shared !--------------------------------------------------------------- logical (kind=log_kind), public :: & + f_tlon = .true., f_tlat = .true., & + f_ulon = .true., f_ulat = .true., & + f_nlon = .true., f_nlat = .true., & + f_elon = .true., f_elat = .true., & f_tmask = .true., f_umask = .true., & f_nmask = .true., f_emask = .true., & f_blkmask = .true., & @@ -308,6 +314,7 @@ module ice_history_shared f_sidmasslat = 'x', & f_sndmasssnf = 'x', & f_sndmassmelt = 'x', & + f_sndmassdyn = 'x', & f_siflswdtop = 'x', & f_siflswutop = 'x', & f_siflswdbot = 'x', & @@ -362,6 +369,10 @@ module ice_history_shared !--------------------------------------------------------------- namelist / icefields_nml / & + f_tlon , f_tlat , & + f_ulon , f_ulat , & + f_nlon , f_nlat , & + f_elon , f_elat , & f_tmask , f_umask , & f_nmask , f_emask , & f_blkmask , & @@ -475,6 +486,7 @@ module ice_history_shared f_sidmasslat, & f_sndmasssnf, & f_sndmassmelt, & + f_sndmassdyn, & f_siflswdtop, & f_siflswutop, & f_siflswdbot, & @@ -529,6 +541,15 @@ module ice_history_shared !--------------------------------------------------------------- integer (kind=int_kind), parameter, public :: & + n_tlon = 1, & + n_tlat = 2, & + n_ulon = 3, & + n_ulat = 4, & + n_nlon = 5, & + n_nlat = 6, & + n_elon = 7, & + n_elat = 8, & + n_tmask = 1, & n_umask = 2, & n_nmask = 3, & @@ -665,6 +686,7 @@ module ice_history_shared n_sidmasslat, & n_sndmasssnf, & n_sndmassmelt, & + n_sndmassdyn, & n_siflswdtop, & n_siflswutop, & n_siflswdbot, & @@ -735,18 +757,22 @@ subroutine construct_filename(ncfile,suffix,ns) dt use ice_restart_shared, only: lenstr - character (char_len_long), intent(inout) :: ncfile - character (len=2), intent(in) :: suffix + character (len=*), intent(inout) :: ncfile + character (len=*), intent(in) :: suffix integer (kind=int_kind), intent(in) :: ns integer (kind=int_kind) :: iyear, imonth, iday, isec - character (len=1) :: cstream + integer (kind=int_kind) :: n + character (len=char_len) :: cstream + character (len=char_len_long), save :: ncfile_last(max_nstrm) = 'UnDefineD' character(len=*), parameter :: subname = '(construct_filename)' iyear = myear imonth = mmonth iday = mday isec = int(msec - dt,int_kind) + cstream = '' + if (hist_suffix(ns) /= 'x') cstream = hist_suffix(ns) ! construct filename if (write_ic) then @@ -771,9 +797,6 @@ subroutine construct_filename(ncfile,suffix,ns) endif endif - cstream = '' - if (hist_suffix(ns) /= 'x') cstream = hist_suffix(ns) - if (hist_avg(ns)) then ! write averaged data if (histfreq(ns) == '1' .and. histfreq_n(ns) == 1) then ! timestep write(ncfile,'(a,a,i4.4,a,i2.2,a,i2.2,a,i5.5,a,a)') & @@ -809,6 +832,25 @@ subroutine construct_filename(ncfile,suffix,ns) endif + ! Check whether the filename is already in use. + ! Same filename in multiple history streams leads to files being overwritten (not good). + ! The current filename convention means we just have to check latest filename, + ! not all filenames ever generated because of use of current model date/time in filename. + + ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug ncfile= ',ns,trim(ncfile) + do n = 1,max_nstrm + ! write(nu_diag,'(2a,i2,1x,a)') subname, 'debug nfile_last= ',n,trim(ncfile_last(n)) + if (ncfile == ncfile_last(n)) then + write(nu_diag,*) subname,' history stream = ',ns + write(nu_diag,*) subname,' history filename = ',trim(ncfile) + write(nu_diag,*) subname,' filename in use for stream ',n + write(nu_diag,*) subname,' filename for stream ',trim(ncfile_last(n)) + write(nu_diag,*) subname,' Use namelist hist_suffix so history filenames are unique' + call abort_ice(subname//' ERROR: history filename already used for another history stream '//trim(ncfile)) + endif + enddo + ncfile_last(ns) = ncfile + end subroutine construct_filename !======================================================================= @@ -869,7 +911,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & if(present(mask_ice_free_points)) l_mask_ice_free_points = mask_ice_free_points if (histfreq(ns) == 'x') then - call abort_ice(subname//'ERROR: define_hist_fields has histfreq x') + call abort_ice(subname//' ERROR: define_hist_fields has histfreq x') endif if (ns == 1) id(:) = 0 @@ -879,7 +921,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & if (vhistfreq(ns1:ns1) == histfreq(ns)) then if (ns1 > 1 .and. index(vhistfreq(1:ns1-1),'x') /= 0) then - call abort_ice(subname//'ERROR: history frequency variable f_' // vname // ' can''t contain ''x'' along with active frequencies') + call abort_ice(subname//' ERROR: history frequency variable f_' // vname // ' can''t contain ''x'' along with active frequencies') endif num_avail_hist_fields_tot = num_avail_hist_fields_tot + 1 @@ -909,7 +951,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & write(nu_diag,*) subname,' num_avail_hist_fields_tot = ',num_avail_hist_fields_tot write(nu_diag,*) subname,' max_avail_hist_fields = ',max_avail_hist_fields endif - call abort_ice(subname//'ERROR: Need in computation of max_avail_hist_fields') + call abort_ice(subname//' ERROR: Need in computation of max_avail_hist_fields') endif if (num_avail_hist_fields_tot /= & @@ -925,7 +967,7 @@ subroutine define_hist_field(id, vname, vunit, vcoord, vcellmeas, & if (my_task == master_task) then write(nu_diag,*) subname,' num_avail_hist_fields_tot = ',num_avail_hist_fields_tot endif - call abort_ice(subname//'ERROR: in num_avail_hist_fields') + call abort_ice(subname//' ERROR: in num_avail_hist_fields') endif id(ns) = num_avail_hist_fields_tot diff --git a/cicecore/cicedyn/general/ice_flux.F90 b/cicecore/cicedyn/general/ice_flux.F90 index 2d61bf642..4d19bb8b2 100644 --- a/cicecore/cicedyn/general/ice_flux.F90 +++ b/cicecore/cicedyn/general/ice_flux.F90 @@ -107,6 +107,7 @@ module ice_flux strintyE, & ! divergence of internal ice stress, y at E points (N/m^2) daidtd , & ! ice area tendency due to transport (1/s) dvidtd , & ! ice volume tendency due to transport (m/s) + dvsdtd , & ! snow volume tendency due to transport (m/s) dagedtd , & ! ice age tendency due to transport (s/s) dardg1dt, & ! rate of area loss by ridging ice (1/s) dardg2dt, & ! rate of area gain by new ridges (1/s) @@ -319,6 +320,7 @@ module ice_flux dsnow, & ! change in snow thickness (m/step-->cm/day) daidtt, & ! ice area tendency thermo. (s^-1) dvidtt, & ! ice volume tendency thermo. (m/s) + dvsdtt, & ! snow volume tendency thermo. (m/s) dagedtt,& ! ice age tendency thermo. (s/s) mlt_onset, &! day of year that sfc melting begins frz_onset, &! day of year that freezing begins (congel or frazil) @@ -419,6 +421,7 @@ subroutine alloc_flux strintyU (nx_block,ny_block,max_blocks), & ! divergence of internal ice stress, y (N/m^2) daidtd (nx_block,ny_block,max_blocks), & ! ice area tendency due to transport (1/s) dvidtd (nx_block,ny_block,max_blocks), & ! ice volume tendency due to transport (m/s) + dvsdtd (nx_block,ny_block,max_blocks), & ! snow volume tendency due to transport (m/s) dagedtd (nx_block,ny_block,max_blocks), & ! ice age tendency due to transport (s/s) dardg1dt (nx_block,ny_block,max_blocks), & ! rate of area loss by ridging ice (1/s) dardg2dt (nx_block,ny_block,max_blocks), & ! rate of area gain by new ridges (1/s) @@ -530,6 +533,7 @@ subroutine alloc_flux dsnow (nx_block,ny_block,max_blocks), & ! change in snow thickness (m/step-->cm/day) daidtt (nx_block,ny_block,max_blocks), & ! ice area tendency thermo. (s^-1) dvidtt (nx_block,ny_block,max_blocks), & ! ice volume tendency thermo. (m/s) + dvsdtt (nx_block,ny_block,max_blocks), & ! snow volume tendency thermo. (m/s) dagedtt (nx_block,ny_block,max_blocks), & ! ice age tendency thermo. (s/s) mlt_onset (nx_block,ny_block,max_blocks), & ! day of year that sfc melting begins frz_onset (nx_block,ny_block,max_blocks), & ! day of year that freezing begins (congel or frazil) @@ -918,7 +922,7 @@ end subroutine init_flux_ocn subroutine init_history_therm - use ice_state, only: aice, vice, trcr + use ice_state, only: aice, vice, vsno, trcr use ice_arrays_column, only: & hfreebd, hdraft, hridge, distrdg, hkeel, dkeel, lfloe, dfloe, & Cdn_atm_skin, Cdn_atm_floe, Cdn_atm_pond, Cdn_atm_rdg, & @@ -965,6 +969,7 @@ subroutine init_history_therm meltl (:,:,:) = c0 daidtt (:,:,:) = aice(:,:,:) ! temporary initial area dvidtt (:,:,:) = vice(:,:,:) ! temporary initial volume + dvsdtt (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) then dagedtt(:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age else @@ -1022,7 +1027,7 @@ end subroutine init_history_therm subroutine init_history_dyn - use ice_state, only: aice, vice, trcr, strength, divu, shear, vort + use ice_state, only: aice, vice, vsno, trcr, strength, divu, shear, vort use ice_grid, only: grid_ice logical (kind=log_kind) :: & @@ -1061,6 +1066,7 @@ subroutine init_history_dyn opening (:,:,:) = c0 daidtd (:,:,:) = aice(:,:,:) ! temporary initial area dvidtd (:,:,:) = vice(:,:,:) ! temporary initial volume + dvsdtd (:,:,:) = vsno(:,:,:) ! temporary initial volume if (tr_iage) & dagedtd (:,:,:) = trcr(:,:,nt_iage,:) ! temporary initial age fmU (:,:,:) = c0 diff --git a/cicecore/cicedyn/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 index 24ac40db3..a7f84e46e 100644 --- a/cicecore/cicedyn/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -938,7 +938,7 @@ subroutine input_data call broadcast_scalar(histfreq_base(n), master_task) call broadcast_scalar(dumpfreq(n), master_task) call broadcast_scalar(dumpfreq_base(n), master_task) - call broadcast_scalar(hist_suffix(n), master_task) + call broadcast_scalar(hist_suffix(n), master_task) enddo call broadcast_array(hist_avg, master_task) call broadcast_array(histfreq_n, master_task) diff --git a/cicecore/cicedyn/general/ice_step_mod.F90 b/cicecore/cicedyn/general/ice_step_mod.F90 index b738e670b..2726a6101 100644 --- a/cicecore/cicedyn/general/ice_step_mod.F90 +++ b/cicecore/cicedyn/general/ice_step_mod.F90 @@ -750,7 +750,7 @@ end subroutine step_therm2 ! ! authors: Elizabeth Hunke, LANL - subroutine update_state (dt, daidt, dvidt, dagedt, offset) + subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) use ice_domain_size, only: ncat ! use ice_grid, only: tmask @@ -766,6 +766,7 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) real (kind=dbl_kind), dimension(:,:,:), intent(inout), optional :: & daidt, & ! change in ice area per time step dvidt, & ! change in ice volume per time step + dvsdt, & ! change in snow volume per time step dagedt ! change in ice age per time step real (kind=dbl_kind), intent(in), optional :: & @@ -827,25 +828,26 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) nt_strata = nt_strata(:,:), & Tf = Tf(i,j,iblk)) - if (present(offset)) then - - !----------------------------------------------------------------- - ! Compute thermodynamic area and volume tendencies. - !----------------------------------------------------------------- - - daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt - dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt - if (tr_iage) then - if (offset > c0) then ! thermo - if (trcr(i,j,nt_iage,iblk) > c0) & - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk) - offset) / dt - else ! dynamics - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk)) / dt - endif - endif ! tr_iage - endif ! present(offset) + if (present(offset)) then + + !----------------------------------------------------------------- + ! Compute thermodynamic area and volume tendencies. + !----------------------------------------------------------------- + + if (present(daidt)) daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt + if (present(dvidt)) dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt + if (present(dvsdt)) dvsdt(i,j,iblk) = (vsno(i,j,iblk) - dvsdt(i,j,iblk)) / dt + if (present(dagedt) .and. tr_iage) then + if (offset > c0) then ! thermo + if (trcr(i,j,nt_iage,iblk) > c0) & + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk) - offset) / dt + else ! dynamics + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk)) / dt + endif + endif ! tr_iage + endif ! present(offset) enddo ! i enddo ! j diff --git a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 index b16d00f07..dae187eae 100644 --- a/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_binary/ice_history_write.F90 @@ -58,7 +58,7 @@ subroutine ice_write_hist(ns) integer (kind=int_kind) :: k,n,nn,nrec,nbits character (char_len) :: title - character (char_len_long) :: ncfile(max_nstrm), hdrfile + character (char_len_long) :: ncfile, hdrfile integer (kind=int_kind) :: icategory,i_aice @@ -85,26 +85,26 @@ subroutine ice_write_hist(ns) if (my_task == master_task) then - call construct_filename(ncfile(ns),'da',ns) + call construct_filename(ncfile,'da',ns) ! add local directory path name to ncfile if (write_ic) then - ncfile(ns) = trim(incond_dir)//ncfile(ns) + ncfile = trim(incond_dir)//ncfile else - ncfile(ns) = trim(history_dir)//ncfile(ns) + ncfile = trim(history_dir)//ncfile endif - hdrfile = trim(ncfile(ns))//'.hdr' + hdrfile = trim(ncfile)//'.hdr' !----------------------------------------------------------------- ! create history files !----------------------------------------------------------------- - call ice_open(nu_history, ncfile(ns), nbits) ! direct access + call ice_open(nu_history, ncfile, nbits) ! direct access open(nu_hdr,file=hdrfile,form='formatted',status='unknown') ! ascii title = 'sea ice model: CICE' write (nu_hdr, 999) 'source',title,' ' - write (nu_hdr, 999) 'file name contains model date',trim(ncfile(ns)),' ' + write (nu_hdr, 999) 'file name contains model date',trim(ncfile),' ' #ifdef CESMCOUPLED write (nu_hdr, 999) 'runid',runid,' ' #endif @@ -391,7 +391,7 @@ subroutine ice_write_hist(ns) close (nu_hdr) ! header file close (nu_history) ! data file write (nu_diag,*) ' ' - write (nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + write (nu_diag,*) 'Finished writing ',trim(ncfile) endif end subroutine ice_write_hist diff --git a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 index c03bc233a..7d29fc4cc 100644 --- a/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_netcdf/ice_history_write.F90 @@ -102,7 +102,7 @@ subroutine ice_write_hist (ns) real (kind=dbl_kind) :: ltime2 character (char_len) :: title, cal_units, cal_att character (char_len) :: time_period_freq = 'none' - character (char_len_long) :: ncfile(max_nstrm) + character (char_len_long) :: ncfile real (kind=dbl_kind) :: secday, rad_to_deg integer (kind=int_kind) :: ind,boundid, lprecision @@ -113,9 +113,6 @@ subroutine ice_write_hist (ns) ! time coord TYPE(coord_attributes) :: time_coord - ! 8 coordinate variables: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT - INTEGER (kind=int_kind), PARAMETER :: ncoord = 8 - ! 4 vertices in each grid cell INTEGER (kind=int_kind), PARAMETER :: nverts = 4 @@ -142,13 +139,13 @@ subroutine ice_write_hist (ns) if (my_task == master_task) then - call construct_filename(ncfile(ns),'nc',ns) + call construct_filename(ncfile,'nc',ns) ! add local directory path name to ncfile if (write_ic) then - ncfile(ns) = trim(incond_dir)//ncfile(ns) + ncfile = trim(incond_dir)//ncfile else - ncfile(ns) = trim(history_dir)//ncfile(ns) + ncfile = trim(history_dir)//ncfile endif ! create file @@ -164,8 +161,8 @@ subroutine ice_write_hist (ns) call abort_ice(subname//' ERROR: history_format not allowed for '//trim(history_format), & file=__FILE__, line=__LINE__) endif - status = nf90_create(ncfile(ns), iflag, ncid) - call ice_check_nc(status, subname// ' ERROR: creating history ncfile '//ncfile(ns), & + status = nf90_create(ncfile, iflag, ncid) + call ice_check_nc(status, subname// ' ERROR: creating history ncfile '//ncfile, & file=__FILE__, line=__LINE__) !----------------------------------------------------------------- @@ -263,39 +260,42 @@ subroutine ice_write_hist (ns) ! define information for required time-invariant variables !----------------------------------------------------------------- - ind = 0 - ind = ind + 1 - var_coord(ind) = coord_attributes('TLON', & - 'T grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lont_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('TLAT', & - 'T grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latt_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ULON', & - 'U grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lonu_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ULAT', & - 'U grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latu_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('NLON', & - 'N grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lonn_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('NLAT', & - 'N grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latn_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ELON', & - 'E grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lone_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ELAT', & - 'E grid center latitude', 'degrees_north') - coord_bounds(ind) = 'late_bounds' + do ind = 1, ncoord + select case (ind) + case(n_tlon) + var_coord(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + case(n_tlat) + var_coord(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + case(n_ulon) + var_coord(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + case(n_ulat) + var_coord(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + case(n_nlon) + var_coord(ind) = coord_attributes('NLON', & + 'N grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonn_bounds' + case(n_nlat) + var_coord(ind) = coord_attributes('NLAT', & + 'N grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latn_bounds' + case(n_elon) + var_coord(ind) = coord_attributes('ELON', & + 'E grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lone_bounds' + case(n_elat) + var_coord(ind) = coord_attributes('ELAT', & + 'E grid center latitude', 'degrees_north') + coord_bounds(ind) = 'late_bounds' + end select + end do var_grdz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') var_grdz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') @@ -406,18 +406,20 @@ subroutine ice_write_hist (ns) dimid(3) = timid do i = 1, ncoord - call ice_hist_coord_def(ncid, var_coord(i), lprecision, dimid(1:2), varid) - call ice_write_hist_fill(ncid,varid,var_coord(i)%short_name,history_precision) - if (var_coord(i)%short_name == 'ULAT') then - status = nf90_put_att(ncid,varid,'comment', & - 'Latitude of NE corner of T grid cell') - call ice_check_nc(status, subname// ' ERROR: defining comment for '//var_coord(i)%short_name, & - file=__FILE__, line=__LINE__) - endif - if (f_bounds) then - status = nf90_put_att(ncid, varid, 'bounds', coord_bounds(i)) - call ice_check_nc(status, subname// ' ERROR: defining bounds for '//var_coord(i)%short_name, & - file=__FILE__, line=__LINE__) + if(icoord(i)) then + call ice_hist_coord_def(ncid, var_coord(i), lprecision, dimid(1:2), varid) + call ice_write_hist_fill(ncid,varid,var_coord(i)%short_name,history_precision) + if (var_coord(i)%short_name == 'ULAT') then + status = nf90_put_att(ncid,varid,'comment', & + 'Latitude of NE corner of T grid cell') + call ice_check_nc(status, subname// ' ERROR: defining comment for '//var_coord(i)%short_name, & + file=__FILE__, line=__LINE__) + endif + if (f_bounds) then + status = nf90_put_att(ncid, varid, 'bounds', coord_bounds(i)) + call ice_check_nc(status, subname// ' ERROR: defining bounds for '//var_coord(i)%short_name, & + file=__FILE__, line=__LINE__) + endif endif enddo @@ -707,44 +709,46 @@ subroutine ice_write_hist (ns) !----------------------------------------------------------------- do i = 1,ncoord - call broadcast_scalar(var_coord(i)%short_name,master_task) - SELECT CASE (var_coord(i)%short_name) - CASE ('TLON') - ! Convert T grid longitude from -180 -> 180 to 0 to 360 - work1 = TLON*rad_to_deg + c360 - where (work1 > c360) work1 = work1 - c360 - where (work1 < c0 ) work1 = work1 + c360 - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('TLAT') - work1 = TLAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ULON') - work1 = ULON*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ULAT') - work1 = ULAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('NLON') - work1 = NLON*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('NLAT') - work1 = NLAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ELON') - work1 = ELON*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - CASE ('ELAT') - work1 = ELAT*rad_to_deg - call gather_global(work_g1,work1,master_task,distrb_info) - END SELECT - - if (my_task == master_task) then - status = nf90_inq_varid(ncid, var_coord(i)%short_name, varid) - call ice_check_nc(status, subname// ' ERROR: getting varid for '//var_coord(i)%short_name, & - file=__FILE__, line=__LINE__) - status = nf90_put_var(ncid,varid,work_g1) - call ice_check_nc(status, subname// ' ERROR: writing'//var_coord(i)%short_name, & - file=__FILE__, line=__LINE__) + if(icoord(i)) then + call broadcast_scalar(var_coord(i)%short_name,master_task) + SELECT CASE (var_coord(i)%short_name) + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + work1 = TLON*rad_to_deg + c360 + where (work1 > c360) work1 = work1 - c360 + where (work1 < c0 ) work1 = work1 + c360 + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('TLAT') + work1 = TLAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULON') + work1 = ULON*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ULAT') + work1 = ULAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('NLON') + work1 = NLON*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('NLAT') + work1 = NLAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ELON') + work1 = ELON*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + CASE ('ELAT') + work1 = ELAT*rad_to_deg + call gather_global(work_g1,work1,master_task,distrb_info) + END SELECT + + if (my_task == master_task) then + status = nf90_inq_varid(ncid, var_coord(i)%short_name, varid) + call ice_check_nc(status, subname// ' ERROR: getting varid for '//var_coord(i)%short_name, & + file=__FILE__, line=__LINE__) + status = nf90_put_var(ncid,varid,work_g1) + call ice_check_nc(status, subname// ' ERROR: writing'//var_coord(i)%short_name, & + file=__FILE__, line=__LINE__) + endif endif enddo @@ -1156,7 +1160,7 @@ subroutine ice_write_hist (ns) call ice_check_nc(status, subname// ' ERROR: closing netCDF history file', & file=__FILE__, line=__LINE__) write(nu_diag,*) ' ' - write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + write(nu_diag,*) 'Finished writing ',trim(ncfile) endif #else diff --git a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 index daebe1f2e..b8971a872 100644 --- a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_history_write.F90 @@ -93,7 +93,7 @@ subroutine ice_write_hist (ns) character (len=8) :: cdate character (len=char_len_long) :: title, cal_units, cal_att character (len=char_len) :: time_period_freq = 'none' - character (len=char_len_long) :: ncfile(max_nstrm) + character (len=char_len_long) :: ncfile integer (kind=int_kind) :: icategory,ind,i_aice,boundid, lprecision @@ -110,9 +110,6 @@ subroutine ice_write_hist (ns) ! time coord TYPE(coord_attributes) :: time_coord - ! 8 coordinate variables: TLON, TLAT, ULON, ULAT, NLON, NLAT, ELON, ELAT - INTEGER (kind=int_kind), PARAMETER :: ncoord = 8 - ! 4 vertices in each grid cell INTEGER (kind=int_kind), PARAMETER :: nverts = 4 @@ -159,15 +156,15 @@ subroutine ice_write_hist (ns) file=__FILE__, line=__LINE__) if (my_task == master_task) then - call construct_filename(ncfile(ns),'nc',ns) + call construct_filename(ncfile,'nc',ns) ! add local directory path name to ncfile if (write_ic) then - ncfile(ns) = trim(incond_dir)//ncfile(ns) + ncfile = trim(incond_dir)//ncfile else - ncfile(ns) = trim(history_dir)//ncfile(ns) + ncfile = trim(history_dir)//ncfile endif - filename = ncfile(ns) + filename = ncfile end if call broadcast_scalar(filename, master_task) @@ -276,39 +273,42 @@ subroutine ice_write_hist (ns) ! define information for required time-invariant variables !----------------------------------------------------------------- - ind = 0 - ind = ind + 1 - var_coord(ind) = coord_attributes('TLON', & - 'T grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lont_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('TLAT', & - 'T grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latt_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ULON', & - 'U grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lonu_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ULAT', & - 'U grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latu_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('NLON', & - 'N grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lonn_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('NLAT', & - 'N grid center latitude', 'degrees_north') - coord_bounds(ind) = 'latn_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ELON', & - 'E grid center longitude', 'degrees_east') - coord_bounds(ind) = 'lone_bounds' - ind = ind + 1 - var_coord(ind) = coord_attributes('ELAT', & - 'E grid center latitude', 'degrees_north') - coord_bounds(ind) = 'late_bounds' + do ind = 1, ncoord + select case (ind) + case(n_tlon) + var_coord(ind) = coord_attributes('TLON', & + 'T grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lont_bounds' + case(n_tlat) + var_coord(ind) = coord_attributes('TLAT', & + 'T grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latt_bounds' + case(n_ulon) + var_coord(ind) = coord_attributes('ULON', & + 'U grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonu_bounds' + case(n_ulat) + var_coord(ind) = coord_attributes('ULAT', & + 'U grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latu_bounds' + case(n_nlon) + var_coord(ind) = coord_attributes('NLON', & + 'N grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lonn_bounds' + case(n_nlat) + var_coord(ind) = coord_attributes('NLAT', & + 'N grid center latitude', 'degrees_north') + coord_bounds(ind) = 'latn_bounds' + case(n_elon) + var_coord(ind) = coord_attributes('ELON', & + 'E grid center longitude', 'degrees_east') + coord_bounds(ind) = 'lone_bounds' + case(n_elat) + var_coord(ind) = coord_attributes('ELAT', & + 'E grid center latitude', 'degrees_north') + coord_bounds(ind) = 'late_bounds' + end select + end do var_grdz(1) = coord_attributes('NCAT', 'category maximum thickness', 'm') var_grdz(2) = coord_attributes('VGRDi', 'vertical ice levels', '1') @@ -418,16 +418,18 @@ subroutine ice_write_hist (ns) dimid2(2) = jmtid do i = 1, ncoord - call ice_hist_coord_def(File, var_coord(i), lprecision, dimid2, varid) - call ice_write_hist_fill(File,varid,var_coord(i)%short_name,history_precision) - if (var_coord(i)%short_name == 'ULAT') then - call ice_pio_check(pio_put_att(File,varid,'comment', & - trim('Latitude of NE corner of T grid cell')), & - subname//' ERROR: defining att comment',file=__FILE__,line=__LINE__) - endif - if (f_bounds) then - call ice_pio_check(pio_put_att(File, varid, 'bounds', trim(coord_bounds(i))), & - subname//' ERROR: defining att bounds '//trim(coord_bounds(i)),file=__FILE__,line=__LINE__) + if (icoord(i)) then + call ice_hist_coord_def(File, var_coord(i), lprecision, dimid2, varid) + call ice_write_hist_fill(File,varid,var_coord(i)%short_name,history_precision) + if (var_coord(i)%short_name == 'ULAT') then + call ice_pio_check(pio_put_att(File,varid,'comment', & + trim('Latitude of NE corner of T grid cell')), & + subname//' ERROR: defining att comment',file=__FILE__,line=__LINE__) + endif + if (f_bounds) then + call ice_pio_check(pio_put_att(File, varid, 'bounds', trim(coord_bounds(i))), & + subname//' ERROR: defining att bounds '//trim(coord_bounds(i)),file=__FILE__,line=__LINE__) + endif endif enddo @@ -706,38 +708,40 @@ subroutine ice_write_hist (ns) allocate(workr2(nx_block,ny_block,nblocks)) do i = 1,ncoord - call ice_pio_check(pio_inq_varid(File, var_coord(i)%short_name, varid), & - subname//' ERROR: getting '//var_coord(i)%short_name ,file=__FILE__,line=__LINE__) - SELECT CASE (var_coord(i)%short_name) - CASE ('TLON') - ! Convert T grid longitude from -180 -> 180 to 0 to 360 - workd2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) - CASE ('TLAT') - workd2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg - CASE ('ULON') - workd2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg - CASE ('ULAT') - workd2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg - CASE ('NLON') - workd2(:,:,:) = nlon(:,:,1:nblocks)*rad_to_deg - CASE ('NLAT') - workd2(:,:,:) = nlat(:,:,1:nblocks)*rad_to_deg - CASE ('ELON') - workd2(:,:,:) = elon(:,:,1:nblocks)*rad_to_deg - CASE ('ELAT') - workd2(:,:,:) = elat(:,:,1:nblocks)*rad_to_deg - END SELECT - if (history_precision == 8) then - call pio_write_darray(File, varid, iodesc2d, & - workd2, status, fillval=spval_dbl) - else - workr2 = workd2 - call pio_write_darray(File, varid, iodesc2d, & - workr2, status, fillval=spval) - endif + if(icoord(i)) then + call ice_pio_check(pio_inq_varid(File, var_coord(i)%short_name, varid), & + subname//' ERROR: getting '//var_coord(i)%short_name ,file=__FILE__,line=__LINE__) + SELECT CASE (var_coord(i)%short_name) + CASE ('TLON') + ! Convert T grid longitude from -180 -> 180 to 0 to 360 + workd2(:,:,:) = mod(tlon(:,:,1:nblocks)*rad_to_deg + c360, c360) + CASE ('TLAT') + workd2(:,:,:) = tlat(:,:,1:nblocks)*rad_to_deg + CASE ('ULON') + workd2(:,:,:) = ulon(:,:,1:nblocks)*rad_to_deg + CASE ('ULAT') + workd2(:,:,:) = ulat(:,:,1:nblocks)*rad_to_deg + CASE ('NLON') + workd2(:,:,:) = nlon(:,:,1:nblocks)*rad_to_deg + CASE ('NLAT') + workd2(:,:,:) = nlat(:,:,1:nblocks)*rad_to_deg + CASE ('ELON') + workd2(:,:,:) = elon(:,:,1:nblocks)*rad_to_deg + CASE ('ELAT') + workd2(:,:,:) = elat(:,:,1:nblocks)*rad_to_deg + END SELECT + if (history_precision == 8) then + call pio_write_darray(File, varid, iodesc2d, & + workd2, status, fillval=spval_dbl) + else + workr2 = workd2 + call pio_write_darray(File, varid, iodesc2d, & + workr2, status, fillval=spval) + endif - call ice_pio_check(status,subname//' ERROR: writing '//avail_hist_fields(n)%vname, & - file=__FILE__,line=__LINE__) + call ice_pio_check(status,subname//' ERROR: writing '//avail_hist_fields(n)%vname, & + file=__FILE__,line=__LINE__) + endif enddo ! Extra dimensions (NCAT, NFSD, VGRD*) @@ -1248,7 +1252,7 @@ subroutine ice_write_hist (ns) call pio_closefile(File) if (my_task == master_task) then write(nu_diag,*) ' ' - write(nu_diag,*) 'Finished writing ',trim(ncfile(ns)) + write(nu_diag,*) 'Finished writing ',trim(ncfile) endif first_call = .false. diff --git a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 index 565e7adbb..9028fa9b7 100644 --- a/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 +++ b/cicecore/cicedyn/infrastructure/io/io_pio2/ice_pio.F90 @@ -83,7 +83,7 @@ subroutine ice_pio_init(mode, filename, File, clobber, fformat, & if ((pio_iotype==PIO_IOTYPE_NETCDF).or.(pio_iotype==PIO_IOTYPE_PNETCDF)) then nmode0 = shr_pio_getioformat(inst_name) else - nmode=0 + nmode0 = 0 endif call pio_seterrorhandling(ice_pio_subsystem, PIO_RETURN_ERROR) diff --git a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 index b67e1a223..43a1a003f 100644 --- a/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/hadgem3/CICE_RunMod.F90 @@ -140,7 +140,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -238,7 +238,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -265,7 +266,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo diff --git a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 index c9875d769..78c703c91 100644 --- a/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 +++ b/cicecore/drivers/direct/nemo_concepts/CICE_RunMod.F90 @@ -140,7 +140,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -238,7 +238,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -265,7 +266,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo diff --git a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 index 5836479b4..6ff6b1270 100644 --- a/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 +++ b/cicecore/drivers/mct/cesm1/CICE_RunMod.F90 @@ -132,7 +132,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -261,7 +261,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -302,7 +303,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo @@ -326,7 +328,7 @@ subroutine ice_step call step_snow (dt, iblk) enddo !$OMP END PARALLEL DO - call update_state (dt) ! clean up + call update_state (dt=dt) ! clean up endif !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) diff --git a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 index 483048051..c2cae81cb 100644 --- a/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/cmeps/CICE_RunMod.F90 @@ -119,7 +119,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -250,7 +250,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -291,7 +292,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo if (debug_model) then @@ -318,7 +320,7 @@ subroutine ice_step do iblk = 1, nblocks call step_snow (dt, iblk) enddo - call update_state (dt) ! clean up + call update_state (dt=dt) ! clean up endif !MHRI: CHECK THIS OMP diff --git a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 index efadabbda..6228c0bdd 100644 --- a/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 +++ b/cicecore/drivers/nuopc/cmeps/ice_comp_nuopc.F90 @@ -23,10 +23,10 @@ module ice_comp_nuopc use ice_domain_size , only : nx_global, ny_global use ice_grid , only : grid_format, init_grid2 use ice_communicate , only : init_communicate, my_task, master_task, mpi_comm_ice - use ice_calendar , only : force_restart_now, write_ic, init_calendar - use ice_calendar , only : idate, mday, mmonth, myear, year_init + use ice_calendar , only : force_restart_now, write_ic + use ice_calendar , only : idate, idate0, mday, mmonth, myear, year_init, month_init, day_init use ice_calendar , only : msec, dt, calendar, calendar_type, nextsw_cday, istep - use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian + use ice_calendar , only : ice_calendar_noleap, ice_calendar_gregorian, use_leap_years use ice_kinds_mod , only : dbl_kind, int_kind, char_len, char_len_long use ice_fileunits , only : nu_diag, nu_diag_set, inst_index, inst_name use ice_fileunits , only : inst_suffix, release_all_fileunits, flush_fileunit @@ -676,6 +676,8 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) if(mastertask) write(nu_diag,*) trim(subname)//'WARNING: pio_typename from driver needs to be set for netcdf output to work' end if + + #else ! Read the cice namelist as part of the call to cice_init1 @@ -789,7 +791,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call cice_init2() call t_stopf ('cice_init2') !--------------------------------------------------------------------------- - ! use EClock to reset calendar information on initial start + ! use EClock to reset calendar information !--------------------------------------------------------------------------- ! - on initial run @@ -805,7 +807,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ref_ymd /= start_ymd .or. ref_tod /= start_tod) then if (my_task == master_task) then write(nu_diag,*) trim(subname),': ref_ymd ',ref_ymd, ' must equal start_ymd ',start_ymd - write(nu_diag,*) trim(subname),': ref_ymd ',ref_tod, ' must equal start_ymd ',start_tod + write(nu_diag,*) trim(subname),': ref_tod',ref_tod, ' must equal start_tod ',start_tod end if end if @@ -837,6 +839,19 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if + ! - start time from ESMF clock. Used to set history time units + idate0 = start_ymd + year_init = (idate0/10000) + month_init= (idate0-year_init*10000)/100 ! integer month of basedate + day_init = idate0-year_init*10000-month_init*100 + + ! - Set use_leap_years based on calendar (as some CICE calls use this instead of the calendar type) + if (calendar_type == ice_calendar_gregorian) then + use_leap_years = .true. + else + use_leap_years = .false. ! no_leap calendars + endif + call calendar() ! update calendar info !---------------------------------------------------------------------------- diff --git a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 index 897f62eea..5f8fb52bc 100644 --- a/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 +++ b/cicecore/drivers/nuopc/dmi/CICE_RunMod.F90 @@ -154,7 +154,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -273,7 +273,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -314,7 +315,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo @@ -338,7 +340,7 @@ subroutine ice_step call step_snow (dt, iblk) enddo !$OMP END PARALLEL DO - call update_state (dt) ! clean up + call update_state (dt=dt) ! clean up endif !MHRI: CHECK THIS OMP diff --git a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 index 42514e06c..59213f728 100644 --- a/cicecore/drivers/standalone/cice/CICE_RunMod.F90 +++ b/cicecore/drivers/standalone/cice/CICE_RunMod.F90 @@ -146,7 +146,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -265,7 +265,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -306,7 +307,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo @@ -330,7 +332,7 @@ subroutine ice_step call step_snow (dt, iblk) enddo !$OMP END PARALLEL DO - call update_state (dt) ! clean up + call update_state (dt=dt) ! clean up endif !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) diff --git a/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 index cb1241a5e..194293118 100644 --- a/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 +++ b/cicecore/drivers/unittest/opticep/CICE_InitMod.F90 @@ -66,7 +66,7 @@ subroutine cice_init floe_binwidth, c_fsd_range use ice_state, only: alloc_state use ice_flux_bgc, only: alloc_flux_bgc - use ice_calendar, only: dt, dt_dyn, write_ic, & + use ice_calendar, only: dt, write_ic, & init_calendar, advance_timestep, calc_timesteps use ice_communicate, only: init_communicate, my_task, master_task use ice_diagnostics, only: init_diags @@ -244,6 +244,7 @@ subroutine cice_init call init_flux_ocn ! initialize ocean fluxes sent to coupler call dealloc_grid ! deallocate temporary grid arrays + if (my_task == master_task) then call ice_memusage_print(nu_diag,subname//':end') endif diff --git a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 index 42514e06c..59213f728 100644 --- a/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 +++ b/cicecore/drivers/unittest/opticep/CICE_RunMod.F90 @@ -146,7 +146,7 @@ subroutine ice_step use ice_dyn_eap, only: write_restart_eap use ice_dyn_shared, only: kdyn, kridge use ice_flux, only: scale_factor, init_history_therm, & - daidtt, daidtd, dvidtt, dvidtd, dagedtt, dagedtd + daidtt, daidtd, dvidtt, dvidtd, dvsdtt, dvsdtd, dagedtt, dagedtd use ice_history, only: accum_hist use ice_history_bgc, only: init_history_bgc use ice_restart, only: final_restart @@ -265,7 +265,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = dt - call update_state (dt, daidtt, dvidtt, dagedtt, offset) + call update_state (dt=dt, daidt=daidtt, dvidt=dvidtt, dvsdt=dvsdtt, & + dagedt=dagedtt, offset=offset) call ice_timer_stop(timer_thermo) ! thermodynamics call ice_timer_stop(timer_column) ! column physics @@ -306,7 +307,8 @@ subroutine ice_step ! clean up, update tendency diagnostics offset = c0 - call update_state (dt_dyn, daidtd, dvidtd, dagedtd, offset) + call update_state (dt=dt_dyn, daidt=daidtd, dvidt=dvidtd, dvsdt=dvsdtd, & + dagedt=dagedtd, offset=offset) enddo @@ -330,7 +332,7 @@ subroutine ice_step call step_snow (dt, iblk) enddo !$OMP END PARALLEL DO - call update_state (dt) ! clean up + call update_state (dt=dt) ! clean up endif !$OMP PARALLEL DO PRIVATE(iblk) SCHEDULE(runtime) diff --git a/cicecore/drivers/unittest/opticep/ice_step_mod.F90 b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 index 370fde6be..64320e601 100644 --- a/cicecore/drivers/unittest/opticep/ice_step_mod.F90 +++ b/cicecore/drivers/unittest/opticep/ice_step_mod.F90 @@ -752,7 +752,7 @@ end subroutine step_therm2 ! ! authors: Elizabeth Hunke, LANL - subroutine update_state (dt, daidt, dvidt, dagedt, offset) + subroutine update_state (dt, daidt, dvidt, dvsdt, dagedt, offset) use ice_domain_size, only: ncat ! use ice_grid, only: tmask @@ -768,6 +768,7 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) real (kind=dbl_kind), dimension(:,:,:), intent(inout), optional :: & daidt, & ! change in ice area per time step dvidt, & ! change in ice volume per time step + dvsdt, & ! change in snow volume per time step dagedt ! change in ice age per time step real (kind=dbl_kind), intent(in), optional :: & @@ -829,25 +830,26 @@ subroutine update_state (dt, daidt, dvidt, dagedt, offset) nt_strata = nt_strata(:,:), & Tf = Tf(i,j,iblk)) - if (present(offset)) then - - !----------------------------------------------------------------- - ! Compute thermodynamic area and volume tendencies. - !----------------------------------------------------------------- - - daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt - dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt - if (tr_iage) then - if (offset > c0) then ! thermo - if (trcr(i,j,nt_iage,iblk) > c0) & - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk) - offset) / dt - else ! dynamics - dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & - - dagedt(i,j,iblk)) / dt - endif - endif ! tr_iage - endif ! present(offset) + if (present(offset)) then + + !----------------------------------------------------------------- + ! Compute thermodynamic area and volume tendencies. + !----------------------------------------------------------------- + + if (present(daidt)) daidt(i,j,iblk) = (aice(i,j,iblk) - daidt(i,j,iblk)) / dt + if (present(dvidt)) dvidt(i,j,iblk) = (vice(i,j,iblk) - dvidt(i,j,iblk)) / dt + if (present(dvsdt)) dvsdt(i,j,iblk) = (vsno(i,j,iblk) - dvsdt(i,j,iblk)) / dt + if (tr_iage .and. present(dagedt)) then + if (offset > c0) then ! thermo + if (trcr(i,j,nt_iage,iblk) > c0) & + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk) - offset) / dt + else ! dynamics + dagedt(i,j,iblk) = (trcr(i,j,nt_iage,iblk) & + - dagedt(i,j,iblk)) / dt + endif + endif ! tr_iage + endif ! present(offset) enddo ! i enddo ! j diff --git a/configuration/scripts/cice.batch.csh b/configuration/scripts/cice.batch.csh index 33b27cbf8..50ef665bd 100755 --- a/configuration/scripts/cice.batch.csh +++ b/configuration/scripts/cice.batch.csh @@ -83,7 +83,7 @@ cat >> ${jobfile} << EOFB #PBS -l walltime=${batchtime} EOFB -else if (${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang*) then +else if (${ICE_MACHINE} =~ gaffney* || ${ICE_MACHINE} =~ koehr* || ${ICE_MACHINE} =~ mustang* || ${ICE_MACHINE} =~ carpenter*) then cat >> ${jobfile} << EOFB #PBS -N ${shortcase} #PBS -q ${queue} diff --git a/configuration/scripts/cice.launch.csh b/configuration/scripts/cice.launch.csh index f8347e101..51c8f044f 100755 --- a/configuration/scripts/cice.launch.csh +++ b/configuration/scripts/cice.launch.csh @@ -94,6 +94,26 @@ cat >> ${jobfile} << EOFR aprun -q -n ${ntasks} -N ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE EOFR +#======= +else if (${ICE_MACHCOMP} =~ carpenter*) then +if (${ICE_COMMDIR} =~ serial*) then +cat >> ${jobfile} << EOFR +./cice >&! \$ICE_RUNLOG_FILE +EOFR +else + +if (${ICE_ENVNAME} =~ intelimpi* || ${ICE_ENVNAME} =~ gnuimpi*) then +cat >> ${jobfile} << EOFR +mpiexec -n ${ntasks} -ppn ${taskpernodelimit} ./cice >&! \$ICE_RUNLOG_FILE +EOFR +else +cat >> ${jobfile} << EOFR +mpiexec --cpu-bind depth -n ${ntasks} -ppn ${taskpernodelimit} -d ${nthrds} ./cice >&! \$ICE_RUNLOG_FILE +EOFR +endif + +endif + #======= else if (${ICE_MACHCOMP} =~ cori* || ${ICE_MACHCOMP} =~ perlmutter*) then if (${ICE_COMMDIR} =~ serial*) then diff --git a/configuration/scripts/ice_in b/configuration/scripts/ice_in index 103c56d2a..63a97d7d8 100644 --- a/configuration/scripts/ice_in +++ b/configuration/scripts/ice_in @@ -471,6 +471,14 @@ / &icefields_nml + f_tlon = .true. + f_tlat = .true. + f_ulon = .true. + f_ulat = .true. + f_nlon = .true. + f_nlat = .true. + f_elon = .true. + f_elat = .true. f_tmask = .true. f_umask = .false. f_nmask = .false. diff --git a/configuration/scripts/machines/Macros.carpenter_cray b/configuration/scripts/machines/Macros.carpenter_cray new file mode 100644 index 000000000..1c8fb50ca --- /dev/null +++ b/configuration/scripts/machines/Macros.carpenter_cray @@ -0,0 +1,60 @@ +#============================================================================== +# Macros file for ERDC carpenter, cray compiler +#============================================================================== + +CPP := ftn -e P +CPPDEFS := -DFORTRANUNDERSCORE -DNO_R16 ${ICE_CPPDEFS} +CFLAGS := -c -O2 + +FIXEDFLAGS := -132 +FREEFLAGS := +FFLAGS := -hbyteswapio +FFLAGS_NOOPT:= -O0 +LDFLAGS := -hbyteswapio + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -hfp0 -g -Rbcdps -Ktrap=fp +else + FFLAGS += -O2 -hfp0 # -eo +endif + +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +#INCLDIR += -I$(NETCDF_PATH)/include + +#LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +#SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +else + LDFLAGS += -hnoomp +# CFLAGS += -hnoomp + FFLAGS += -hnoomp +endif + diff --git a/configuration/scripts/machines/Macros.carpenter_gnu b/configuration/scripts/machines/Macros.carpenter_gnu new file mode 100644 index 000000000..61efa80c2 --- /dev/null +++ b/configuration/scripts/machines/Macros.carpenter_gnu @@ -0,0 +1,69 @@ +#============================================================================== +# Macros file for ERDC carpenter, gnu compiler +#============================================================================== + +CPP := ftn -E +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c + +FIXEDFLAGS := -ffixed-line-length-132 +FREEFLAGS := -ffree-form +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_COVERAGE), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_COVERAGE), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif +endif + +#SCC := gcc +#SFC := gfortran +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +INCLDIR += -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +endif + diff --git a/configuration/scripts/machines/Macros.carpenter_gnuimpi b/configuration/scripts/machines/Macros.carpenter_gnuimpi new file mode 100644 index 000000000..ef0c5e96a --- /dev/null +++ b/configuration/scripts/machines/Macros.carpenter_gnuimpi @@ -0,0 +1,69 @@ +#============================================================================== +# Macros file for NAVYDSRC narwhal, gnu compiler +#============================================================================== + +CPP := ftn -E +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c + +FIXEDFLAGS := -ffixed-line-length-132 +FREEFLAGS := -ffree-form +FFLAGS := -fconvert=big-endian -fbacktrace -ffree-line-length-none -fallow-argument-mismatch +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -fcheck=bounds -finit-real=nan -fimplicit-none -ffpe-trap=invalid,zero,overflow + CFLAGS += -O0 +endif + +ifeq ($(ICE_COVERAGE), true) + FFLAGS += -O0 -g -fprofile-arcs -ftest-coverage + CFLAGS += -O0 -g -coverage + LDFLAGS += -g -ftest-coverage -fprofile-arcs +endif + +ifneq ($(ICE_BLDDEBUG), true) +ifneq ($(ICE_COVERAGE), true) + FFLAGS += -O2 + CFLAGS += -O2 +endif +endif + +#SCC := gcc +#SFC := gfortran +SCC := mpicc +SFC := mpif90 +MPICC := mpicc +MPIFC := mpif90 + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +INCLDIR += -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -fopenmp + CFLAGS += -fopenmp + FFLAGS += -fopenmp +endif + diff --git a/configuration/scripts/machines/Macros.carpenter_intel b/configuration/scripts/machines/Macros.carpenter_intel new file mode 100644 index 000000000..d53f959e4 --- /dev/null +++ b/configuration/scripts/machines/Macros.carpenter_intel @@ -0,0 +1,59 @@ +#============================================================================== +# Macros file for ERDC carpenter, intel compiler +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -fcommon + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +# -mcmodel medium -shared-intel +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays +else + FFLAGS += -O2 +endif + +#SCC := icx +#SFC := ifort +SCC := cc +SFC := ftn +MPICC := cc +MPIFC := ftn + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +INCLDIR += -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/Macros.carpenter_intelimpi b/configuration/scripts/machines/Macros.carpenter_intelimpi new file mode 100644 index 000000000..0c1aa5812 --- /dev/null +++ b/configuration/scripts/machines/Macros.carpenter_intelimpi @@ -0,0 +1,59 @@ +#============================================================================== +# Macros file for ERDC carpenter, intel compiler, intel mpi +#============================================================================== + +CPP := fpp +CPPDEFS := -DFORTRANUNDERSCORE ${ICE_CPPDEFS} +CFLAGS := -c -O2 -fp-model precise -fcommon + +FIXEDFLAGS := -132 +FREEFLAGS := -FR +FFLAGS := -fp-model precise -convert big_endian -assume byterecl -ftz -traceback +# -mcmodel medium -shared-intel +FFLAGS_NOOPT:= -O0 + +ifeq ($(ICE_BLDDEBUG), true) + FFLAGS += -O0 -g -check uninit -check bounds -check pointers -fpe0 -check noarg_temp_created +# FFLAGS += -O0 -g -check all -fpe0 -ftrapuv -fp-model except -check noarg_temp_created -init=snan,arrays +else + FFLAGS += -O2 +endif + +#SCC := icx +#SFC := ifort +SCC := mpiicc +SFC := mpiifort +MPICC := mpiicc +MPIFC := mpiifort + +ifeq ($(ICE_COMMDIR), mpi) + FC := $(MPIFC) + CC := $(MPICC) +else + FC := $(SFC) + CC := $(SCC) +endif +LD:= $(FC) + +# defined by module +#NETCDF_PATH := $(NETCDF) +#PNETCDF_PATH := $(PNETCDF) +#PNETCDF_PATH := /glade/apps/opt/pnetcdf/1.3.0/intel/default +#LAPACK_LIBDIR := /glade/apps/opt/lapack/3.4.2/intel/12.1.5/lib + +#PIO_CONFIG_OPTS:= --enable-filesystem-hints=gpfs + +INCLDIR := $(INCLDIR) +INCLDIR += -I$(NETCDF_PATH)/include + +LIB_NETCDF := $(NETCDF_PATH)/lib +#LIB_PNETCDF := $(PNETCDF_PATH)/lib +#LIB_MPI := $(IMPILIBDIR) +SLIBS := -L$(LIB_NETCDF) -lnetcdf -lnetcdff + +ifeq ($(ICE_THREADED), true) + LDFLAGS += -qopenmp + CFLAGS += -qopenmp + FFLAGS += -qopenmp +endif + diff --git a/configuration/scripts/machines/env.carpenter_cray b/configuration/scripts/machines/env.carpenter_cray new file mode 100644 index 000000000..d2c832d8f --- /dev/null +++ b/configuration/scripts/machines/env.carpenter_cray @@ -0,0 +1,54 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-pgi +module load PrgEnv-cray/8.4.0 + +module unload cce +module load cce/16.0.0 + +module unload cray-mpich +module load cray-mpich/8.1.26 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.9.0.3 +module load cray-hdf5/1.12.2.3 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE +setenv FI_CXI_RX_MATCH_MODE hybrid + +endif + +setenv ICE_MACHINE_MACHNAME carpenter +setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" +setenv ICE_MACHINE_ENVNAME cray +setenv ICE_MACHINE_ENVINFO "Cray Fortran/Clang 16.0.0, cray-mpich/8.1.26, netcdf/4.9.0.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 192 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.carpenter_gnu b/configuration/scripts/machines/env.carpenter_gnu new file mode 100644 index 000000000..96a04072f --- /dev/null +++ b/configuration/scripts/machines/env.carpenter_gnu @@ -0,0 +1,58 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-pgi +module load PrgEnv-gnu/8.4.0 + +module unload gcc +module load gcc/12.2.0 + +module unload cray-mpich +module unload mpi +module unload openmpi +module load cray-mpich/8.1.26 +#module load openmpi/4.1.6 +#module load mpi/2021.11 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.9.0.3 +module load cray-hdf5/1.12.2.3 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE +setenv FI_CXI_RX_MATCH_MODE hybrid + +endif + +setenv ICE_MACHINE_MACHNAME carpenter +setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" +setenv ICE_MACHINE_ENVNAME gnu +setenv ICE_MACHINE_ENVINFO "gnu gcc 12.2.0 20220819, mpich/8.1.26, netcdf/4.9.0.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 192 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.carpenter_gnuimpi b/configuration/scripts/machines/env.carpenter_gnuimpi new file mode 100644 index 000000000..f21bf97a5 --- /dev/null +++ b/configuration/scripts/machines/env.carpenter_gnuimpi @@ -0,0 +1,58 @@ +#!/bin/csh -f + +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-pgi +module load PrgEnv-gnu/8.4.0 + +module unload gcc +module load gcc/11.2.0 + +module unload cray-mpich +module unload mpi +module unload openmpi +#module load cray-mpich/8.1.26 +#module load openmpi/4.1.6 +module load mpi/2021.11 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.9.0.3 +module load cray-hdf5/1.12.2.3 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE +setenv FI_CXI_RX_MATCH_MODE hybrid + +endif + +setenv ICE_MACHINE_MACHNAME carpenter +setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" +setenv ICE_MACHINE_ENVNAME gnuimpi +setenv ICE_MACHINE_ENVINFO "gnu gcc 11.2.0 20210728, intel mpi 2021.11, netcdf/4.9.0.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 192 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.carpenter_intel b/configuration/scripts/machines/env.carpenter_intel new file mode 100644 index 000000000..c97a7d25a --- /dev/null +++ b/configuration/scripts/machines/env.carpenter_intel @@ -0,0 +1,57 @@ +#!/bin/csh -f +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-pgi +module load PrgEnv-intel/8.4.0 + +module unload intel +module load intel/2023.0.0 + +module unload cray-mpich +module unload mpi +module unload openmpi +module load cray-mpich/8.1.26 +#module load mpi/2021.11 +#module load openmpi/4.1.6 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.9.0.3 +module load cray-hdf5/1.12.2.3 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE +setenv FI_CXI_RX_MATCH_MODE hybrid + +endif + +setenv ICE_MACHINE_MACHNAME carpenter +setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" +setenv ICE_MACHINE_ENVNAME intel +setenv ICE_MACHINE_ENVINFO "ifort 2021.8.0 20221119, cray-mpich/8.1.26, netcdf/4.9.0.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 192 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/machines/env.carpenter_intelimpi b/configuration/scripts/machines/env.carpenter_intelimpi new file mode 100644 index 000000000..25385b09a --- /dev/null +++ b/configuration/scripts/machines/env.carpenter_intelimpi @@ -0,0 +1,57 @@ +#!/bin/csh -f +set inp = "undefined" +if ($#argv == 1) then + set inp = $1 +endif + +if ("$inp" != "-nomodules") then + +source ${MODULESHOME}/init/csh + +module unload PrgEnv-cray +module unload PrgEnv-gnu +module unload PrgEnv-intel +module unload PrgEnv-pgi +module load PrgEnv-intel/8.4.0 + +module unload intel +module load intel/2023.0.0 + +module unload cray-mpich +module unload mpi +module unload openmpi +#module load cray-mpich/8.1.26 +module load mpi/2021.11 +#module load openmpi/4.1.6 + +module unload cray-hdf5 +module unload cray-hdf5-parallel +module unload cray-netcdf-hdf5parallel +module unload cray-parallel-netcdf +module unload netcdf +module load cray-netcdf/4.9.0.3 +module load cray-hdf5/1.12.2.3 + +setenv NETCDF_PATH ${NETCDF_DIR} +limit coredumpsize unlimited +limit stacksize unlimited +setenv OMP_STACKSIZE 128M +setenv OMP_WAIT_POLICY PASSIVE +setenv FI_CXI_RX_MATCH_MODE hybrid + +endif + +setenv ICE_MACHINE_MACHNAME carpenter +setenv ICE_MACHINE_MACHINFO "Cray EX4000 AMD 9654 Genoa 2.1GHz, Slingshot Interconnect" +setenv ICE_MACHINE_ENVNAME intelimpi +setenv ICE_MACHINE_ENVINFO "ifort 2021.8.0 20221119, intel mpi 2021.11, netcdf/4.9.0.3" +setenv ICE_MACHINE_MAKE gmake +setenv ICE_MACHINE_WKDIR $WORKDIR/CICE_RUNS +setenv ICE_MACHINE_INPUTDATA /p/app/unsupported/RASM/cice_consortium +setenv ICE_MACHINE_BASELINE $WORKDIR/CICE_BASELINE +setenv ICE_MACHINE_SUBMIT "qsub " +setenv ICE_MACHINE_ACCT P00000000 +setenv ICE_MACHINE_QUEUE "debug" +setenv ICE_MACHINE_TPNODE 192 # tasks per node +setenv ICE_MACHINE_BLDTHRDS 12 +setenv ICE_MACHINE_QSTAT "qstat " diff --git a/configuration/scripts/options/set_nml.histinst b/configuration/scripts/options/set_nml.histinst index 31d566d76..34000f635 100644 --- a/configuration/scripts/options/set_nml.histinst +++ b/configuration/scripts/options/set_nml.histinst @@ -1 +1,2 @@ hist_avg = .false.,.false.,.false.,.false.,.false. +hist_suffix = '1','2','3','4','5' diff --git a/doc/source/user_guide/ug_case_settings.rst b/doc/source/user_guide/ug_case_settings.rst index b8bde525d..9f1f8a259 100644 --- a/doc/source/user_guide/ug_case_settings.rst +++ b/doc/source/user_guide/ug_case_settings.rst @@ -186,7 +186,6 @@ setup_nml "", "zero", "restart output frequency relative to year-month-day of 0000-01-01", "" "``dumpfreq_n``", "integer array", "write restart frequency with ``dumpfreq``", "1,1,1,1,1" "``dump_last``", "logical", "write restart on last time step of simulation", "``.false.``" - "``hist_avg``", "logical", "write time-averaged data", "``.true.,.true.,.true.,.true.,.true.``" "``histfreq``", "``d``", "write history every ``histfreq_n`` days", "'1','h','d','m','y'" "", "``h``", "write history every ``histfreq_n`` hours", "" "", "``m``", "write history every ``histfreq_n`` months", "" @@ -218,6 +217,7 @@ setup_nml "", "subset", "subset io rearranger option for history output", "" "``history_root``", "integer", "pe root task for history output with history_iotasks and history_stride (PIO only), -99=internal default", "-99" "``history_stride``", "integer", "pe stride for history output with history_iotasks and history_root (PIO only), -99=internal default", "-99" + "``hist_avg``", "logical", "write time-averaged data", "``.true.,.true.,.true.,.true.,.true.``" "``hist_suffix``", "character array", "appended to history_file when not x", "``x,x,x,x,x``" "``hist_time_axis``","character","history file time axis interval location: begin, middle, end","end" "``ice_ic``", "``default``", "equal to internal", "``default``" diff --git a/doc/source/user_guide/ug_implementation.rst b/doc/source/user_guide/ug_implementation.rst index c243616d2..7d172e91d 100644 --- a/doc/source/user_guide/ug_implementation.rst +++ b/doc/source/user_guide/ug_implementation.rst @@ -132,11 +132,11 @@ This is shown in Figure :ref:`fig-Cgrid`. The user has several ways to initialize the grid: *popgrid* reads grid lengths and other parameters for a nonuniform grid (including tripole and regional grids), and *rectgrid* creates a regular rectangular grid. -The input files **global\_gx3.grid** and **global\_gx3.kmt** contain the +The input files **global_gx3.grid** and **global_gx3.kmt** contain the :math:`\left<3^\circ\right>` POP grid and land mask; -**global\_gx1.grid** and **global\_gx1.kmt** contain the -:math:`\left<1^\circ\right>` grid and land mask, and **global\_tx1.grid** -and **global\_tx1.kmt** contain the :math:`\left<1^\circ\right>` POP +**global_gx1.grid** and **global_gx1.kmt** contain the +:math:`\left<1^\circ\right>` grid and land mask, and **global_tx1.grid** +and **global_tx1.kmt** contain the :math:`\left<1^\circ\right>` POP tripole grid and land mask. These are binary unformatted, direct access, Big Endian files. @@ -183,7 +183,7 @@ block distribution are ``nx_block`` :math:`\times`\ ``ny_block``. The physical portion of a subdomain is indexed as [``ilo:ihi``, ``jlo:jhi``], with nghost “ghost” or “halo" cells outside the domain used for boundary conditions. These parameters are illustrated in :ref:`fig-grid` in one -dimension. The routines *global\_scatter* and *global\_gather* +dimension. The routines *global_scatter* and *global_gather* distribute information from the global domain to the local domains and back, respectively. If MPI is not being used for grid decomposition in the ice model, these routines simply adjust the indexing on the global @@ -215,7 +215,7 @@ four subdomains. The user sets the ``NTASKS`` and ``NTHRDS`` settings in **cice.settings** and chooses a block size ``block_size_x`` :math:`\times`\ ``block_size_y``, ``max_blocks``, and decomposition information ``distribution_type``, ``processor_shape``, -and ``distribution_type`` in **ice\_in**. That information is used to +and ``distribution_type`` in **ice_in**. That information is used to determine how the blocks are distributed across the processors, and how the processors are distributed across the grid domain. The model is parallelized over blocks @@ -223,18 +223,18 @@ for both MPI and OpenMP. Some suggested combinations for these parameters for best performance are given in Section :ref:`performance`. The script **cice.setup** computes some default decompositions and layouts but the user can overwrite the defaults by manually changing the values in -`ice\_in`. At runtime, the model will print decomposition +`ice_in`. At runtime, the model will print decomposition information to the log file, and if the block size or max blocks is inconsistent with the task and thread size, the model will abort. The code will also print a warning if the maximum number of blocks is too large. Although this is not fatal, it does use extra memory. If ``max_blocks`` is set to -1, the code will compute a tentative ``max_blocks`` on the fly. -A loop at the end of routine *create\_blocks* in module -**ice\_blocks.F90** will print the locations for all of the blocks on +A loop at the end of routine *create_blocks* in module +**ice_blocks.F90** will print the locations for all of the blocks on the global grid if the namelist variable ``debug_blocks`` is set to be true. Likewise, a similar loop at -the end of routine *create\_local\_block\_ids* in module -**ice\_distribution.F90** will print the processor and local block +the end of routine *create_local_block_ids* in module +**ice_distribution.F90** will print the processor and local block number for each block. With this information, the grid decomposition into processors and blocks can be ascertained. This ``debug_blocks`` variable should be used carefully as there may be hundreds or thousands of blocks to print @@ -242,7 +242,7 @@ and this information should be needed only rarely. ``debug_blocks`` can be set to true using the ``debugblocks`` option with **cice.setup**. This information is much easier to look at using a debugger such as Totalview. There is also -an output field that can be activated in `icefields\_nml`, ``f_blkmask``, +an output field that can be activated in `icefields_nml`, ``f_blkmask``, that prints out the variable ``blkmask`` to the history file and which labels the blocks in the grid decomposition according to ``blkmask = my_task + iblk/100``. @@ -427,7 +427,7 @@ restoring timescale ``trestore`` may be used (it is also used for restoring ocean sea surface temperature in stand-alone ice runs). This implementation is only intended to provide the “hooks" for a more sophisticated treatment; the rectangular grid option can be used to test -this configuration. The ‘displaced\_pole’ grid option should not be used +this configuration. The ‘displaced_pole’ grid option should not be used unless the regional grid contains land all along the north and south boundaries. The current form of the boundary condition routines does not allow Neumann boundary conditions, which must be set explicitly. This @@ -470,7 +470,7 @@ The logical masks ``tmask``, ``umask``, ``nmask``, and ``emask`` respectively) are useful in conditional statements. In addition to the land masks, two other masks are implemented in -*dyn\_prep* in order to reduce the dynamics component’s work on a global +*dyn_prep* in order to reduce the dynamics component’s work on a global grid. At each time step the logical masks ``iceTmask`` and ``iceUmask`` are determined from the current ice extent, such that they have the value “true” wherever ice exists. They also include a border of cells around @@ -842,7 +842,7 @@ is the step count at the start of a long multi-restart run, and is continuous across model restarts. In general, the time manager should be advanced by calling -*advance\_timestep*. This subroutine in **ice\_calendar.F90** +*advance_timestep*. This subroutine in **ice_calendar.F90** automatically advances the model time by ``dt``. It also advances the istep numbers and calls subroutine *calendar* to update additional calendar data. @@ -912,7 +912,7 @@ may vary with each run depending on several factors including the model timestep, initial date, and value of ``istep0``. The model year is limited by some integer math. In particular, calculation -of elapsed hours in **ice\_calendar.F90**, and the model year is +of elapsed hours in **ice_calendar.F90**, and the model year is limited to the value of ``myear_max`` set in that file. Currently, that's 200,000 years. @@ -927,10 +927,10 @@ set the namelist variables ``year_init``, ``month_init``, ``day_init``, ``sec_init``, and ``dt`` in conjuction with ``days_per_year`` and ``use_leap_years`` to initialize the model date, timestep, and calendar. To overwrite the default/namelist settings in the coupling layer, -set the **ice\_calendar.F90** variables ``myear``, ``mmonth``, ``mday``, +set the **ice_calendar.F90** variables ``myear``, ``mmonth``, ``mday``, ``msec`` and ``dt`` after the namelists have been read. Subroutine *calendar* should then be called to update all the calendar data. -Finally, subroutine *advance\_timestep* should be used to advance +Finally, subroutine *advance_timestep* should be used to advance the model time manager. It advances the step numbers, advances time by ``dt``, and updates the calendar data. The older method of manually advancing the steps and adding ``dt`` to ``time`` should @@ -945,11 +945,11 @@ Initialization and Restarts The ice model’s parameters and variables are initialized in several steps. Many constants and physical parameters are set in -**ice\_constants.F90**. Namelist variables (:ref:`tabnamelist`), -whose values can be altered at run time, are handled in *input\_data* +**ice_constants.F90**. Namelist variables (:ref:`tabnamelist`), +whose values can be altered at run time, are handled in *input_data* and other initialization routines. These variables are given default values in the code, which may then be changed when the input file -**ice\_in** is read. Other physical constants, numerical parameters, and +**ice_in** is read. Other physical constants, numerical parameters, and variables are first set in initialization routines for each ice model component or module. Then, if the ice model is being restarted from a previous run, core variables are read and reinitialized in @@ -1038,12 +1038,12 @@ An additional namelist option, ``restart_coszen`` specifies whether the cosine of the zenith angle is included in the restart files. This is mainly used in coupled models. -MPI is initialized in *init\_communicate* for both coupled and +MPI is initialized in *init_communicate* for both coupled and stand-alone MPI runs. The ice component communicates with a flux coupler or other climate components via external routines that handle the variables listed in the `Icepack documentation `_. For stand-alone runs, -routines in **ice\_forcing.F90** read and interpolate data from files, +routines in **ice_forcing.F90** read and interpolate data from files, and are intended merely to provide guidance for the user to write his or her own routines. Whether the code is to be run in stand-alone or coupled mode is determined at compile time, as described below. @@ -1232,51 +1232,54 @@ above. In addition, ``history_format`` as well as other history namelist options control the specific file format as well as features related to IO performance, see :ref:`iooverview`. -CICE Model history output data can be written as instantaneous or average data as specified -by the ``hist_avg`` namelist array and is customizable by stream. Characters -can be added to the ``history_filename`` to distinguish the streams. This can be changed -by modifying ``hist_suffix`` to something other than "x". - -The data written at the period(s) given by ``histfreq`` and +The data is written at the period(s) given by ``histfreq`` and ``histfreq_n`` relative to a reference date specified by ``histfreq_base``. -The files are written to binary or netCDF files prepended by ``history_file`` -in **ice_in**. These settings for history files are set in the +The files are written to binary or netCDF files prepended by the ``history_file`` +and ``history_suffix`` +namelist setting. The settings for history files are set in the **setup_nml** section of **ice_in** (see :ref:`tabnamelist`). -If ``history_file`` = ‘iceh’ then the -filenames will have the form **iceh.[timeID].nc** or **iceh.[timeID].da**, -depending on the output file format chosen. With binary files, a separate header +The history filenames will have a form like +**[history_file][history_suffix][_freq].[timeID].[nc,da]** +depending on the namelist options chosen. With binary files, a separate header file is written with equivalent information. Standard fields are output -according to settings in the **icefields\_nml** section of **ice\_in** +according to settings in the **icefields_nml** section of **ice_in** (see :ref:`tabnamelist`). The user may add (or subtract) variables not already available in the namelist by following the instructions in section :ref:`addhist`. -The history module has been divided into several +The history implementation has been divided into several modules based on the desired formatting and on the variables themselves. Parameters, variables and routines needed by multiple -modules is in **ice\_history\_shared.F90**, while the primary routines +modules is in **ice_history_shared.F90**, while the primary routines for initializing and accumulating all of the history variables are in -**ice\_history.F90**. These routines call format-specific code in the -**io\_binary**, **io\_netcdf** and **io\_pio** directories. History +**ice_history.F90**. These routines call format-specific code in the +**io_binary**, **io_netcdf** and **io_pio2** directories. History variables specific to certain components or parameterizations are -collected in their own history modules (**ice\_history\_bgc.F90**, -**ice\_history\_drag.F90**, **ice\_history\_mechred.F90**, -**ice\_history\_pond.F90**). +collected in their own history modules (**ice_history_bgc.F90**, +**ice_history_drag.F90**, **ice_history_mechred.F90**, +**ice_history_pond.F90**). The history modules allow output at different frequencies. Five output -frequencies (``1``, ``h``, ``d``, ``m``, ``y``) are available simultaneously during a run. -The same variable can be output at different frequencies (say daily and -monthly) via its namelist flag, `f\_` :math:`\left<{var}\right>`, which -is a character string corresponding to ``histfreq`` or ‘x’ for none. -(Grid variable flags are logicals, since they are written to all -files, no matter what the frequency is.) If there are no namelist flags +options (``1``, ``h``, ``d``, ``m``, ``y``) are available simultaneously for ``histfreq`` +during a run, and each stream must have a unique value for ``histfreq``. In other words, ``d`` +cannot be used by two different streams. Each stream has an associated frequency +set by ``histfreq_n``. The frequency is +relative to a reference date specified by the corresponding entry in ``histfreq_base``. +Each stream can be instantaneous or time averaged +data over the frequency internal. The ``hist_avg`` namelist turns on time averaging +for each stream individually. +The same model variable can be written to multiple history streams (ie. daily ``d`` and +monthly ``m``) via its namelist flag, `f_` :math:`\left<{var}\right>`, while ``x`` +turns that history variable off. For example, ``f_aice = 'md'`` will write aice to the +monthly and daily streams. +Grid variable history output flags are logicals and written to all stream files if +turned on. If there are no namelist flags with a given ``histfreq`` value, or if an element of ``histfreq_n`` is 0, then -no file will be written at that frequency. The output period can be -discerned from the filenames or the ``hist_suffix`` can be used. Each history stream will be either instantaneous -or averaged as specified by the corresponding entry in the ``hist_avg`` namelist array, and the frequency -will be relative to a reference date specified by the corresponding entry in ``histfreq_base``. -More information about how the frequency is -computed is found in :ref:`timemanager`. +no file will be written at that frequency. The history filenames are set in +the subroutine **construct_filename** in **ice_history_shared.F90**. +In cases where two streams produce the same identical filename, the model will +abort. Use the namelist ``hist_suffix`` to make stream filenames unique. +More information about how the frequency is computed is found in :ref:`timemanager`. Also, some Earth Sytem Models require the history file time axis to be centered in the averaging interval. The flag ``hist_time_axis`` will allow the user to chose ``begin``, ``middle``, @@ -1299,7 +1302,9 @@ For example, in the namelist: Here, ``hi`` will be written to a file on every timestep, ``hs`` will be written once every 6 hours, ``aice`` once a month, ``meltb`` once a month AND -once every 6 hours, and ``Tsfc`` and ``iage`` will not be written. +once every 6 hours, and ``Tsfc`` and ``iage`` will not be written. All streams +are time averaged over the interval although because one stream has ``histfreq=1`` and +``histfreq_n=1``, that is equivalent to instantaneous output each model timestep. From an efficiency standpoint, it is best to set unused frequencies in ``histfreq`` to ‘x’. Having output at all 5 frequencies takes nearly 5 times @@ -1322,19 +1327,14 @@ above, ``meltb`` is called ``meltb`` in the monthly file (for backward compatibility with the default configuration) and ``meltb_h`` in the 6-hourly file. -Using the same frequency twice in ``histfreq`` will have unexpected -consequences and currently will cause the code to abort. It is not -possible at the moment to output averages once a month and also once -every 3 months, for example. - -If ``write_ic`` is set to true in **ice\_in**, a snapshot of the same set +If ``write_ic`` is set to true in **ice_in**, a snapshot of the same set of history fields at the start of the run will be written to the history -directory in **iceh\_ic.[timeID].nc(da)**. Several history variables are +directory in **iceh_ic.[timeID].nc(da)**. Several history variables are hard-coded for instantaneous output regardless of the ``hist_avg`` averaging flag, at the frequency given by their namelist flag. The normalized principal components of internal ice stress (``sig1``, ``sig2``) are computed -in *principal\_stress* and written to the history file. This calculation +in *principal_stress* and written to the history file. This calculation is not necessary for the simulation; principal stresses are merely computed for diagnostic purposes and included here for the user’s convenience. @@ -1342,7 +1342,7 @@ convenience. Several history variables are available in two forms, a value representing an average over the sea ice fraction of the grid cell, and another that is multiplied by :math:`a_i`, representing an average over -the grid cell area. Our naming convention attaches the suffix “\_ai" to +the grid cell area. Our naming convention attaches the suffix “_ai" to the grid-cell-mean variable names. Beginning with CICE v6, history variables requested by the Sea Ice Model Intercomparison @@ -1352,9 +1352,9 @@ Project (SIMIP) :cite:`Notz16` have been added as possible history output variab `daily `_ requested SIMIP variables provide the names of possible history fields in CICE. However, each of the additional variables can be output at any temporal frequency -specified in the **icefields\_nml** section of **ice\_in** as detailed above. +specified in the **icefields_nml** section of **ice_in** as detailed above. Additionally, a new history output variable, ``f_CMIP``, has been added. When ``f_CMIP`` -is added to the **icefields\_nml** section of **ice\_in** then all SIMIP variables +is added to the **icefields_nml** section of **ice_in** then all SIMIP variables will be turned on for output at the frequency specified by ``f_CMIP``. It may also be helpful for debugging to increase the precision of the history file @@ -1367,7 +1367,7 @@ Diagnostic files Like ``histfreq``, the parameter ``diagfreq`` can be used to regulate how often output is written to a log file. The log file unit to which diagnostic -output is written is set in **ice\_fileunits.F90**. If ``diag_type`` = +output is written is set in **ice_fileunits.F90**. If ``diag_type`` = ‘stdout’, then it is written to standard out (or to **ice.log.[ID]** if you redirect standard out as in **cice.run**); otherwise it is written to the file given by ``diag_file``. @@ -1381,7 +1381,7 @@ useful for checking global conservation of mass and energy. ``print_points`` writes data for two specific grid points defined by the input namelist ``lonpnt`` and ``latpnt``. By default, one point is near the North Pole and the other is in the Weddell Sea; these -may be changed in **ice\_in**. +may be changed in **ice_in**. The namelist ``debug_model`` prints detailed debug diagnostics for a single point as the model advances. The point is defined @@ -1394,16 +1394,16 @@ namelist, the point associated with ``lonpnt(1)`` and ``latpnt(1)`` is used. in detail at a particular (usually failing) grid point. Memory use diagnostics are controlled by the logical namelist ``memory_stats``. -This feature uses an intrinsic query in C defined in **ice\_memusage\_gptl.c**. +This feature uses an intrinsic query in C defined in **ice_memusage_gptl.c**. Memory diagnostics will be written at the the frequency defined by diagfreq. -Timers are declared and initialized in **ice\_timers.F90**, and the code -to be timed is wrapped with calls to *ice\_timer\_start* and -*ice\_timer\_stop*. Finally, *ice\_timer\_print* writes the results to +Timers are declared and initialized in **ice_timers.F90**, and the code +to be timed is wrapped with calls to *ice_timer_start* and +*ice_timer_stop*. Finally, *ice_timer_print* writes the results to the log file. The optional “stats" argument (true/false) prints additional statistics. The "stats" argument can be set by the ``timer_stats`` -namelist. Calling *ice\_timer\_print\_all* prints all of +namelist. Calling *ice_timer_print_all* prints all of the timings at once, rather than having to call each individually. Currently, the timers are set up as in :ref:`timers`. Section :ref:`addtimer` contains instructions for adding timers. @@ -1415,8 +1415,8 @@ the code, including the dynamics and advection routines. The Dynamics, Advection, and Column timers do not overlap and represent most of the overall model work. -The timers use *MPI\_WTIME* for parallel runs and the F90 intrinsic -*system\_clock* for single-processor runs. +The timers use *MPI_WTIME* for parallel runs and the F90 intrinsic +*system_clock* for single-processor runs. .. _timers: From 500073c60809f6f887194d3e79c1d632d1b5ed26 Mon Sep 17 00:00:00 2001 From: Tony Craig Date: Wed, 1 May 2024 12:56:21 -0700 Subject: [PATCH 2/2] Fix Github Actions for macos system update (#948) Github Actions broke again after default macos system update. Clang was not picking up the C system files. Had to change the implementation and add -isysroot to the CFLAGS option. At the same time, removed prior implementation where the system files were linked into /usr/local/include, this was no longer working. --- .github/workflows/test-cice.yml | 3 +-- configuration/scripts/machines/Macros.conda_macos | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test-cice.yml b/.github/workflows/test-cice.yml index e7e41de11..20d944d88 100644 --- a/.github/workflows/test-cice.yml +++ b/.github/workflows/test-cice.yml @@ -46,7 +46,6 @@ jobs: run: | sudo xcode-select -r sudo xcode-select -s /Library/Developer/CommandLineTools - sudo ln -s /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/* /usr/local/include/ echo "xcrun --show-sdk-path: $(xcrun --show-sdk-path)" echo "xcode-select -p: $(xcode-select -p)" - name: system info @@ -55,7 +54,7 @@ jobs: type wget type curl type csh - echo "readlink \$(which csh): $(python -c 'import os, sys; print os.path.realpath(sys.argv[1])' $(which csh))" + echo "readlink \$(which csh): $(python -c 'import os, sys; print(os.path.realpath(sys.argv[1]))' $(which csh))" echo "csh --version: $(csh --version)" echo "uname -a: $(uname -a)" echo "sw_vers: $(sw_vers)" diff --git a/configuration/scripts/machines/Macros.conda_macos b/configuration/scripts/machines/Macros.conda_macos index 6f26da0fc..f8b95aa76 100644 --- a/configuration/scripts/machines/Macros.conda_macos +++ b/configuration/scripts/machines/Macros.conda_macos @@ -47,7 +47,8 @@ SDKPATH = $(shell xcrun --show-sdk-path) ifeq ($(strip $(SDKPATH)),) CFLAGS_HOST := else - CFLAGS_HOST = -isysroot $(SDKPATH) + CFLAGS_HOST := -isysroot $(SDKPATH) + CFLAGS += -isysroot $(SDKPATH) LD += -L$(SDKPATH)/usr/lib endif