Skip to content

Commit

Permalink
Merge pull request #245 from jorritmesman/main
Browse files Browse the repository at this point in the history
Release v1.1.0
  • Loading branch information
jorritmesman authored Mar 16, 2022
2 parents 050384c + 6e044b6 commit e191e1d
Show file tree
Hide file tree
Showing 51 changed files with 10,224 additions and 5,921 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,12 @@ jobs:
do
eval sudo $cmd
done < <(Rscript -e 'writeLines(remotes::system_requirements("ubuntu", "18.04"))')
- name: Install macOS dependencies
if: runner.os == 'macOS'
run: |
brew install netcdf
- name: Install dependencies
run: |
remotes::install_deps(dependencies = TRUE)
Expand Down
6 changes: 3 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
Package: LakeEnsemblR
Type: Package
Title: Run Ensemble of Lake Models
Version: 1.0.5
Date: 2021-08-12
Version: 1.1.0
Date: 2022-03-16
URL: https://github.com/aemon-j/LakeEnsemblR
Author: Authors@R: c(person("Tadhg", "Moore", role = c("aut","cre"),
email = "[email protected]"),
Expand Down Expand Up @@ -47,5 +47,5 @@ Suggests:
Encoding: UTF-8
LazyData: true
LazyLoad: true
RoxygenNote: 7.1.1
RoxygenNote: 7.1.2
VignetteBuilder: knitr
4 changes: 3 additions & 1 deletion NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ export(check_master_config)
export(export_config)
export(export_dirs)
export(export_extinction)
export(export_inflow)
export(export_flow)
export(export_init_cond)
export(export_location)
export(export_meteo)
Expand All @@ -21,6 +21,7 @@ export(export_output_settings)
export(export_time)
export(format_inflow)
export(format_met)
export(format_outflow)
export(get_config_value)
export(get_json_value)
export(get_mylakeconfig_value)
Expand Down Expand Up @@ -52,6 +53,7 @@ importFrom(configr,read.config)
importFrom(configr,write.config)
importFrom(glmtools,get_ice)
importFrom(glmtools,get_nml_value)
importFrom(glmtools,get_surface_height)
importFrom(glmtools,get_var)
importFrom(glmtools,read_nml)
importFrom(glmtools,set_nml)
Expand Down
21 changes: 21 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,24 @@
## version 1.1.0

### Changes

- Inflows and outflows are now separate inputs and can be different from each other
- Multiple inflows and outflows can be simulated.
- Outflows can exit the lake at specified depths, or at the surface.
- Water level ("w_level") is now a potential output of LakeEnsemblR.
- "max_members" can be specified, which sets the number of members that the output-netcdf can have
- LakeEnsemblR now works with the latest version of Simstrat (v3.0.1), which included changes in some parameter names
- Model-specific template config files are removed from the LakeEnsemblR package and are instead retrieved from the model-specific packages

Bug fixes:
- get_yaml_multiple and input_yaml_multiple; works now if the first key is the highest level in the yaml file
- cali_ensemble for FLake; depths are now sorted
- Initial conditions Simstrat; Simstrat now always starts at the specified initial water lvl
- plot_heatmap now works in lakes where the water level changes

The setup of the LakeEnsemblR configuration file changed. More information can be found on: https://github.com/aemon-j/LakeEnsemblR/wiki/From-v1.0-to-v1.1


## version 1.0.5

### Changes
Expand Down
38 changes: 35 additions & 3 deletions R/check_config.R
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ check_master_config <- function(config_file,
if(!grepl("\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}", start)){
stop("Start time format must be in yyyy-mm-dd hh:mm:ss format")
}
# check if stop is in Iright format
# check if stop is in right format
if(!grepl("\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}", stop)) {
stop("Start time format must be in yyyy-mm-dd hh:mm:ss format")
}
Expand Down Expand Up @@ -97,10 +97,43 @@ check_master_config <- function(config_file,
stop(paste0('Unknown output time variable "', time_method, '" in control file ', config_file,
". Allowed units: ", paste0(good_umethod, collapse = ", ")))
}

# check if the same number of in-/outflow scaling factors are given
# as the number of in-/outflows
configr_master_config <- configr::read.config(file.path(config_file))
use_inflows <- get_yaml_value(config_file, label = "inflows", key = "use")
use_outflows <- get_yaml_value(config_file, label = "outflows", key = "use")

if(!is.null(configr_master_config[["scaling_factors"]][["all"]]
[["inflow"]]) & use_inflows){
infl_scalings <- (configr_master_config[["scaling_factors"]][["all"]]
[["inflow"]])
num_inflows <- get_yaml_multiple(config_file, key1 = "inflows",
key2 = "number_inflows")
if(length(infl_scalings) != num_inflows){
warning(paste0("There is a different number of inflows than there are ",
"inflow scaling factors in the control file ",
config_file, ". Provide the same number of ",
"scaling factors and inflows."))
}
}
if(!is.null(configr_master_config[["scaling_factors"]][["all"]]
[["outflow"]]) & use_outflows){
outfl_scalings <- (configr_master_config[["scaling_factors"]][["all"]]
[["outflow"]])
num_outflows <- get_yaml_multiple(config_file, key1 = "outflows",
key2 = "number_outflows")
if(length(outfl_scalings) != num_outflows){
warning(paste0("There is a different number of outflows than there are ",
"outflow scaling factors in the control file ",
config_file, ". Provide the same number of ",
"scaling factors and outflows."))
}
}

# check if variables in output are OK
variables <- gotmtools::get_yaml_value(config_file, "output", "variables")
good_vars <- c("temp", "ice_height", "dens", "salt")
good_vars <- c("temp", "ice_height", "dens", "salt", "w_level")
if(any(!variables %in% good_vars)) {
stop(paste0('Unknown output variable: "', variables[!variables %in% good_vars],
'" in control file ', config_file,
Expand All @@ -109,7 +142,6 @@ check_master_config <- function(config_file,

# Check if lower limits for calibration are smaller than upper limits
# load master config file
configr_master_config <- configr::read.config(file.path(config_file))
if ("calibration" %in% names(configr_master_config)) {
# meteo parameter
cal_section <- configr_master_config[["calibration"]][["met"]]
Expand Down
94 changes: 47 additions & 47 deletions R/create_netcdf_output.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@
create_netcdf_output <- function(output_lists, folder = ".", model, out_time,
longitude = 0, latitude = 0, compression = 4,
members = 25, out_file = "ensemble_output.nc"){

# Creat output directory
if(!dir.exists(file.path(folder, "output"))) {
message("Creating directory for output: ", file.path(folder, "output"))
dir.create(file.path(folder, "output"), showWarnings = FALSE)
}


#Create ncdf
message("Writing NetCDF file... [", Sys.time(), "]")
ref_time <- as.POSIXct("1970-01-01 00:00:00", tz = "GMT") # Reference time for netCDF time
Expand All @@ -40,150 +40,150 @@ create_netcdf_output <- function(output_lists, folder = ".", model, out_time,
# Define lon and lat dimensions
lon1 <- ncdf4::ncdim_def("lon", "degrees_east", vals = as.double(xvals))
lat2 <- ncdf4::ncdim_def("lat", "degrees_north", vals = as.double(yvals))

# Set dimensions
# Time dimension
timedim <- ncdf4::ncdim_def("time", units = "seconds since 1970-01-01 00:00:00",
vals = as.double(nsecs), calendar = "proleptic_gregorian")

# Define model dimensions
mod_names <- c(model, "Obs")
moddim <- ncdf4::ncdim_def("model", units = "-",
vals = as.double(seq_len(length(mod_names))))

# Define member dimensions
memdim <- ncdf4::ncdim_def("member", units = "", unlim = TRUE,
vals = as.double(1:members))
vals = as.double(seq_len(members)))

fillvalue <- 1e20 # Fill value
missvalue <- 1e20 # Missing value

nc_vars <- list() #Initialize empty list to fill netcdf variables

for(i in seq_len(length(output_lists))){
# Get variable name (e.g. "temp" from "temp_list")
splitted_name <- strsplit(names(output_lists[[i]])[1], "_")[[1]]
variable_name <- paste(splitted_name[2:length(splitted_name)], collapse = "_")

# Get variable unit (from dictionary)
variable_unit <- lake_var_dic$unit[lake_var_dic$short_name == variable_name]

# See if it"s 2D (e.g. ice height) or 3D (e.g. temperature)
if(ncol(output_lists[[i]][[1]]) == 2){
# Add 2D variable

# Define variable
tmp_def <- ncdf4::ncvar_def(variable_name, variable_unit,
list(lon1, lat2, memdim, moddim, timedim),
fillvalue, variable_name,
prec = "float", compression = compression, shuffle = FALSE)
nc_vars[[length(nc_vars) + 1]] <- tmp_def # Add to list


}else if(ncol(output_lists[[i]][[1]]) > 2){
# Add 3D variable

lengths <- lapply(output_lists[[i]], ncol) # Extract ncols in each output
lon_list <- which.max(lengths) # Select largest depths
deps <- get.offsets(output_lists[[i]][[lon_list]]) # Extract depths

# Depth dimension
depthdim <- ncdf4::ncdim_def("z", units = "meters", vals = as.double((-deps)),
longname = "Depth from surface")

# Define variable
tmp_def <- ncdf4::ncvar_def(variable_name, variable_unit,
list(lon1, lat2, memdim, moddim, timedim, depthdim),
fillvalue, variable_name,
prec = "float", compression = compression, shuffle = FALSE)
nc_vars[[length(nc_vars) + 1]] <- tmp_def # Add to list


}


}

# Re-assign list names
names(nc_vars)[(length(nc_vars) - length(output_lists) + 1):length(nc_vars)] <-
names(output_lists)

# Create file name for output file
fname <- file.path(folder, "output", out_file) # Ensemble output filename

# If file exists - delete it
if(file.exists(fname)) {
unlink(fname, recursive = TRUE)
}

# Create and input data into the netCDF file
ncout <- ncdf4::nc_create(fname, nc_vars, force_v4 = T)
# Add coordinates attribute for use with get_vari()
ncdf4::ncatt_put(ncout, "z", attname = "coordinates", attval = c("z"))
ncdf4::ncatt_put(ncout, "model", attname = "Model",
attval = paste(seq_len(length(mod_names)), "-", mod_names, collapse = ", "))
ncdf4::ncatt_put(ncout, "member", attname = "member", attval = c(members))

# Loop through and add each variable
# Add tryCatch ensure that it closes netCDF file
result <- tryCatch({
# Again different for 2D or 3D variables
for(i in seq_len(length(output_lists))){

if(ncol(output_lists[[i]][[1]]) == 2){
# Add 2D variable

arr <- array(NA, dim = c((members), length(mod_names),
length(nsecs)))


for(j in seq_len(length(output_lists[[i]]))) {
mat1 <- matrix(NA, nrow = nc_vars[[i]]$dim[[4]]$len, ncol = nc_vars[[i]]$dim[[5]]$len)


mat <- as.matrix(output_lists[[i]][[j]][, -1])

splitted_name <- strsplit(names(output_lists[[i]])[j], "_")[[1]]
m_name <- splitted_name[1]
idx <- which(mod_names == m_name)

arr[1, idx, ] <- mat
}

ncdf4::ncvar_put(ncout, nc_vars[[i]], arr)
ncdf4::ncatt_put(ncout, nc_vars[[i]], attname = "coordinates",
attval = c("lon lat model member"))
ncdf4::ncvar_change_missval(ncout, nc_vars[[i]], missval = fillvalue)

}else if(ncol(output_lists[[i]][[1]]) > 2){
# Add 3D variable

arr <- array(NA, dim = c((members), length(mod_names),
length(nsecs), length(deps)))


for(j in seq_len(length(output_lists[[i]]))) {

mat1 <- matrix(NA, nrow = nc_vars[[i]]$dim[[5]]$len,
ncol = nc_vars[[i]]$dim[[6]]$len)

# vector of depths to input into the matrix
deps_tmp <- get.offsets(output_lists[[i]][[j]])

mat <- as.matrix(output_lists[[i]][[j]][, -1])

for(k in seq_len(ncol(mat))) {
col <- which(deps == deps_tmp[k])
mat1[, col] <- mat[, k]
}

splitted_name <- strsplit(names(output_lists[[i]])[j], "_")[[1]]
m_name <- splitted_name[1]
idx <- which(mod_names == m_name)

arr[1, idx, , ] <- mat1
}

ncdf4::ncvar_put(nc = ncout, varid = nc_vars[[i]], vals = arr)
ncdf4::ncatt_put(ncout, nc_vars[[i]], attname = "coordinates",
attval = c("lon lat z model member"))
Expand All @@ -199,7 +199,7 @@ create_netcdf_output <- function(output_lists, folder = ".", model, out_time,
}, finally = {
ncdf4::nc_close(ncout) # Close netCDF file
})

message("Finished writing NetCDF file [", Sys.time(), "]")

}
Loading

0 comments on commit e191e1d

Please sign in to comment.