diff --git a/DESCRIPTION b/DESCRIPTION index c8ed94ea..50a533d6 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -70,4 +70,4 @@ License: MIT + file LICENSE URL: https://github.com/eblondel/geoflow BugReports: https://github.com/eblondel/geoflow LazyLoad: yes -RoxygenNote: 7.1.1 +RoxygenNote: 7.2.0 diff --git a/R/executeWorkflowJob.R b/R/executeWorkflowJob.R index 82ee90fd..b3848e93 100644 --- a/R/executeWorkflowJob.R +++ b/R/executeWorkflowJob.R @@ -154,7 +154,7 @@ executeWorkflowJob <- function(config, jobdir = NULL, queue = NULL, monitor = NU for(i in 1:length(entity$data$actions)){ entity_action <- entity$data$actions[[i]] config$logger.info(sprintf("Executing entity data action %s: '%s' ('%s')", i, entity_action$id, entity_action$script)) - entity_action$fun(entity, config, entity_action$options) + entity_action$run(entity, config) #monitor local action step<-step+inc_step config$logger.info(sprintf("WORKFLOW PROGRESS : ACTION '%s' of ENTITY '%s' ... %s %%",entity_action$id,entity$identifiers[["id"]],step)) @@ -220,7 +220,7 @@ executeWorkflowJob <- function(config, jobdir = NULL, queue = NULL, monitor = NU if(length(actions)>0) for(i in 1:length(actions)){ action <- actions[[i]] config$logger.info(sprintf("Executing Action %s: %s - for entity %s", i, action$id, entity$identifiers[["id"]])) - action$fun(entity = entity, config = config, options = action$options) + action$run(entity = entity, config = config) #monitor global action step<-step+inc_step @@ -246,7 +246,7 @@ executeWorkflowJob <- function(config, jobdir = NULL, queue = NULL, monitor = NU #behavior for generic uploaders, we set depositWithFiles = TRUE and proceed with all resource uploads generic_uploader_options <- generic_uploader$options generic_uploader_options$depositWithFiles <- TRUE - generic_uploader$fun(entity, config, generic_uploader_options) + generic_uploader$run(entity, config) } } } diff --git a/R/geoflow_action.R b/R/geoflow_action.R index 0bdb5278..81f287d3 100644 --- a/R/geoflow_action.R +++ b/R/geoflow_action.R @@ -129,6 +129,13 @@ geoflow_action <- R6Class("geoflow_action", } }, + #'@description Runs the action + #'@param entity entity + #'@param config config + run = function(entity, config){ + self$fun(self, entity, config) + }, + #'@description Indicates if the action is PID generator #'@return \code{TRUE} if the action is a PID generator, \code{FALSE} otherwise isPIDGenerator = function(){ @@ -238,7 +245,6 @@ register_actions <- function(){ target = "entity", target_dir = "metadata", packages = list("geometa","ows4R"), - fun = geometa_create_iso_19115, available_options = list( use_uuid = list(def = "Use UUID as metadata identifier, if not defined the UUID is pre-generated", class = "logical", default = FALSE), doi = list(def = "Add entity DOI - if defined - as metadata identifier and online resource", class = "logical", default = FALSE), @@ -250,7 +256,8 @@ register_actions <- function(){ subject_geography = list(def = "Identifier of the subject handling a Geographic coverage.", class = "character", default = "geography"), include_coverage_data_dimension_values = list(def = "Include data dimensions's range values to coverage description", class = "logical", default = FALSE), include_coverage_service_dimension_values = list(def = "Include ogc dimensions's range values to coverage description", class = "logical", default = FALSE) - ) + ), + fun = source(system.file("actions", "geometa_create_iso_19115.R", package = "geoflow"))$value ), geoflow_action$new( id = "geometa-create-iso-19110", @@ -259,7 +266,6 @@ register_actions <- function(){ target = "entity", target_dir = "metadata", packages = list("geometa"), - fun = geometa_create_iso_19110, available_options = list( doi = list(def = "Add entity DOI - if defined - as metadata identifier and online resource", class = "logical", default = FALSE), exclude_attributes = list(def = "Attributes that should be excluded from the ISO 19110 production", class = "character", choices = list(), add_choices = TRUE, multiple = TRUE, default = NA), @@ -268,7 +274,8 @@ register_actions <- function(){ extra_attributes = list(def = "Extra attributes to add as feature catalog attributes although not in data", class = "character", choices = list(), add_choices = TRUE, multiple = TRUE, default = NA), default_min_occurs = list(def = "The default min occurs value for feature attributes cardinality", class = "integer", default = 1L), default_max_occurs = list(def = "The default max occurs value for feature attribute cardinality", class = "numeric", default = Inf) - ) + ), + fun = source(system.file("actions", "geometa_create_iso_19110.R", package = "geoflow"))$value ), geoflow_action$new( id="ows4R-publish-iso-19139", @@ -277,10 +284,10 @@ register_actions <- function(){ target = NA, target_dir = NA, packages = list("ows4R"), - fun = ows4R_publish_iso_19139, available_options = list( geometa_inspire = list(def = "Validates ISO 19139 metadata with INSPIRE reference validator before publication", class = "logical", default = FALSE) - ) + ), + fun = source(system.file("actions", "ows4R_publish_iso_19139.R", package = "geoflow"))$value ), geoflow_action$new( id = "geonapi-publish-iso-19139", @@ -289,13 +296,13 @@ register_actions <- function(){ target = NA, target_dir = NA, packages = list("geonapi"), - fun = geonapi_publish_iso_19139, available_options = list( geometa_inspire = list(def = "Validates ISO 19139 metadata with INSPIRE reference validator before publication", class = "logical", default = FALSE), privileges = list(def = "Geonetwork privileges to set for the metadata to be published", class = "character", choices = c("view","dynamic","featured"), default = c("view","dynamic","featured"), multiple = TRUE), group = list(def = "Geonetwork user group to which the metadata should be associated", class = "character", default = "1"), category = list(def = "Category of metadata resources to which the metadata record should be associated", class = "character", default = "datasets") - ) + ), + fun = source(system.file("actions", "ows4R_publish_iso_19139.R", package = "geoflow"))$value ), geoflow_action$new( id = "geosapi-publish-ogc-services", @@ -304,7 +311,6 @@ register_actions <- function(){ target = NA, target_dir = NA, packages = list("geosapi"), - fun = geosapi_publish_ogc_services, available_options = list( createWorkspace = list(def = "Create workspace if not already existing", class = "logical", default = FALSE), createStore = list(def = "Create data/coverage store if not already existing", class = "logical", default = FALSE), @@ -315,7 +321,8 @@ register_actions <- function(){ enrich_with_relation_wfs = list(def = "When enabled, enrichs entity with a base WFS link relation (applies to 'vector' only)", class = "logical", default = TRUE), enrich_with_relation_wfs_download_links = list(def = "When enabled, enrichs entity with WFS format-specific links for download purpose (applies to 'vector' only)", class = "logical", default = TRUE), enrich_with_relation_wcs = list(def = "When enabled, enrichs entity with a base WCS link relation (applies to 'grid' only)", class = "logical", default = TRUE) - ) + ), + fun = source(system.file("actions", "geosapi_publish_ogc_services.R", package = "geoflow"))$value ), geoflow_action$new( id = "zen4R-deposit-record", @@ -330,7 +337,6 @@ register_actions <- function(){ ), generic_uploader = TRUE, packages = list("zen4R"), - fun = zen4R_deposit_record, available_options = list( depositWithFiles = list(def = "Indicates if the action is uploading files", class = "logical", default = FALSE), depositDataPattern = list(def = "A regular expression to filter data files to upload in Zenodo", class = "character", default = ""), @@ -341,7 +347,8 @@ register_actions <- function(){ update_metadata = list(def = "For an existing deposit, indicates if metadata elements should be updated", class = "logical", default = TRUE), update_files = list(def = "For an existing deposit, indicates if files should be updated", class = "logical", default = TRUE), communities = list(def = "One or more communities to which the deposit should be associated", class = "character", choices = list(), add_choices = TRUE, multiple = TRUE, default = NA) - ) + ), + fun = source(system.file("actions", "zen4R_deposit_record.R", package = "geoflow"))$value ), geoflow_action$new( id = "atom4R-dataverse-deposit-record", @@ -355,14 +362,14 @@ register_actions <- function(){ ), generic_uploader = TRUE, packages = list("atom4R"), - fun = atom4R_dataverse_deposit_record, available_options = list( depositWithFiles = list(def = "Indicates if the action is uploading files", class = "logical", default = FALSE), publish = list(def = "Indicates if the action should publish the deposit. Requires 'depositWithFiles' set to TRUE", class = "logical", default = FALSE), deleteOldFiles = list(def = "Indicates if the action should delete old files prior upload new files", class = "logical", default = TRUE), update_metadata = list(def = "For an existing deposit, indicates if metadata elements should be updated", class = "logical", default = TRUE), update_files = list(def = "For an existing deposit, indicates if files should be updated", class = "logical", default = TRUE) - ) + ), + fun = source(system.file("actions", "atom4R_dataverse_deposit_record.R", package = "geoflow"))$value ), geoflow_action$new( id = "dataone-upload-datapackage", @@ -375,8 +382,8 @@ register_actions <- function(){ packageId = "PackageId" ), packages = list("mime", "datapack", "dataone"), - fun = dataone_upload_datapackage, - available_options = list() + available_options = list(), + fun = source(system.file("actions", "dataone_upload_datapackage.R", package = "geoflow"))$value ), geoflow_action$new( id = "sf-write-generic", @@ -385,14 +392,14 @@ register_actions <- function(){ target = "entity", target_dir = "data", packages = list("sf", "DBI", "RSQLite", "RPostgres"), - fun = sf_write_generic, available_options = list( type=list(def = "format to convert", class = "character", choices = c("shp", "dbtable","csv","gpkg"), default = NA), createIndexes=list(def = "create indexes for columns", class = "logical", default = FALSE), overwrite=list(def = "Overwrite policy", class = "logical", default = TRUE), append=list(def = "Append policy", class = "logical", default = FALSE), chunk.size=list(def = "Size of DB upload data chunk. Default is 0L, meaning no chunking is operated.", class = "integer", default = 0L) - ) + ), + fun = source(system.file("actions", "sf_write_generic.R", package = "geoflow"))$value ), geoflow_action$new( id = "sf-write-dbi", @@ -401,13 +408,13 @@ register_actions <- function(){ target = NA, target_dir = NA, packages = list("sf", "DBI", "RSQLite", "RPostgres"), - fun = sf_write_dbi, available_options = list( createIndexes=list(def = "create indexes for columns", class = "logical", default = FALSE), overwrite=list(def = "Overwrite policy", class = "logical", default = TRUE), append=list(def = "Append policy", class = "logical", default = FALSE), chunk.size=list(def = "Size of DB upload data chunk. Default is 0L, meaning no chunking is operated.", class = "integer", default = 0L) - ) + ), + fun = source(system.file("actions", "sf_write_dbi.R", package = "geoflow"))$value ), geoflow_action$new( id = "sf-write-shp", @@ -416,7 +423,7 @@ register_actions <- function(){ target = "entity", target_dir = "data", packages = list("sf"), - fun = sf_write_shp + fun = source(system.file("actions", "sf_write_shp.R", package = "geoflow"))$value ), geoflow_action$new( id = "eml-create-eml", @@ -425,10 +432,10 @@ register_actions <- function(){ target = "entity", target_dir = "metadata", packages = list("EML", "emld"), - fun = eml_create_eml, available_options = list( subject_taxonomy = list(def = "Identifier of the subject handling the Taxonomic coverage.", class = "character", default = "taxonomy") - ) + ), + fun = source(system.file("actions", "eml_create_eml.R", package = "geoflow"))$value ), geoflow_action$new( id = "d4storagehub4R-upload-data", @@ -438,11 +445,11 @@ register_actions <- function(){ target_dir = NA, generic_uploader = TRUE, packages = list("d4storagehub4R"), - fun = d4storagehub4R_upload_data, available_options = list( depositWithFiles = list(def = "Indicates if the action is uploading files", class = "logical", default = FALSE), otherUploadFolders = list(def = "List of Folders (other than 'data' and 'metadata') to upload and which may contain files which should enrich others actions" , class = "character", choices = list(), add_choices = TRUE, multiple = TRUE, default = c()) - ) + ), + fun = source(system.file("actions", "d4storagehub4R_upload_data.R", package = "geoflow"))$value ), geoflow_action$new( id = "create-metadata-rmd", @@ -451,11 +458,11 @@ register_actions <- function(){ target = "entity", target_dir = "markdown", packages = list("rmarkdown"), - fun = create_metadata_Rmd, available_options = list( template = list(def = "Rmarkdown template", class = "character", default = "generic"), output_format = list(def = "output format generate by Rmarkdown template (e.g. 'html','pdf')", class = "character",choices = list("html","pdf","word","odt","rtf","md","github"), add_choices = FALSE, multiple = FALSE, default = "html") - ) + ), + fun = source(system.file("actions", "rmarkdown_create_metadata.R", package = "geoflow"))$value ) ) .geoflow$actions <- objs diff --git a/R/geoflow_action_geosapi_publish_ogc_services.R b/R/geoflow_action_geosapi_publish_ogc_services.R deleted file mode 100644 index 28e48a3b..00000000 --- a/R/geoflow_action_geosapi_publish_ogc_services.R +++ /dev/null @@ -1,458 +0,0 @@ -geosapi_publish_ogc_services <- function(entity, config, options){ - - if(!requireNamespace("geosapi", quietly = TRUE)){ - stop("The 'geosapi-publish-ogc-services' action requires the 'geosapi' package") - } - - #options - createWorkspace <- if(!is.null(options$createWorkspace)) options$createWorkspace else FALSE - createStore <- if(!is.null(options$createStore)) options$createStore else FALSE - store_description <- if(!is.null(options$store_description)) options$store_description else "" - - #check presence of data - if(is.null(entity$data)){ - warnMsg <- sprintf("No data object associated to entity '%s'. Skipping data publication!", - entity$identifiers[["id"]]) - config$logger.warn(warnMsg) - return(NULL) - } - - if(length(entity$data$source)>1) - config$logger.warn("More than one data sources, geosapi action will consider the first one only!") - - #datasource - datasource <- entity$data$uploadSource[[1]] - datasource_name <- NULL - datasource_file <- NULL - if(!is.null(datasource)){ - datasource_name <- unlist(strsplit(datasource, "\\."))[1] - datasource_file <- attr(datasource, "uri") - attributes(datasource) <- NULL - }else{ - if(entity$data$upload){ - errMsg <- sprintf("Upload source is missing!") - stop(errMsg) - } - } - - #layername/sourcename - layername <- if(!is.null(entity$data$layername)) entity$data$layername else entity$identifiers$id - - #shortcut for gs config - GS_CONFIG <- config$software$output$geoserver_config - GS <- config$software$output$geoserver - if(is.null(GS)){ - errMsg <- "This action requires a GeoServer software to be declared in the configuration" - config$logger.error(errMsg) - stop(errMsg) - } - - workspace <- GS_CONFIG$properties$workspace - if(is.null(workspace)) if(!is.null(entity$data$workspaces$geoserver)) workspace <- entity$data$workspaces$geoserver - if(is.null(workspace)){ - errMsg <- "The geoserver configuration requires a workspace for publishing action" - config$logger.error(errMsg) - stop(errMsg) - } - - store <- GS_CONFIG$properties$store - if(is.null(store)) if(!is.null(entity$data$store)) store <- entity$data$store - if(is.null(store)){ - errMsg <- "The geoserver configuration requires a data/coverage store for publishing action" - config$logger.error(errMsg) - stop(errMsg) - } - - if(entity$data$uploadType == "other"){ - warnMsg <- "No 'geosapi' action possible for type 'other'. Action skipped" - config$logger.warn(warnMsg) - return(NULL) - } - - # Check existence of data/coverage store - the_store <- switch(entity$data$spatialRepresentationType, - "vector" = GS$getDataStore(workspace, store), - "grid" = GS$getCoverageStore(workspace, store) - ) - # If store does not exist - # Check if createStore is TRUE - if(length(the_store)==0){ - if(createStore){ - switch(entity$data$uploadType, - #vector/features upload types - #=========================================================================================== - #vector/GeoPackage - #------------------------------------------------------------------------------------------- - "gpkg"= { - the_store<-GSGeoPackageDataStore$new( - name = store, - description = store_description , - enabled = TRUE, - database = paste0("file://data/",workspace,"/",entity$data$uploadSource,".gpkg") - ) - }, - #vector/dbtable - #------------------------------------------------------------------------------------------- - "dbtable"= { - dbi<-config$software$output$dbi_config - if(is.null(dbi)) dbi<-config$software$output$dbi_config - if(is.null(dbi)) { - errMsg <- sprintf("Error during Geoserver '%s' datastore creation, this datastore type requires a DBI type software declaration in the configuration", store) - config$logger.error(errMsg) - stop(errMsg) - } - Postgres<-dbi$parameters$drv %in% c("Postgres","PostreSQL") - if(!Postgres){ - errMsg <- sprintf("Error during Geoserver '%s' datastore creation, the DBI software declared in the configuration is not a PostGis database", store) - config$logger.error(errMsg) - stop(errMsg) - } - the_store<-GSPostGISDataStore$new(name=store, description = store_description, enabled = TRUE) - the_store$setHost(dbi$parameters$host) - the_store$setPort(dbi$parameters$port) - the_store$setDatabase(dbi$parameters$dbname) - #the_store$setSchema()#Not yet implemented in dbi software arguments - the_store$setUser(dbi$parameters$user) - the_store$setPassword(dbi$parameters$password) - }, - #vector/dbquery - #------------------------------------------------------------------------------------------- - "dbquery"= { - dbi<-config$software$output$dbi_config - if(is.null(dbi)) dbi<-config$software$output$dbi_config - if(is.null(dbi)) { - errMsg <- sprintf("Error during Geoserver '%s' datastore creation, this datastore type requires a DBI type software declaration in the configuration", store) - config$logger.error(errMsg) - stop(errMsg) - } - Postgres<-dbi$parameters$drv %in% c("Postgres","PostreSQL") - if(!Postgres){ - errMsg <- sprintf("Error during Geoserver '%s' datastore creation, the DBI software declared in the configuration is not a PostGis database", store) - config$logger.error(errMsg) - stop(errMsg) - } - the_store<-GSPostGISDataStore$new(name=store, description = store_description, enabled = TRUE) - the_store$setHost(dbi$parameters$host) - the_store$setPort(dbi$parameters$port) - the_store$setDatabase(dbi$parameters$dbname) - #the_store$setSchema()#Not yet implemented in dbi software arguments - the_store$setUser(dbi$parameters$user) - the_store$setPassword(dbi$parameters$password) - }, - #vector/shapefile (ESRI) - #------------------------------------------------------------------------------------------- - "shp"= { - the_store <- GSShapefileDirectoryDataStore$new( - name=store, - description = store_description, - enabled = TRUE, - url = paste0("file://data","/",workspace) - ) - }, - #grid/coverages upload types - #----------------------------------------------- - "geotiff" = { - the_store <- GSGeoTIFFCoverageStore$new(name = store, description = store_description, enabled = TRUE) - } - ) - if(is.null(the_store)){ - errMsg <- sprintf("Error during Geoserver data/coverage store creation, format '%s' not supported. Aborting 'geosapi' action!",entity$data$uploadType) - config$logger.error(errMsg) - stop(errMsg) - }else{ - created <- switch(entity$data$spatialRepresentationType, - "vector" = GS$createDataStore(workspace, the_store), - "grid" = GS$createCoverageStore(workspace, the_store) - ) - if(created){ - infoMsg <- sprintf("Successful Geoserver '%s' data/coverage store creaction", store) - config$logger.info(infoMsg) - }else{ - errMsg <- "Error during Geoserver data/coverage store creation. Aborting 'geosapi' action!" - config$logger.error(errMsg) - stop(errMsg) - } - } - }else{ - # If createStore is FALSE edit ERROR Message - errMsg <- sprintf("Data/Coverage store '%s' does not exist and 'createStore' option = FALSE, please verify config if data/coverage store already exists or change createStore = TRUE to create it",store) - config$logger.error(errMsg) - stop(errMsg) - } - } - - #upload - #------------------------------------------------------------------------------------------------- - if(entity$data$upload){ - - config$logger.info("Upload mode is set to true") - if(startsWith(entity$data$uploadType,"db") || entity$data$uploadType == "other"){ - warnMsg <- "Skipping upload: Upload mode is only valid for types 'shp', 'spatialite' or 'h2'" - config$logger.warn(warnMsg) - }else{ - uploaded <- FALSE - config$logger.info("Upload from local file(s)") - filepath <- file.path(getwd(), "data", datasource) - config$logger.info(sprintf("File to upload to Geoserver: %s", filepath)) - if(file.exists(filepath)){ - config$logger.info(sprintf("Upload file '%s' [%s] to GeoServer...", filepath, entity$data$uploadType)) - uploaded <- switch(entity$data$spatialRepresentationType, - #vector/features upload - "vector" = GS$uploadData( - workspace, store, endpoint = "file", configure = "none", update = "overwrite", - filename = filepath, extension = entity$data$uploadType, charset = "UTF-8", - contentType = if(entity$data$uploadType=="spatialite") "application/x-sqlite3" else "" - ), - #grid/coverages upload - "grid" = GS$uploadCoverage( - workspace, store, endpoint = "file", configure = "none", update = "overwrite", - filename = filepath, extension = entity$data$uploadType, - contentType = switch(entity$data$uploadType, - "geotiff" = "text/plain", - "arcgrid" = "text/plain", - "worldimage" = "application/zip", - "imagemosaic" = "application/zip" - ) - ) - ) - }else{ - errMsg <- sprintf("Upload from local file(s): no zipped file found for source '%s' (%s)", filepath, datasource) - config$logger.error(errMsg) - stop(errMsg) - } - - if(uploaded){ - infoMsg <- sprintf("Successful Geoserver upload for file '%s' (%s)", datasource_file, entity$data$uploadType) - config$logger.info(infoMsg) - }else{ - errMsg <- "Error during Geoserver file upload. Aborting 'geosapi' action!" - config$logger.error(errMsg) - stop(errMsg) - } - } - } - - #featuretype/coverage +layer publication - #-------------------------------------------------------------------------------------------------- - - #variables - epsgCode <- sprintf("EPSG:%s", entity$srid) - - #build resource (either featuretype or coverage) - resource <- switch(entity$data$spatialRepresentationType, - "vector" = GSFeatureType$new(), - "grid" = GSCoverage$new() - ) - resource$setName(layername) - nativename <- datasource_name - if(entity$data$uploadType == "dbquery") nativename <- layername - if(entity$data$spatialRepresentationType == "grid") nativename <- store - resource$setNativeName(nativename) - resource$setAbstract(entity$descriptions$abstract) - resource$setTitle(entity$titles[["title"]]) - resource$setSrs(epsgCode) - resource$setNativeCRS(epsgCode) - resource$setEnabled(TRUE) - resource$setProjectionPolicy("FORCE_DECLARED") - bbox <- entity$spatial_bbox - resource$setNativeBoundingBox(bbox$xmin, bbox$ymin, bbox$xmax, bbox$ymax, crs = epsgCode) - sfc_min <- sf::st_sfc(sf::st_point(c(bbox$xmin, bbox$ymin)), crs = epsgCode) - sfc_max <- sf::st_sfc(sf::st_point(c(bbox$xmax, bbox$ymax)), crs = epsgCode) - sfc_min_ll <- sf::st_bbox(sf::st_transform(sfc_min, crs = 4326)) - sfc_max_ll <- sf::st_bbox(sf::st_transform(sfc_max, crs = 4326)) - resource$setLatLonBoundingBox(sfc_min_ll$xmin, sfc_min_ll$ymin, sfc_max_ll$xmax, sfc_max_ll$ymax, crs = 4326) - for(subject in entity$subjects){ - kwds <- subject$keywords - for(kwd in kwds) resource$addKeyword(kwd$name) - } - - #add metadata links - #in case (only if) geoflow defines either CSW or Geonetwork software, we can add metadata links - md_link_xml <- NULL - md_link_html <- NULL - if(!is.null(config$software$output$csw)|!is.null(config$software$output$geonetwork)){ - if(!is.null(config$software$output$csw)){ - md_link_xml <- paste0(config$software$output$csw_config$parameters$url, "?service=CSW&request=GetRecordById&Version=", config$software$output$csw_config$parameters$version, - "&elementSetName=full&outputSchema=http%3A//www.isotc211.org/2005/gmd&id=", entity$identifiers[["id"]]) - } - if(!is.null(config$software$output$geonetwork)){ - md_link_xml <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/eng/csw?service=CSW&request=GetRecordById&Version=2.0.2", - "&elementSetName=full&outputSchema=http%3A//www.isotc211.org/2005/gmd&id=", entity$identifiers[["id"]]) - if(startsWith(config$software$output$geonetwork_config$parameters$version, "2")){ - md_link_html <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/en/main.home?uuid=", entity$identifiers[["id"]]) - }else if(startsWith(config$software$output$geonetwork_config$parameters$version, "3")){ - md_link_html <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/eng/catalog.search#/metadata/", entity$identifiers[["id"]]) - } - } - } - if(!is.null(md_link_xml)){ - md_xml <- GSMetadataLink$new(type = "text/xml", metadataType = "ISO19115:2003", content = md_link_xml) - resource$addMetadataLink(md_xml) - } - if(!is.null(md_link_html)){ - md_html <- GSMetadataLink$new(type = "text/html", metadataType = "ISO19115:2003", content = md_link_html) - resource$addMetadataLink(md_html) - } - - #resource type specific properties - switch(entity$data$spatialRepresentationType, - "vector" = { - #cql filter? - if(!is.null(entity$data$cqlfilter)){ - resource$setCqlFilter(entity$data$cqlfilter) - } - - #virtual table? - if(entity$data$uploadType == "dbquery"){ - vt <- GSVirtualTable$new() - vt$setName(layername) - vt$setSql(entity$data$sql) - #if the virtual table is spatialized - if(!is.null(entity$data$geometryField) & !is.null(entity$data$geometryType)){ - vtg <- GSVirtualTableGeometry$new( - name = entity$data$geometryField, - type = entity$data$geometryType, - srid = entity$srid - ) - vt$setGeometry(vtg) - } - #if the virtual table has service parameters - if(length(entity$data$parameters)>0){ - for(param in entity$data$parameters){ - vtp <- GSVirtualTableParameter$new( - name = param$name, - defaultValue = param$defaultvalue, - regexpValidator = param$regexp - ) - vt$addParameter(vtp) - } - } - resource$setVirtualTable(vt) - } - }, - "grid" = { - - #coverage view? - if(length(entity$data$bands)>0){ - coview <- GSCoverageView$new() - coview$setName(layername) - coview$setEnvelopeCompositionType(entity$data$envelopeCompositionType) - coview$setSelectedResolution(entity$data$selectedResolution) - coview$setSelectedResolutionIndex(entity$data$selectedResolutionIndex) - for(band in entity$data$bands){ - cvb <- GSCoverageBand$new() - covname <- if(!is.null(band$name)) band$name else layername - cvb$setDefinition(paste0(covname,"@", band$index)) - cvb$setIndex(band$index) - cvb$addInputBand(GSInputCoverageBand$new( coverageName = covname, band = band$index)) - coview$addBand(cvb) - } - resource$setView(coview) - }else{ - #check nb of bands, if > 3 we configure a coverage view - bands <- names(entity$data$coverages) - if(length(bands)>3){ - coview <- GSCoverageView$new() - coview$setName(layername) - ect <- entity$data$envelopeCompositionType - if(is.null(ect)) ect <- "INTERSECTION" - coview$setEnvelopeCompositionType(ect) - sr <- entity$data$selectedResolution - if(is.null(sr)) sr <- "BEST" - coview$setSelectedResolution(sr) - sri <- entity$data$selectedResolutionIndex - if(is.null(sri)) sri <- -1 - coview$setSelectedResolutionIndex(sri) - for(i in 1:length(bands)){ - band <- bands[i] - cvb <- GSCoverageBand$new() - covname <- layername - cvb$setDefinition(paste0(covname,"@", i-1)) - cvb$setIndex(i-1) - cvb$addInputBand(GSInputCoverageBand$new( coverageName = covname, band = i-1)) - coview$addBand(cvb) - } - resource$setView(coview) - } - } - - } - ) - - #styles publication if needed - gs_styles <- GS$getStyleNames() - if(entity$data$styleUpload) if(length(entity$data$styles)>0){ - for(i in 1:length(entity$data$styles)){ - style <- entity$data$styles[i] - #check if any style SLD file is available in source - style_sldfile <- paste0(style,".sld") - if(!style %in% gs_styles){ - config$logger.warn(sprintf("No style '%s' in Geoserver", style)) - if(style_sldfile %in% entity$data$source){ - config$logger.info(sprintf("Creating GeoServer style '%s' from SLD style file '%s' available as source", style, style_sldfile)) - created <- GS$createStyle(file = file.path(getwd(), "data", style_sldfile), name = style) - } - } - } - GS$reload() - } - - #layer build and publication - switch(entity$data$spatialRepresentationType, - "vector" = { - layer <- GSLayer$new() - layer$setName(layername) - if(length(entity$data$styles)>0){ - for(i in 1:length(entity$data$styles)){ - style <- entity$data$styles[[i]] - if(i==1) layer$setDefaultStyle(style) else layer$addStyle(style) - } - }else{ - layer$setDefaultStyle("generic") - } - - #publish - try(GS$unpublishLayer(workspace, store, layername)) - out <- GS$publishLayer(workspace, store, resource, layer) - if(!out){ - errMsg <- sprintf("Error during layer '%s' publication for entity '%s'!",layername, entity$identifiers[["id"]]) - config$logger.error(errMsg) - }else{ - infoMsg <- sprintf("Successful layer'%s' publication in Geoserver for entity '%s'!", layername, entity$identifiers[["id"]]) - } - }, - "grid" = { - out <- FALSE - cov <- GS$getCoverage(ws = workspace, cs = store, cv = layername) - if(is.null(cov)){ - out <- GS$createCoverage(ws = workspace, cs = store, coverage = resource) - }else{ - out <- GS$updateCoverage(ws = workspace, cs = store, coverage = resource) - } - #manage coverage styles by updating associated layer object - layer <- GS$getLayer(layername) - if(is(layer, "GSLayer")){ - layer$setName(layername) - if(length(entity$data$styles)>0){ - layer$styles <- list() - for(i in 1:length(entity$data$styles)){ - style <- entity$data$styles[[i]] - if(i==1) layer$setDefaultStyle(style) else layer$addStyle(style) - } - }else{ - layer$setDefaultStyle("generic") - } - GS$updateLayer(layer) - } - - if(!out){ - errMsg <- sprintf("Error during layer '%s' publication for entity '%s'!",layername, entity$identifiers[["id"]]) - config$logger.error(errMsg) - }else{ - infoMsg <- sprintf("Successful layer'%s' publication in Geoserver for entity '%s'!", layername, entity$identifiers[["id"]]) - } - } - ) - - -} \ No newline at end of file diff --git a/R/geoflow_action_writeWorkflowJobDataResource.R b/R/geoflow_action_writeWorkflowJobDataResource.R deleted file mode 100644 index 98da04f4..00000000 --- a/R/geoflow_action_writeWorkflowJobDataResource.R +++ /dev/null @@ -1,60 +0,0 @@ -#For write generic action -sf_write_generic <- function(entity, config, options){ - #options - createIndexes <- ifelse(!is.null(options$createIndexes), options$createIndexes, FALSE) - overwrite <- ifelse(!is.null(options$overwrite), options$overwrite, TRUE) - append <- ifelse(!is.null(options$append), options$append, FALSE) - chunk.size <- ifelse(!is.null(options$chunk.size), options$chunk.size, 0L) - #function - writeWorkflowJobDataResource( - entity = entity, - config = config, - obj = NULL, - useFeatures = TRUE, - resourcename = NULL, - useUploadSource = TRUE, - createIndexes = createIndexes, - overwrite = overwrite, - append = append, - chunk.size = chunk.size, - type=options$type - ) -} - -#For write in dbi -sf_write_dbi <- function(entity, config, options){ - #options - createIndexes <- ifelse(!is.null(options$createIndexes), options$createIndexes, FALSE) - overwrite <- ifelse(!is.null(options$overwrite), options$overwrite, TRUE) - append <- ifelse(!is.null(options$append), options$append, FALSE) - chunk.size <- ifelse(!is.null(options$chunk.size), options$chunk.size, 0L) - #function - writeWorkflowJobDataResource( - entity = entity, - config = config, - obj = NULL, - useFeatures = TRUE, - resourcename = NULL, - useUploadSource = TRUE, - createIndexes = createIndexes, - overwrite = overwrite, - append = append, - chunk.size = chunk.size, - type = "dbtable" - ) -} - -#For write as shp -sf_write_shp <- function(entity, config, options){ - writeWorkflowJobDataResource( - entity = entity, - config = config, - obj = NULL, - useFeatures = TRUE, - resourcename = NULL, - useUploadSource = TRUE, - type = "shp" - ) -} - - \ No newline at end of file diff --git a/R/geoflow_data.R b/R/geoflow_data.R index 5e2f643e..0295e8c8 100644 --- a/R/geoflow_data.R +++ b/R/geoflow_data.R @@ -389,8 +389,10 @@ geoflow_data <- R6Class("geoflow_data", scope = "local", types = c("Entity data action"), def = desc, - fun = eval(expr = parse(text = paste0("function(entity, config, options){ - source(",script_to_source,", local = TRUE) + fun = eval(expr = parse(text = paste0("function(action, entity, config){ + act_fun <- source(",script_to_source,", local = TRUE)$value + if(!is(act_fun, \"function\")) stop(\"Script for entity action ",action," is not a function!\") + act_fun(action, entity, config) }"))), script = script, options = action_options diff --git a/R/initWorkflow.R b/R/initWorkflow.R index bb6c8a9e..8702c97e 100644 --- a/R/initWorkflow.R +++ b/R/initWorkflow.R @@ -487,8 +487,7 @@ initWorkflow <- function(file, dir = "."){ action_to_trigger$options <- action$options }else{ if(config$profile$mode == "entity"){ - source(action$script) - customfun <- eval(parse(text = action$id)) + customfun <- source(action$script)$value if(is(customfun,"try-error")){ errMsg <- sprintf("Error while trying to evaluate custom function'%s", action$id) config$logger.error(errMsg) diff --git a/R/geoflow_action_atom4R_dataverse_deposit_record.R b/inst/actions/atom4R_dataverse_deposit_record.R similarity index 89% rename from R/geoflow_action_atom4R_dataverse_deposit_record.R rename to inst/actions/atom4R_dataverse_deposit_record.R index 592f0226..6af16938 100644 --- a/R/geoflow_action_atom4R_dataverse_deposit_record.R +++ b/inst/actions/atom4R_dataverse_deposit_record.R @@ -1,4 +1,4 @@ -atom4R_dataverse_deposit_record <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("atom4R", quietly = TRUE)){ stop("The 'atom4R-dataverse-deposit-record' action requires the 'atom4R' package") @@ -8,6 +8,7 @@ atom4R_dataverse_deposit_record <- function(entity, config, options){ skipFileDownload <- if(!is.null(config$profile$options$skipFileDownload)) config$profile$options$skipFileDownload else FALSE #options + options <- action$options depositWithFiles <- if(!is.null(options$depositWithFiles)) options$depositWithFiles else FALSE publish <- if(!is.null(options$publish) & depositWithFiles) options$publish else FALSE deleteOldFiles <- if(!is.null(options$deleteOldFiles)) options$deleteOldFiles else TRUE @@ -165,20 +166,20 @@ atom4R_dataverse_deposit_record <- function(entity, config, options){ action <- ifelse(is.null(doi),"CREATE","UPDATE") update <- action == "UPDATE" out <- switch(action, - "CREATE" = { - rec <- SWORD$createDataverseRecord(target_dataverse_id, dcentry) - doi <- unlist(strsplit(rec$id, "doi:"))[2] #we need the reserved doi to add files - rec - }, - "UPDATE" = { - if(update_metadata){ - config$logger.info(sprintf("Updating record for doi '%s'", doi)) - SWORD$updateDataverseRecord(target_dataverse_id, dcentry, paste0("doi:", doi)) - }else{ - config$logger.info(sprintf("Skip updating record for doi '%s' (option 'update_metadata' is FALSE)", doi)) - SWORD$getDataverseRecord(paste0("doi:", doi)) - } - } + "CREATE" = { + rec <- SWORD$createDataverseRecord(target_dataverse_id, dcentry) + doi <- unlist(strsplit(rec$id, "doi:"))[2] #we need the reserved doi to add files + rec + }, + "UPDATE" = { + if(update_metadata){ + config$logger.info(sprintf("Updating record for doi '%s'", doi)) + SWORD$updateDataverseRecord(target_dataverse_id, dcentry, paste0("doi:", doi)) + }else{ + config$logger.info(sprintf("Skip updating record for doi '%s' (option 'update_metadata' is FALSE)", doi)) + SWORD$getDataverseRecord(paste0("doi:", doi)) + } + } ) #delete/add files @@ -244,10 +245,10 @@ atom4R_dataverse_deposit_record <- function(entity, config, options){ #output table of DOIs if(is(out, "AtomEntry") | is(out, "AtomFeed")){ infoMsg <- switch(action, - "CREATE" = sprintf("Successfully created Dataverse dataset with id '%s'", - entity$identifiers[["id"]]), - "UPDATE" = sprintf("Successfully updated Dataverse dataset with id '%s' (doi: %s)", - entity$identifiers[["id"]], doi) + "CREATE" = sprintf("Successfully created Dataverse dataset with id '%s'", + entity$identifiers[["id"]]), + "UPDATE" = sprintf("Successfully updated Dataverse dataset with id '%s' (doi: %s)", + entity$identifiers[["id"]], doi) ) config$logger.info(infoMsg) diff --git a/R/geoflow_action_d4storagehub4R_upload_data.R b/inst/actions/d4storagehub4R_upload_data.R similarity index 83% rename from R/geoflow_action_d4storagehub4R_upload_data.R rename to inst/actions/d4storagehub4R_upload_data.R index 96895ab8..200d823c 100644 --- a/R/geoflow_action_d4storagehub4R_upload_data.R +++ b/inst/actions/d4storagehub4R_upload_data.R @@ -1,10 +1,11 @@ -d4storagehub4R_upload_data <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("d4storagehub4R", quietly = TRUE)){ stop("The 'd4storagehub4R-upload-datas' action requires the 'd4storagehub4R' package") } #options + options <- action$options depositWithFiles <- if(!is.null(options$depositWithFiles)) options$depositWithFiles else FALSE otherUploadFolders <- if(!is.null(options$otherUploadFolders)) options$otherUploadFolders else c() @@ -28,12 +29,12 @@ d4storagehub4R_upload_data <- function(entity, config, options){ #verify if folder exist and create it if missing #------------------------------------------------------------------------------------------------- workspace<- file.path(workspace,entity$getEntityJobDirname()) - folderID <- D4STORAGE_HUB$searchWSFolderID(folderPath = file.path(workspace,"data")) - if (is.null(folderID)) { - config$logger.info(sprintf("Creating folder [%s] in d4cience workspace", workspace)) - D4STORAGE_HUB$createFolder(folderPath = workspace, name="data", description = entity$titles[['title']], hidden = FALSE, recursive = TRUE) - } - + folderID <- D4STORAGE_HUB$searchWSFolderID(folderPath = file.path(workspace,"data")) + if (is.null(folderID)) { + config$logger.info(sprintf("Creating folder [%s] in d4cience workspace", workspace)) + D4STORAGE_HUB$createFolder(folderPath = workspace, name="data", description = entity$titles[['title']], hidden = FALSE, recursive = TRUE) + } + #upload #------------------------------------------------------------------------------------------------- if(entity$data$upload){ @@ -50,16 +51,16 @@ d4storagehub4R_upload_data <- function(entity, config, options){ config$logger.info(sprintf("File %s successfully uploaded to the d4science folder %s", fileName, file.path(workspace, "data"))) } } - + #enrish with relation #------------------------------------------------------------------------------------------------- - new_d4storagehub_link<- geoflow_relation$new() - new_d4storagehub_link$setKey("http") - new_d4storagehub_link$setName(fileName) - new_d4storagehub_link$setDescription(paste0(entity$titles[['title']]," - D4Science Data Download (",entity$data$uploadType,")")) - new_d4storagehub_link$setLink(D4STORAGE_HUB$getPublicFileLink(file.path(workspace, "data",fileName))) - - entity$addRelation(new_d4storagehub_link) + new_d4storagehub_link<- geoflow_relation$new() + new_d4storagehub_link$setKey("http") + new_d4storagehub_link$setName(fileName) + new_d4storagehub_link$setDescription(paste0(entity$titles[['title']]," - D4Science Data Download (",entity$data$uploadType,")")) + new_d4storagehub_link$setLink(D4STORAGE_HUB$getPublicFileLink(file.path(workspace, "data",fileName))) + + entity$addRelation(new_d4storagehub_link) if(otherUploadFolders>0){ for (folder in otherUploadFolders){ @@ -102,9 +103,9 @@ d4storagehub4R_upload_data <- function(entity, config, options){ D4STORAGE_HUB$uploadFile (folderPath = file.path(workspace, "data"), file=file.path(getwd(),"data",data_file), description = "", archive = FALSE) } }else{ - config$logger.warn("D4storagehub: no other data files to upload") + config$logger.warn("D4storagehub: no other data files to upload") } - + #check if metadata files exists metadata_files <- list.files(file.path(getwd(),"metadata")) if(length(metadata_files)>0){ @@ -115,10 +116,10 @@ d4storagehub4R_upload_data <- function(entity, config, options){ D4STORAGE_HUB$createFolder(folderPath = workspace, name="metadata", description = entity$titles[['title']], hidden = FALSE, recursive = TRUE) } #upload metadata files - for(metadata_file in metadata_files){ - config$logger.info(sprintf("D4storagehub: uploading metadata file '%s'", metadata_file)) - D4STORAGE_HUB$uploadFile (folderPath = file.path(workspace, "metadata"), file=file.path(getwd(),"metadata",metadata_file), description = "", archive = FALSE) - } + for(metadata_file in metadata_files){ + config$logger.info(sprintf("D4storagehub: uploading metadata file '%s'", metadata_file)) + D4STORAGE_HUB$uploadFile (folderPath = file.path(workspace, "metadata"), file=file.path(getwd(),"metadata",metadata_file), description = "", archive = FALSE) + } }else{ config$logger.warn("D4storagehub: no metadata files to upload") } diff --git a/R/geoflow_action_dataone_upload_datapackage.R b/inst/actions/dataone_upload_datapackage.R similarity index 88% rename from R/geoflow_action_dataone_upload_datapackage.R rename to inst/actions/dataone_upload_datapackage.R index 1966b245..02e7a66a 100644 --- a/R/geoflow_action_dataone_upload_datapackage.R +++ b/inst/actions/dataone_upload_datapackage.R @@ -1,4 +1,4 @@ -dataone_upload_datapackage <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("EML", quietly = TRUE)){ stop("The 'dataone-upload-datapackage' action requires the 'EML' package") @@ -14,6 +14,7 @@ dataone_upload_datapackage <- function(entity, config, options){ } #options + options <- action$options publish <- TRUE #see https://github.com/DataONEorg/rdataone/issues/262 accessRules <- NA @@ -33,14 +34,14 @@ dataone_upload_datapackage <- function(entity, config, options){ #create datapackage dp <- switch(action, - "CREATE" = new("DataPackage"), - "UPDATE" = try(dataone::getDataPackage(DATAONE, identifier = packageId, lazyLoad = TRUE, limit="0MB", quiet=FALSE)) + "CREATE" = new("DataPackage"), + "UPDATE" = try(dataone::getDataPackage(DATAONE, identifier = packageId, lazyLoad = TRUE, limit="0MB", quiet=FALSE)) ) if(update){ members <- datapack::getIdentifiers(dp) } - + #EML metadata dp_eml_meta_obj <- NULL eml_file <- file.path("metadata", paste0(entity$identifiers[["id"]], "_EML.xml")) @@ -114,10 +115,10 @@ dataone_upload_datapackage <- function(entity, config, options){ #output table of DOIs if(is(out, "character")){ infoMsg <- switch(action, - "CREATE" = sprintf("Successfully created data package with id '%s'", - entity$identifiers[["id"]]), - "UPDATE" = sprintf("Successfully updated Dataverse dataset with id '%s' (packageId: %s)", - entity$identifiers[["id"]], packageId) + "CREATE" = sprintf("Successfully created data package with id '%s'", + entity$identifiers[["id"]]), + "UPDATE" = sprintf("Successfully updated Dataverse dataset with id '%s' (packageId: %s)", + entity$identifiers[["id"]], packageId) ) config$logger.info(infoMsg) @@ -136,5 +137,5 @@ dataone_upload_datapackage <- function(entity, config, options){ entity$identifiers[["dataone_packageId_to_save"]] <- packageId_to_save entity$setStatus("dataone", ifelse(publish, "published", "draft")) } - + } \ No newline at end of file diff --git a/R/geoflow_action_eml_create_eml.R b/inst/actions/eml_create_eml.R similarity index 68% rename from R/geoflow_action_eml_create_eml.R rename to inst/actions/eml_create_eml.R index 183fa6de..83fadd1e 100644 --- a/R/geoflow_action_eml_create_eml.R +++ b/inst/actions/eml_create_eml.R @@ -1,4 +1,4 @@ -eml_create_eml <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("EML", quietly = TRUE)){ stop("The EML action requires the 'EML' package") @@ -8,6 +8,7 @@ eml_create_eml <- function(entity, config, options){ } #options + options <- action$options taxonomySubject <- if(!is.null(options$subject_taxonomy)) options$subject_taxonomy else "taxonomy" #init dataset @@ -55,10 +56,10 @@ eml_create_eml <- function(entity, config, options){ if(length(contact$identifiers)>0){ for(idkey in names(contact$identifiers)){ switch(idkey, - "orcid" = { - person$id <- sprintf("https://orcid.org/%s", contact$identifiers[[idkey]]) - person$userId <- list(directory = "https://orcid.org", userId = person$id) - } + "orcid" = { + person$id <- sprintf("https://orcid.org/%s", contact$identifiers[[idkey]]) + person$userId <- list(directory = "https://orcid.org", userId = person$id) + } ) } } @@ -78,7 +79,7 @@ eml_create_eml <- function(entity, config, options){ }) return(out_persons) } - + #creator creators <- entity$getContacts()[sapply(entity$getContacts(), function(x){x$role=="owner"})] dataset$creator <- contactsToEML(creators) @@ -137,12 +138,12 @@ eml_create_eml <- function(entity, config, options){ url = list( url = relation$link, "function" = switch(relation$key, - "thumbnail" = "information", - "parent" = "information", - "http" = "information", - "wms" = "information", - "wfs" = "download", - "information" + "thumbnail" = "information", + "parent" = "information", + "http" = "information", + "wms" = "information", + "wfs" = "download", + "information" ) ) ) @@ -233,7 +234,7 @@ eml_create_eml <- function(entity, config, options){ if(!is.null(entity$descriptions$project)) project <- entity$descriptions$project #attributes - + #spatialVector if(!is.null(entity$data)) if(!is.null(entity$data$features)){ features = entity$data$features @@ -316,78 +317,78 @@ eml_create_eml <- function(entity, config, options){ } } if(is.null(uom)) uom = "dimensionless" - + #measurementScale measurementScale <- switch(class(featureAttrValues[1])[1], - "integer" = EML::eml$measurementScale(ratio = EML::eml$ratio( - unit = EML::eml$unit(standardUnit = uom), - numericDomain = EML::eml$numericDomain( - numberType = "integer" - ))), - "numeric" = EML::eml$measurementScale(ratio = EML::eml$ratio( - unit = EML::eml$unit(standardUnit = uom), - numericDomain = EML::eml$numericDomain( - numberType = "real" - ))), - "character" = EML::eml$measurementScale(nominal = EML::eml$nominal( - nonNumericDomain = { - if (enumerated){ - EML::eml$nonNumericDomain( - enumeratedDomain = EML::eml$enumeratedDomain( - codeDefinition = lapply(featureAttrValues, function(featureAttrValue){ - print(featureAttrValue) - EML::eml$codeDefinition( - code = featureAttrValue, - definition = { - def = featureAttrValue - reg_item <- fat_attr_register$data[fat_attr_register$data$code == featureAttrValue,] - if(nrow(reg_item)>0){ - def = reg_item[1L,"label"] - } - print(def) - def - }, - source = { - src = NULL - reg_item <- fat_attr_register$data[fat_attr_register$data$code == featureAttrValue,] - if(nrow(reg_item)>0){ - src = reg_item[1L,"uri"] - } - print(src) - src - } - ) - }) - ) - ) - }else{ - EML::eml$nonNumericDomain( - textDomain = EML::eml$textDomain( - definition = "Free text", - pattern = "\\w" - ) - ) - } - } - )), - "logical" = EML::eml$measurementScale(ordinal = EML::eml$ordinal( - nonNumericDomain = EML::eml$nonNumericDomain( - enumeratedDomain = EML::eml$enumeratedDomain( - codeDefinition = list( - EML::eml$codeDefinition(code = "TRUE", definition = "TRUE"), - EML::eml$codeDefinition(code = "FALSE", definition = "FALSE") - ) - ) - ) - )), - "Date" = EML::eml$measurementScale(dateTime = EML::eml$dateTime(formatString = "YYYY-MM-DDTHH:mm:ss")), - "POSIXct" = EML::eml$measurementScale(dateTime = EML::eml$dateTime(formatString = "YYYY-MM-DD")), - "sfc_POINT" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), - "sfc_MULTIPOINT" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), - "sfc_LINESTRING" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), - "sfc_MULTILINESTRING" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), - "sfc_POLYGON" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), - "sfc_MULTIPOLYGON" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))) + "integer" = EML::eml$measurementScale(ratio = EML::eml$ratio( + unit = EML::eml$unit(standardUnit = uom), + numericDomain = EML::eml$numericDomain( + numberType = "integer" + ))), + "numeric" = EML::eml$measurementScale(ratio = EML::eml$ratio( + unit = EML::eml$unit(standardUnit = uom), + numericDomain = EML::eml$numericDomain( + numberType = "real" + ))), + "character" = EML::eml$measurementScale(nominal = EML::eml$nominal( + nonNumericDomain = { + if (enumerated){ + EML::eml$nonNumericDomain( + enumeratedDomain = EML::eml$enumeratedDomain( + codeDefinition = lapply(featureAttrValues, function(featureAttrValue){ + print(featureAttrValue) + EML::eml$codeDefinition( + code = featureAttrValue, + definition = { + def = featureAttrValue + reg_item <- fat_attr_register$data[fat_attr_register$data$code == featureAttrValue,] + if(nrow(reg_item)>0){ + def = reg_item[1L,"label"] + } + print(def) + def + }, + source = { + src = NULL + reg_item <- fat_attr_register$data[fat_attr_register$data$code == featureAttrValue,] + if(nrow(reg_item)>0){ + src = reg_item[1L,"uri"] + } + print(src) + src + } + ) + }) + ) + ) + }else{ + EML::eml$nonNumericDomain( + textDomain = EML::eml$textDomain( + definition = "Free text", + pattern = "\\w" + ) + ) + } + } + )), + "logical" = EML::eml$measurementScale(ordinal = EML::eml$ordinal( + nonNumericDomain = EML::eml$nonNumericDomain( + enumeratedDomain = EML::eml$enumeratedDomain( + codeDefinition = list( + EML::eml$codeDefinition(code = "TRUE", definition = "TRUE"), + EML::eml$codeDefinition(code = "FALSE", definition = "FALSE") + ) + ) + ) + )), + "Date" = EML::eml$measurementScale(dateTime = EML::eml$dateTime(formatString = "YYYY-MM-DDTHH:mm:ss")), + "POSIXct" = EML::eml$measurementScale(dateTime = EML::eml$dateTime(formatString = "YYYY-MM-DD")), + "sfc_POINT" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), + "sfc_MULTIPOINT" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), + "sfc_LINESTRING" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), + "sfc_MULTILINESTRING" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), + "sfc_POLYGON" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))), + "sfc_MULTIPOLYGON" = EML::eml$measurementScale(interval = EML::eml$interval(unit = EML::eml$unit(standardUnit = uom), numericDomain = EML::eml$numericDomain(numberType = "real"))) ) attribute = EML::eml$attribute( @@ -406,7 +407,7 @@ eml_create_eml <- function(entity, config, options){ attributeList[[length(attributeList)+1]] <- attribute } - + if(is(entity$data$features, "sf")){ #use spatialVector config$logger.info("EML: spatial dataset - filling attributeList as 'spatialVector'") @@ -417,12 +418,12 @@ eml_create_eml <- function(entity, config, options){ coverage = dataset$coverage, attributeList = EML::eml$attributeList(attribute = attributeList), geometry = switch(class(sf::st_geometry(entity$data$features))[1], - "sfc_POINT" = "Point", - "sfc_MULTIPOINT" = "MultiPoint", - "sfc_LINESTRING" = "LineString", - "sfc_MULTILINESTRING" = "MultiLineString", - "sfc_POLYGON" = "Polygon", - "sfc_MULTIPOLYGON" = "MultiPolygon" + "sfc_POINT" = "Point", + "sfc_MULTIPOINT" = "MultiPoint", + "sfc_LINESTRING" = "LineString", + "sfc_MULTILINESTRING" = "MultiLineString", + "sfc_POLYGON" = "Polygon", + "sfc_MULTIPOLYGON" = "MultiPolygon" ), geometricObjectCount = nrow(entity$data$features), spatialReference = list(horizCoordSysName = { diff --git a/R/geoflow_action_geometa_create_iso_19110.R b/inst/actions/geometa_create_iso_19110.R similarity index 90% rename from R/geoflow_action_geometa_create_iso_19110.R rename to inst/actions/geometa_create_iso_19110.R index 17e83799..f111138d 100644 --- a/R/geoflow_action_geometa_create_iso_19110.R +++ b/inst/actions/geometa_create_iso_19110.R @@ -1,4 +1,4 @@ -geometa_create_iso_19110 <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("geometa", quietly = TRUE)){ stop("The 'geometa-create-iso-19110' action requires the 'geometa' package") @@ -15,6 +15,7 @@ geometa_create_iso_19110 <- function(entity, config, options){ } #options + options <- actions$options doi <- if(!is.null(options$doi)) options$doi else FALSE exclude_attributes <- if(!is.null(options$exclude_attributes)) options$exclude_attributes else list() exclude_attributes_not_in_dictionary <- if(!is.null(options$exclude_attributes_not_in_dictionary)) options$exclude_attributes_not_in_dictionary else FALSE @@ -133,12 +134,12 @@ geometa_create_iso_19110 <- function(entity, config, options){ columns <- c(colnames(features), unlist(extra_attributes)) for(featureAttrName in columns){ - + if(featureAttrName %in% exclude_attributes){ config$logger.warn(sprintf("Feature Attribute '%s' is listed in 'exclude_attributes'. Discarding it...", featureAttrName)) next } - + fat_attr_register <- NULL #create attribute @@ -200,8 +201,8 @@ geometa_create_iso_19110 <- function(entity, config, options){ #add listed values if(featureAttrName %in% colnames(features)){ featureAttrValues <- switch(class(features)[1], - "sf" = features[,featureAttrName][[1]], - "data.frame" = features[,featureAttrName] + "sf" = features[,featureAttrName][[1]], + "data.frame" = features[,featureAttrName] ) }else{ featureAttrValues <- fat_attr_register$data$code @@ -248,26 +249,26 @@ geometa_create_iso_19110 <- function(entity, config, options){ }else{ config$logger.warn(sprintf("Skip listing values for feature Attribute '%s'...", featureAttrName)) } - + #add primitive type + data type (attribute or variable) as valueType fat_type <- switch(class(featureAttrValues[1])[1], - "integer" = "xsd:int", - "numeric" = "xsd:decimal", - "character" = "xsd:string", - "logical" = "xsd:boolean", - "Date" = "xsd:date", - "POSIXct" = "xsd:datetime", - "sfc_POINT" = "gml:PointPropertyType", - "sfc_MULTIPOINT" = "gml:MultiPointPropertyType", - "sfc_LINESTRING" = "gml:LineStringPropertyType", - "sfc_MULTILINESTRING" = "gml:MultiLineStringPropertyType", - "sfc_POLYGON" = "gml:PolygonPropertyType", - "sfc_MULTIPOLYGON" = "gml:MultiPolygonPropertyType" + "integer" = "xsd:int", + "numeric" = "xsd:decimal", + "character" = "xsd:string", + "logical" = "xsd:boolean", + "Date" = "xsd:date", + "POSIXct" = "xsd:datetime", + "sfc_POINT" = "gml:PointPropertyType", + "sfc_MULTIPOINT" = "gml:MultiPointPropertyType", + "sfc_LINESTRING" = "gml:LineStringPropertyType", + "sfc_MULTILINESTRING" = "gml:MultiLineStringPropertyType", + "sfc_POLYGON" = "gml:PolygonPropertyType", + "sfc_MULTIPOLYGON" = "gml:MultiPolygonPropertyType" ) fat_generic_type <- switch(class(featureAttrValues[1])[1], - "integer" = "variable", - "numeric" = "variable", - "attribute" + "integer" = "variable", + "numeric" = "variable", + "attribute" ) if(!is.null(fat_attr)) fat_generic_type <- fat_attr$type fat_type_anchor <- ISOAnchor$new(name = fat_type, href = fat_generic_type) diff --git a/R/geoflow_action_geometa_create_iso_19115.R b/inst/actions/geometa_create_iso_19115.R similarity index 87% rename from R/geoflow_action_geometa_create_iso_19115.R rename to inst/actions/geometa_create_iso_19115.R index ace2330e..70c10e51 100644 --- a/R/geoflow_action_geometa_create_iso_19115.R +++ b/inst/actions/geometa_create_iso_19115.R @@ -1,4 +1,4 @@ -geometa_create_iso_19115 <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("geometa", quietly = TRUE)){ stop("The 'geometa-create-iso-19115' action requires the 'geometa' package") @@ -10,6 +10,7 @@ geometa_create_iso_19115 <- function(entity, config, options){ features <- entity$data$features #options + options <- action$options use_uuid <- if(!is.null(options$use_uuid)) options$use_uuid else FALSE inspire <- if(!is.null(options$inspire)) options$inspire else FALSE logo <- if(!is.null(options$logo)) options$logo else FALSE @@ -139,25 +140,25 @@ geometa_create_iso_19115 <- function(entity, config, options){ md$setHierarchyLevel(dctype_iso) #add contacts - # if(length(entity$contacts)>0){ - # metadata_contacts <- entity$contacts[sapply(entity$contacts, function(x){tolower(x$role) == "metadata"})] - # if(is.null(metadata_contacts)) metadata_contacts<-entity$contacts[sapply(entity$contacts, function(x){tolower(x$role) == "owner"})] - # for(metadata_contact in metadata$contacts){ - # metadata_contact$setRole("metadata") - # rp<-createResponsibleParty(metadata_contact,"pointOfContact") - # md$addContact(rp) - # } - # } - - if(length(entity$contacts)>0)for(entity_contact in entity$contacts){ - if(tolower(entity_contact$role) == "metadata"){ - rp<-createResponsibleParty(entity_contact,"pointOfContact") - md$addContact(rp) - } - } + # if(length(entity$contacts)>0){ + # metadata_contacts <- entity$contacts[sapply(entity$contacts, function(x){tolower(x$role) == "metadata"})] + # if(is.null(metadata_contacts)) metadata_contacts<-entity$contacts[sapply(entity$contacts, function(x){tolower(x$role) == "owner"})] + # for(metadata_contact in metadata$contacts){ + # metadata_contact$setRole("metadata") + # rp<-createResponsibleParty(metadata_contact,"pointOfContact") + # md$addContact(rp) + # } + # } + + if(length(entity$contacts)>0)for(entity_contact in entity$contacts){ + if(tolower(entity_contact$role) == "metadata"){ + rp<-createResponsibleParty(entity_contact,"pointOfContact") + md$addContact(rp) + } + } if(length(md$contact)==0) md$contact <- ISOAttributes$new("gco:nilReason" = "missing") - + #spatial representation if(!is.null(entity$data)) { @@ -179,12 +180,12 @@ geometa_create_iso_19115 <- function(entity, config, options){ if(geomLevel == "geometryOnly"){ geomObject <- ISOGeometricObjects$new() isoGeomType <- switch(geomtype, - "GEOMETRY" = "composite", "GEOMETRYCOLLECTION" = "composite", - "POINT" = "point", "MULTIPOINT" = "point", - "LINESTRING" = "curve", "CIRCULARSTRING" = "curve", "MULTILINESTRING" = "curve", "CURVE" = "curve", "COMPOUNDCURVE" = "curve", - "POLYGON" = "surface", "MULTIPOLYGON" = "surface", "TRIANGLE" = "surface", - "CURVEPOLYGON" = "surface", "SURFACE" = "surface", "MULTISURFACE" = "surface", - "POLYHEDRALSURFACE" = "solid" + "GEOMETRY" = "composite", "GEOMETRYCOLLECTION" = "composite", + "POINT" = "point", "MULTIPOINT" = "point", + "LINESTRING" = "curve", "CIRCULARSTRING" = "curve", "MULTILINESTRING" = "curve", "CURVE" = "curve", "COMPOUNDCURVE" = "curve", + "POLYGON" = "surface", "MULTIPOLYGON" = "surface", "TRIANGLE" = "surface", + "CURVEPOLYGON" = "surface", "SURFACE" = "surface", "MULTISURFACE" = "surface", + "POLYHEDRALSURFACE" = "solid" ) geomObject$setGeometricObjectType(isoGeomType) geomObject$setGeometricObjectCount(nrow(features[st_geometry_type(features)==geomtype,])) @@ -212,7 +213,7 @@ geometa_create_iso_19115 <- function(entity, config, options){ }else{ dimObject$setResolution(ISOMeasure$new(value=resolution$value,uom=resolution$uom)) } - gsr$addDimension(dimObject) + gsr$addDimension(dimObject) } gsr$setCellGeometry("area") md$addSpatialRepresentationInfo(gsr) @@ -252,7 +253,7 @@ geometa_create_iso_19115 <- function(entity, config, options){ ident$addPointOfContact(rp) } } - + #citation now <- Sys.time() ct <- ISOCitation$new() @@ -296,7 +297,7 @@ geometa_create_iso_19115 <- function(entity, config, options){ ct$addIdentifier(ISOMetaIdentifier$new(code = mdIdentifier)) } } - + ct$addPresentationForm("mapDigital") #TODO to map with gsheet #adding responsible party (search for owner, otherwise take first contact) @@ -559,10 +560,10 @@ geometa_create_iso_19115 <- function(entity, config, options){ if(request=="GetCapabilities"){ or1 <- ISOOnlineResource$new() or1$setLinkage(paste0(wms$link,"&version=",switch(wms$key, - "wms" = "1.1.0", - "wms110" = "1.1.0", - "wms111" = "1.1.1", - "wms130" = "1.3.0"),"&request=GetCapabilities")) + "wms" = "1.1.0", + "wms110" = "1.1.0", + "wms111" = "1.1.1", + "wms130" = "1.3.0"),"&request=GetCapabilities")) or1$setName("OGC:WMS") or1$setDescription("Open Geospatial Consortium Web Map Service (WMS)") or1$setProtocol("OGC:WMS") @@ -631,44 +632,44 @@ geometa_create_iso_19115 <- function(entity, config, options){ md$contentInfo = c(md$contentInfo,cov) } if(!is.null(entity$data$dimensions)){ - #create coverage description - cov <- ISOImageryCoverageDescription$new() - cov$setAttributeDescription("data") - cov$setContentType("coordinate") - - #adding dimensions - - for(dimension in names(entity$data$dimensions)){ - dim_name<-dimension - dimension<-entity$data$dimensions[[dimension]] - - band <- ISOBand$new() - - mn <- ISOMemberName$new(aName = dim_name, attributeType = "float") - band$sequenceIdentifier<-mn - band$descriptor<-dimension$longName - band$maxValue<-dimension$minValue - band$minValue<-dimension$maxValue - #unit - # unit<-dimension$resolution$uom - # if(length(unit)>0){ - # invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit)),type="message")) - # if(is.null(gml) | class(gml)=="try-error") invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit,"name_singular")),type="message")) - # if(is.null(gml) | class(gml)=="try-error") invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit,"name_plural")),type="message")) - # if(!is.null(gml) & class(gml)!="try-error") band$units<-gml - # } - band$units<-NA - cov$dimension = c(cov$dimension, band) - - if(include_coverage_data_dimension_values){ - des <- ISOImageryRangeElementDescription$new() - des$name<-dim_name - des$definition<-dimension$longName - des$rangeElement <- sapply(unique(dimension$values), function(x){ ISORecord$new(value = x)}) - cov$rangeElementDescription = c(cov$rangeElementDescription,des) - } - } - md$contentInfo = c(md$contentInfo,cov) + #create coverage description + cov <- ISOImageryCoverageDescription$new() + cov$setAttributeDescription("data") + cov$setContentType("coordinate") + + #adding dimensions + + for(dimension in names(entity$data$dimensions)){ + dim_name<-dimension + dimension<-entity$data$dimensions[[dimension]] + + band <- ISOBand$new() + + mn <- ISOMemberName$new(aName = dim_name, attributeType = "float") + band$sequenceIdentifier<-mn + band$descriptor<-dimension$longName + band$maxValue<-dimension$minValue + band$minValue<-dimension$maxValue + #unit + # unit<-dimension$resolution$uom + # if(length(unit)>0){ + # invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit)),type="message")) + # if(is.null(gml) | class(gml)=="try-error") invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit,"name_singular")),type="message")) + # if(is.null(gml) | class(gml)=="try-error") invisible(capture.output(gml<-try(GMLUnitDefinition$buildFrom(unit,"name_plural")),type="message")) + # if(!is.null(gml) & class(gml)!="try-error") band$units<-gml + # } + band$units<-NA + cov$dimension = c(cov$dimension, band) + + if(include_coverage_data_dimension_values){ + des <- ISOImageryRangeElementDescription$new() + des$name<-dim_name + des$definition<-dimension$longName + des$rangeElement <- sapply(unique(dimension$values), function(x){ ISORecord$new(value = x)}) + cov$rangeElementDescription = c(cov$rangeElementDescription,des) + } + } + md$contentInfo = c(md$contentInfo,cov) } if(!is.null(entity$data$ogc_dimensions)){ #create coverage description @@ -680,10 +681,10 @@ geometa_create_iso_19115 <- function(entity, config, options){ ogc_dim_name<-toupper(ogc_dimension) ogc_dimension<-entity$data$ogc_dimensions[[ogc_dimension]] band <- ISOBand$new() - + mn <- switch(ogc_dim_name, - "TIME" = ISOMemberName$new(aName = ogc_dim_name, attributeType = "xsd:datetime"), - "ELEVATION" = ISOMemberName$new(aName = ogc_dim_name, attributeType = "xsd:decimal") + "TIME" = ISOMemberName$new(aName = ogc_dim_name, attributeType = "xsd:datetime"), + "ELEVATION" = ISOMemberName$new(aName = ogc_dim_name, attributeType = "xsd:decimal") ) band$sequenceIdentifier<-mn @@ -691,11 +692,11 @@ geometa_create_iso_19115 <- function(entity, config, options){ cov$dimension = c(cov$dimension, band) if(include_coverage_service_dimension_values){ - des <- ISOImageryRangeElementDescription$new() - des$name<-ogc_dim_name - des$definition<-"" - des$rangeElement <- sapply(ogc_dimension$values, function(x){ ISORecord$new(value = x)}) - cov$rangeElementDescription = c(cov$rangeElementDescription,des) + des <- ISOImageryRangeElementDescription$new() + des$name<-ogc_dim_name + des$definition<-"" + des$rangeElement <- sapply(ogc_dimension$values, function(x){ ISORecord$new(value = x)}) + cov$rangeElementDescription = c(cov$rangeElementDescription,des) } } md$contentInfo = c(md$contentInfo,cov) @@ -744,12 +745,12 @@ geometa_create_iso_19115 <- function(entity, config, options){ or$setName(http_relation$name) or$setDescription(http_relation$description) protocol <- switch(http_relation$key, - "http" = "WWW:LINK-1.0-http--link", - "wms" = "OGC:WMS-1.1.0-http-get-map", #defaut - "wms110" = "OGC:WMS-1.1.0-http-get-map", - "wms111" = "OGC:WMS-1.1.1-http-get-map", - "wms130" = "OGC:WMS-1.3.0-http-get-map", - "WWW:LINK-1.0-http--link" + "http" = "WWW:LINK-1.0-http--link", + "wms" = "OGC:WMS-1.1.0-http-get-map", #defaut + "wms110" = "OGC:WMS-1.1.0-http-get-map", + "wms111" = "OGC:WMS-1.1.1-http-get-map", + "wms130" = "OGC:WMS-1.3.0-http-get-map", + "WWW:LINK-1.0-http--link" ) or$setProtocol(protocol) dto$onLine = c(dto$onLine,or) @@ -841,4 +842,5 @@ geometa_create_iso_19115 <- function(entity, config, options){ md$save(file.path(getwd(), "metadata", paste0(entity$getEntityJobDirname(), "_ISO-19115.xml")), inspire = inspire, inspireValidator = INSPIRE_VALIDATOR) rm(md) -} + +} \ No newline at end of file diff --git a/R/geoflow_action_geonapi_publish_iso_19139.R b/inst/actions/geonapi_publish_iso_19139.R similarity index 69% rename from R/geoflow_action_geonapi_publish_iso_19139.R rename to inst/actions/geonapi_publish_iso_19139.R index 6a9a6e45..c4b1f6a1 100644 --- a/R/geoflow_action_geonapi_publish_iso_19139.R +++ b/inst/actions/geonapi_publish_iso_19139.R @@ -1,9 +1,11 @@ -geonapi_publish_iso_19139 <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("geonapi", quietly = TRUE)){ stop("The 'geonapi-publish-iso-19139' action requires the 'geonapi' package") } + options <- action$options + geometa_inspire <- if(!is.null(options$geometa_inspire)) options$geometa_inspire else FALSE INSPIRE_VALIDATOR <- NULL if(geometa_inspire){ @@ -71,37 +73,37 @@ geonapi_publish_iso_19139 <- function(entity, config, options){ } switch(GN$getClassName(), - "GNOpenAPIManager" = { - if(category_match_col=="name"){ - category <- available_categories[available_categories$name==category,]$id - } - GN$insertRecord(geometa = md, group = group, category = category, - uuidProcessing = "OVERWRITE", geometa_inspire = inspire) - }, - "GNLegacyAPIManager" = { - if(category_match_col=="id"){ - category <- available_categories[available_categories$id==category,]$name - } - metaId <- GN$get(mdId, by = "uuid", output = "id") - if(is.null(metaId)){ - #insert metadata (once inserted only visible to the publisher) - created = GN$insertMetadata(geometa = md, group = group, category = category, - geometa_inspire = inspire, geometa_inspireValidator = INSPIRE_VALIDATOR) - #config privileges - config <- GNPrivConfiguration$new() - config$setPrivileges("all", privs) - GN$setPrivConfiguration(id = created, config = config) - }else{ - #update a metadata - updated = GN$updateMetadata(id = metaId, geometa = md, - geometa_inspire = inspire, geometa_inspireValidator = INSPIRE_VALIDATOR) - - #config privileges - gn_config <- GNPrivConfiguration$new() - gn_config$setPrivileges("all", privs) - GN$setPrivConfiguration(id = metaId, config = gn_config) - } - } + "GNOpenAPIManager" = { + if(category_match_col=="name"){ + category <- available_categories[available_categories$name==category,]$id + } + GN$insertRecord(geometa = md, group = group, category = category, + uuidProcessing = "OVERWRITE", geometa_inspire = inspire) + }, + "GNLegacyAPIManager" = { + if(category_match_col=="id"){ + category <- available_categories[available_categories$id==category,]$name + } + metaId <- GN$get(mdId, by = "uuid", output = "id") + if(is.null(metaId)){ + #insert metadata (once inserted only visible to the publisher) + created = GN$insertMetadata(geometa = md, group = group, category = category, + geometa_inspire = inspire, geometa_inspireValidator = INSPIRE_VALIDATOR) + #config privileges + config <- GNPrivConfiguration$new() + config$setPrivileges("all", privs) + GN$setPrivConfiguration(id = created, config = config) + }else{ + #update a metadata + updated = GN$updateMetadata(id = metaId, geometa = md, + geometa_inspire = inspire, geometa_inspireValidator = INSPIRE_VALIDATOR) + + #config privileges + gn_config <- GNPrivConfiguration$new() + gn_config$setPrivileges("all", privs) + GN$setPrivConfiguration(id = metaId, config = gn_config) + } + } ) rm(md) } diff --git a/inst/actions/geosapi_publish_ogc_services.R b/inst/actions/geosapi_publish_ogc_services.R new file mode 100644 index 00000000..0441949a --- /dev/null +++ b/inst/actions/geosapi_publish_ogc_services.R @@ -0,0 +1,459 @@ +function(action, entity, config){ + + if(!requireNamespace("geosapi", quietly = TRUE)){ + stop("The 'geosapi-publish-ogc-services' action requires the 'geosapi' package") + } + + #options + options <- action$options + createWorkspace <- if(!is.null(options$createWorkspace)) options$createWorkspace else FALSE + createStore <- if(!is.null(options$createStore)) options$createStore else FALSE + store_description <- if(!is.null(options$store_description)) options$store_description else "" + + #check presence of data + if(is.null(entity$data)){ + warnMsg <- sprintf("No data object associated to entity '%s'. Skipping data publication!", + entity$identifiers[["id"]]) + config$logger.warn(warnMsg) + return(NULL) + } + + if(length(entity$data$source)>1) + config$logger.warn("More than one data sources, geosapi action will consider the first one only!") + + #datasource + datasource <- entity$data$uploadSource[[1]] + datasource_name <- NULL + datasource_file <- NULL + if(!is.null(datasource)){ + datasource_name <- unlist(strsplit(datasource, "\\."))[1] + datasource_file <- attr(datasource, "uri") + attributes(datasource) <- NULL + }else{ + if(entity$data$upload){ + errMsg <- sprintf("Upload source is missing!") + stop(errMsg) + } + } + + #layername/sourcename + layername <- if(!is.null(entity$data$layername)) entity$data$layername else entity$identifiers$id + + #shortcut for gs config + GS_CONFIG <- config$software$output$geoserver_config + GS <- config$software$output$geoserver + if(is.null(GS)){ + errMsg <- "This action requires a GeoServer software to be declared in the configuration" + config$logger.error(errMsg) + stop(errMsg) + } + + workspace <- GS_CONFIG$properties$workspace + if(is.null(workspace)) if(!is.null(entity$data$workspaces$geoserver)) workspace <- entity$data$workspaces$geoserver + if(is.null(workspace)){ + errMsg <- "The geoserver configuration requires a workspace for publishing action" + config$logger.error(errMsg) + stop(errMsg) + } + + store <- GS_CONFIG$properties$store + if(is.null(store)) if(!is.null(entity$data$store)) store <- entity$data$store + if(is.null(store)){ + errMsg <- "The geoserver configuration requires a data/coverage store for publishing action" + config$logger.error(errMsg) + stop(errMsg) + } + + if(entity$data$uploadType == "other"){ + warnMsg <- "No 'geosapi' action possible for type 'other'. Action skipped" + config$logger.warn(warnMsg) + return(NULL) + } + + # Check existence of data/coverage store + the_store <- switch(entity$data$spatialRepresentationType, + "vector" = GS$getDataStore(workspace, store), + "grid" = GS$getCoverageStore(workspace, store) + ) + # If store does not exist + # Check if createStore is TRUE + if(length(the_store)==0){ + if(createStore){ + switch(entity$data$uploadType, + #vector/features upload types + #=========================================================================================== + #vector/GeoPackage + #------------------------------------------------------------------------------------------- + "gpkg"= { + the_store<-GSGeoPackageDataStore$new( + name = store, + description = store_description , + enabled = TRUE, + database = paste0("file://data/",workspace,"/",entity$data$uploadSource,".gpkg") + ) + }, + #vector/dbtable + #------------------------------------------------------------------------------------------- + "dbtable"= { + dbi<-config$software$output$dbi_config + if(is.null(dbi)) dbi<-config$software$output$dbi_config + if(is.null(dbi)) { + errMsg <- sprintf("Error during Geoserver '%s' datastore creation, this datastore type requires a DBI type software declaration in the configuration", store) + config$logger.error(errMsg) + stop(errMsg) + } + Postgres<-dbi$parameters$drv %in% c("Postgres","PostreSQL") + if(!Postgres){ + errMsg <- sprintf("Error during Geoserver '%s' datastore creation, the DBI software declared in the configuration is not a PostGis database", store) + config$logger.error(errMsg) + stop(errMsg) + } + the_store<-GSPostGISDataStore$new(name=store, description = store_description, enabled = TRUE) + the_store$setHost(dbi$parameters$host) + the_store$setPort(dbi$parameters$port) + the_store$setDatabase(dbi$parameters$dbname) + #the_store$setSchema()#Not yet implemented in dbi software arguments + the_store$setUser(dbi$parameters$user) + the_store$setPassword(dbi$parameters$password) + }, + #vector/dbquery + #------------------------------------------------------------------------------------------- + "dbquery"= { + dbi<-config$software$output$dbi_config + if(is.null(dbi)) dbi<-config$software$output$dbi_config + if(is.null(dbi)) { + errMsg <- sprintf("Error during Geoserver '%s' datastore creation, this datastore type requires a DBI type software declaration in the configuration", store) + config$logger.error(errMsg) + stop(errMsg) + } + Postgres<-dbi$parameters$drv %in% c("Postgres","PostreSQL") + if(!Postgres){ + errMsg <- sprintf("Error during Geoserver '%s' datastore creation, the DBI software declared in the configuration is not a PostGis database", store) + config$logger.error(errMsg) + stop(errMsg) + } + the_store<-GSPostGISDataStore$new(name=store, description = store_description, enabled = TRUE) + the_store$setHost(dbi$parameters$host) + the_store$setPort(dbi$parameters$port) + the_store$setDatabase(dbi$parameters$dbname) + #the_store$setSchema()#Not yet implemented in dbi software arguments + the_store$setUser(dbi$parameters$user) + the_store$setPassword(dbi$parameters$password) + }, + #vector/shapefile (ESRI) + #------------------------------------------------------------------------------------------- + "shp"= { + the_store <- GSShapefileDirectoryDataStore$new( + name=store, + description = store_description, + enabled = TRUE, + url = paste0("file://data","/",workspace) + ) + }, + #grid/coverages upload types + #----------------------------------------------- + "geotiff" = { + the_store <- GSGeoTIFFCoverageStore$new(name = store, description = store_description, enabled = TRUE) + } + ) + if(is.null(the_store)){ + errMsg <- sprintf("Error during Geoserver data/coverage store creation, format '%s' not supported. Aborting 'geosapi' action!",entity$data$uploadType) + config$logger.error(errMsg) + stop(errMsg) + }else{ + created <- switch(entity$data$spatialRepresentationType, + "vector" = GS$createDataStore(workspace, the_store), + "grid" = GS$createCoverageStore(workspace, the_store) + ) + if(created){ + infoMsg <- sprintf("Successful Geoserver '%s' data/coverage store creaction", store) + config$logger.info(infoMsg) + }else{ + errMsg <- "Error during Geoserver data/coverage store creation. Aborting 'geosapi' action!" + config$logger.error(errMsg) + stop(errMsg) + } + } + }else{ + # If createStore is FALSE edit ERROR Message + errMsg <- sprintf("Data/Coverage store '%s' does not exist and 'createStore' option = FALSE, please verify config if data/coverage store already exists or change createStore = TRUE to create it",store) + config$logger.error(errMsg) + stop(errMsg) + } + } + + #upload + #------------------------------------------------------------------------------------------------- + if(entity$data$upload){ + + config$logger.info("Upload mode is set to true") + if(startsWith(entity$data$uploadType,"db") || entity$data$uploadType == "other"){ + warnMsg <- "Skipping upload: Upload mode is only valid for types 'shp', 'spatialite' or 'h2'" + config$logger.warn(warnMsg) + }else{ + uploaded <- FALSE + config$logger.info("Upload from local file(s)") + filepath <- file.path(getwd(), "data", datasource) + config$logger.info(sprintf("File to upload to Geoserver: %s", filepath)) + if(file.exists(filepath)){ + config$logger.info(sprintf("Upload file '%s' [%s] to GeoServer...", filepath, entity$data$uploadType)) + uploaded <- switch(entity$data$spatialRepresentationType, + #vector/features upload + "vector" = GS$uploadData( + workspace, store, endpoint = "file", configure = "none", update = "overwrite", + filename = filepath, extension = entity$data$uploadType, charset = "UTF-8", + contentType = if(entity$data$uploadType=="spatialite") "application/x-sqlite3" else "" + ), + #grid/coverages upload + "grid" = GS$uploadCoverage( + workspace, store, endpoint = "file", configure = "none", update = "overwrite", + filename = filepath, extension = entity$data$uploadType, + contentType = switch(entity$data$uploadType, + "geotiff" = "text/plain", + "arcgrid" = "text/plain", + "worldimage" = "application/zip", + "imagemosaic" = "application/zip" + ) + ) + ) + }else{ + errMsg <- sprintf("Upload from local file(s): no zipped file found for source '%s' (%s)", filepath, datasource) + config$logger.error(errMsg) + stop(errMsg) + } + + if(uploaded){ + infoMsg <- sprintf("Successful Geoserver upload for file '%s' (%s)", datasource_file, entity$data$uploadType) + config$logger.info(infoMsg) + }else{ + errMsg <- "Error during Geoserver file upload. Aborting 'geosapi' action!" + config$logger.error(errMsg) + stop(errMsg) + } + } + } + + #featuretype/coverage +layer publication + #-------------------------------------------------------------------------------------------------- + + #variables + epsgCode <- sprintf("EPSG:%s", entity$srid) + + #build resource (either featuretype or coverage) + resource <- switch(entity$data$spatialRepresentationType, + "vector" = GSFeatureType$new(), + "grid" = GSCoverage$new() + ) + resource$setName(layername) + nativename <- datasource_name + if(entity$data$uploadType == "dbquery") nativename <- layername + if(entity$data$spatialRepresentationType == "grid") nativename <- store + resource$setNativeName(nativename) + resource$setAbstract(entity$descriptions$abstract) + resource$setTitle(entity$titles[["title"]]) + resource$setSrs(epsgCode) + resource$setNativeCRS(epsgCode) + resource$setEnabled(TRUE) + resource$setProjectionPolicy("FORCE_DECLARED") + bbox <- entity$spatial_bbox + resource$setNativeBoundingBox(bbox$xmin, bbox$ymin, bbox$xmax, bbox$ymax, crs = epsgCode) + sfc_min <- sf::st_sfc(sf::st_point(c(bbox$xmin, bbox$ymin)), crs = epsgCode) + sfc_max <- sf::st_sfc(sf::st_point(c(bbox$xmax, bbox$ymax)), crs = epsgCode) + sfc_min_ll <- sf::st_bbox(sf::st_transform(sfc_min, crs = 4326)) + sfc_max_ll <- sf::st_bbox(sf::st_transform(sfc_max, crs = 4326)) + resource$setLatLonBoundingBox(sfc_min_ll$xmin, sfc_min_ll$ymin, sfc_max_ll$xmax, sfc_max_ll$ymax, crs = 4326) + for(subject in entity$subjects){ + kwds <- subject$keywords + for(kwd in kwds) resource$addKeyword(kwd$name) + } + + #add metadata links + #in case (only if) geoflow defines either CSW or Geonetwork software, we can add metadata links + md_link_xml <- NULL + md_link_html <- NULL + if(!is.null(config$software$output$csw)|!is.null(config$software$output$geonetwork)){ + if(!is.null(config$software$output$csw)){ + md_link_xml <- paste0(config$software$output$csw_config$parameters$url, "?service=CSW&request=GetRecordById&Version=", config$software$output$csw_config$parameters$version, + "&elementSetName=full&outputSchema=http%3A//www.isotc211.org/2005/gmd&id=", entity$identifiers[["id"]]) + } + if(!is.null(config$software$output$geonetwork)){ + md_link_xml <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/eng/csw?service=CSW&request=GetRecordById&Version=2.0.2", + "&elementSetName=full&outputSchema=http%3A//www.isotc211.org/2005/gmd&id=", entity$identifiers[["id"]]) + if(startsWith(config$software$output$geonetwork_config$parameters$version, "2")){ + md_link_html <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/en/main.home?uuid=", entity$identifiers[["id"]]) + }else if(startsWith(config$software$output$geonetwork_config$parameters$version, "3")){ + md_link_html <- paste0(config$software$output$geonetwork_config$parameters$url, "/srv/eng/catalog.search#/metadata/", entity$identifiers[["id"]]) + } + } + } + if(!is.null(md_link_xml)){ + md_xml <- GSMetadataLink$new(type = "text/xml", metadataType = "ISO19115:2003", content = md_link_xml) + resource$addMetadataLink(md_xml) + } + if(!is.null(md_link_html)){ + md_html <- GSMetadataLink$new(type = "text/html", metadataType = "ISO19115:2003", content = md_link_html) + resource$addMetadataLink(md_html) + } + + #resource type specific properties + switch(entity$data$spatialRepresentationType, + "vector" = { + #cql filter? + if(!is.null(entity$data$cqlfilter)){ + resource$setCqlFilter(entity$data$cqlfilter) + } + + #virtual table? + if(entity$data$uploadType == "dbquery"){ + vt <- GSVirtualTable$new() + vt$setName(layername) + vt$setSql(entity$data$sql) + #if the virtual table is spatialized + if(!is.null(entity$data$geometryField) & !is.null(entity$data$geometryType)){ + vtg <- GSVirtualTableGeometry$new( + name = entity$data$geometryField, + type = entity$data$geometryType, + srid = entity$srid + ) + vt$setGeometry(vtg) + } + #if the virtual table has service parameters + if(length(entity$data$parameters)>0){ + for(param in entity$data$parameters){ + vtp <- GSVirtualTableParameter$new( + name = param$name, + defaultValue = param$defaultvalue, + regexpValidator = param$regexp + ) + vt$addParameter(vtp) + } + } + resource$setVirtualTable(vt) + } + }, + "grid" = { + + #coverage view? + if(length(entity$data$bands)>0){ + coview <- GSCoverageView$new() + coview$setName(layername) + coview$setEnvelopeCompositionType(entity$data$envelopeCompositionType) + coview$setSelectedResolution(entity$data$selectedResolution) + coview$setSelectedResolutionIndex(entity$data$selectedResolutionIndex) + for(band in entity$data$bands){ + cvb <- GSCoverageBand$new() + covname <- if(!is.null(band$name)) band$name else layername + cvb$setDefinition(paste0(covname,"@", band$index)) + cvb$setIndex(band$index) + cvb$addInputBand(GSInputCoverageBand$new( coverageName = covname, band = band$index)) + coview$addBand(cvb) + } + resource$setView(coview) + }else{ + #check nb of bands, if > 3 we configure a coverage view + bands <- names(entity$data$coverages) + if(length(bands)>3){ + coview <- GSCoverageView$new() + coview$setName(layername) + ect <- entity$data$envelopeCompositionType + if(is.null(ect)) ect <- "INTERSECTION" + coview$setEnvelopeCompositionType(ect) + sr <- entity$data$selectedResolution + if(is.null(sr)) sr <- "BEST" + coview$setSelectedResolution(sr) + sri <- entity$data$selectedResolutionIndex + if(is.null(sri)) sri <- -1 + coview$setSelectedResolutionIndex(sri) + for(i in 1:length(bands)){ + band <- bands[i] + cvb <- GSCoverageBand$new() + covname <- layername + cvb$setDefinition(paste0(covname,"@", i-1)) + cvb$setIndex(i-1) + cvb$addInputBand(GSInputCoverageBand$new( coverageName = covname, band = i-1)) + coview$addBand(cvb) + } + resource$setView(coview) + } + } + + } + ) + + #styles publication if needed + gs_styles <- GS$getStyleNames() + if(entity$data$styleUpload) if(length(entity$data$styles)>0){ + for(i in 1:length(entity$data$styles)){ + style <- entity$data$styles[i] + #check if any style SLD file is available in source + style_sldfile <- paste0(style,".sld") + if(!style %in% gs_styles){ + config$logger.warn(sprintf("No style '%s' in Geoserver", style)) + if(style_sldfile %in% entity$data$source){ + config$logger.info(sprintf("Creating GeoServer style '%s' from SLD style file '%s' available as source", style, style_sldfile)) + created <- GS$createStyle(file = file.path(getwd(), "data", style_sldfile), name = style) + } + } + } + GS$reload() + } + + #layer build and publication + switch(entity$data$spatialRepresentationType, + "vector" = { + layer <- GSLayer$new() + layer$setName(layername) + if(length(entity$data$styles)>0){ + for(i in 1:length(entity$data$styles)){ + style <- entity$data$styles[[i]] + if(i==1) layer$setDefaultStyle(style) else layer$addStyle(style) + } + }else{ + layer$setDefaultStyle("generic") + } + + #publish + try(GS$unpublishLayer(workspace, store, layername)) + out <- GS$publishLayer(workspace, store, resource, layer) + if(!out){ + errMsg <- sprintf("Error during layer '%s' publication for entity '%s'!",layername, entity$identifiers[["id"]]) + config$logger.error(errMsg) + }else{ + infoMsg <- sprintf("Successful layer'%s' publication in Geoserver for entity '%s'!", layername, entity$identifiers[["id"]]) + } + }, + "grid" = { + out <- FALSE + cov <- GS$getCoverage(ws = workspace, cs = store, cv = layername) + if(is.null(cov)){ + out <- GS$createCoverage(ws = workspace, cs = store, coverage = resource) + }else{ + out <- GS$updateCoverage(ws = workspace, cs = store, coverage = resource) + } + #manage coverage styles by updating associated layer object + layer <- GS$getLayer(layername) + if(is(layer, "GSLayer")){ + layer$setName(layername) + if(length(entity$data$styles)>0){ + layer$styles <- list() + for(i in 1:length(entity$data$styles)){ + style <- entity$data$styles[[i]] + if(i==1) layer$setDefaultStyle(style) else layer$addStyle(style) + } + }else{ + layer$setDefaultStyle("generic") + } + GS$updateLayer(layer) + } + + if(!out){ + errMsg <- sprintf("Error during layer '%s' publication for entity '%s'!",layername, entity$identifiers[["id"]]) + config$logger.error(errMsg) + }else{ + infoMsg <- sprintf("Successful layer'%s' publication in Geoserver for entity '%s'!", layername, entity$identifiers[["id"]]) + } + } + ) + + +} \ No newline at end of file diff --git a/R/geoflow_action_ows4R_publish_iso_19139.R b/inst/actions/ows4R_publish_iso_19139.R similarity index 97% rename from R/geoflow_action_ows4R_publish_iso_19139.R rename to inst/actions/ows4R_publish_iso_19139.R index 552ec49d..50be6d4b 100644 --- a/R/geoflow_action_ows4R_publish_iso_19139.R +++ b/inst/actions/ows4R_publish_iso_19139.R @@ -1,9 +1,11 @@ -ows4R_publish_iso_19139 <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("ows4R", quietly = TRUE)){ stop("The 'ows4R-publish-iso-19139' action requires the 'ows4R' package") } + options <- action$options + geometa_inspire <- if(!is.null(options$geometa_inspire)) options$geometa_inspire else FALSE INSPIRE_VALIDATOR <- NULL if(geometa_inspire){ @@ -16,7 +18,7 @@ ows4R_publish_iso_19139 <- function(entity, config, options){ } config$logger.info("INSPIRE geometa option enabled: The record will be checked against the INSPIRE reference validator prior its CSW-T publication") } - + #shortcut for csw config CSW <- config$software$output$csw diff --git a/R/geoflow_action_create_metadata_Rmd.R b/inst/actions/rmarkdown_create_metadata.R similarity index 81% rename from R/geoflow_action_create_metadata_Rmd.R rename to inst/actions/rmarkdown_create_metadata.R index 3e877047..81b32ca9 100644 --- a/R/geoflow_action_create_metadata_Rmd.R +++ b/inst/actions/rmarkdown_create_metadata.R @@ -1,4 +1,4 @@ -create_metadata_Rmd <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("rmarkdown")){ stop("The action 'create-metadata-rmd' requires the 'rmarkdown' package") @@ -6,9 +6,10 @@ create_metadata_Rmd <- function(entity, config, options){ config$logger.info('Generate Rmd') #options + options <- action$otions template <- if(!is.null(options$template)) options$template else "generic" output_format <- if(!is.null(options$output_format)) options$output_format else "html" - + infoMsg <- sprintf("Rmd template use :'%s'", template) config$logger.info(infoMsg) @@ -18,9 +19,9 @@ create_metadata_Rmd <- function(entity, config, options){ template_file<-template }else{ switch(template, - "generic" = {template_file<-system.file("extdata/markdown", "generic.Rmd", package="geoflow") - template_name<-"generic"} - + "generic" = {template_file<-system.file("extdata/markdown", "generic.Rmd", package="geoflow") + template_name<-"generic"} + ) } infoMsg <- sprintf("Rmd Localisation of template :'%s'", template_file) diff --git a/inst/actions/sf_write_dbi.R b/inst/actions/sf_write_dbi.R new file mode 100644 index 00000000..d268457a --- /dev/null +++ b/inst/actions/sf_write_dbi.R @@ -0,0 +1,22 @@ +function(action, entity, config){ + #options + options <- action$options + createIndexes <- ifelse(!is.null(options$createIndexes), options$createIndexes, FALSE) + overwrite <- ifelse(!is.null(options$overwrite), options$overwrite, TRUE) + append <- ifelse(!is.null(options$append), options$append, FALSE) + chunk.size <- ifelse(!is.null(options$chunk.size), options$chunk.size, 0L) + #function + writeWorkflowJobDataResource( + entity = entity, + config = config, + obj = NULL, + useFeatures = TRUE, + resourcename = NULL, + useUploadSource = TRUE, + createIndexes = createIndexes, + overwrite = overwrite, + append = append, + chunk.size = chunk.size, + type = "dbtable" + ) +} \ No newline at end of file diff --git a/inst/actions/sf_write_generic.R b/inst/actions/sf_write_generic.R new file mode 100644 index 00000000..99bc4fd7 --- /dev/null +++ b/inst/actions/sf_write_generic.R @@ -0,0 +1,23 @@ +function(action, entity, config){ + + #options + options <- action$options + createIndexes <- ifelse(!is.null(options$createIndexes), options$createIndexes, FALSE) + overwrite <- ifelse(!is.null(options$overwrite), options$overwrite, TRUE) + append <- ifelse(!is.null(options$append), options$append, FALSE) + chunk.size <- ifelse(!is.null(options$chunk.size), options$chunk.size, 0L) + #function + writeWorkflowJobDataResource( + entity = entity, + config = config, + obj = NULL, + useFeatures = TRUE, + resourcename = NULL, + useUploadSource = TRUE, + createIndexes = createIndexes, + overwrite = overwrite, + append = append, + chunk.size = chunk.size, + type=options$type + ) +} \ No newline at end of file diff --git a/inst/actions/sf_write_shp.R b/inst/actions/sf_write_shp.R new file mode 100644 index 00000000..035a5647 --- /dev/null +++ b/inst/actions/sf_write_shp.R @@ -0,0 +1,11 @@ +function(action, entity, config){ + writeWorkflowJobDataResource( + entity = entity, + config = config, + obj = NULL, + useFeatures = TRUE, + resourcename = NULL, + useUploadSource = TRUE, + type = "shp" + ) +} \ No newline at end of file diff --git a/R/geoflow_action_zen4R_deposit_record.R b/inst/actions/zen4R_deposit_record.R similarity index 83% rename from R/geoflow_action_zen4R_deposit_record.R rename to inst/actions/zen4R_deposit_record.R index 4238de34..6f45f0d9 100644 --- a/R/geoflow_action_zen4R_deposit_record.R +++ b/inst/actions/zen4R_deposit_record.R @@ -1,4 +1,4 @@ -zen4R_deposit_record <- function(entity, config, options){ +function(action, entity, config){ if(!requireNamespace("zen4R", quietly = TRUE)){ stop("The 'zen4R-deposit-record' action requires the 'zen4R' package") @@ -16,6 +16,7 @@ zen4R_deposit_record <- function(entity, config, options){ skipFileDownload <- if(!is.null(config$profile$options$skipFileDownload)) config$profile$options$skipFileDownload else FALSE #options + options <- action$options depositWithFiles <- if(!is.null(options$depositWithFiles)) options$depositWithFiles else FALSE depositDataPattern <- if(!is.null(options$depositDataPattern)) options$depositDataPattern else "" depositMetadataPattern <- if(!is.null(options$depositMetadataPattern)) options$depositMetadataPattern else "" @@ -59,19 +60,19 @@ zen4R_deposit_record <- function(entity, config, options){ #check related identifier if(length(deposits)>0){ invisible(lapply(deposits, function(deposit){ - related_identifiers <- deposit$metadata$related_identifiers - if(!is.null(related_identifiers)){ - for(related_identifier in related_identifiers){ - if(startsWith(related_identifier$identifier,"urn")){ - related_id <- unlist(strsplit(related_identifier$identifier, "urn:"))[2] - if(related_id == entity$identifiers[["id"]] & - related_identifier$relation == "isIdenticalTo"){ - zenodo_metadata <<- deposit - break - } - } - } - } + related_identifiers <- deposit$metadata$related_identifiers + if(!is.null(related_identifiers)){ + for(related_identifier in related_identifiers){ + if(startsWith(related_identifier$identifier,"urn")){ + related_id <- unlist(strsplit(related_identifier$identifier, "urn:"))[2] + if(related_id == entity$identifiers[["id"]] & + related_identifier$relation == "isIdenticalTo"){ + zenodo_metadata <<- deposit + break + } + } + } + } })) } @@ -116,7 +117,7 @@ zen4R_deposit_record <- function(entity, config, options){ if(!is.null(entity$identifiers[["doi"]])){ doi <- entity$identifiers[["doi"]] } - + #if entity comes with a foreign DOI (not assigned by Zenodo) #we set the DOI (which set prereserve_doi to FALSE) if(!is.null(doi)) if(regexpr("zenodo", doi)<0){ @@ -215,10 +216,10 @@ zen4R_deposit_record <- function(entity, config, options){ license <- licenses[[1]]$value accepted_licenses <- ZENODO$getLicenses()$id if(license%in%accepted_licenses){ - zenodo_metadata$setLicense(license) + zenodo_metadata$setLicense(license) }else{ - config$logger.warn(sprintf("Zenodo :license specified (%s) in entity doesn't match Zenodo accepted list of licenses. license %s ignored!", - license,license)) + config$logger.warn(sprintf("Zenodo :license specified (%s) in entity doesn't match Zenodo accepted list of licenses. license %s ignored!", + license,license)) } } } @@ -293,7 +294,7 @@ zen4R_deposit_record <- function(entity, config, options){ }else{ config$logger.info("Skipping update of Zenodo record files (option 'update_files' and/or 'depositWithFiles FALSE)") } - + #deposit (and publish, if specified in options) if(publish){ #2d verification for publish action, need to have the DOI specified in the entity table @@ -312,38 +313,38 @@ zen4R_deposit_record <- function(entity, config, options){ } config$logger.info(sprintf("Deposit record with id '%s' - publish = %s", zenodo_metadata$id, tolower(as.character(publish)))) out <- switch(record_state, - "unsubmitted" = ZENODO$depositRecord(zenodo_metadata, publish = publish), - "inprogress" = ZENODO$depositRecord(zenodo_metadata, publish = publish), - "done" = { - switch(strategy, - "edition" = ZENODO$depositRecord(zenodo_metadata, publish = publish), - "newversion" = { - data_files <- list.files(file.path(getwd(),"data"), pattern = depositDataPattern) - - if(zipEachDataFile){ - config$logger.info("Zenodo: 'zipEachDaTafile' is true - zipping data files") - data_files <- lapply(data_files, function(data_file){ - config$logger.info(sprintf("Zenodo: 'zipEachDaTafile' is true - zipping each data file '%s'", data_file)) - fileparts <- unlist(strsplit(data_file, "\\.")) - if(length(fileparts)>1) fileparts <- fileparts[1:(length(fileparts)-1)] - filename <- paste0(fileparts, collapse = ".") - outfilename <- file.path(getwd(), "data", paste0(filename, ".zip")) - zip::zipr(zipfile = outfilename, files = data_file) - return(outfilename) - }) - } - - metadata_files <- list.files(file.path(getwd(),"metadata")) - files_to_upload <- if(depositWithFiles & (!update | (update & update_files))) c(data_files, metadata_files) else NULL - ZENODO$depositRecordVersion( - record = zenodo_metadata, - delete_latest_files = TRUE, - files = files_to_upload, - publish = publish - ) - } - ) - } + "unsubmitted" = ZENODO$depositRecord(zenodo_metadata, publish = publish), + "inprogress" = ZENODO$depositRecord(zenodo_metadata, publish = publish), + "done" = { + switch(strategy, + "edition" = ZENODO$depositRecord(zenodo_metadata, publish = publish), + "newversion" = { + data_files <- list.files(file.path(getwd(),"data"), pattern = depositDataPattern) + + if(zipEachDataFile){ + config$logger.info("Zenodo: 'zipEachDaTafile' is true - zipping data files") + data_files <- lapply(data_files, function(data_file){ + config$logger.info(sprintf("Zenodo: 'zipEachDaTafile' is true - zipping each data file '%s'", data_file)) + fileparts <- unlist(strsplit(data_file, "\\.")) + if(length(fileparts)>1) fileparts <- fileparts[1:(length(fileparts)-1)] + filename <- paste0(fileparts, collapse = ".") + outfilename <- file.path(getwd(), "data", paste0(filename, ".zip")) + zip::zipr(zipfile = outfilename, files = data_file) + return(outfilename) + }) + } + + metadata_files <- list.files(file.path(getwd(),"metadata")) + files_to_upload <- if(depositWithFiles & (!update | (update & update_files))) c(data_files, metadata_files) else NULL + ZENODO$depositRecordVersion( + record = zenodo_metadata, + delete_latest_files = TRUE, + files = files_to_upload, + publish = publish + ) + } + ) + } ) if(!is(out,"ZenodoRecord")){ errMsg <- sprintf("Zenodo: %s", out$errors[[1]]$message)