diff --git a/all2nc.R b/all2nc.R new file mode 100644 index 0000000..2b2b146 --- /dev/null +++ b/all2nc.R @@ -0,0 +1,211 @@ +## Save the ECA&D station data as a netCDF4 file - ecad2nc4 +## Go through the data co untry and element wise to generate several netCDF files which +## then can be combined into one. +## REB 2020-04-21: updated to use previousliy existing netCDF files and only read the +## data that is needed to redice the data traffic and demands on the database. Some databases +## using JSON are slow for large data volumes. + +## This is a dirty hack to try to get data from Frost. It's not easy.... +## based on https://github.com/metno/rscripts/blob/master/R/Datalaster.Frost.CSV.R +## +## 2021-07-20 +## Use the data from seNorge job: /lustre/storeB/project/metkl/senorge2/case/case_real_v2/README +## 2023-01-20 +## Use Ketil Tunheim's code to read directly from Frost + +library(esd) + + +print('Put metnod-data in netCDF files - updated version 2023-07-25 OpenClimateData version') +test <-FALSE; if (test) print('Test mode...') +verbose=FALSE +add.latest=TRUE # In case some of the latest data is suspect or corrupt, then set to FALSE. Used for testing purposes. +## Generate a new file from scratch at the start of each month: +if (format(Sys.time(),'%d')=="01") new.data.file <- TRUE else new.data.file <- FALSE +new.data.file <- FALSE +#fromenddate <- '1961-01-01' +fromenddate <- NULL +i30path <- 'nc30' +t1 <- Sys.time() + +## Select one or several of the elements through an index +params <- c('precip','t2m','tmax','tmin','fg','fx','sd','pp','dd') #[1:7] ## problems with dd & pp + +if (test) { + print('Test with test.nc') + file.remove('test.nc') + download.file('https://thredds.met.no/thredds/fileServer/metusers/rasmusb/precip.metnod.nc','test.nc') + Y <- retrieve.stationsummary('test.nc') + print(attr(Y,'period')) + path <- './' + params <- 'precip' +} + +## Loop through the list of parameters +for (param in params) { + print(param) + if (!test) { + #path <- '~/data/data.METNOD/' + path <- '~/OpenClimateData/data/' + testfile <- paste0(path,param,'.metnod.nc') + if (!file.exists(testfile)) { + print(paste('Unable to read',testfile)) + thredds.url <- 'https://thredds.met.no/thredds/fileServer/metusers/rasmusb/' + print('Download file from thredds...') + download.file(paste0(thredds.url,param,'.metnod.nc'),paste0(param,'.metnod.nc')) + print(paste0('Reading from thredds and working on local ',param,'.metnod.nc')) + print(paste('Local path:', getwd())) + path <- './' + } + } + + ## Generate the name of the netCDF file depending on parameter: + fname <- paste0(path,param,'.metnod.nc') + if (test) fname='test.nc' + print(paste('Saving the data in',fname)) + if ( (file.exists(fname)) & !(new.data.file) ) { + print('updating') + ## If the file already exists and is to be updated, then read the metadata from the netCDF file + ## to save computational demands, data traffic and demand on the database: + meta <- retrieve.stationsummary(fname) + print(paste('retrieved metadata from',fname)) + ## Find the dates missing in the netCDF files: + it <- c(as.character(as.Date(attr(meta,'period')[2])+1),as.character(Sys.Date())) + is <- meta$station_id + if (!is.null(fromenddate)) it[1] <- fromenddate + print(it) + ## Select the stations present in the netCDF file + SS <- select.station(src='metnod.frost',is=meta$station.id,param=param) + print(paste('fetch',length(meta$station.id),'stations')) + nmin <- NULL + } else { + print('generate new netCDF file') + ## Select all stations in the database from scratch + it <- c("1950-01-01",as.character(Sys.Date())) + #SS <- select.station(src='metnod.frost',param=param,nmin=nmin) + nmin <- 30 ## Minimum 30 years + meta <- select.station(src='metnod.frost',param=param) + is <- meta$station_id + attr(meta,'period') <- it + } + + if (!is.null(meta)) { + if ( ((as.Date(attr(meta,'period')[2])+1) < Sys.Date()) & (diff(as.Date(it))>0) ) { + ## Read the actual data based on the selection criterea: + ## Check the dates retrieved:para + print(paste('New dates retieved',paste(it,collapse=' - '))) + # if (param!='pp') + # x <- try(station(param=param,stid=meta$station.id,src='metnod.frost',verbose=verbose,it=it)) else + # x <- try(station(param='slp',stid=meta$station.id,src='metnod.frost',verbose=verbose,it=it)) + + #x <- seNorgeStations(param=param,it=it,meta=meta) + x <- station(param=param,src='metnod.frost',it=it,is=is,nmin=nmin, verbose=verbose) + x0 <- x + if (!is.null(x)) print('Retrieved Frost data:') else stop('Failed to read data from Frost') + if (is.null(dim(x))) dim(x) <- c(1,length(x)) + print(range(index(x))); print(dim(x)) + + ## If the read was successful, then save the data in the netCDF file. + if (!inherits(x,'try-error')) { + print(paste('Save the data in netCDF file',fname)) + print(dim(x)) + + if (!(new.data.file)) { + print('rewrite the netCDF file') + if (!is.null(meta)) { + it2 <- as.character(as.Date(attr(meta,'period'))) + if (!is.null(fromenddate)) it2 <- c(as.character(as.Date(attr(meta,'period')[1])),fromenddate) + } + print(paste('read the old file',fname,'... time:',it2[1],'-',it2[2])) + y <- retrieve.station(file=fname,it=it2) + it0 <- it + it <- !is.element(index(x),index(y)) + if (sum(it)>0) { + print(paste('Add',sum(it),'new days from Frost API')) + print(paste(length(intersect(stid(x),stid(y))),'common stations')) + print(dim(y)); print(dim(x)) + ## Remove the stations read from Frost that are not in the original file. + is0 <- is + x <- subset(x,it=NULL,is=is.element(stid(x),stid(y)),verbose=verbose) + is <- match(stid(x),stid(y)); is <- is[is.finite(is)] + print(dim(x)) + nt <- sum(it); ns <- dim(y)[2] + print(paste('Found',length(is),'stations of',ns,'and there are',nt,'new days to add')) + if (sum(is)==0) browser() + if (nt > 0) { + X <- matrix(rep(NA,nt*ns),nt,ns) + dim(x) <- c(length(it),length(is)) + X[,is] <- x + print('Combine old and new data') + colnames(y) <- NULL + z <- c(zoo(y),zoo(X,order.by=index(x)[it])) + print(range(index(z))) + } else { + print('No data to add') + add.latest <- FALSE + } + if (add.latest) { + z <- attrcp(y,z); class(z) <- class(y) + } else { + print('<=== No new data ===>') + z <- y + } + } + } else z <- x + #plot(subset(z,is=stid(z)==18700,it=c(2010,2020))) + ## Test to see if the path has write access - if not, use the local path + test.writeaccess <- file.access(path, 2) + if (test.writeaccess == -1) path <- './' + Fname <- paste0(path,param,'.metnod.nc'); fname <- paste0(param,'.metnod.nc') + ## The OCDP uses 'pp' from ECA&D notation rater than 'slp' in its file name convention + fname <- sub('slp','pp',fname); Fname <- sub('slp','pp',Fname) + n <- length(index(z)) + print(paste0('write2ncdf4.station: ',fname,'x dates: ',index(z)[1],' - ',index(z)[n])) + + ## Split data to save with write2ncdf4.station into smaller chunks of 30 stations + ns <- dim(z)[2] + #nctemp <- list() + # append <- FALSE + if (!file.exists(i30path)) dir.create(i30path) else + if (length(list.files(path=i30path))>0) file.remove(list.files(path=i30path,full.names = TRUE)) + for (i30 in seq(1,ns,by=30)) { + print(paste('Saving stations,',i30,'to',min(c(i30+29,ns)),'in', + file.path(i30path,paste0(fname,'_',i30,'.nc')))) + z30 <- subset(z,is=i30:min(c(i30+29,ns))) + #nctemp[[paste0(fname,i30,'nc')]] <- paste0(fname,i30,'nc') + #write2ncdf4.station(z30,file=paste0(fname,'x'),append = append,verbose=verbose) + #append - TRUE + ci30 <- as.character(i30); while (nchar(ci30) < 4) ci30 <- paste0('0',ci30) + write2ncdf4.station(z30,file=file.path(i30path,paste0(fname,'_',ci30,'.nc')),verbose=verbose) + } + print('Finished saving all station subsets into nc-files - now combine them') + nc30files <- list.files(path=i30path,pattern=fname,full.names=TRUE) + nc4combine.stations(nc30files,out=paste0(fname,'x'),verbose=verbose) + file.remove(nc30files); file.remove(i30path) + file.rename(paste0(fname,'x'),Fname) + #file.remove(names(nctemp)) + print("test new file") + meta.test <- retrieve.stationsummary(Fname) + if (!is.null(meta)) print(paste('Old netCDF file:',paste(attr(meta,'period')),collapse = ' - ')) + print(paste('New netCDF file:',paste(attr(meta.test,'period')),collapse = ' - ')) + } else print('<<< File is up-to-date >>>') + print('|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||') + rm('z','x'); gc(reset = TRUE) + } else if (diff(as.Date(it))>0) { + print(paste('No new dates to add, but generate new file with',length(is),'station for',param)) + z <- station(param=param,src='metnod.frost',it=it, verbose=verbose) + fname <- paste0(path,param,'.metnod.nc') + ## The OCDP uses 'pp' from ECA&D notation rater than 'slp' in its file name convention + fname <- sub('slp','pp',fname) + n <- length(index(z)) + print(paste0('write2ncdf4.station: ',fname,'x dates: ',index(z)[1],' - ',index(z)[n])) + write2ncdf4.station(z,file=paste0(fname,'x'),verbose=verbose) + print('|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||') + } else print(paste('Do nothing with',fname)) + } +} + +t2 <- Sys.time() +print(paste('The update took',round(t2-t1),'seconds')) +gc(reset=TRUE) + diff --git a/all2nc.sh b/all2nc.sh new file mode 100755 index 0000000..66da1b8 --- /dev/null +++ b/all2nc.sh @@ -0,0 +1,4 @@ +#/bin/bash +## Batch job for the R-script +echo "Update data for OpenClimateData R-shiny app" +R CMD BATCH ~/OpenClimateData/all2nc.R diff --git a/ocdp/app/global.R b/ocdp/app/global.R index c7a2e00..271d690 100644 --- a/ocdp/app/global.R +++ b/ocdp/app/global.R @@ -248,10 +248,14 @@ names(r_colors) <- colors() #src <- c('metnod','ecad','Asia','Pacific') ## The labeling of the data sources in the menu -regions <- rbind(c('Norge','Europa','Eurasia','Asia','Stillehavet','Afrika','Latin-Amerika','Australia','Nord-America','Mosambik','Argentina'), - c('Noreg','Europa','Eurasia','Asia','Stillehavet','Afrika','Latin-Amerika','Australia','Nord-America','Mosambik','Argentina'), - c('Norway','Europe','Eurasia','Asia','The Pacific','Africa','Latin-America','Australia','North America','Mozambique','Argentinia')) -source.regions <- c('metnod','eustance','ecad','Asia','Pacific','Africa','LatinAmerica','Australia','USA','INAM','CLARIS') +regions <- rbind(c('Norge','Europa','Europa','Asia','Stillehavet','Afrika','Latin-Amerika','Australia','Nord-America', + 'Mosambik','Argentina','Midtøsten','Nord-Afrika','Sørøst-Afrika'), + c('Noreg','Europa','Europa','Asia','Stillehavet','Afrika','Latin-Amerika','Australia','Nord-America', + 'Mosambik','Argentina','Midtausten','Nord-Afrika','Søraust-Afrika'), + c('Norway','Europe','Europe','Asia','The Pacific','Africa','Latin-America','Australia','North America', + 'Mozambique','Argentinia','Middle-East','North-Africa','Southeast-Africa')) +source.regions <- c('metnod','eustance','europe.ecad','Asia','Pacific','Africa','LatinAmerica','Australia','USA', + 'INAM','CLARIS','meast.ecad','nafrca.ecad','southeastAfrica') names.src <- regions[1,match(src,source.regions)] ## If not in the list, use part of the file neams as region identifier. @@ -292,7 +296,8 @@ sources <- rbind( c('Oppdaterte data fra Meteorologisk institutt. Kun stasjoner c('Åpne data fra Global Historical Climate Network (GHCN). Kilde: https://www.ncdc.noaa.gov/ghcn-daily-description', 'Åpne data fra Global Historical Climate Network (GHCN). Kilde: https://www.ncdc.noaa.gov/ghcn-daily-description', 'Open data from Global Historical Climate Network (GHCN). Source: https://www.ncdc.noaa.gov/ghcn-daily-description'), - c('INAM','INAM','INAM'),c('CLARIS','CLARIS','CLARIS')) + c('INAM','INAM','INAM'),c('CLARIS','CLARIS','CLARIS'),rep('meast.ecad',3),rep('nafrca.ecad',3), + rep('CORDEX FPS southeast Africa',3) ) ## Types of statistics types <- c("altitude","first.year","lastrains","lastdry","last.year","latitude","longitude","max", @@ -413,4 +418,3 @@ print('--- ---') - diff --git a/ocdp/app/server.R b/ocdp/app/server.R index c5b19ce..2a74df2 100644 --- a/ocdp/app/server.R +++ b/ocdp/app/server.R @@ -4,6 +4,8 @@ # Load the ggplot2 package which provides # the 'mpg' dataset. +print("server") + # Define a server for the Shiny app server <- function(input, output, session) { @@ -439,8 +441,11 @@ server <- function(input, output, session) { zoom <- reactive({ print('<20: reactive - zoom()') zoomscale <- switch(input$src, - 'metnod'=5,'ecad'=4,'eustance'=4,'Asia'=4,'Pacific'=4,'LatinAmerica'=4,'Africa'=3,'USA'=3,'Australia'=4, - 'INAM'=6,'CLARIS'=6) + 'metnod'=5,'ecad'=4,'eustance'=4,'Asia'=4,'Pacific'=4,'LatinAmerica'=4, + 'Africa'=3,'USA'=3,'Australia'=4, + 'INAM'=6,'CLARIS'=6, + 'southeastAfrica'=6) + if (is.na(zoomscale)) zoomscale <- 5 return(zoomscale) }) @@ -652,16 +657,17 @@ server <- function(input, output, session) { radius <- rep(input$rad,length(statistic[filter])) radius[!is.finite(statistic[filter])] <- 1 - # print(paste('The map is being rendered:','Number of locations shown=',sum(filter),'with',sum(!is.finite(statistic)), - # 'bad points - range of values= [',min(statistic,na.rm=TRUE),max(statistic,na.rm=TRUE),'] - slider:', - # input$statisticrange[1],'-',input$statisticrange[2],' ci=',input$ci,'is=',is)) - # print(summary(statistic)); print(summary(Y$longitude)); print(summary(Y$latitude)) - # print(paste('Filter: is=',is,'l=',length(filter),'s=',sum(filter),'ID=',Y$station.id[filter][is], - # Y$longitude[filter][is],Y$latitude[filter][is],Y$location[filter][is], - # 'n=',length(statistic[filter]),' good=',sum(good))) - # str(Y$longitude[filter]); str(Y$latitude[filter]) - # print(paste(Y$location[filter],as.character(round(statistic[filter],digits = 2)))) - # str(radius); + print(paste('The map is being rendered:','Number of locations shown=',sum(filter),'with',sum(!is.finite(statistic)), + 'bad points - range of values= [',min(statistic,na.rm=TRUE),max(statistic,na.rm=TRUE),'] - slider:', + input$statisticrange[1],'-',input$statisticrange[2],' ci=',input$ci,'is=',is)) + print(summary(statistic)); print(summary(Y$longitude)); print(summary(Y$latitude)) + print(paste('Filter: is=',is,'l=',length(filter),'s=',sum(filter),'ID=',Y$station.id[filter][is], + Y$longitude[filter][is],Y$latitude[filter][is],Y$location[filter][is], + 'n=',length(statistic[filter]),' good=',sum(good))) + str(Y$longitude[filter]); str(Y$latitude[filter]) + print(paste(Y$location[filter],as.character(round(statistic[filter],digits = 2)))) + str(radius); + str(Y$station.id[filter]) leaflet("mapid") %>% @@ -672,7 +678,7 @@ server <- function(input, output, session) { popup = Y$location[filter],popupOptions(keepInView = TRUE), radius =radius,stroke=TRUE,weight = 1, color='black', layerId = Y$station.id[filter], - fillOpacity = 0.4,fillColor=pal(statistic[filter])) %>% + fillOpacity = 0.4,fillColor=pal(statistic[filter])) %>% addCircleMarkers(lng = Y$longitude[filter][is], lat = Y$latitude[filter][is],fill=TRUE, label = paste(Y$location[filter][is],as.character(round(statistic[filter][is],digits = 2))), labelOptions = labelOptions(direction = "right",textsize = "12px",opacity=0.6), @@ -686,7 +692,7 @@ server <- function(input, output, session) { radius=6,stroke=TRUE, weight=3, color='black', layerId = Y$station.id[filter][highlight], fillOpacity = 0.5,fillColor=pal(statistic[filter])[highlight]) %>% - addLegend("bottomright", pal=pal, values=round(statistic[filter], digits = 2), + addLegend("bottomright", pal=pal, values=round(statistic[filter], digits = 2), title=legendtitle(), layerId="colorLegend",labFormat = labelFormat(big.mark = "")) %>% #addProviderTiles(providers$Esri.WorldStreetMap, @@ -695,15 +701,17 @@ server <- function(input, output, session) { #addProviderTiles(providers$Stamen.TerrainBackground, addProviderTiles(providers$Stamen.Terrain, options = providerTileOptions(noWrap = FALSE)) %>% - setView(lat=Y$latitude[filter][is],lng = Y$longitude[filter][is], zoom = zoom()) + setView(lat=Y$latitude[filter][is],lng = Y$longitude[filter][is], zoom()) }) output$plotstation <- renderPlotly({ print('<25: output$plotstation - render') - # print(paste('Time series for',input$location,'ci=',input$ci,'season=',input$season, - # 'tscale=',input$tscale,'aspect=',input$aspect)) + print('output$plotstation <- renderPlotly({') + print(paste('Time series for',input$location,'ci=',input$ci,'season=',input$season, + 'tscale=',input$tscale,'aspect=',input$aspect)) y <- updatetimeseries() - #print(summary(coredata(y))) + print('Time series updated...') + print(summary(coredata(y))) #if (is.precip(y)) thresholds <- seq(10,50,by=10) else thresholds <- seq(-30,30,by=5) ## Marking the top and low 10 points @@ -731,6 +739,7 @@ server <- function(input, output, session) { #print('The timeseries is being rendered') if ( (!is.null(dim(y))) & ((input$aspect=="Number_of_days") | (input$aspect=="Days_above_normal")) ){ + print(paste('In terms of number of days:',input$aspect)) ndps <- switch(input$tscale, 'year'=365.25,"day"=1,"month"=30,"season"=90) timeseries <- data.frame(date=index(y),pr=ndps*coredata(y[,1]),obs= ndps*coredata(y[,2]), @@ -741,6 +750,7 @@ server <- function(input, output, session) { add_trace(y=~pr,name="predicted",color=rgb(0,0,1,0.5)) %>% layout(title=loc(y),yaxis = list(title='days/year')) } else { + print(paste('In terms of analytical statistics:',input$aspect)) timeseries <- data.frame(date=index(y),y=coredata(y),trend=coredata(trend(y))) #print(summary(timeseries)) TS <- plot_ly(timeseries,x=~date,y=~y,type = 'scatter',mode='lines',name='data') @@ -748,13 +758,15 @@ server <- function(input, output, session) { TS = TS %>% add_trace(y=~trend,name='trend') %>% layout(title=loc(y),yaxis = list(title=esd::unit(y))) } else { + print("highlight selected points") TS = TS %>% add_trace(y=~trend,name='trend') %>% add_markers(x=index(highlight10),y=coredata(highlight10),hoveron=input$highlightTS) %>% layout(title=loc(y),yaxis = list(title=esd::unit(y))) } } #TS$elementID <- NULL - + print('... }) 25<') + TS }) output$histstation <- renderPlotly({ @@ -873,13 +885,21 @@ server <- function(input, output, session) { lyr <- data.frame(y=mac$y[it],Month=mac$Month[it]) print(lyr) AC <- AC %>% add_trace(AC,data=lyr,x=~Month,y=~y,name=yrnow,type='scatter',size=10) - clim6190 <- aggregate(subset(as.monthly(y),it=1961:1990),month,FUN=FUN) - clim9120 <- aggregate(subset(as.monthly(y),it=1991:2020),month,FUN=FUN) + clim6190 <- try(aggregate(subset(as.monthly(y),it=1961:1990),month,FUN=FUN)) + if (inherits(clim6190,'try-error')) clim6190 <- rep(NA,12) + clim9120 <- try(aggregate(subset(as.monthly(y),it=1991:2020),month,FUN=FUN)) + if (inherits(clim9120,'try-error')) clim9120 <- rep(NA,12) klim <- data.frame(Month=1:12,y1=clim6190,y2=clim9120) AC <- AC %>% add_trace(AC,data=klim,x=~Month,y=~y1,name='1961-1990',type='scatter',mode='lines',line=list(width = 4)) AC <- AC %>% add_trace(AC,data=klim,x=~Month,y=~y2,name='1991-2020',type='scatter',mode='lines',line=list(width = 2, dash = 'dash')) } else { + print(paste('...else input$timescale=',input$timespace)) y <- updatestation() + print(paste(loc(y),varid(y),stid(y))) + print(paste(sum(is.finite(y)),'valid data points and',sum(!is.finite(y)),'invalid ones. The data range is', + paste(range(y,na.rm=TRUE),collapse=' - '),'over the period',paste(range(index(y)),collapse=' - '), + 'aspect=',attr(y,'aspect'))) + attr(y,'aspect') <- 'observed' clim <- climatology(y) if (is.precip(y)) fun <- 'sum' else fun <- 'mean' if (input$timespace!='Annual_cycle_cumugram') Z <- diagram(y,plot=FALSE) else @@ -1016,9 +1036,12 @@ server <- function(input, output, session) { c('År start','ÅR start','Year start')[as.numeric(input$lingo)] }) output$enddate <- renderText({ + print("output$enddate <- renderText({...") Y <- updatemetadata() #print('output$enddate'); print(attr(Y,'period')) - attr(Y,'period')[2]}) + attr(Y,'period')[2] + print("...{)") + }) output$excludelabel <- renderText({ lab.exclude[as.numeric(input$lingo)]}) output$seasonTSlabel <- renderText({ @@ -1026,24 +1049,33 @@ server <- function(input, output, session) { output$mapdescription <- renderText({ paste(descrlab[as.numeric(input$lingo)],explainmapstatistic(input$statistic,input$lingo,types))}) output$datainterval <- renderText({ + print("output$datainterval <- renderText({...") Y <- updatemetadata() - print('output$datainterval'); print(paste('Source:',input$src)) + print('output$datainterval'); print(paste('Source:',input$src)); print(attr(Y,'period')) + print(match(input$src,source.regions),as.numeric(input$lingo)) print(paste(sources[match(input$src,source.regions),as.numeric(input$lingo)], attr(Y,'period')[1],' - ',attr(Y,'period')[2])) paste(sources[match(input$src,source.regions),as.numeric(input$lingo)], - attr(Y,'period')[1],' - ',attr(Y,'period')[2])}) + attr(Y,'period')[1],' - ',attr(Y,'period')[2]) + }) output$cntr <- renderText({ + print("output$cntr <- renderText({...") y <- updatetimeseries() average <- round(mean(y[,1],na.rm=TRUE),1)[1]; slope <- round(trend.coef(y[,1]),2)[1] varpro <- round(100*var(trend(y[,1],na.rm=TRUE))/var(y[,1],na.rm=TRUE))[1] trendpvalue <- round(100*trend.pval(y[,1]),2)[1] #print(c(average,slope,varpro,trendpvalue)) - paste(esd::loc(y)[1],'in',esd::cntr(y)[1],'mean=',average,'. Trend=', slope,esd::unit(y)[1],'per decade and explains', + textout <- paste(esd::loc(y)[1],'in',esd::cntr(y)[1],'mean=',average,'. Trend=', slope,esd::unit(y)[1],'per decade and explains', varpro,'% of the variance with a probability of',trendpvalue,'% that it is due to chance.') + print("...{)") + textout }) output$hdes <- renderText({ + print("output$hdes <- renderText({...") y <- updatetimeseries() Y <- updatemetadata() + print(paste(loc(y),varid(y),sum(is.finite(y)),'valid data points and',sum(!is.finite(y)),'invalid ones. The data range is', + paste(range(y,na.rm=TRUE),collapse=' - '),'over the period',paste(range(index(y)),collapse=' - '))) statistic <- vals() ## Apply filter to highlight stations selected in the map n <- length(statistic) @@ -1060,7 +1092,9 @@ server <- function(input, output, session) { if (input$timespace == 'Histogram_location') { yH <- coredata(y) } else yH <- statistic - syH <- round(summary(yH)[c(1,4,6)],1) + print(summary(yH[is.finite(yH)])) + syH <- round(summary(yH[is.finite(yH)])[c(1,4,6)],1) + print(summary(yH)) if ( (substr(input$timespace,1,12) != 'Annual_cycle') & (substr(input$timespace,1,6) != 'Trends') ) { if (input$timespace=='Histogram_map') hsum <- paste('Data= ',input$statistic,' ', attr(Y,'longname'),': ',paste(names(syH),syH,collapse=', ',sep='='),sep='') @@ -1080,6 +1114,7 @@ server <- function(input, output, session) { } #print(c(hdescr,hsum)) paste(hdescr,'. ',hsum,'. Sample size= ',sum(is.finite(yH)),' data points.',sep='') + print("...{)") }) output$yearlabel <- renderText({ showhideyears[as.numeric(input$lingo)]}) diff --git a/pullfiles.sh b/pullfiles.sh new file mode 100755 index 0000000..789be3f --- /dev/null +++ b/pullfiles.sh @@ -0,0 +1,2 @@ +wget -N -e robots=off -nH --cut-dirs 4 -r -l5 -A '*.metnod.nc' -R 'catalog*' -I /thredds/fileServer/,/thredds/catalog/ 'https://thredds.met.no/thredds/catalog/metusers/rasmusb/catalog.html' -P ~/OpenClimateData/data +