diff --git a/DESCRIPTION b/DESCRIPTION index e87d95bb328dd222b8b12123915c35d5dfa10930..8354f1668af1014f1a79ddb6cb285f01ce26eb16 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,7 +1,7 @@ Package: ClimDatDownloadR Type: Package Title: Downloads Climate Data from Chelsa and WorldClim -Version: 0.1.5 +Version: 0.1.6 Authors@R: c(person("Helge", "Jentsch", email = "helge.marc.ole.jentsch@uni-hamburg.de", role = c("aut", "cre")), @@ -14,8 +14,8 @@ Authors@R: c(person("Helge", "Jentsch", person("Nadine", "Kaul", email = "nadine.kaul@studium.uni-hamburg.de", role = c("ctb"))) -Maintainer: Helge Jentsch <helge.jentsch@studium.uni-hamburg.de> -Description: ClimDatDownloadR provides functions to download, pre-process and +Maintainer: Helge Jentsch <helge.marc.ole.jentsch@uni-hamburg.de> +Description: This package `ClimDatDownloadR` provides functions to download, pre-process and manage CHELSA and Worldclim climate data sets in respective available spatial and temporal resolutions. The output is provided as GEOTIFF, ASCII, or netCDF format. The package's main purpose is to simplify and @@ -30,18 +30,17 @@ Description: ClimDatDownloadR provides functions to download, pre-process and YEAR: 2021 License: MIT + file LICENSE Encoding: UTF-8 -LazyData: true Depends: R (>= 3.6), raster (>= 3.1-5) -Imports: gdalUtils(>= 2.0.3.2), httr (>= 1.4.1), ncdf4 (>= 1.17), qpdf - (>= 1.1), RCurl (>= 1.98), RefManageR (>= 1.2.12), rgdal (>= - 1.5-10), stringr (>= 1.4.0), sf (>= 0.9-4), sp (>= 1.4-1), - svMisc (>= 1.1.0), utils -RoxygenNote: 7.1.1 +Imports: curl (>= 4.3.2), gdalUtils(>= 2.0.3.2), httr (>= 1.4.1), ncdf4 + (>= 1.17), qpdf (>= 1.1), RCurl (>= 1.98), RefManageR (>= + 1.2.12), rgdal (>= 1.5-10), stringr (>= 1.4.0), sf (>= 0.9-4), + sp (>= 1.4-1), svMisc (>= 1.1.0), utils +RoxygenNote: 7.1.2 Suggests: knitr, testthat, usethis, rmarkdown VignetteBuilder: knitr SystemRequirements: Tested with at least 4 GB RAM. NeedsCompilation: no -Packaged: 2021-04-06 12:23:27 UTC; helge +Packaged: 2021-11-03 12:10:35 UTC; helge Author: Helge Jentsch [aut, cre], Maria Bobrowski [aut], Johannes Weidinger [aut], diff --git a/NAMESPACE b/NAMESPACE index 97aa7399270bbca90c26d613384f552e3eb9d035..5c92083de81ab450d454c1aa9d3b1dc7bc0fd76b 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -3,6 +3,7 @@ export(Chelsa.CMIP_5.download) export(Chelsa.CRUts.download) export(Chelsa.Clim.download) +export(Chelsa.Clim.download.deprecated) export(Chelsa.lgm.download) export(Chelsa.timeseries.download) export(WorldClim.CMIP_5.download) @@ -11,6 +12,7 @@ export(WorldClim.HistClim.download) export(clipping.tif) export(combine.raw.in.zip) export(convert.to.asc) +export(getDownloadSize) export(save.citation) export(stacking.downloaded.data) import(RCurl) @@ -23,7 +25,10 @@ import(stringr) importFrom(RefManageR,ReadBib) importFrom(RefManageR,ReadCrossRef) importFrom(RefManageR,WriteBib) +importFrom(curl,curl_fetch_memory) importFrom(utils,download.file) importFrom(utils,sessionInfo) +importFrom(utils,setTxtProgressBar) +importFrom(utils,txtProgressBar) importFrom(utils,unzip) importFrom(utils,zip) diff --git a/R/Chelsa_Download_functions.R b/R/Chelsa_Download_functions.R index 5845294fa129ac861f0ddfb89a89ab5a74cc3ec0..0a279c9c9be265c05b594e1977bca5c1fca15282 100644 --- a/R/Chelsa_Download_functions.R +++ b/R/Chelsa_Download_functions.R @@ -1,4 +1,4 @@ -#'@title Function for downloading the CHELSA climate dataset (1979-2013) +#'@title Deprecated function for downloading the CHELSA climate dataset (1979-2013) #'@author Helge Jentsch #'@description This function supports the download, pre-processing and management of CHELSA climate data comprising of monthly precipitation sums in mm, monthly temperature (average, minimum, maximum) in degrees Celsius, and annual chracteristics (19 bioclimatic variables). The spatial resolution of the downloaded data is 30 arc-seconds.\cr To allow pre-processing, clipping and buffering, conversion to ASCII-grids and stacking options are included.\cr Optional an output of a .bib-file of the cited literature can be retrieved.\cr For user convenience, saving directories will be created automatically. Also options to "zip" and/or delete the RAW-files are included. #' @@ -35,24 +35,24 @@ #'@import RCurl #'@import ncdf4 #'@import raster -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #' #'@export -Chelsa.Clim.download <- function(save.location = "./", - parameter = c("prec", "temp", "tmax", "tmin", "bio"), - bio.var = c(1:19), - month.var = c(1:12), - version.var = c("1.2"), - clipping = FALSE, - clip.shapefile = NULL, - clip.extent = c(-180, 180, -90, 90), - buffer = 0, - convert.files.to.asc = FALSE, - stacking.data = FALSE, - combine.raw.zip = FALSE, - delete.raw.data = FALSE, - save.bib.file = TRUE){ +Chelsa.Clim.download.deprecated<- function(save.location = "./", + parameter = c("prec", "temp", "tmax", "tmin", "bio"), + bio.var = c(1:19), + month.var = c(1:12), + version.var = c("1.2"), + clipping = FALSE, + clip.shapefile = NULL, + clip.extent = c(-180, 180, -90, 90), + buffer = 0, + convert.files.to.asc = FALSE, + stacking.data = FALSE, + combine.raw.zip = FALSE, + delete.raw.data = FALSE, + save.bib.file = TRUE){ gc() call.time <- str_replace_all(str_replace_all(paste0(Sys.time()), pattern = ":", replacement = "-"), pattern = " ", replacement = "_") # initial check ----------------------------------------------------------- @@ -70,20 +70,20 @@ Chelsa.Clim.download <- function(save.location = "./", # Padding of "one-digit" months with a 0 month.var <- str_pad(month.var, 2, 'left', pad = "0") } - + # analog to the if-clause before - here the parameter bio.var is checked. if(is.element("bio", parameter)){ bio.var <- c(bio.var) if(!is.numeric(bio.var)) stop() bio.var <- str_pad(bio.var, 2, 'left', pad = "0") } - + # Download: 1. work through all parameters ----------------------------------- for(i in parameter){ - + # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + # create intermediate strings for later use interm <- switch(i, "prec" = "prec/", @@ -92,7 +92,7 @@ Chelsa.Clim.download <- function(save.location = "./", "tmin" = "tmin/", "bio" = "bio/", stop()) - + variable.numbers <- switch(i, "bio" = bio.var, "tmin" = month.var, @@ -107,16 +107,16 @@ Chelsa.Clim.download <- function(save.location = "./", # set the 1. order temporal save location to this directory # 1. Order -> parameter! temp.save.location <- paste0(save.location, "/", i, "/") - + # Add "10" after parameter string for all parameters except precipitation if(i != "prec"){ i <- paste0(i, "10") } ## Download: 2. Work through versions as given as initial parameter --------- for (version in version.var) { - + if(version == "1.1") next - + # create version string vers <- switch(version, "1.1" = "", @@ -135,11 +135,11 @@ Chelsa.Clim.download <- function(save.location = "./", if(!dir.exists(temp.temp.save.location)){ dir.create(temp.temp.save.location) } - + # normalize the path to make it work more easily # temp.temp.save.location <- normalizePath(temp.temp.save.location, # winslash = "/") - + ##### Download: 4. Check if bio is not requested ----------------------------- if(i != "bio10"){ # should years be added? necessary for the download function @@ -182,13 +182,13 @@ Chelsa.Clim.download <- function(save.location = "./", cacheOK = FALSE) if(i != "prec"){ raster.temp <- raster(dest.temp) - + gc() raster.temp <- clamp(raster.temp, lower = -1000, useValues = FALSE) gain(raster.temp) <- 0.1 gc() - + writeRaster(raster.temp, dest.temp, overwrite = TRUE) @@ -304,21 +304,21 @@ Chelsa.Clim.download <- function(save.location = "./", if(url.exists(URL.temp)){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + # download file to save location download.file(url = URL.temp, destfile = dest.temp, overwrite = TRUE, mode = 'wb', quiet = FALSE) - + if(bio <= 11){ raster.temp <- raster(dest.temp) # raster.values <- values(raster.temp) # raster.values[raster.values==-32768] <- NA # values(raster.temp) <- raster.values # rm(raster.values) - + gc() raster.temp <- clamp(raster.temp, lower = -1000, useValues = FALSE) @@ -347,7 +347,7 @@ Chelsa.Clim.download <- function(save.location = "./", warning(paste0("File does not exist. Did not download: \n", URL.temp), call. = TRUE, immediate. = FALSE) } - + } if(bio == bio.var[length(bio.var)] & length(list.files(temp.temp.save.location, @@ -400,12 +400,12 @@ Chelsa.Clim.download <- function(save.location = "./", # given data. if(length(list.files(temp.temp.save.location, include.dirs = TRUE)) == 0){ - + unlink(str_sub(temp.temp.save.location, 1, end = str_length(temp.temp.save.location)-1), force = TRUE, recursive = TRUE) } - + } # version for-loop END # Download END } # Parameters for-loop end @@ -473,7 +473,7 @@ Chelsa.Clim.download <- function(save.location = "./", #'@import ncdf4 #'@import raster #'@import httr -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #' #'@export @@ -524,16 +524,16 @@ Chelsa.CMIP_5.download <- function(save.location = "./", if(is.element("prec", parameter)|is.element("temp", parameter)| is.element("tmax", parameter)|is.element("tmin", parameter)){ if(!is.numeric(month.var)) stop() - + } if(is.element("bio", parameter)){ if(!is.numeric(bio.var)) stop() - + } for(i in parameter){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + variable.numbers <- switch(i, "bio" = bio.var, "tmin" = month.var, @@ -541,7 +541,7 @@ Chelsa.CMIP_5.download <- function(save.location = "./", "temp" = month.var, "prec" = month.var, stop()) - + for (time.interval in time.interval.var) { interm <- switch(i, "prec" = paste0("cmip5/", time.interval, @@ -563,193 +563,193 @@ Chelsa.CMIP_5.download <- function(save.location = "./", # print(temp.save.location) for (model in model.var) { for (emission.scenario in emission.scenario.var) { - temp.temp.save.location <- paste0(temp.save.location, - str_replace_all(interm, - pattern = "/", - "_"), - model,"_", - emission.scenario, "/") - if(!dir.exists(str_sub(temp.temp.save.location, - end = str_length(temp.temp.save.location)-1))){ - dir.create(str_sub(temp.temp.save.location, - end = str_length(temp.temp.save.location)-1)) - } - - if(i != "bio"){ - for(month in month.var){ - dest.temp <- paste0(temp.temp.save.location, "CHELSA_", model, - "_", emission.scenario, "_", i, "_", month, - "_", time.interval, ".tif") - if(!file.exists(dest.temp)){ - URL.temp <- paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/", - interm, model, "_", emission.scenario, - # "_r1i1p1_g025.nc_", month, "_", time.interval, - "_V1.2.tif") - if(!http_error(URL.temp)){ - # clear up the temporary directory - unlink(list.files(tempdir(), recursive = T, full.names=T)) - - download.file(url = URL.temp, - destfile = dest.temp, - overwrite = TRUE, - mode = 'wb', - quiet = FALSE) - if(i != "prec"){ - gc() - raster.temp <- raster(dest.temp) - raster.temp <- clamp(raster.temp, lower = -1000, - useValues = FALSE) - gc() - gain(raster.temp) <- 0.1 - gc() - writeRaster(raster.temp, - dest.temp, - overwrite = TRUE) - rm(raster.temp) - gc() - } - }else{ - warning(paste0("File does not exist. Did not download: \n", - URL.temp), call. = TRUE, immediate. = FALSE) - } - } - if(month.var[month] == month.var[length(month.var)] & - length(list.files(temp.temp.save.location, - pattern = ".tif", - include.dirs = FALSE)) != 0){ - if(clipping == TRUE){ - clipping.tif(clip.save.location = temp.temp.save.location, - clip.shapefile = clip.shapefile, - clip.extent = clip.extent, - buffer = buffer, - convert.files.to.asc = convert.files.to.asc, - time.stamp.var = call.time) - } - if(convert.files.to.asc == TRUE){ - convert.to.asc(save.location = temp.temp.save.location, - time.stamp.var = call.time) - } - if(stacking.data == TRUE){ - if(clipping==TRUE){ - stacking.downloaded.data(stack.save.location = temp.temp.save.location, - parameter.var = i, - variable.numbers = variable.numbers, - stack.clipped = TRUE, - time.stamp.var = call.time) - }else{ - stacking.downloaded.data(stack.save.location = temp.temp.save.location, - parameter.var = i, - variable.numbers = variable.numbers, - time.stamp.var = call.time) - } - } - if(combine.raw.zip == TRUE){ - combine.raw.in.zip(save.location = temp.temp.save.location, - zip.name = paste0("CHELSACMIP5_", i, ""), - time.stamp.var = call.time) - } - if(delete.raw.data == TRUE){ - unlink(list.files(temp.temp.save.location, - pattern = ".tif", - include.dirs = FALSE, full.names = T), force = TRUE) - } - } - } - }else{ - for(bio in bio.var){ - dest.temp <- paste0(temp.temp.save.location, "CHELSA_", model, - "_", emission.scenario, "_", i, "_", bio, - "_", time.interval,".tif") - if(!file.exists(dest.temp)){ + temp.temp.save.location <- paste0(temp.save.location, + str_replace_all(interm, + pattern = "/", + "_"), + model,"_", + emission.scenario, "/") + if(!dir.exists(str_sub(temp.temp.save.location, + end = str_length(temp.temp.save.location)-1))){ + dir.create(str_sub(temp.temp.save.location, + end = str_length(temp.temp.save.location)-1)) + } + + if(i != "bio"){ + for(month in month.var){ + dest.temp <- paste0(temp.temp.save.location, "CHELSA_", model, + "_", emission.scenario, "_", i, "_", month, + "_", time.interval, ".tif") + if(!file.exists(dest.temp)){ + URL.temp <- paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/", + interm, model, "_", emission.scenario, + # "_r1i1p1_g025.nc_", month, "_", time.interval, + "_V1.2.tif") + if(!http_error(URL.temp)){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - - URL.temp <- paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/", - interm, model, "_", - emission.scenario, "_r1i1p1_g025.nc_", - bio, "_", time.interval, "_V1.2.tif") - if(!http_error(URL.temp)){ - download.file(url = URL.temp, - destfile = dest.temp, - overwrite = FALSE, - mode = 'wb', - quiet = FALSE) - + + download.file(url = URL.temp, + destfile = dest.temp, + overwrite = TRUE, + mode = 'wb', + quiet = FALSE) + if(i != "prec"){ + gc() raster.temp <- raster(dest.temp) - # raster.values <- values(raster.temp) - # raster.values[raster.values==-32768] <- NA - # values(raster.temp) <- raster.values - # rm(raster.values); gc() - raster.temp <- clamp(raster.temp, - lower = -1000, + raster.temp <- clamp(raster.temp, lower = -1000, useValues = FALSE) gc() - if(bio <= 11){ - gc() - gain(raster.temp) <- 0.1 - } + gain(raster.temp) <- 0.1 + gc() writeRaster(raster.temp, dest.temp, overwrite = TRUE) rm(raster.temp) gc() - }else{ - warning(paste0("File does not exist. Did not download: \n", URL.temp), - call.=TRUE, immediate. = FALSE) } + }else{ + warning(paste0("File does not exist. Did not download: \n", + URL.temp), call. = TRUE, immediate. = FALSE) } - if(bio == bio.var[length(bio.var)] & - length(list.files(temp.temp.save.location, - pattern = ".tif", - include.dirs = FALSE)) != 0){ - if(clipping == TRUE){ - clipping.tif(clip.save.location = temp.temp.save.location, - clip.shapefile = clip.shapefile, - clip.extent = clip.extent, - buffer = buffer, - convert.files.to.asc = convert.files.to.asc, + } + if(month.var[month] == month.var[length(month.var)] & + length(list.files(temp.temp.save.location, + pattern = ".tif", + include.dirs = FALSE)) != 0){ + if(clipping == TRUE){ + clipping.tif(clip.save.location = temp.temp.save.location, + clip.shapefile = clip.shapefile, + clip.extent = clip.extent, + buffer = buffer, + convert.files.to.asc = convert.files.to.asc, + time.stamp.var = call.time) + } + if(convert.files.to.asc == TRUE){ + convert.to.asc(save.location = temp.temp.save.location, time.stamp.var = call.time) + } + if(stacking.data == TRUE){ + if(clipping==TRUE){ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = i, + variable.numbers = variable.numbers, + stack.clipped = TRUE, + time.stamp.var = call.time) + }else{ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = i, + variable.numbers = variable.numbers, + time.stamp.var = call.time) } - if(convert.files.to.asc == TRUE){ - convert.to.asc(save.location = temp.temp.save.location, - time.stamp.var = call.time) - } - if(stacking.data == TRUE){ - if(clipping==TRUE){ - stacking.downloaded.data(stack.save.location = temp.temp.save.location, - parameter.var = i, - variable.numbers = variable.numbers, - stack.clipped = TRUE, - time.stamp.var = call.time) - }else{ - stacking.downloaded.data(stack.save.location = temp.temp.save.location, - parameter.var = i, - variable.numbers = variable.numbers, - time.stamp.var = call.time) - - } - } - if(combine.raw.zip == TRUE){ - combine.raw.in.zip(save.location = temp.temp.save.location, - zip.name = paste0("CHELSACMIP5_", i, ""), - time.stamp.var = call.time) + } + if(combine.raw.zip == TRUE){ + combine.raw.in.zip(save.location = temp.temp.save.location, + zip.name = paste0("CHELSACMIP5_", i, ""), + time.stamp.var = call.time) + } + if(delete.raw.data == TRUE){ + unlink(list.files(temp.temp.save.location, + pattern = ".tif", + include.dirs = FALSE, full.names = T), force = TRUE) + } + } + } + }else{ + for(bio in bio.var){ + dest.temp <- paste0(temp.temp.save.location, "CHELSA_", model, + "_", emission.scenario, "_", i, "_", bio, + "_", time.interval,".tif") + if(!file.exists(dest.temp)){ + # clear up the temporary directory + unlink(list.files(tempdir(), recursive = T, full.names=T)) + + URL.temp <- paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/", + interm, model, "_", + emission.scenario, "_r1i1p1_g025.nc_", + bio, "_", time.interval, "_V1.2.tif") + if(!http_error(URL.temp)){ + download.file(url = URL.temp, + destfile = dest.temp, + overwrite = FALSE, + mode = 'wb', + quiet = FALSE) + + raster.temp <- raster(dest.temp) + # raster.values <- values(raster.temp) + # raster.values[raster.values==-32768] <- NA + # values(raster.temp) <- raster.values + # rm(raster.values); gc() + raster.temp <- clamp(raster.temp, + lower = -1000, + useValues = FALSE) + gc() + if(bio <= 11){ + gc() + gain(raster.temp) <- 0.1 } - if(delete.raw.data == TRUE){ - unlink(list.files(temp.temp.save.location, - pattern = ".tif", - include.dirs = FALSE, full.names = T), force = TRUE) + writeRaster(raster.temp, + dest.temp, + overwrite = TRUE) + rm(raster.temp) + gc() + }else{ + warning(paste0("File does not exist. Did not download: \n", URL.temp), + call.=TRUE, immediate. = FALSE) + } + } + if(bio == bio.var[length(bio.var)] & + length(list.files(temp.temp.save.location, + pattern = ".tif", + include.dirs = FALSE)) != 0){ + if(clipping == TRUE){ + clipping.tif(clip.save.location = temp.temp.save.location, + clip.shapefile = clip.shapefile, + clip.extent = clip.extent, + buffer = buffer, + convert.files.to.asc = convert.files.to.asc, + time.stamp.var = call.time) + } + if(convert.files.to.asc == TRUE){ + convert.to.asc(save.location = temp.temp.save.location, + time.stamp.var = call.time) + } + if(stacking.data == TRUE){ + if(clipping==TRUE){ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = i, + variable.numbers = variable.numbers, + stack.clipped = TRUE, + time.stamp.var = call.time) + }else{ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = i, + variable.numbers = variable.numbers, + time.stamp.var = call.time) + } } + if(combine.raw.zip == TRUE){ + combine.raw.in.zip(save.location = temp.temp.save.location, + zip.name = paste0("CHELSACMIP5_", i, ""), + time.stamp.var = call.time) + } + if(delete.raw.data == TRUE){ + unlink(list.files(temp.temp.save.location, + pattern = ".tif", + include.dirs = FALSE, full.names = T), force = TRUE) + } } } - if(length(list.files(temp.temp.save.location, - include.dirs = TRUE)) == 0){ - unlink(str_sub(temp.temp.save.location, 1, - end = str_length(temp.temp.save.location)-1), - force = TRUE, - recursive = TRUE) - } - + } + if(length(list.files(temp.temp.save.location, + include.dirs = TRUE)) == 0){ + unlink(str_sub(temp.temp.save.location, 1, + end = str_length(temp.temp.save.location)-1), + force = TRUE, + recursive = TRUE) + } + } } } @@ -805,7 +805,7 @@ Chelsa.CMIP_5.download <- function(save.location = "./", #'@import RCurl #'@import ncdf4 #'@import raster -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #' #'@export @@ -844,7 +844,7 @@ Chelsa.lgm.download <- function(save.location = "./", if(!is.numeric(month.var)) stop("month.var needs to be a numeric vector") # month.var <- str_pad(month.var, 2, 'left', pad = "0") } - + if(is.element("bio", parameter)){ bio.var <- c(bio.var) if(!is.numeric(bio.var)) stop("bio.var needs to be a numeric vector") @@ -852,13 +852,13 @@ Chelsa.lgm.download <- function(save.location = "./", bio.var <- str_pad(bio.var, 2, 'left', pad = "0") # print(bio.var) } - + # through all given parameters -------------------------------------------- # work through paramerters for(i in parameter){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + # create intermediate strings for later use interm <- switch(i, "prec" = "prec/", @@ -867,7 +867,7 @@ Chelsa.lgm.download <- function(save.location = "./", "tmin" = "tmin/", "bio" = "bioclim/", stop()) - + variable.numbers <- switch(i, "bio" = bio.var, "tmin" = month.var, @@ -875,11 +875,11 @@ Chelsa.lgm.download <- function(save.location = "./", "temp" = month.var, "prec" = month.var, stop()) - + # create new directory dir.create(paste0(save.location, "/", i)) temp.save.location <- paste0(save.location, "/", i, "/") - + for (model in model.var) { # download of the requested datasets ------------------------------------- temp.temp.save.location <- paste0(temp.save.location, @@ -890,10 +890,10 @@ Chelsa.lgm.download <- function(save.location = "./", if(!dir.exists(temp.temp.save.location)){ dir.create(temp.temp.save.location) } - + # temp.temp.save.location <- normalizePath(temp.temp.save.location, # winslash = "/") - + # Check if bio is not requested if(i != "bio"){ if(i == "temp") { @@ -901,12 +901,12 @@ Chelsa.lgm.download <- function(save.location = "./", }else{ month.var <- as.integer(month.var) } - + # work through every requested month for(month in 1:length(month.var)){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + dest.temp <- paste0(temp.temp.save.location, "CHELSA_PMIP_", model, "_", i,"_",month.var[month],".tif") if(!file.exists(dest.temp)){ @@ -916,9 +916,9 @@ Chelsa.lgm.download <- function(save.location = "./", interm, "CHELSA_PMIP_", model,"_",i,"_", month.var[month],"_1.tif") }else{ if(model == "CCSM4"){ - URL.temp <- - paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/pmip3/", - interm, "CHELSA_PMIP_", model, "_tmean_", month.var[month],".tif") + URL.temp <- + paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/pmip3/", + interm, "CHELSA_PMIP_", model, "_tmean_", month.var[month],".tif") }else{ month.var <- as.integer(month.var) URL.temp <- @@ -936,16 +936,16 @@ Chelsa.lgm.download <- function(save.location = "./", quiet = FALSE) if(i != "prec"){ raster.temp <- raster(dest.temp) - + raster.temp <- clamp(raster.temp, lower = -1000, useValues= FALSE) gc() - + # Conversion Float gain(raster.temp) <- 0.1 # umrechnung Kelvin - Celsius gc() offs(raster.temp) <- -273.15 - + writeRaster(raster.temp, dest.temp, overwrite = TRUE) @@ -1021,7 +1021,7 @@ Chelsa.lgm.download <- function(save.location = "./", URL.temp <- paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/pmip3/", interm, "CHELSA_PMIP_", model, "_BIO_",bio,".tif") - + # check if URL is available if(url.exists(URL.temp)){ # download file to save location @@ -1034,7 +1034,7 @@ Chelsa.lgm.download <- function(save.location = "./", raster.temp <- raster(dest.temp) raster.temp <- clamp(raster.temp, lower = -1000, useValues= FALSE) gc() - + if(bio <= 11){ # values(raster.temp) <- as.numeric(values(raster.temp)/10) gain(raster.temp) <- 0.1 @@ -1093,7 +1093,7 @@ Chelsa.lgm.download <- function(save.location = "./", } } } - + if(length(list.files(temp.temp.save.location, include.dirs = TRUE)) == 0){ unlink(str_sub(temp.temp.save.location, 1, end = str_length(temp.temp.save.location)-1), @@ -1116,7 +1116,7 @@ Chelsa.lgm.download <- function(save.location = "./", quiet = FALSE) } } -# Saving BIB File + # Saving BIB File if(save.bib.file == TRUE) save.citation(save.location = save.location, dataSetName = "CHELSA") } @@ -1163,7 +1163,7 @@ Chelsa.lgm.download <- function(save.location = "./", #'@import ncdf4 #'@import raster #'@import httr -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #' #'@export @@ -1209,7 +1209,7 @@ Chelsa.timeseries.download <- function(save.location = "./", if(start.year.var == end.year.var){ if(start.month.var > end.month.var) stop("End is before the start. Please correct the input!") } - + ts_string <- seq.Date(as.Date(paste(start.year.var, start.month.var, "01", sep = "-")), as.Date(paste(end.year.var, @@ -1218,7 +1218,7 @@ Chelsa.timeseries.download <- function(save.location = "./", ts_string <- format.Date(ts_string, format = "%Y_%m") # ts_string <- str_sub(ts_string, 1, end = str_length(ts_string)-3) # ts_string <- str_replace_all(ts_string, pattern = "-", replacement = "_") - + if(length(include.month.var)!=12){ ts.string.temp <- c() for (incl.month in include.month.var) { @@ -1230,14 +1230,14 @@ Chelsa.timeseries.download <- function(save.location = "./", } ts_string <- ts.string.temp } - - + + # Parameter and directories ----------------------------------------------- # work through paramerters for(i in parameter){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + # create intermediate strings for later use interm <- switch(i, "prec" = "prec/", @@ -1246,7 +1246,7 @@ Chelsa.timeseries.download <- function(save.location = "./", "tmin" = "tmin/", # "bio" = "bio/", stop()) - + variable.numbers <- switch(i, # "bio" = bio.var, "tmin" = include.month.var, @@ -1254,7 +1254,7 @@ Chelsa.timeseries.download <- function(save.location = "./", "temp" = include.month.var, "prec" = include.month.var, stop()) - + # create new directory if(!dir.exists(paste0(save.location, "/", i))){ dir.create(paste0(save.location, "/", i), showWarnings = FALSE) @@ -1270,7 +1270,7 @@ Chelsa.timeseries.download <- function(save.location = "./", if(!dir.exists(temp.temp.save.location)){ dir.create(str_sub(temp.temp.save.location, end=-2)) } - + # temp.temp.save.location <- normalizePath(temp.temp.save.location, # winslash = "/") # if(i == "temp"){ @@ -1288,7 +1288,7 @@ Chelsa.timeseries.download <- function(save.location = "./", if(!http_error(URL.temp)){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + dest.file <- paste0(temp.temp.save.location, "CHELSA_", i, "_", year_month, "_V1.2.1.tif") if(!file.exists(dest.file)){ @@ -1298,29 +1298,29 @@ Chelsa.timeseries.download <- function(save.location = "./", overwrite = TRUE, mode = 'wb', quiet = FALSE) - - - if(i != "prec"){ - raster.temp <- raster(dest.file) - # raster.values <- values(raster.temp) - # raster.values[raster.values==-32768] <- NA - # values(raster.temp) <- raster.values - # rm(raster.values) - - raster.temp <- clamp(raster.temp, lower = -1000, useValues = FALSE) - gc() - - # values(raster.temp) <- as.numeric(values(raster.temp)/10) - # values(raster.temp) <- as.numeric(values(raster.temp)-273.15) - gain(raster.temp) <- 0.1 - offs(raster.temp) <- -273.15 - - writeRaster(raster.temp, - dest.file, - overwrite = TRUE) - rm(raster.temp) - gc() - } + + + if(i != "prec"){ + raster.temp <- raster(dest.file) + # raster.values <- values(raster.temp) + # raster.values[raster.values==-32768] <- NA + # values(raster.temp) <- raster.values + # rm(raster.values) + + raster.temp <- clamp(raster.temp, lower = -1000, useValues = FALSE) + gc() + + # values(raster.temp) <- as.numeric(values(raster.temp)/10) + # values(raster.temp) <- as.numeric(values(raster.temp)-273.15) + gain(raster.temp) <- 0.1 + offs(raster.temp) <- -273.15 + + writeRaster(raster.temp, + dest.file, + overwrite = TRUE) + rm(raster.temp) + gc() + } } }else{ # Warning message @@ -1373,10 +1373,10 @@ Chelsa.timeseries.download <- function(save.location = "./", } } } - + # Clean up, if no data was downloaded. ------------------------------------ - - + + if(length(list.files(temp.temp.save.location, include.dirs = TRUE)) == 0){ unlink(str_sub(temp.temp.save.location, 1, end = str_length(temp.temp.save.location)-1), @@ -1432,7 +1432,7 @@ Chelsa.timeseries.download <- function(save.location = "./", #'@import ncdf4 #'@import raster #'@import httr -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #' #'@export @@ -1487,14 +1487,14 @@ Chelsa.CRUts.download <- function(save.location = "./", stop("End is before the start. Please correct the input!") } } - + ts_string <- seq.Date(as.Date(paste(start.year.var, start.month.var, "01", sep = "-")), as.Date(paste(end.year.var, end.month.var, "01", sep = "-")), by = "month") ts_string <- format.Date(ts_string, format = "%m_%Y") - + if(length(include.month.var)!=12){ ts.string.temp <- c() for (incl.month in include.month.var) { @@ -1506,17 +1506,17 @@ Chelsa.CRUts.download <- function(save.location = "./", } ts_string <- ts.string.temp } - + ts_string <- as.Date(paste0(ts_string,"-01"), format = "%d_%Y-%m") ts_string <- format.Date(ts_string, format = "%e_%Y") ts_string <- str_remove(ts_string, pattern = " ") - + # Parameter and directories ----------------------------------------------- # work through paramerters for(i in parameter){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + # create intermediate strings for later use interm <- switch(i, "prec" = "prec/", @@ -1544,13 +1544,13 @@ Chelsa.CRUts.download <- function(save.location = "./", if(!dir.exists(temp.temp.save.location)){ dir.create(temp.temp.save.location) } - + # temp.temp.save.location <- normalizePath(temp.temp.save.location, # winslash = "/") - + # Download ---------------------------------------------------------------- for (year_month in ts_string){ - + output.year_month <- str_split(year_month, pattern = "_") output.year_month <- unlist(output.year_month) zero <- switch (as.character(str_length(paste0(output.year_month[2], @@ -1568,13 +1568,13 @@ Chelsa.CRUts.download <- function(save.location = "./", "_V.1.0.tif") dest.temp <- paste0(temp.temp.save.location, "CHELSA_CRUts_", i, "_",output.year_month, "_V_1_0.tif") - + # check if URL is available if(!http_error(URL.temp)){ gc() # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) - + if(!file.exists(dest.temp)){ # download file to save location download.file(url = URL.temp, @@ -1652,15 +1652,15 @@ Chelsa.CRUts.download <- function(save.location = "./", } } } - + # Clean up, if no data was downloaded. ------------------------------------ - + if(length(list.files(temp.temp.save.location, include.dirs = TRUE)) == 0){ unlink(str_sub(temp.temp.save.location, 1, end = str_length(temp.temp.save.location)-1), force = T, recursive = TRUE) } - + } # Saving BIB File if(save.bib.file == TRUE) save.citation(save.location = save.location, dataSetName = "CHELSA") diff --git a/R/Chelsa_new_download_functions.R b/R/Chelsa_new_download_functions.R new file mode 100644 index 0000000000000000000000000000000000000000..7aeaac1e2a05d9f259de4e38cbcffe60f4823d79 --- /dev/null +++ b/R/Chelsa_new_download_functions.R @@ -0,0 +1,304 @@ +#'@title Function for downloading the CHELSA climate dataset (1979-2013) +#'@author Helge Jentsch +#'@description This function supports the download, pre-processing and management of CHELSA climate data comprising of monthly precipitation sums in mm, monthly temperature (average, minimum, maximum) in degrees Celsius, and annual chracteristics (19 bioclimatic variables). The spatial resolution of the downloaded data is 30 arc-seconds.\cr To allow pre-processing, clipping and buffering, conversion to ASCII-grids and stacking options are included.\cr Optional an output of a .bib-file of the cited literature can be retrieved.\cr For user convenience, saving directories will be created automatically. Also options to "zip" and/or delete the RAW-files are included. +#' +#'@param save.location string. Input where the datasets will be saved. \cr Default: Working Directory. +#'@param parameter string (vector). Input of parameters which should be downloaded. \cr Default: \code{c("prec", "temp", "tmax", "tmin", "bio")} +#'@param bio.var integer (vector). Input which monthly data should be downloaded. Only applicable to BIOCLIM variables. For further information see: \url{http://chelsa-climate.org/bioclim/}. \cr Default: \code{c(1:19)} +#'@param month.var integer (vector). Input which monthly data should be downloaded. Only applicable to precipitation and temperature (average, maximum, minimum). \cr Default: \code{c(1:12)} +#'@param version.var string (vector). Input which version of the dataset should be downloaded. Multiple selection is possible. \cr Default: \code{c("1.2")} +#'@param clipping logical. Input whether the downloaded data should be clipped.\cr If \code{FALSE}: clip.shapefile, buffer, clip.extent will be ignored. \cr Default: \code{FALSE} +#'@param clip.shapefile string. Input which shapefile should be used for clipping. \cr Default: \code{NULL} +#'@param clip.extent numeric (vector). Input vector with four numeric values. This is following the input order c("xleft", "xright", "ybottom", "ytop"). \cr Default: \code{c(-180, 180, -90, 90)} +#'@param buffer numeric. Input of decimal degrees of buffer around the shapefile and/or extent. \cr Default: \code{0} +#'@param convert.files.to.asc logical. Input whether files should be converted into the ASCII format.\cr If \code{TRUE}: a new subdirectory is created and the rawdata is saved there. If \code{clipping} is \code{TRUE}: the clipped raster files are also saved as ASCII grids. \cr Default: \code{FALSE} +#'@param stacking.data logical. Input whether the downloaded data should be stacked as a netCDF-rasterstack. \cr Default: \code{FALSE} +#'@param combine.raw.zip logical. Should the downloaded raw-data be "zipped". \cr Default: \code{FALSE} +#'@param delete.raw.data logical. Should the downloaded raw-data be deleted.\cr If \code{combine.raw.zip} is \code{TRUE}: raw-data is still available in the zipped file. \cr Default: \code{FALSE} +#'@param save.bib.file logical. Whether a BibTex-citation file of the dataset should be provided in the Working directory. \cr Default: \code{TRUE} +#'@return CHELSA climate datasets for the period of 1979 - 2013 +#' +#'@note Please note that the downloaded data for temperature and the therefore also the first eleven bioclim-variables are processed to °C with one significant decimal without offset and factor. Processing and conversion to other file-formats on a global dataset may take some time. +#' +#'@references D. N. Karger, O. Conrad, J. B{\"o}hner , et al. "Climatologies at high resolution for the earth's land surface areas". In: _Scientific Data_ 4.1 (Sep. 2017). DOI: 10.1038/sdata.2017.122. <URL: https://doi.org/10.1038/sdata.2017.122>. +#'@references D. N. Karger, O. Conrad, J. B{\"o}hner , et al. _Data from: Climatologies at high resolution for the earth's land surface areas_. En. 2018. DOI: 10.5061/DRYAD.KD1D4. <URL: http://datadryad.org/stash/dataset/doi:10.5061/dryad.kd1d4>. +#' +#'@examples +#' \dontrun{ +#' # Bioclim +#' Chelsa.Clim.download(parameter = "bio", bio.var = c(1,19)) +#' # Precipitation +#' Chelsa.Clim.download(parameter = "prec", month.var = c(1,12)) +#' } +#' +#'@import stringr +#'@import RCurl +#'@import ncdf4 +#'@import raster +#'@importFrom utils unzip download.file +#' +#' +#'@export +Chelsa.Clim.download <- function(save.location = "./", + parameter = c("prec", "temp", "tmax", "tmin", "bio"), + bio.var = c(1:19), + month.var = c(1:12), + version.var = c("1.2"), + clipping = FALSE, + clip.shapefile = NULL, + clip.extent = c(-180, 180, -90, 90), + buffer = 0, + convert.files.to.asc = FALSE, + stacking.data = FALSE, + combine.raw.zip = FALSE, + delete.raw.data = FALSE, + save.bib.file = TRUE){ + gc() + call.time <- stringr::str_replace_all( + stringr::str_replace_all(paste0(Sys.time()), + pattern = ":", + replacement = "-"), + pattern = " ", + replacement = "_") + # initial check ----------------------------------------------------------- + # normalize Path for easier application later + save.location <- normalizePath(save.location, winslash = "/") + # Check which parameters are put in and if the connected + # month/bio-variables are correctly input + if(is.element("prec", parameter)|is.element("temp", parameter)| + is.element("tmax", parameter)|is.element("tmin", parameter)){ + # if month.var is just a single numeric input it is here casted into + # a vector for comparabilities + month.var <- c(month.var) + # if there is not a numeric input -> prompt error + if(!is.numeric(month.var)) stop() + # Padding of "one-digit" months with a 0 + month.var <- stringr::str_pad(month.var, 2, 'left', pad = "0") + } + + # analog to the if-clause before - here the parameter bio.var is checked. + if(is.element("bio", parameter)){ + bio.var <- c(bio.var) + if(!is.numeric(bio.var)) stop() + bio.var <- stringr::str_pad(bio.var, 2, 'left', pad = "0") + } + # parameter + parameter <- sort(parameter) + DLTparameter <- c(rep(parameter[parameter!="bio"], length(month.var)), rep(parameter[parameter=="bio"], length(bio.var))) + DLTparameter <- sort(DLTparameter) + # variables + DLTvariable <- NULL + for(parm in parameter){ + DLTvariable <- c(DLTvariable, switch(parm, + "prec" = month.var, + "tmax" = month.var, + "temp" = month.var, + "tmin" = month.var, + bio.var) + ) + } + + # print(length(DLTvariable));print(length(DLTparameter)) + dataDF <- data.frame("parameter" = DLTparameter, + "variable" = DLTvariable, + "version" = rep(version.var, length(DLTvariable))) + + dataDF$parmLong <- paste0(dataDF$parameter,"10") + dataDF$parmLong[dataDF$parameter == "prec"] <- paste0("prec") + dataDF$parameter[dataDF$parameter == "temp"] <- paste0("tmean") + + dataDF$years <- paste0("_1979-2013") + dataDF$years[dataDF$parameter == "prec" | dataDF$parameter == "bio"] <- paste0("") + + dataDF$URL[dataDF$version == "1.2"] <- + paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/climatologies/", + dataDF$parameter, + "/CHELSA_", dataDF$parmLong , "_", dataDF$variable, dataDF$years, + "_V1.2_land.tif") + dataDF$URL[dataDF$version == "1.2" & dataDF$parameter == "bio"] <- + paste0("https://os.zhdk.cloud.switch.ch/envicloud/chelsa/chelsa_V1/climatologies/", + dataDF$parameter[dataDF$version == "1.2" & dataDF$parameter == "bio"], + "/CHELSA_", + dataDF$parmLong[dataDF$version == "1.2" & dataDF$parameter == "bio"] , + "_", + dataDF$variable[dataDF$version == "1.2" & dataDF$parameter == "bio"], + dataDF$years[dataDF$version == "1.2" & dataDF$parameter == "bio"], + ".tif") + # print(dataDF$URL) + for(urlexists in dataDF$URL){ + if(!url.exists(urlexists)){ + cat(paste(urlexists, + " does not exist, please check the website of Chelsa.", + "\n We would greatly apprecheate feedback on this at helge.marc.ole.jentsch@uni-hamburg.de") + ) + next # stop() + } + } + print(paste0(getDownloadSize(dataDF$URL), " MB will be downloaded.")) + # Progressbar setup + PGBsum <- nrow(dataDF) + length(unique(dataDF$parameter)) + 1 + PGB <- txtProgressBar(min = 0, max = PGBsum, style = 3) + PGBstate <- 0 + + locationSack <- NULL + for(parm in dataDF$parameter){ + if (!dir.exists(paste0(save.location, "/", parm))){ + dir.create(paste0(save.location, "/", parm)) + } + if("1.2" %in% dataDF$version){ + if (!dir.exists(paste0(save.location, "/", parm, "/ChelsaV1.2Climatologies"))){ + dir.create(paste0(save.location, "/", parm, "/ChelsaV1.2Climatologies")) + } + locationSack <- c(locationSack, paste0(save.location, "/", parm, "/ChelsaV1.2Climatologies/")) + } + } + # print(locationSack) + dataDF$filepath[dataDF$version == "1.2"] <- + paste0(save.location,"/", + dataDF$parameter, "/ChelsaV1.2Climatologies", + "/CHELSA_", dataDF$parmLong , "_", dataDF$variable, dataDF$years, + "_V1.2.tif") + for(fileexists in dataDF$filepath){ + if(!file.exists(fileexists)){ + unlink(list.files(tempdir(), recursive = TRUE, full.names = TRUE)) + download.file(url = dataDF$URL[dataDF$filepath == fileexists], + destfile = fileexists, + # overwrite is TRUE otherwise a error is caused + overwrite = TRUE, + # From the description file: + # The choice of binary transfer (mode = "wb" or "ab") + # is important on Windows, since unlike Unix-alikes + # it does distinguish between text and binary files and + # for text transfers changes + # \n line endings to \r\n (aka ‘CRLF’). + mode = 'wb', + # to show progression bar + quiet = TRUE, + cacheOK = FALSE) + } + setTxtProgressBar(PGB, PGBstate+1) + PGBstate <- PGBstate+1 + } + rescaleDF <- dataDF[dataDF$parameter != "prec",] + rescaleDF <- rescaleDF[!(rescaleDF$parameter == "bio" & as.numeric(rescaleDF$variable) > 12),] + # for(prepro in dataDF$filepath){ + # gc() + # preproRaster <- raster(prepro) + # gc() + # preproRaster <- clamp(preproRaster, lower = -1000, + # useValues = FALSE) + # if(prepro %in% rescaleDF$filepath){ + # print(Sys.time()) + # gain(preproRaster) <- 0.1 + # print(Sys.time()) + # } + # gc() + # writeRaster(x = preproRaster, filename = prepro, overwrite = T) + # } + locationSack <- unique(locationSack) + for (temp.temp.save.location in locationSack) { + run <- grep(temp.temp.save.location, locationSack) + for(i in run){ + # print(i) + # print(parameter[i]) + variable.numbers <- switch(parameter[i], + "bio" = bio.var, + month.var) + # if clipping is TRUE ... + if(clipping == TRUE){ + # the function "clipping.tif" (found in the auxiliary.R-File) + # is executed. The clip.save.location is the same location as the + # "current" save location + clipping.tif(clip.save.location = temp.temp.save.location, + # the clip-shapefile is passed + # default "NULL" does not produce error + clip.shapefile = clip.shapefile, + # Clip.extent is passed + # default "c(-180, 180, -90, 90)" does not produce errors + # simply clips the whole world. + clip.extent = clip.extent, + # buffer is passed + # default: 0. Unit is arc-degrees + buffer = buffer, + # conversion to ASCII format here integrated into the + # clipping function. Since it can be assumed that + # they should be converted lateron anyway. + convert.files.to.asc = convert.files.to.asc, + time.stamp.var = call.time) + } + # if converting.files.to.asc is TRUE ... + if(convert.files.to.asc == TRUE){ + # the function "convert.to.asc" (found in the auxiliary.R-File) + # is executed. The save.location is the same location as the + # "current" save location. Also another new subdirectory will + # be created with the name "ASCII" . + convert.to.asc(save.location = temp.temp.save.location, + time.stamp.var = call.time) + } + # if stacking.data is TRUE ... + if(stacking.data == TRUE){ + # the function "stacking.downloaded.data" + # (found in the auxiliary.R-File) is executed. + # The save.location is the same location as the + # "current" save location. + if(clipping==TRUE){ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = parameter[i], + variable.numbers = variable.numbers, + stack.clipped = TRUE, + time.stamp.var = call.time) + }else{ + stacking.downloaded.data(stack.save.location = temp.temp.save.location, + parameter.var = parameter[i], + variable.numbers = variable.numbers, + time.stamp.var = call.time) + } + } + # if combine.raw.zip is TRUE ... + if(combine.raw.zip == TRUE){ + # the function "combine.raw.in.zip" + # (found in the auxiliary.R-File) is executed. + # The save.location is the same location as the + # "current" save location. The name of the zip-file is also + # passed with the current parameter in it. + combine.raw.in.zip(save.location = temp.temp.save.location, + zip.name = paste0("CHELSAClim_", parameter[i], ""), + time.stamp.var = call.time) + } + # if delete.raw.data is TRUE ... + if(delete.raw.data == TRUE){ + # All .tif raster files in the current 2nd order subdirectory are + # unlinked (deleted). + unlink(list.files(temp.temp.save.location, + pattern = ".tif", + include.dirs = FALSE, + full.names = T), + force = TRUE) + } + } + # delete all temporary files + unlink(list.files(tempdir(), recursive = T, full.names =T)) + setTxtProgressBar(PGB, PGBstate+1) + PGBstate <- PGBstate+1 + } + if(save.bib.file == TRUE) { + save.citation(save.location = save.location, dataSetName = "CHELSA") + } + setTxtProgressBar(PGB, PGBstate+1) + close(PGB) + # delete all temporary files + unlink(list.files(tempdir(), recursive = T, full.names =T)) + # print(dataDF) + + # print(getDownloadSize(dataDF$URL)) +} +# Chelsa.Clim.download(save.location = "../Daten/") +# Chelsa.Clim.download(parameter = c("tmin", "prec", "bio"), month.var = c(1,4,7), bio.var = c(1,13,14,17)) +# Chelsa.Clim.download(save.location = "../testing/", +# parameter = c("tmin", "bio"), month.var = c(1,4,7), bio.var = c(1,13,14,17)) +# Chelsa.Clim.download(save.location = "../testing/", +# parameter = c("tmin", "bio"), month.var = c(8), bio.var = c(19), clipping = T, clip.extent = c(5,10,50,55)) +# Chelsa.Clim.download("../testing/", parameter = c("prec", "temp", "bio"), +# bio.var = c(1,12), month.var = c(1,12), +# clipping = T, clip.extent = c(8,10,50,56), +# combine.raw.zip = T) \ No newline at end of file diff --git a/R/WorldClim_download_functions.R b/R/WorldClim_download_functions.R index 5f5c4741104e843a47a7a4eca48a6b1d8b0f1043..649f67f3d9918b3d6a5873c8e3a976ff4616f5c1 100644 --- a/R/WorldClim_download_functions.R +++ b/R/WorldClim_download_functions.R @@ -42,10 +42,11 @@ #' } #' #'@import stringr +#'@importFrom curl curl_fetch_memory #'@import RCurl #'@import ncdf4 #'@import raster -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #'@export WorldClim.HistClim.download <- function(save.location = "./", @@ -158,7 +159,7 @@ WorldClim.HistClim.download <- function(save.location = "./", stop()) # Thrid: Through resolution ----------------------------------------------- for (res in resolution) { - temp.temp.save.location <- paste0(temp.save.location,"WorldClim_", + temp.temp.save.location <- paste0(temp.save.location,"/WorldClim_", vers, "_", parm.temp, "_", res, "/") # if not already created, create new directory if(!dir.exists(temp.temp.save.location)){ @@ -185,15 +186,17 @@ WorldClim.HistClim.download <- function(save.location = "./", immediate. = TRUE) } # destination file - dest.temp <- paste0(temp.temp.save.location, "WC_",vers, "_", - res, "_", parm.temp, "_", "_Bulk.zip") + dest.temp <- paste0(temp.temp.save.location, "/WC_",vers, "_", + res, "_", parm.temp, "_Bulk.zip") if(!(parm.temp == "bio" & res.temp == "30s")){ if(!file.exists(dest.temp)){ # create a variable for the later requested Download-URL to avoid # requireing multiple changes, if the link changes. URL.temp <- paste0(URL.1 , parm.temp, "_", res.temp, "_bil.zip") + urlCheck <- curl_fetch_memory(url = URL.temp)$status_code # check if URL is available - if(url.exists(URL.temp)){ + # if(url.exists(URL.temp)){ + if(urlCheck == 200){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) # download file to save location @@ -228,8 +231,10 @@ WorldClim.HistClim.download <- function(save.location = "./", # create a variable for the later requested Download-URL to avoid # requiring multiple changes, if the link changes. URL.temp <- paste0(URL.1 , parm.temp, div, "_", res.temp, "_bil.zip") + urlCheck <- curl_fetch_memory(url = URL.temp)$status_code # check if URL is available - if(url.exists(URL.temp)){ + # if(url.exists(URL.temp)){ + if(urlCheck == 200){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) # download file to save location @@ -379,9 +384,10 @@ WorldClim.HistClim.download <- function(save.location = "./", # create a variable for the later requested Download-URL to avoid # requireing multiple changes, if the link changes. URL.temp <- paste0(URL.1, res.temp, "_", parm.temp, ".zip") - paste0(URL.temp) + urlCheck <- curl_fetch_memory(url = URL.temp)$status_code # check if URL is available - if(url.exists(URL.temp)){ + # if(url.exists(URL.temp)){ + if(urlCheck == 200){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) # download file to save location @@ -609,10 +615,11 @@ WorldClim.HistClim.download <- function(save.location = "./", #' } #' #'@import stringr +#'@importFrom curl curl_fetch_memory #'@import RCurl #'@import ncdf4 #'@import raster -#'@importFrom utils unzip download.file +#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar #' #'@export WorldClim.CMIP_5.download <- function(save.location = "./", @@ -792,7 +799,7 @@ WorldClim.CMIP_5.download <- function(save.location = "./", "2070" = "70", next ) - temp.temp.save.location <- paste0(temp.save.location,"WorldClim_CMIP5_", + temp.temp.save.location <- paste0(temp.save.location,"/WorldClim_CMIP5_", parm.temp, "_", res,"_", gcm, "_", rcp, "_",year,"/") # if not already created, create new directory @@ -809,8 +816,10 @@ WorldClim.CMIP_5.download <- function(save.location = "./", # create a variable for the later requested Download-URL to avoid # requireing multiple changes, if the link changes. URL.temp <- paste0(URL.1, URL.2, URL.4, parm.temp, year.temp, ".zip") + urlCheck <- curl_fetch_memory(url = URL.temp)$status_code # check if URL is available - if(url.exists(URL.temp)){ + # if(url.exists(URL.temp)){ + if(urlCheck == 200){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) # download file to save location @@ -1106,7 +1115,9 @@ WorldClim.CMIP_5.download <- function(save.location = "./", #'@import RCurl #'@import ncdf4 #'@import raster -#'@importFrom utils download.file unzip +#'@importFrom utils download.file unzip setTxtProgressBar txtProgressBar +#'@importFrom curl curl_fetch_memory + #'@export WorldClim.CMIP_6.download <- function(save.location = "./", @@ -1247,8 +1258,10 @@ WorldClim.CMIP_6.download <- function(save.location = "./", # create a variable for the later requested Download-URL to avoid # requireing multiple changes, if the link changes. URL.temp <- paste0(URL.1, URL.2,URL.3, ".zip") + urlCheck <- curl_fetch_memory(url = URL.temp)$status_code # check if URL is available - if(url.exists(URL.temp)){ + # if(url.exists(URL.temp)){ + if(urlCheck == 200){ # clear up the temporary directory unlink(list.files(tempdir(), recursive = T, full.names=T)) # download file to save location diff --git a/R/auxiliary.R b/R/auxiliary.R index 15dfacc8b2401a664c4c4b3d3db539073110e49a..2de7b29743ecaec6461a2a1d2783248dfd63cc0d 100644 --- a/R/auxiliary.R +++ b/R/auxiliary.R @@ -41,6 +41,7 @@ clipping.tif <- function(clip.save.location = "./", recursive = FALSE, pattern = ".tif") if(length(temp.list.files) == 0) stop(paste0("No files found at location: ", clip.save.location)) + # print(temp.list.files) temp.list.file.names <- list.files(clip.save.location, full.names = FALSE, recursive = FALSE, @@ -527,46 +528,85 @@ process.raster.int.doub <- function(raster.layer = NULL) if(class(raster.layer)[1] != "RasterLayer"){ stop("raster.layer is not a 'RasterLayer'") } - extent_rasterfile <- extent(raster.layer) - # crop - tl <- crop(raster.layer, extent(extent_rasterfile@xmin, - (extent_rasterfile@xmin+extent_rasterfile@xmax)/2, - (extent_rasterfile@ymin+extent_rasterfile@ymax)/2, - extent_rasterfile@ymax) - ) - bl <- crop(raster.layer, extent(extent_rasterfile@xmin, - (extent_rasterfile@xmin+extent_rasterfile@xmax)/2, - extent_rasterfile@ymin, - (extent_rasterfile@ymin+extent_rasterfile@ymax)/2) - ) - tr <- crop(raster.layer, extent((extent_rasterfile@xmin+extent_rasterfile@xmax)/2, - extent_rasterfile@xmax, - (extent_rasterfile@ymin+extent_rasterfile@ymax)/2, - extent_rasterfile@ymax) - ) - br <- crop(raster.layer, extent((extent_rasterfile@xmin+extent_rasterfile@xmax)/2, - extent_rasterfile@xmax, - extent_rasterfile@ymin, - (extent_rasterfile@ymin+extent_rasterfile@ymax)/2) - ) - # recalculate like: - # values(raster.temp) <- as.numeric(values(raster.temp)/10) - # values(tl) <- as.numeric(values(tl)/10) - tl <- tl/10 - # values(tr) <- as.numeric(values(tr)/10) - tr <- tr/10 - # values(bl) <- as.numeric(values(bl)/10) - bl <- bl/10 - # values(br) <- as.numeric(values(br)/10) - br <- br/10 - # and mosaic: - gc() - top <- mosaic(tl,tr, fun = "mean") - rm(tl, tr) - gc() - bottom <- mosaic(bl,br, fun = "mean") - rm(bl, br) + # extent_rasterfile <- extent(raster.layer) + # # crop + # tl <- crop(raster.layer, extent(extent_rasterfile@xmin, + # (extent_rasterfile@xmin+extent_rasterfile@xmax)/2, + # (extent_rasterfile@ymin+extent_rasterfile@ymax)/2, + # extent_rasterfile@ymax) + # ) + # bl <- crop(raster.layer, extent(extent_rasterfile@xmin, + # (extent_rasterfile@xmin+extent_rasterfile@xmax)/2, + # extent_rasterfile@ymin, + # (extent_rasterfile@ymin+extent_rasterfile@ymax)/2) + # ) + # tr <- crop(raster.layer, extent((extent_rasterfile@xmin+extent_rasterfile@xmax)/2, + # extent_rasterfile@xmax, + # (extent_rasterfile@ymin+extent_rasterfile@ymax)/2, + # extent_rasterfile@ymax) + # ) + # br <- crop(raster.layer, extent((extent_rasterfile@xmin+extent_rasterfile@xmax)/2, + # extent_rasterfile@xmax, + # extent_rasterfile@ymin, + # (extent_rasterfile@ymin+extent_rasterfile@ymax)/2) + # ) + # # recalculate like: + # # values(raster.temp) <- as.numeric(values(raster.temp)/10) + # # values(tl) <- as.numeric(values(tl)/10) + # tl <- tl/10 + # # values(tr) <- as.numeric(values(tr)/10) + # tr <- tr/10 + # # values(bl) <- as.numeric(values(bl)/10) + # bl <- bl/10 + # # values(br) <- as.numeric(values(br)/10) + # br <- br/10 + # # and mosaic: + # gc() + # top <- mosaic(tl,tr, fun = "mean") + # rm(tl, tr) + # gc() + # bottom <- mosaic(bl,br, fun = "mean") + # rm(bl, br) + # gc() + # raster.layer <- mosaic(top, bottom, fun = "mean") + + raster.temp <- raster.layer + gain(raster.temp) <- 0.1 gc() - raster.layer <- mosaic(top, bottom, fun = "mean") - return(raster.layer) + return(raster.temp) +} + + +#' @title Get Download Size +#' @author Helge Jentsch +#' @description Helper function that returns the download size of a vector of URLs +#' +#' @param URLVector Character vector. Multiple vectors of valid URLs. +#' +#' @return Download size as double numeric value +#' @import httr +#' @export +getDownloadSize <- function(URLVector){ + # helper-function by Allan Cameron (https:\/\/stackoverflow.com\/a\/63852321) + download_size <- function(url){ + as.numeric(httr::HEAD(url)$headers$`content-length`) + } + filesizes <- NULL + for(i in URLVector){ + # collect file sizes + fileISize <- download_size(i) + # and add to another + filesizes <- sum(filesizes,fileISize) + # return(Downloadsize) + } + return(round(filesizes*0.000001, 2)) + # Download size in MB + + # get duration by calculating with https:\/\/gitlab.com\/hrbrmstr\/speedtest + # config <- spd_config() + # servers <- spd_servers(config=config) + # closest_servers <- spd_closest_servers(servers, config=config) + # speed <- spd_download_test(close_servers[1,], config=config) + # medspeed <- speed$median + # cat("Download-Zeit: \n", downloadSize/medspeed, "s \n") } diff --git a/README.md b/README.md index 69a1c2cab5bab69d2bc1ac3fad8622509a021bef..c0c8a7a8a88a6854a49c00ad7d8836285061657f 100644 --- a/README.md +++ b/README.md @@ -1,116 +1,9 @@ ## Welcome to the help-page of ClimDatDownloadR -To get started please proceed further down under the update section. - -_This R-package was developed as a student project for the masters programm Geography at the Universität Hamburg, Germany._ - -## Update 09.03.2021 - -As of today a new alpha version is released. -If you want to test the ClimDatDownloadR-package please feel free to install the package via - -`install.packages("https://gitlab.rrz.uni-hamburg.de/helgejentsch/climdatdownloadr/-/archive/master/climdatdownloadr-master.tar.gz", repos = NULL, type = "source")` - -and if you need the dependencies via - -`install.packages(c("gdalUtils", "httr", "ncdf4", "qpdf", "raster", "RCurl", "RefManageR", "rgdal", "stringr", "sf", "sp", "svMisc", "utils"), dependencies = TRUE)` +To get started please proceed [here](./articles/ClimDatDownloadR.html). -I would appreciate your feedback and possible bug reports. -If you find anything, please send an email to [helge.marc.ole.jentsch@uni-hamburg.de](<mailto:helge.marc.ole.jentsch@uni-hamburg.de>) -Thank you very much for using ClimDatDownloadR! -## A warm welcome - -Hello and welcome to the ClimDatDownloadR R-package. - -With this package **cli**mate **dat**asets provided by [Chelsa](http://chelsa-climate.org/) and [WorldClim](https://www.worldclim.org/) can be automatically **download**ed, clipped, and converted with **R**. -To start, you'll have to install the package and it's dependencies first, if not already done. Then you can activate the package with the `library`-function. -```{r setup} -# install.packages(c("gdalUtils", "httr", "ncdf4", "qpdf", "raster", "RCurl", "RefManageR", "rgdal", "stringr", "sf", "sp", "svMisc", "utils"), dependencies = TRUE) -# install.packages("https://gitlab.rrz.uni-hamburg.de/helgejentsch/climdatdownloadr/-/archive/master/climdatdownloadr-master.tar.gz", repos = NULL, type = "source") -library(ClimDatDownloadR) -``` -Very well, now that you have the package installed and attached, let's start with the data sets of the climatologies of Chelsa and WorldClim. - -## Overview of download-functions - -Besides the functions to download the currend climatologies of [Chelsa](http://chelsa-climate.org/) and [WorldClim](https://www.worldclim.org/), described below as [`Chelsa.Clim.download()`](../man/Chelsa.Clim.download.Rd) and [`WorldClim.HistClim.download()`](../man/WorldClim.HistClim.download.Rd), the package offers more download functions. -- Beginning with the 'Last Glacial Maximum'-data set (LGM), Chelsa offers a data set with parameters like precipitation, temperature, and also bioclim variables, driven by various models. It can be called with [`Chelsa.lgm.download()`](../man/Chelsa.lgm.download.Rd). -- [Chelsa's](http://chelsa-climate.org/) timeseries dataset can be downloaded via the [`Chelsa.timeseries.download()`](../man/Chelsa.timeseries.download.Rd)-function. -- For projected climatic conditions both [Chelsa](http://chelsa-climate.org/) and [WorldClim](https://www.worldclim.org/) provide various options. - - [Chelsa's](http://chelsa-climate.org/) options can be downloaded through the functions [`Chelsa.CMIP_5.download()`](../man/Chelsa.CMIP_5.download.Rd) and/or [`Chelsa.CRUts.download()`](../man/Chelsa.CRUts.download.Rd). - - [WorldClim's](https://www.worldclim.org/) options can be downloaded through the functions [`WorldClim.CMIP_5.download()`](../man/WorldClim.CMIP_5.download.Rd) and/or [`WorldClim.CMIP_6.download()`](../man/WorldClim.CMIP_6.download.Rd). - -## Download Climatologies - -In the help pages of [`Chelsa.Clim.download()`](../man/Chelsa.Clim.download.Rd) and [`WorldClim.HistClim.download()`](../man/WorldClim.HistClim.download.Rd) you can find further information about the handling of these functions. In fact running the functions all by itself bulk-downloads all the climatology data sets from the servers to your current working directory. -Let's start with a example of the Chelsa climatologies: -```{r setup} -Chelsa.Clim.download( - # first you'll have to choose your working directory - # don't worry about having a directory for every parameter you want to download - # ClimDatDownloadR sorts this out for you - save.location = "./", - # now you'll have to choose parameters. - # since there is the possibility to download more than one data set - # the parameters must be a string-vector input. - # Single parameters, however, can be just put in as a string. - # the valid parameter inputs can be found in the help (linked s.o.) - parameter = c("temp", "bio"), - # Now, since you chose "temp" and "bio" as input parameters, - # you can specify the months and bioclim-variables to download. - # If you want all of them, just leave the default values. - # It is crutial, however, that the inputs are integer number values. - month.var = c(1), # Here January was chosen to be downloaded for demonstration purposes - bio.var = c(1), # Here the first bioclim-variable was chosen to be downloaded for demonstration purposes - # For Chelsa a newer Version of their climatologies was published in 2019. - # They still got their old version still hosted on their website. - # So you can download it as well, if you want to reproduce some research you base your studies on. - version.var = "1.2", # Here the newer version is chosen - # Now you can choose whether you want the data set clipped - clipping = TRUE, # Here TRUE was chosen to show a basic introduction to the function - # Since "clipping" is enganged now you can specify the extent you want to have for your analysis - # This is possible via the parameters "clip.shapefile", "clip.extent", and "buffer" - clip.extent = c(-9,20,35,80), # Here the extent for Europe was used ... - buffer = 5, # ... with a 5 arc-degree buffer. - # Now, since some might prefer older file formats there is a possibility to convert - # clipped files and raw data into ESRI-ASCII format - convert.files.to.asc = FALSE, - # now you can stack the data ... - stacking.data = FALSE, - # ... and choose if you want to combine the raw data in a .zip-file ... - combine.raw.zip = FALSE, - # and whether raw data should be deleted. - delete.raw.data = FALSE, - # Finally you are presented with the option to save a bibliography file at the save location. - save.bib.file = TRUE -) -``` -___ -With this showing the basic principle of these functions, here is a example of a WorldClim climatology download: -```{r setup} -WorldClim.HistClim.download( - # As you can see, the structure of this function is very similar to the Chelsa-function - save.location = "./", - parameter = c("temp", "bio"), - month.var = c(1), - bio.var = c(1), - # Here the resolution of the downloaded data set must be added - # If no input is given all resolutions will be downloaded - resolution = "10m", # here 10 arc-minutes are chosen - # WorldClim also recently had an update to version 2.1 - version.var = "2.1", # Here the newer version is chosen - clipping = TRUE, - clip.extent = c(-9,20,35,80), - buffer = 5, - convert.files.to.asc = FALSE, - stacking.data = FALSE, - # here you can choose if you want to keep the downloaded zip-file - keep.raw.zip = FALSE, - delete.raw.data = FALSE, - save.bib.file = TRUE -) -``` +_This R-package was developed as a student project for the masters programm Geography at the Universität Hamburg, Germany._ diff --git a/build/vignette.rds b/build/vignette.rds index 54ba8e9d79c9e667986843a514e6b4077ccb0eb9..65cd7cfaa06d4d584dd6d8d7eeacbaf13312acc0 100644 Binary files a/build/vignette.rds and b/build/vignette.rds differ diff --git a/man/Chelsa.Clim.download.Rd b/man/Chelsa.Clim.download.Rd index 24eff119cb32d2f061e7fe093650b0e05349a03b..03ee4ad3ba9efdb853e12a602369d7c5f0c7baa5 100644 --- a/man/Chelsa.Clim.download.Rd +++ b/man/Chelsa.Clim.download.Rd @@ -1,5 +1,5 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Chelsa_Download_functions.R +% Please edit documentation in R/Chelsa_new_download_functions.R \name{Chelsa.Clim.download} \alias{Chelsa.Clim.download} \title{Function for downloading the CHELSA climate dataset (1979-2013)} diff --git a/man/Chelsa.Clim.download.deprecated.Rd b/man/Chelsa.Clim.download.deprecated.Rd new file mode 100644 index 0000000000000000000000000000000000000000..87b72ee867eb1df801f983095c582d5727cec01d --- /dev/null +++ b/man/Chelsa.Clim.download.deprecated.Rd @@ -0,0 +1,78 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Chelsa_Download_functions.R +\name{Chelsa.Clim.download.deprecated} +\alias{Chelsa.Clim.download.deprecated} +\title{Deprecated function for downloading the CHELSA climate dataset (1979-2013)} +\usage{ +Chelsa.Clim.download.deprecated( + save.location = "./", + parameter = c("prec", "temp", "tmax", "tmin", "bio"), + bio.var = c(1:19), + month.var = c(1:12), + version.var = c("1.2"), + clipping = FALSE, + clip.shapefile = NULL, + clip.extent = c(-180, 180, -90, 90), + buffer = 0, + convert.files.to.asc = FALSE, + stacking.data = FALSE, + combine.raw.zip = FALSE, + delete.raw.data = FALSE, + save.bib.file = TRUE +) +} +\arguments{ +\item{save.location}{string. Input where the datasets will be saved. \cr Default: Working Directory.} + +\item{parameter}{string (vector). Input of parameters which should be downloaded. \cr Default: \code{c("prec", "temp", "tmax", "tmin", "bio")}} + +\item{bio.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to BIOCLIM variables. For further information see: \url{http://chelsa-climate.org/bioclim/}. \cr Default: \code{c(1:19)}} + +\item{month.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to precipitation and temperature (average, maximum, minimum). \cr Default: \code{c(1:12)}} + +\item{version.var}{string (vector). Input which version of the dataset should be downloaded. Multiple selection is possible. \cr Default: \code{c("1.2")}} + +\item{clipping}{logical. Input whether the downloaded data should be clipped.\cr If \code{FALSE}: clip.shapefile, buffer, clip.extent will be ignored. \cr Default: \code{FALSE}} + +\item{clip.shapefile}{string. Input which shapefile should be used for clipping. \cr Default: \code{NULL}} + +\item{clip.extent}{numeric (vector). Input vector with four numeric values. This is following the input order c("xleft", "xright", "ybottom", "ytop"). \cr Default: \code{c(-180, 180, -90, 90)}} + +\item{buffer}{numeric. Input of decimal degrees of buffer around the shapefile and/or extent. \cr Default: \code{0}} + +\item{convert.files.to.asc}{logical. Input whether files should be converted into the ASCII format.\cr If \code{TRUE}: a new subdirectory is created and the rawdata is saved there. If \code{clipping} is \code{TRUE}: the clipped raster files are also saved as ASCII grids. \cr Default: \code{FALSE}} + +\item{stacking.data}{logical. Input whether the downloaded data should be stacked as a netCDF-rasterstack. \cr Default: \code{FALSE}} + +\item{combine.raw.zip}{logical. Should the downloaded raw-data be "zipped". \cr Default: \code{FALSE}} + +\item{delete.raw.data}{logical. Should the downloaded raw-data be deleted.\cr If \code{combine.raw.zip} is \code{TRUE}: raw-data is still available in the zipped file. \cr Default: \code{FALSE}} + +\item{save.bib.file}{logical. Whether a BibTex-citation file of the dataset should be provided in the Working directory. \cr Default: \code{TRUE}} +} +\value{ +CHELSA climate datasets for the period of 1979 - 2013 +} +\description{ +This function supports the download, pre-processing and management of CHELSA climate data comprising of monthly precipitation sums in mm, monthly temperature (average, minimum, maximum) in degrees Celsius, and annual chracteristics (19 bioclimatic variables). The spatial resolution of the downloaded data is 30 arc-seconds.\cr To allow pre-processing, clipping and buffering, conversion to ASCII-grids and stacking options are included.\cr Optional an output of a .bib-file of the cited literature can be retrieved.\cr For user convenience, saving directories will be created automatically. Also options to "zip" and/or delete the RAW-files are included. +} +\note{ +Please note that the downloaded data for temperature and the therefore also the first eleven bioclim-variables are processed to °C with one significant decimal without offset and factor. Processing and conversion to other file-formats on a global dataset may take some time. +} +\examples{ +\dontrun{ +# Bioclim +Chelsa.Clim.download(parameter = "bio", bio.var = c(1,19)) +# Precipitation +Chelsa.Clim.download(parameter = "prec", month.var = c(1,12)) +} + +} +\references{ +D. N. Karger, O. Conrad, J. B{\"o}hner , et al. "Climatologies at high resolution for the earth's land surface areas". In: _Scientific Data_ 4.1 (Sep. 2017). DOI: 10.1038/sdata.2017.122. <URL: https://doi.org/10.1038/sdata.2017.122>. + +D. N. Karger, O. Conrad, J. B{\"o}hner , et al. _Data from: Climatologies at high resolution for the earth's land surface areas_. En. 2018. DOI: 10.5061/DRYAD.KD1D4. <URL: http://datadryad.org/stash/dataset/doi:10.5061/dryad.kd1d4>. +} +\author{ +Helge Jentsch +} diff --git a/man/Chelsa.Clim.download_deprecated.Rd b/man/Chelsa.Clim.download_deprecated.Rd new file mode 100644 index 0000000000000000000000000000000000000000..453926c5624eab4bf11fb3b756e4da94b1c25ece --- /dev/null +++ b/man/Chelsa.Clim.download_deprecated.Rd @@ -0,0 +1,78 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Chelsa_Download_functions.R +\name{Chelsa.Clim.download_deprecated} +\alias{Chelsa.Clim.download_deprecated} +\title{Deprecated function for downloading the CHELSA climate dataset (1979-2013)} +\usage{ +Chelsa.Clim.download_deprecated( + save.location = "./", + parameter = c("prec", "temp", "tmax", "tmin", "bio"), + bio.var = c(1:19), + month.var = c(1:12), + version.var = c("1.2"), + clipping = FALSE, + clip.shapefile = NULL, + clip.extent = c(-180, 180, -90, 90), + buffer = 0, + convert.files.to.asc = FALSE, + stacking.data = FALSE, + combine.raw.zip = FALSE, + delete.raw.data = FALSE, + save.bib.file = TRUE +) +} +\arguments{ +\item{save.location}{string. Input where the datasets will be saved. \cr Default: Working Directory.} + +\item{parameter}{string (vector). Input of parameters which should be downloaded. \cr Default: \code{c("prec", "temp", "tmax", "tmin", "bio")}} + +\item{bio.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to BIOCLIM variables. For further information see: \url{http://chelsa-climate.org/bioclim/}. \cr Default: \code{c(1:19)}} + +\item{month.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to precipitation and temperature (average, maximum, minimum). \cr Default: \code{c(1:12)}} + +\item{version.var}{string (vector). Input which version of the dataset should be downloaded. Multiple selection is possible. \cr Default: \code{c("1.2")}} + +\item{clipping}{logical. Input whether the downloaded data should be clipped.\cr If \code{FALSE}: clip.shapefile, buffer, clip.extent will be ignored. \cr Default: \code{FALSE}} + +\item{clip.shapefile}{string. Input which shapefile should be used for clipping. \cr Default: \code{NULL}} + +\item{clip.extent}{numeric (vector). Input vector with four numeric values. This is following the input order c("xleft", "xright", "ybottom", "ytop"). \cr Default: \code{c(-180, 180, -90, 90)}} + +\item{buffer}{numeric. Input of decimal degrees of buffer around the shapefile and/or extent. \cr Default: \code{0}} + +\item{convert.files.to.asc}{logical. Input whether files should be converted into the ASCII format.\cr If \code{TRUE}: a new subdirectory is created and the rawdata is saved there. If \code{clipping} is \code{TRUE}: the clipped raster files are also saved as ASCII grids. \cr Default: \code{FALSE}} + +\item{stacking.data}{logical. Input whether the downloaded data should be stacked as a netCDF-rasterstack. \cr Default: \code{FALSE}} + +\item{combine.raw.zip}{logical. Should the downloaded raw-data be "zipped". \cr Default: \code{FALSE}} + +\item{delete.raw.data}{logical. Should the downloaded raw-data be deleted.\cr If \code{combine.raw.zip} is \code{TRUE}: raw-data is still available in the zipped file. \cr Default: \code{FALSE}} + +\item{save.bib.file}{logical. Whether a BibTex-citation file of the dataset should be provided in the Working directory. \cr Default: \code{TRUE}} +} +\value{ +CHELSA climate datasets for the period of 1979 - 2013 +} +\description{ +This function supports the download, pre-processing and management of CHELSA climate data comprising of monthly precipitation sums in mm, monthly temperature (average, minimum, maximum) in degrees Celsius, and annual chracteristics (19 bioclimatic variables). The spatial resolution of the downloaded data is 30 arc-seconds.\cr To allow pre-processing, clipping and buffering, conversion to ASCII-grids and stacking options are included.\cr Optional an output of a .bib-file of the cited literature can be retrieved.\cr For user convenience, saving directories will be created automatically. Also options to "zip" and/or delete the RAW-files are included. +} +\note{ +Please note that the downloaded data for temperature and the therefore also the first eleven bioclim-variables are processed to °C with one significant decimal without offset and factor. Processing and conversion to other file-formats on a global dataset may take some time. +} +\examples{ +\dontrun{ +# Bioclim +Chelsa.Clim.download(parameter = "bio", bio.var = c(1,19)) +# Precipitation +Chelsa.Clim.download(parameter = "prec", month.var = c(1,12)) +} + +} +\references{ +D. N. Karger, O. Conrad, J. B{\"o}hner , et al. "Climatologies at high resolution for the earth's land surface areas". In: _Scientific Data_ 4.1 (Sep. 2017). DOI: 10.1038/sdata.2017.122. <URL: https://doi.org/10.1038/sdata.2017.122>. + +D. N. Karger, O. Conrad, J. B{\"o}hner , et al. _Data from: Climatologies at high resolution for the earth's land surface areas_. En. 2018. DOI: 10.5061/DRYAD.KD1D4. <URL: http://datadryad.org/stash/dataset/doi:10.5061/dryad.kd1d4>. +} +\author{ +Helge Jentsch +} diff --git a/man/getDownloadSize.Rd b/man/getDownloadSize.Rd new file mode 100644 index 0000000000000000000000000000000000000000..eddd1506a7081ec5509a6ebc910270adb566fd9a --- /dev/null +++ b/man/getDownloadSize.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/auxiliary.R +\name{getDownloadSize} +\alias{getDownloadSize} +\title{Get Download Size} +\usage{ +getDownloadSize(URLVector) +} +\arguments{ +\item{URLVector}{Character vector. Multiple vectors of valid URLs.} +} +\value{ +Download size as double numeric value +} +\description{ +Helper function that returns the download size of a vector of URLs +} +\author{ +Helge Jentsch +}