From c1a688d3cae9065a2405873561189d7d46190607 Mon Sep 17 00:00:00 2001
From: HJe <helge.marc.ole.jentsch@uni-hamburg.de>
Date: Wed, 3 Nov 2021 14:05:44 +0100
Subject: [PATCH] 211103 1400: - Worldclim availability check update.
 url.exists() gave errornous results

---
 DESCRIPTION                            | 14 ++---
 NAMESPACE                              |  5 +-
 R/Chelsa_Download_functions.R          | 12 ++--
 R/WorldClim_download_functions.R       | 39 ++++++++-----
 README.md                              |  2 +-
 man/Chelsa.Clim.download.deprecated.Rd | 78 ++++++++++++++++++++++++++
 6 files changed, 122 insertions(+), 28 deletions(-)
 create mode 100644 man/Chelsa.Clim.download.deprecated.Rd

diff --git a/DESCRIPTION b/DESCRIPTION
index fc0fc50..8354f16 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -15,7 +15,7 @@ Authors@R: c(person("Helge", "Jentsch",
   email = "nadine.kaul@studium.uni-hamburg.de", 
   role = c("ctb")))
 Maintainer: Helge Jentsch <helge.marc.ole.jentsch@uni-hamburg.de>
-Description: This package, `ClimDatDownloadR`, provides functions to download, pre-process and 
+Description: This package `ClimDatDownloadR` provides functions to download, pre-process and 
     manage CHELSA and Worldclim climate data sets in respective available 
     spatial and temporal resolutions. The output is provided as GEOTIFF, 
     ASCII, or netCDF format. The package's main purpose is to simplify and 
@@ -31,16 +31,16 @@ YEAR: 2021
 License: MIT + file LICENSE
 Encoding: UTF-8
 Depends: R (>= 3.6), raster (>= 3.1-5)
-Imports: gdalUtils(>= 2.0.3.2), httr (>= 1.4.1), ncdf4 (>= 1.17), RCurl
-        (>= 1.98), RefManageR (>= 1.2.12), rgdal (>= 1.5-10), stringr
-        (>= 1.4.0), sf (>= 0.9-4), sp (>= 1.4-1), svMisc (>= 1.1.0),
-        utils
+Imports: curl (>= 4.3.2), gdalUtils(>= 2.0.3.2), httr (>= 1.4.1), ncdf4
+        (>= 1.17), qpdf (>= 1.1), RCurl (>= 1.98), RefManageR (>=
+        1.2.12), rgdal (>= 1.5-10), stringr (>= 1.4.0), sf (>= 0.9-4),
+        sp (>= 1.4-1), svMisc (>= 1.1.0), utils
 RoxygenNote: 7.1.2
-Suggests: knitr, testthat, usethis, qpdf, rmarkdown
+Suggests: knitr, testthat, usethis, rmarkdown
 VignetteBuilder: knitr
 SystemRequirements: Tested with at least 4 GB RAM.
 NeedsCompilation: no
-Packaged: 2021-09-28 15:14:45 UTC; helge
+Packaged: 2021-11-03 12:10:35 UTC; helge
 Author: Helge Jentsch [aut, cre],
   Maria Bobrowski [aut],
   Johannes Weidinger [aut],
diff --git a/NAMESPACE b/NAMESPACE
index 766a588..5c92083 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -3,7 +3,7 @@
 export(Chelsa.CMIP_5.download)
 export(Chelsa.CRUts.download)
 export(Chelsa.Clim.download)
-export(Chelsa.Clim.download_deprecated)
+export(Chelsa.Clim.download.deprecated)
 export(Chelsa.lgm.download)
 export(Chelsa.timeseries.download)
 export(WorldClim.CMIP_5.download)
@@ -25,7 +25,10 @@ import(stringr)
 importFrom(RefManageR,ReadBib)
 importFrom(RefManageR,ReadCrossRef)
 importFrom(RefManageR,WriteBib)
+importFrom(curl,curl_fetch_memory)
 importFrom(utils,download.file)
 importFrom(utils,sessionInfo)
+importFrom(utils,setTxtProgressBar)
+importFrom(utils,txtProgressBar)
 importFrom(utils,unzip)
 importFrom(utils,zip)
diff --git a/R/Chelsa_Download_functions.R b/R/Chelsa_Download_functions.R
index 5ddd1c2..0a279c9 100644
--- a/R/Chelsa_Download_functions.R
+++ b/R/Chelsa_Download_functions.R
@@ -35,11 +35,11 @@
 #'@import RCurl
 #'@import ncdf4
 #'@import raster
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'
 #'@export
-Chelsa.Clim.download_deprecated<- function(save.location = "./",
+Chelsa.Clim.download.deprecated<- function(save.location = "./",
                                            parameter = c("prec", "temp", "tmax", "tmin", "bio"),
                                            bio.var = c(1:19),
                                            month.var = c(1:12),
@@ -473,7 +473,7 @@ Chelsa.Clim.download_deprecated<- function(save.location = "./",
 #'@import ncdf4
 #'@import raster
 #'@import httr
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'
 #'@export
@@ -805,7 +805,7 @@ Chelsa.CMIP_5.download <- function(save.location = "./",
 #'@import RCurl
 #'@import ncdf4
 #'@import raster
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'
 #'@export
@@ -1163,7 +1163,7 @@ Chelsa.lgm.download <- function(save.location = "./",
 #'@import ncdf4
 #'@import raster
 #'@import httr
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'
 #'@export
@@ -1432,7 +1432,7 @@ Chelsa.timeseries.download <- function(save.location = "./",
 #'@import ncdf4
 #'@import raster
 #'@import httr
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'
 #'@export
diff --git a/R/WorldClim_download_functions.R b/R/WorldClim_download_functions.R
index 5f5c474..649f67f 100644
--- a/R/WorldClim_download_functions.R
+++ b/R/WorldClim_download_functions.R
@@ -42,10 +42,11 @@
 #' }
 #'
 #'@import stringr
+#'@importFrom curl curl_fetch_memory
 #'@import RCurl
 #'@import ncdf4
 #'@import raster
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file setTxtProgressBar txtProgressBar
 #'
 #'@export
 WorldClim.HistClim.download <- function(save.location = "./",
@@ -158,7 +159,7 @@ WorldClim.HistClim.download <- function(save.location = "./",
                                  stop())
       # Thrid: Through resolution -----------------------------------------------
       for (res in resolution) {
-        temp.temp.save.location <- paste0(temp.save.location,"WorldClim_",
+        temp.temp.save.location <- paste0(temp.save.location,"/WorldClim_",
                                           vers, "_", parm.temp, "_", res, "/")
         # if not already created, create new directory
         if(!dir.exists(temp.temp.save.location)){
@@ -185,15 +186,17 @@ WorldClim.HistClim.download <- function(save.location = "./",
               immediate. = TRUE)
           }
           # destination file
-          dest.temp <- paste0(temp.temp.save.location, "WC_",vers, "_",
-                              res, "_", parm.temp, "_", "_Bulk.zip")
+          dest.temp <- paste0(temp.temp.save.location, "/WC_",vers, "_",
+                              res, "_", parm.temp, "_Bulk.zip")
           if(!(parm.temp == "bio" & res.temp == "30s")){
             if(!file.exists(dest.temp)){
               # create a variable for the later requested Download-URL to avoid
               # requireing multiple changes, if the link changes.
               URL.temp <- paste0(URL.1 , parm.temp, "_", res.temp, "_bil.zip")
+              urlCheck <- curl_fetch_memory(url = URL.temp)$status_code
               # check if URL is available
-              if(url.exists(URL.temp)){
+              # if(url.exists(URL.temp)){
+              if(urlCheck == 200){
                 # clear up the temporary directory
                 unlink(list.files(tempdir(), recursive = T, full.names=T))
                 # download file to save location
@@ -228,8 +231,10 @@ WorldClim.HistClim.download <- function(save.location = "./",
                 # create a variable for the later requested Download-URL to avoid
                 # requiring multiple changes, if the link changes.
                 URL.temp <- paste0(URL.1 , parm.temp, div, "_", res.temp, "_bil.zip")
+                urlCheck <- curl_fetch_memory(url = URL.temp)$status_code
                 # check if URL is available
-                if(url.exists(URL.temp)){
+                # if(url.exists(URL.temp)){
+                if(urlCheck == 200){
                   # clear up the temporary directory
                   unlink(list.files(tempdir(), recursive = T, full.names=T))
                   # download file to save location
@@ -379,9 +384,10 @@ WorldClim.HistClim.download <- function(save.location = "./",
             # create a variable for the later requested Download-URL to avoid
             # requireing multiple changes, if the link changes.
             URL.temp <- paste0(URL.1, res.temp, "_", parm.temp, ".zip")
-            paste0(URL.temp)
+            urlCheck <- curl_fetch_memory(url = URL.temp)$status_code
             # check if URL is available
-            if(url.exists(URL.temp)){
+            # if(url.exists(URL.temp)){
+            if(urlCheck == 200){
               # clear up the temporary directory
               unlink(list.files(tempdir(), recursive = T, full.names=T))
               # download file to save location
@@ -609,10 +615,11 @@ WorldClim.HistClim.download <- function(save.location = "./",
 #' }
 #'
 #'@import stringr
+#'@importFrom curl curl_fetch_memory
 #'@import RCurl
 #'@import ncdf4
 #'@import raster
-#'@importFrom utils unzip download.file
+#'@importFrom utils unzip download.file  setTxtProgressBar txtProgressBar
 #'
 #'@export
 WorldClim.CMIP_5.download <- function(save.location = "./",
@@ -792,7 +799,7 @@ WorldClim.CMIP_5.download <- function(save.location = "./",
                                  "2070" = "70",
                                  next
             )
-            temp.temp.save.location <- paste0(temp.save.location,"WorldClim_CMIP5_",
+            temp.temp.save.location <- paste0(temp.save.location,"/WorldClim_CMIP5_",
                                               parm.temp, "_", res,"_",
                                               gcm, "_", rcp, "_",year,"/")
             # if not already created, create new directory
@@ -809,8 +816,10 @@ WorldClim.CMIP_5.download <- function(save.location = "./",
               # create a variable for the later requested Download-URL to avoid
               # requireing multiple changes, if the link changes.
               URL.temp <- paste0(URL.1, URL.2, URL.4, parm.temp, year.temp, ".zip")
+              urlCheck <- curl_fetch_memory(url = URL.temp)$status_code
               # check if URL is available
-              if(url.exists(URL.temp)){
+              # if(url.exists(URL.temp)){
+              if(urlCheck == 200){
                 # clear up the temporary directory
                 unlink(list.files(tempdir(), recursive = T, full.names=T))
                 # download file to save location
@@ -1106,7 +1115,9 @@ WorldClim.CMIP_5.download <- function(save.location = "./",
 #'@import RCurl
 #'@import ncdf4
 #'@import raster
-#'@importFrom utils download.file unzip
+#'@importFrom utils download.file unzip setTxtProgressBar txtProgressBar
+#'@importFrom curl curl_fetch_memory
+
 
 #'@export
 WorldClim.CMIP_6.download <- function(save.location = "./",
@@ -1247,8 +1258,10 @@ WorldClim.CMIP_6.download <- function(save.location = "./",
               # create a variable for the later requested Download-URL to avoid
               # requireing multiple changes, if the link changes.
               URL.temp <- paste0(URL.1, URL.2,URL.3, ".zip")
+              urlCheck <- curl_fetch_memory(url = URL.temp)$status_code
               # check if URL is available
-              if(url.exists(URL.temp)){
+              # if(url.exists(URL.temp)){
+              if(urlCheck == 200){
                 # clear up the temporary directory
                 unlink(list.files(tempdir(), recursive = T, full.names=T))
                 # download file to save location
diff --git a/README.md b/README.md
index 33c9d8e..c0c8a7a 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 ## Welcome to the help-page of ClimDatDownloadR
 
 
-To get started please proceed [here](https://gitlab.rrz.uni-hamburg.de/helgejentsch/climdatdownloadr/-/blob/master/README.md). 
+To get started please proceed [here](./articles/ClimDatDownloadR.html). 
 
 
 
diff --git a/man/Chelsa.Clim.download.deprecated.Rd b/man/Chelsa.Clim.download.deprecated.Rd
new file mode 100644
index 0000000..87b72ee
--- /dev/null
+++ b/man/Chelsa.Clim.download.deprecated.Rd
@@ -0,0 +1,78 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/Chelsa_Download_functions.R
+\name{Chelsa.Clim.download.deprecated}
+\alias{Chelsa.Clim.download.deprecated}
+\title{Deprecated function for downloading the CHELSA climate dataset (1979-2013)}
+\usage{
+Chelsa.Clim.download.deprecated(
+  save.location = "./",
+  parameter = c("prec", "temp", "tmax", "tmin", "bio"),
+  bio.var = c(1:19),
+  month.var = c(1:12),
+  version.var = c("1.2"),
+  clipping = FALSE,
+  clip.shapefile = NULL,
+  clip.extent = c(-180, 180, -90, 90),
+  buffer = 0,
+  convert.files.to.asc = FALSE,
+  stacking.data = FALSE,
+  combine.raw.zip = FALSE,
+  delete.raw.data = FALSE,
+  save.bib.file = TRUE
+)
+}
+\arguments{
+\item{save.location}{string. Input where the datasets will be saved. \cr Default: Working Directory.}
+
+\item{parameter}{string (vector). Input of parameters which should be downloaded. \cr Default: \code{c("prec", "temp", "tmax", "tmin", "bio")}}
+
+\item{bio.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to BIOCLIM variables. For further information see: \url{http://chelsa-climate.org/bioclim/}. \cr Default: \code{c(1:19)}}
+
+\item{month.var}{integer (vector). Input which monthly data should be downloaded. Only applicable to precipitation and temperature (average, maximum, minimum). \cr Default: \code{c(1:12)}}
+
+\item{version.var}{string (vector). Input which version of the dataset should be downloaded. Multiple selection is possible. \cr Default:  \code{c("1.2")}}
+
+\item{clipping}{logical. Input whether the downloaded data should be clipped.\cr If \code{FALSE}: clip.shapefile, buffer, clip.extent will be ignored. \cr Default: \code{FALSE}}
+
+\item{clip.shapefile}{string. Input which shapefile should be used for clipping. \cr Default: \code{NULL}}
+
+\item{clip.extent}{numeric (vector). Input vector with four numeric values. This is following the input order c("xleft", "xright", "ybottom", "ytop"). \cr Default: \code{c(-180, 180, -90, 90)}}
+
+\item{buffer}{numeric. Input of decimal degrees of buffer around the shapefile and/or extent. \cr Default: \code{0}}
+
+\item{convert.files.to.asc}{logical. Input whether files should be converted into the ASCII format.\cr If \code{TRUE}: a new subdirectory is created and the rawdata is saved there. If \code{clipping} is \code{TRUE}: the clipped raster files are also saved as ASCII grids. \cr  Default: \code{FALSE}}
+
+\item{stacking.data}{logical. Input whether the downloaded data should be stacked as a netCDF-rasterstack. \cr Default: \code{FALSE}}
+
+\item{combine.raw.zip}{logical. Should the downloaded raw-data be "zipped". \cr Default: \code{FALSE}}
+
+\item{delete.raw.data}{logical. Should the downloaded raw-data be deleted.\cr If \code{combine.raw.zip} is \code{TRUE}: raw-data is still available in the zipped file. \cr Default: \code{FALSE}}
+
+\item{save.bib.file}{logical. Whether a BibTex-citation file of the dataset should be provided in the Working directory. \cr Default: \code{TRUE}}
+}
+\value{
+CHELSA climate datasets for the period of 1979 - 2013
+}
+\description{
+This function supports the download, pre-processing and management of CHELSA climate data comprising of monthly precipitation sums in mm, monthly temperature (average, minimum, maximum) in degrees Celsius, and annual chracteristics (19 bioclimatic variables). The spatial resolution of the downloaded data is 30 arc-seconds.\cr To allow pre-processing, clipping and buffering, conversion to ASCII-grids and stacking options are included.\cr Optional an output of a .bib-file of the cited literature can be retrieved.\cr For user convenience, saving directories will be created automatically. Also options to "zip" and/or delete the RAW-files are included.
+}
+\note{
+Please note that the downloaded data for temperature and the therefore also the first eleven bioclim-variables are processed to °C with one significant decimal without offset and factor. Processing and conversion to other file-formats on a global dataset may take some time.
+}
+\examples{
+\dontrun{
+# Bioclim
+Chelsa.Clim.download(parameter = "bio", bio.var = c(1,19))
+# Precipitation
+Chelsa.Clim.download(parameter = "prec", month.var = c(1,12))
+}
+
+}
+\references{
+D. N. Karger, O. Conrad, J. B{\"o}hner , et al. "Climatologies at high resolution for the earth's land surface areas". In: _Scientific Data_ 4.1 (Sep. 2017). DOI: 10.1038/sdata.2017.122. <URL: https://doi.org/10.1038/sdata.2017.122>.
+
+D. N. Karger, O. Conrad, J. B{\"o}hner , et al. _Data from: Climatologies at high resolution for the earth's land surface areas_. En. 2018. DOI: 10.5061/DRYAD.KD1D4. <URL: http://datadryad.org/stash/dataset/doi:10.5061/dryad.kd1d4>.
+}
+\author{
+Helge Jentsch
+}
-- 
GitLab