Skip to content

Commit

Permalink
Add opts argument and attr to outputs
Browse files Browse the repository at this point in the history
  • Loading branch information
mps9506 committed Oct 1, 2019
1 parent 6c2bfc7 commit d1fa4cc
Show file tree
Hide file tree
Showing 19 changed files with 156 additions and 44 deletions.
1 change: 1 addition & 0 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ RoxygenNote: 6.1.1
Imports:
crul,
fauxpas,
jsonlite,
readr,
sf
BugReports: https://github.com/mps9506/wd4tx/isssues
9 changes: 8 additions & 1 deletion NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,11 @@ export(download_lep)
export(download_reservoir)
export(download_well)
export(download_well_meta)
import(crul)
export(errs)
export(get_download)
importFrom(crul,HttpClient)
importFrom(fauxpas,find_error_class)
importFrom(jsonlite,fromJSON)
importFrom(readr,read_csv)
importFrom(sf,st_as_sf)
importFrom(sf,st_as_sfc)
19 changes: 14 additions & 5 deletions R/download_coastal_geometry.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,16 @@
#'
#' @param type character, required. One of \code{c("basin", "bay", "estuary",
#' "sub_watershed", "watershed")}
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return sf
#' @return simple features data_frame
#' @importFrom sf st_as_sfc
#' @importFrom sf st_as_sf
#' @importFrom jsonlite fromJSON
#' @export
#'
download_coastal_geometry <- function(type) {
download_coastal_geometry <- function(type,
opts = list()) {
url <- sprintf("https://waterdatafortexas.org/coastal/api/geometries/%s",
type)

Expand All @@ -15,7 +20,9 @@ download_coastal_geometry <- function(type) {

content <- get_download(url,
path = NULL,
accept = "json")
accept = "json",
opts = opts)
attr.url <- attr(content, 'url')

## parse the json
parsed_content <- jsonlite::fromJSON(content)
Expand All @@ -31,8 +38,10 @@ download_coastal_geometry <- function(type) {

sf_content <- as_tibble(parsed_content$features %>%
select(-c(geometry, properties))) %>%
mutate(geometry = st_as_sfc(content, GeoJSON = TRUE)) %>%
st_as_sf()
mutate(geometry = sf::st_as_sfc(content, GeoJSON = TRUE)) %>%
sf::st_as_sf()

attr(sf_content, 'url') <- attr.url

return(sf_content)
}
19 changes: 14 additions & 5 deletions R/download_coastal_hydrology.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
#' @param resample character, optional. By default, data is returned in daily
#' interval. Specify one of \code{c("month", "year")} to get monthly or yearly
#' summed values.
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return
#' @return tibble
#' @export
#'
#' @examples
download_coastal_hydrology <- function(geoid, resample = NULL) {
download_coastal_hydrology <- function(geoid,
resample = NULL,
opts = list()) {

url <- sprintf("https://waterdatafortexas.org/coastal/api/hydrology/%s/timeseries",
geoid)
Expand All @@ -19,11 +20,19 @@ download_coastal_hydrology <- function(geoid, resample = NULL) {
else args = list(resample = resample)

## download
content <- get_download(url, path, args = args, accept = "json")
content <- get_download(url,
path,
args = args,
accept = "json",
opts = opts)
attr.url <- attr(content, 'url')

## parse the returned json
content <- jsonlite::fromJSON(content)

## return as tibble (need to specify column types)
content <- tibble::as_tibble(content)
attr(content, 'url') <- attr.url

return(content)
}
16 changes: 12 additions & 4 deletions R/download_coastal_site_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
#' @param parameter character, required.
#' @param start_date character, required. Format must be \code{yyyy-mm-dd}.
#' @param end_date character, required. Format must be \code{yyyy-mm-dd}.
#' @param bin character. One of \code{"mon", "day", "hour"}. Default is
#' @param binning character. One of \code{"mon", "day", "hour"}. Default is
#' \code{"day"}
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return tibble
#' @export
Expand All @@ -18,24 +19,31 @@ download_coastal_site_data <- function(station,
parameter,
start_date,
end_date,
bin = "day") {
binning = "day",
opts = list()) {


url <- sprintf("https://waterdatafortexas.org/coastal/api/stations/%s/data/%s",
station, parameter)
path = NULL
args = list(start_date = start_date,
end_date = end_date,
binning = bin)
binning = binning)

## download
content <- get_download(url, path, args = args, accept = "json")
content <- get_download(url,
path,
args = args,
accept = "json",
opts = opts)
attr.url <- attr(content, 'url')

## parse the returned json
content <- jsonlite::fromJSON(content)

## return as tibble
content <- tibble::as_tibble(content)
attr(content, 'url') <- attr.url

return(content)
}
12 changes: 10 additions & 2 deletions R/download_coastal_sites.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
#' @param all_stations logical. Defaults is FALSE. If FALSE, returns all
#' stations for which actual data is available. If TRUE, returns all stations
#' regardless of data availability.
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return tibble
#' @export
download_coastal_sites <- function(all_stations = FALSE) {
download_coastal_sites <- function(all_stations = FALSE,
opts = list()) {
url <- "https://www.waterdatafortexas.org/coastal/api/stations"
path <- NULL

Expand All @@ -16,13 +18,19 @@ download_coastal_sites <- function(all_stations = FALSE) {
else args = list(all = "false")

## download
content <- get_download(url, path, args, accept = "json")
content <- get_download(url,
path,
args,
accept = "json",
opts = opts)

## parse the returned json
content <- jsonlite::fromJSON(content)
attr.url <- attr(content, 'url')

## return as tibble
content <- tibble::as_tibble(content)
attr(content, 'url') <- attr.url

return(content)
}
13 changes: 10 additions & 3 deletions R/download_lep.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@
#' @param quad required character. TWDB quad number.
#' @param start_date required character. Start date of data. Specified as 4-digit year and 2-digit month. YYYY-MM
#' @param end_date required character. End date of data. Specified as 4-digit year and 2-digit month. YYYY-MM
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return tibble
#' @export

download_lep <- function(quad = "710",
start_date = "2010-01",
end_date = "2018-12") {
end_date = "2018-12",
opts = list()) {

# check for valid arguments
if(!is.character(quad)) {
Expand All @@ -33,10 +35,15 @@ download_lep <- function(quad = "710",

content <- get_download(url = url,
path = path,
accept = "csv")
accept = "csv",
opts = opts)
attr.url <- attr(content, 'url')

df <- readr::read_csv(content,
content <- readr::read_csv(content,
comment = "#")
attr(content, 'url') <- attr.url

return(content)
}


11 changes: 8 additions & 3 deletions R/download_reservoir.R
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,17 @@
#' @param region_name optional character. Documentation comming soon.
#' @param reservoir optional character. Documentation soon.
#' @param period optional character. Documentation soon
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return dataframe
#' @import crul
#' @importFrom readr read_csv
#' @export
#'
download_reservoir <- function(aggregate_by = NULL,
region_name = NULL,
reservoir = NULL,
period = "historical") {
period = "historical",
opts = list()) {
## allowable aggregate_by
ab = c("statewide",
"planning region",
Expand Down Expand Up @@ -99,11 +101,14 @@ download_reservoir <- function(aggregate_by = NULL,

content <- get_download(url = url,
path = path,
accept = "csv")
accept = "csv",
opts = opts)
attr.url <- attr(content, 'url')

df <- readr::read_csv(content,
comment = "#",
col_types = "Dddddddd")
attr(content, 'url') <- attr.url

## Note for future: I'd like to extract the commented metadata.
## However, it is only sometimes returned in the parsed csv.
Expand Down
13 changes: 9 additions & 4 deletions R/download_well.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@
#' Download "TWDB" Individual Well Data
#'
#' @param state_well_nmbr required character. State well number.
#' @param opts list of curl options passed to crul::HttpClient()
#'
#' @return dataframe
#' @export
#'
download_well <- function(state_well_nmbr) {
download_well <- function(state_well_nmbr,
opts = list()) {

# check for valid arguments
if(!is.character(state_well_nmbr)) {
Expand All @@ -18,10 +20,13 @@ download_well <- function(state_well_nmbr) {

content <- get_download(url = url,
path = path,
accept = "csv")
accept = "csv",
opts = opts)
attr.url <- attr(content, 'url')

df <- readr::read_csv(content,
content <- readr::read_csv(content,
comment = "#")
attr(content, 'url') <- attr.url

return(df)
return(content)
}
12 changes: 10 additions & 2 deletions R/download_well_meta.R
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@

#' Download "TWDB" Well Metadata
#'
#' @param opts list of curl options passed to crul::HttpClient()
#' @return sf
#' @export
#'
download_well_meta <- function() {
download_well_meta <- function(opts = list()) {
## downloads geojson and returns a sf tibble
url <- "https://www.waterdatafortexas.org"
path <- paste0("groundwater/wells.geojson")

content <- get_download(url, path, accept = "json")
content <- get_download(url,
path,
accept = "json",
opts = opts)
attr.url <- attr(content, 'url')

content <- sf::read_sf(content)
attr(content, 'url') <- attr.url

return(content)
}

Expand Down
35 changes: 30 additions & 5 deletions R/utils.R
Original file line number Diff line number Diff line change
@@ -1,9 +1,22 @@
# function to make the http request for csv downloads
#' HTTP Request Function
#'
#' Internal function for making http requests.
#' @param url character. Base url.
#' @param path character
#' @param args query argument list
#' @param accept character. One of \code{c("csv", "json")}
#' @param opts curl options to crul::HttpClient. Must be a list.
#'
#' @return Parsed json content or csv
#' @export
#' @importFrom crul HttpClient
#' @keywords internal
#' @noRd
get_download <- function(url,
path,
args = list(),
accept = "csv",
...) {
opts = list()) {

if(accept == "csv") {
headers = list(Accept = "text/csv")
Expand All @@ -14,20 +27,32 @@ get_download <- function(url,
}

cli <- crul::HttpClient$new(url = url,
headers = headers)
headers = headers,
opts = opts)

res <- cli$get(path)
res <- cli$get(path,
query = args)

errs(res)

content <- res$parse("UTF-8")
attr(content, 'url') <- res$url

return(content)
}



# return http errors gracefully
#' Gracefully return http errors
#'
#' Internal function for returning http error message when making http requests.
#' @param x http request
#'
#' @return error message or nothing
#' @export
#' @keywords internal
#' @noRd
#' @importFrom fauxpas find_error_class
errs <- function(x) {
if (x$status_code > 201) {

Expand Down
6 changes: 4 additions & 2 deletions man/download_coastal_geometry.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit d1fa4cc

Please sign in to comment.