Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

initial implementation of geoconnex reference features for #386 #388

Merged
merged 6 commits into from
May 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: nhdplusTools
Type: Package
Title: NHDPlus Tools
Version: 1.1.0
Version: 1.2.0
Authors@R: c(person(given = "David",
family = "Blodgett",
role = c("aut", "cre"),
Expand Down
2 changes: 2 additions & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ export(align_nhdplus_names)
export(calculate_arbolate_sum)
export(calculate_total_drainage_area)
export(disambiguate_flowline_indexes)
export(discover_geoconnex_reference)
export(discover_nhdplus_id)
export(discover_nldi_characteristics)
export(download_nhd)
Expand All @@ -25,6 +26,7 @@ export(get_characteristics_metadata)
export(get_elev_along_path)
export(get_flowline_index)
export(get_gagesII)
export(get_geoconnex_reference)
export(get_hr_data)
export(get_huc)
export(get_hydro_location)
Expand Down
9 changes: 9 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
nhdplusTools 1.2.0
==========

This release introduces a new data source: https://reference.geoconnex.us and a vignette providing a broad overview of data access with nhdplusTools.

- Fixed bug with use of memoise cache #365
- Added `vignette("get_data_overview")` for WaterSciCon24
- Added `get_geoconnex_reference()` and `discover_geoconnex_reference()`

nhdplusTools 1.1.0
==========

Expand Down
207 changes: 207 additions & 0 deletions R/get_geoconnex.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
#' memoise get json
#' @description
#' attempts to get a url as JSON and return the content.
#'
#' Will return NULL if anything fails
#'
#' @param url character url to get
#' @return list containing parsed json on success, NULL otherwise
#' @noRd
mem_get_json <- landing <- memoise::memoise(\(url) {
tryCatch({
retn <- httr::RETRY("GET", url, httr::accept_json())

if(retn$status_code == 200 & grepl("json", retn$headers$`content-type`)) {
return(httr::content(retn, simplifyVector = FALSE, type = "application/json"))
} else {
warning("Can't access json from ", url)
return(NULL)
}
}, error = function(e) {
warning("Error accessing ", url, "\n\n", e)
return(NULL)
})
})

filter_list_kvp <- \(l, key, val, type = NULL, n = NULL) {
ret <- l[vapply(l, \(x) x[[key]] == val, TRUE)]


if(!is.null(type)) {
ret <- ret[vapply(ret, \(x) x[["type"]] == type, TRUE)]
}

if(!is.null(n)) {
ret <- ret[[n]]
}

ret
}

extract <- `[[`

#' discover geoconnex reference feature layers
#' @description
#' Queries the geoconnex.us reference feature server for available layers and
#' attributes.
#'
#' @return data.frame containing layers available and fields that are available to query.
#' @export
#' @examples
#' discover_geoconnex_reference()
#'
discover_geoconnex_reference <- function() {

landing <- mem_get_json(get("gocnx_ref_base_url", envir = nhdplusTools_env))

collections <- landing$links |>
filter_list_kvp("rel", "data", n = 1) |>
extract("href") |>
mem_get_json()

collections_meta <- dplyr::bind_rows(
lapply(collections$collections,
\(x) c(x[c("id", "title", "description")],
list(url = filter_list_kvp(x$links,
"rel", "self", n = 1)$href))))


q_ables <- dplyr::bind_rows(lapply(collections$collections, \(x) {
q <- filter_list_kvp(x$links, "rel", "http://www.opengis.net/def/rel/ogc/1.0/queryables",
type = "application/schema+json", n = 1)$href |>
mem_get_json() |>
(\(y) list(id = x$id, qs = y$properties))()

q$qs <- q$qs[vapply(q$qs, \(x) all(c("title", "type") %in% names(x)), TRUE)]

data.frame(id = rep(q$id, length(q$qs)),
attribute = vapply(q$qs, \(x) x$title, ""),
type = vapply(q$qs, \(x) x$type, ""), row.names = NULL)
}))

dplyr::left_join(collections_meta, q_ables, by = "id")
}

get_features_paging <- function(base_call, limit = 1000, status = TRUE) {

if(!grepl("\\?", base_call)) {
base_call <- paste0(base_call, "?")
} else {
base_call <- paste0(base_call, "&")
}

offset <- 0

keep_going <- TRUE

if(status) message("Starting download of first set of features.")

out <- rep(list(list()), 1e6)
i <- 1

while(keep_going) {
req <- paste0(base_call, "limit=", limit, "&offset=", offset)

out[[i]] <- try(read_sf(req))

if(inherits(out[[i]], "sf") & nrow(out[[i]]) == limit) {
offset <- offset + limit
}

if(nrow(out[[i]]) < limit) keep_going <- FALSE

if(!inherits(out[[i]], "sf")) {
warning("Something went wrong requesting data.")
keep_going <- FALSE
}

if(status & keep_going) message("Starting next download from ", offset, ".")

i <- i + 1
}

out <- out[1:(i - 1)]

sf::st_sf(dplyr::bind_rows(unify_types(out)))
}

#' get geoconnex reference feature layers
#' @description
#' Queries the geoconnex reference feature server for features of interest.
#'
#' @param AOI bbox, sf polygon or point, or a URL that will return an sf object when passed to
#' \link[sf]{read_sf}
#' @param type character the feature type chosen from \link{discover_geoconnex_reference}
#' @inheritParams query_usgs_geoserver
#' @param status boolean print status or not
#' @return sf data.frame containing requested reference features
#' @export
#' @examples
#' \donttest{
#'
#' dplyr::distinct(discover_geoconnex_reference()[c("id", "title")])
#'
#' AOI <- sf::st_as_sfc(sf::st_bbox(c(xmin = -89.56684, ymin = 42.99816,
#' xmax = -89.24681, ymax = 43.17192),
#' crs = "+proj=longlat +datum=WGS84 +no_defs"))
#'
#' get_geoconnex_reference(AOI, type = "hu04")
#'
#' get_geoconnex_reference("https://geoconnex.us/ref/mainstems/315626", type = "hu04", )
#'
#' AOI <- sf::st_sfc(sf::st_point(c(-89.56684, 42.99816)),
#' crs = "+proj=longlat +datum=WGS84 +no_defs")
#'
#' get_geoconnex_reference(AOI, type = "hu04", buffer = 100000)
#'
#' }
get_geoconnex_reference <- function(AOI,
type = NULL,
t_srs = NULL,
buffer = 0.5,
status = TRUE) {

avail <- discover_geoconnex_reference()

if(is.null(type)) {
warning("type is required, returning choices.")
return(avail)
}

base <- get("gocnx_ref_base_url", envir = nhdplusTools_env)

if(!type %in% avail$id) stop("Type must be in available ids. ",
"Check discover_geoconnex_reference()")

base_call <- paste0(base, "collections/", type, "/items")

if(is.character(AOI)) {

AOI <- try(sf::read_sf(AOI))

if(!inherits(AOI, "sf")) {
stop("AOI did not return an sf object when read")
}

}

if(!inherits(AOI, "bbox")) {
AOI <- st_bbox(AOI)
} else if(!inherits(AOI, "bbox") &&
grepl("point", sf::st_geometry_type(AOI), ignore.case = TRUE)) {
AOI <- sf::st_buffer(AOI, units::as_units(buffer, "m"))
}

# pull features with paging if necessary

bbox <- paste(AOI, collapse = ",")

base_call <- paste0(base_call, "?bbox=", bbox)

out <- get_features_paging(base_call, status = status)

if(!is.null(t_srs)) out <- st_transform(out, t_srs)

out
}

4 changes: 2 additions & 2 deletions R/get_network.R
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ navigate_network <- function(start, mode = "UM", network = NULL,
stop("If start is numeric it must be a comid integer")
}

start <- as.integer(start)
start <- floor(start)
start_comid <- start

}
Expand Down Expand Up @@ -332,7 +332,7 @@ navigate_network <- function(start, mode = "UM", network = NULL,
# now trim start if requested
if(trim_start) {

if(!is.integer(start)) {
if(!(is.numeric(start) && start %% 1 == 0)) {
if(output == "flowlines") {
# trim event flowline to measure of event

Expand Down
7 changes: 4 additions & 3 deletions _pkgdown.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ reference:
desc: >
Functions that help get access data. (all U.S. context)
- contents:
- '`download_nhdplushr`'
- '`get_3dhp`'
- '`get_nhdplus`'
- '`discover_geoconnex_reference`'
- '`get_geoconnex_reference`'
- '`download_nhdplushr`'
- '`get_nhdplushr`'
- '`get_hr_data`'
- '`download_nhdplusv2`'
- '`get_nhdplus`'
- '`download_nhd`'
- '`download_rf1`'
- '`download_wbd`'
Expand All @@ -35,7 +37,6 @@ reference:
- '`subset_rpu`'
- '`subset_vpu`'
- '`discover_nhdplus_id`'
- '`get_nhdplus`'
- '`get_gagesII`'
- '`get_huc`'
- '`get_nhdarea`'
Expand Down
5 changes: 4 additions & 1 deletion docs/404.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 6 additions & 4 deletions docs/DISCLAIMER.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading