Skip to content

Commit 3cc5de1

Browse files
Merge pull request #138 from ropensci/dev
Dev
2 parents 02e88bb + cf62c5a commit 3cc5de1

File tree

221 files changed

+4375
-31077
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

221 files changed

+4375
-31077
lines changed

.github/workflows/pkgdown.yaml

+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
# Workflow derived from https://github.com/r-lib/actions/tree/v2/examples
2+
# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
3+
on:
4+
push:
5+
branches: [dev]
6+
pull_request:
7+
branches: [dev]
8+
workflow_dispatch:
9+
10+
name: pkgdown
11+
12+
jobs:
13+
pkgdown:
14+
runs-on: ubuntu-latest
15+
# Only restrict concurrency for non-PR jobs
16+
concurrency:
17+
group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }}
18+
env:
19+
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
20+
permissions:
21+
contents: write
22+
steps:
23+
- uses: actions/checkout@v3
24+
25+
- uses: r-lib/actions/setup-pandoc@v2
26+
27+
- uses: r-lib/actions/setup-r@v2
28+
with:
29+
use-public-rspm: true
30+
31+
- uses: r-lib/actions/setup-r-dependencies@v2
32+
with:
33+
extra-packages: any::pkgdown, local::., any::svglite
34+
needs: website
35+
36+
- name: Build site
37+
run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE)
38+
shell: Rscript {0}
39+
40+
- name: Deploy to GitHub pages 🚀
41+
if: github.event_name != 'pull_request'
42+
uses: JamesIves/[email protected]
43+
with:
44+
clean: false
45+
branch: gh-pages
46+
folder: docs

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,4 @@ inst/doc
1212
devtests.R
1313
/doc/
1414
/Meta/
15+
docs

DESCRIPTION

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
Package: weathercan
22
Type: Package
33
Title: Download Weather Data from Environment and Climate Change Canada
4-
Version: 0.6.3
4+
Version: 0.7.0
55
Authors@R: c(
6-
person("Steffi", "LaZerte", email = "steffi@steffi.ca", role = c("aut","cre"), comment = c(ORCID = "0000-0002-7690-8360")),
6+
person("Steffi", "LaZerte", email = "sel@steffilazerte.ca", role = c("aut","cre"), comment = c(ORCID = "0000-0002-7690-8360")),
77
person("Sam", "Albers", email = "[email protected]", role = c("ctb"), comment = c(ORCID = "0000-0002-9270-7884")),
88
person("Nick", "Brown", email = "[email protected]", role = c("ctb"), comment = c(ORCID = "0000-0002-2719-0671")),
99
person("Kevin", "Cazelles", email = "[email protected]", role = c("ctb"), comment = c(ORCID = "0000-0001-6619-9874")))
@@ -35,7 +35,7 @@ Imports:
3535
tidyselect (>= 1.0.0),
3636
xml2 (>= 0.1.2),
3737
rappdirs (>= 0.3.3)
38-
RoxygenNote: 7.1.2
38+
RoxygenNote: 7.2.3
3939
Roxygen: list(markdown = TRUE)
4040
Suggests:
4141
devtools,
@@ -48,10 +48,10 @@ Suggests:
4848
naniar,
4949
rmarkdown,
5050
sf,
51-
sp,
52-
testthat,
53-
vcr (>= 1.0.2)
51+
testthat (>= 3.0.0),
52+
vcr (>= 1.0.2),
53+
withr
5454
VignetteBuilder:
5555
knitr
5656
Encoding: UTF-8
57-
Config/Needs/website: meteoland
57+
Config/testthat/edition: 3

NEWS.md

+14
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# weathercan 0.7.0
2+
- Internal updates to tests (testthat 3rd edition)
3+
- Small changes to messages
4+
- Switch completely to sf (remove sp dependency)
5+
- Remove "Use with tidyverse" vignette - better to go to the source: https://r4ds.hadley.nz/
6+
- Remove "Meteoland" vignette as functions are defunct
7+
8+
## Bug fixes
9+
- Fix bug with Interpolate where silently transforms non-matching timezones.
10+
This can produce incorrect matching when using "local-UTC" timezones
11+
(as weathercan does as of v0.3.0). Now timezone mismatch results in an error
12+
so users can decide how it should be handled.
13+
14+
115
# weathercan 0.6.3
216
- Internal re-arranging and clean up
317
- Stations without lat/lon now have NA timezone

R/interpolate.R

+11-3
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,8 @@ weather_interp <- function(data, weather,
8383

8484
## If 'time', convert to same timezone
8585
if(interval == "hour") {
86-
if(attr(data[['time']], "tzone") != attr(weather[['time']], "tzone")) {
87-
weather[['time']] <- lubridate::with_tz(weather[['time']], attr(data[['time']], "tzone"))
86+
if(lubridate::tz(data$time) != lubridate::tz(weather$time)) {
87+
stop("`data` and `weather` timezones must match", call. = FALSE)
8888
}
8989
}
9090

@@ -144,11 +144,19 @@ weather_interp <- function(data, weather,
144144

145145

146146
approx_na_rm <- function(x, y, xout, na_gap = NULL) {
147+
147148
if(!all(class(x) == class(xout)) & !(is.numeric(xout) & is.numeric(x))) {
148149
stop("'xout' must be the same class as 'x'")
149150
}
150151

151-
new <- as.data.frame(stats::approx(x = x, y = y, xout = xout))
152+
if(lubridate::is.POSIXct(x) &&
153+
lubridate::is.POSIXct(xout) &&
154+
lubridate::tz(x) != lubridate::tz(xout)) {
155+
stop("Timezone of `x` doesn't match `xout`", call. = FALSE)
156+
}
157+
158+
new <- as.data.frame(stats::approx(x = x, y = y, xout = xout,
159+
yleft = NA, yright = NA))
152160

153161
if(any(is.na(y)) & !is.null(na_gap)) {
154162
if(lubridate::is.Date(x) | lubridate::is.POSIXct(x)) {

R/normals.R

+5-3
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ frost_extract <- function(f, climate_id) {
336336
readr::local_edition(1)
337337
f1 <- readr::read_csv(I(f[frost_free:last]),
338338
col_names = c("variable", "value", "frost_code"),
339-
col_types = readr::cols()) %>%
339+
col_types = readr::cols(), progress = FALSE) %>%
340340
tidyr::spread(key = "variable", value = "value")
341341

342342
n <- tibble_to_list(f_names[f_names$variable %in% names(f1),
@@ -353,7 +353,8 @@ frost_extract <- function(f, climate_id) {
353353
if(length(frost_probs) > 0) {
354354
readr::local_edition(1)
355355
f2 <- readr::read_csv(I(f[frost_probs:length(f)]),
356-
col_names = FALSE, col_types = readr::cols()) %>%
356+
col_names = FALSE, col_types = readr::cols(),
357+
progress = FALSE) %>%
357358
as.data.frame()
358359
f2 <- data.frame(prob = rep(c("10%", "25%", "33%", "50%",
359360
"66%", "75%", "90%"), 3),
@@ -373,7 +374,8 @@ frost_extract <- function(f, climate_id) {
373374
} else {
374375
r <- dplyr::full_join(
375376
dplyr::mutate(f1, climate_id = climate_id),
376-
dplyr::mutate(f2, climate_id = climate_id), by = "climate_id") %>%
377+
dplyr::mutate(f2, climate_id = climate_id),
378+
by = "climate_id", relationship = "many-to-many") %>%
377379
dplyr::select(-climate_id)
378380
}
379381

R/stations.R

+22-14
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ stations_dl_internal <- function(skip = NULL, verbose = FALSE, quiet = FALSE,
186186

187187
headings <- readr::read_lines(httr::content(resp, as = "text",
188188
encoding = "Latin1"),
189-
n_max = 5)
189+
n_max = 5, progress = FALSE)
190190
if(!any(stringr::str_detect(headings, "Climate ID"))){
191191
stop("Could not read stations list (",
192192
getOption("weathercan.urls.stations"), ")", call. = FALSE)
@@ -216,7 +216,8 @@ stations_dl_internal <- function(skip = NULL, verbose = FALSE, quiet = FALSE,
216216

217217
raw <- httr::content(resp, as = "text", encoding = "Latin1")
218218

219-
s <- readr::read_delim(raw, skip = skip, col_types = readr::cols())
219+
s <- readr::read_delim(raw, skip = skip, col_types = readr::cols(),
220+
progress = FALSE)
220221
s <- dplyr::select(s,
221222
"prov" = "Province",
222223
"station_name" = "Name",
@@ -256,7 +257,7 @@ stations_dl_internal <- function(skip = NULL, verbose = FALSE, quiet = FALSE,
256257
dplyr::distinct() %>%
257258
dplyr::mutate(
258259
tz = lutz::tz_lookup_coords(.data$lat, .data$lon, method = "accurate"),
259-
tz = purrr::map_chr(.data$tz, ~tz_offset(.x)),
260+
tz = purrr::map_chr(.data$tz, ~tz_diff(.x)),
260261
tz = dplyr::if_else(is.na(.data$lat) | is.na(.data$lon), NA_character_, .data$tz))
261262

262263
s <- s %>%
@@ -366,7 +367,7 @@ stations_dl_internal <- function(skip = NULL, verbose = FALSE, quiet = FALSE,
366367
#' stations_search(name = "Ottawa", normals_years = "1981-2010") # Same as above
367368
#' stations_search(name = "Ottawa", normals_years = "1971-2000") # 1971-2010
368369
#'
369-
#' if(requireNamespace("sp")) {
370+
#' if(requireNamespace("sf")) {
370371
#' stations_search(coords = c(53.915495, -122.739379))
371372
#' }
372373
#'
@@ -416,9 +417,9 @@ stations_search <- function(name = NULL,
416417
stop("'coord' takes one pair of lat and lon in a numeric vector")
417418
}
418419

419-
if(!requireNamespace("sp", quietly = TRUE)) {
420-
stop("Package 'sp' required to search for stations using coordinates. ",
421-
"Use the code \"install.packages('sp')\" to install.", call. = FALSE)
420+
if(!requireNamespace("sf", quietly = TRUE)) {
421+
stop("Package 'sf' required to search for stations using coordinates. ",
422+
"Use the code \"install.packages('sf')\" to install.", call. = FALSE)
422423
}
423424

424425
}
@@ -481,18 +482,25 @@ stations_search <- function(name = NULL,
481482

482483
if(!is.null(coords)){
483484
if(verbose) message("Calculating station distances")
484-
coords <- as.numeric(as.character(coords[c(2,1)]))
485-
locs <- as.matrix(stn[!is.na(stn$lat), c("lon", "lat")])
486-
stn$distance <- NA
487-
stn$distance[!is.na(stn$lat)] <- sp::spDistsN1(pts = locs,
488-
pt = coords, longlat = TRUE)
489-
stn <- dplyr::arrange(stn, .data$distance)
485+
486+
coords <- sf::st_point(coords[c(2,1)]) %>%
487+
sf::st_sfc(crs = 4326)
488+
489+
locs <- dplyr::select(stn, "station_id", "lon", "lat") %>%
490+
tidyr::drop_na() %>%
491+
dplyr::distinct() %>%
492+
sf::st_as_sf(coords = c("lon", "lat"), crs = 4326) %>%
493+
dplyr::mutate(distance = as.vector(sf::st_distance(coords, .data$geometry))/1000) %>%
494+
sf::st_drop_geometry()
495+
496+
stn <- dplyr::left_join(stn, locs, by = "station_id") %>%
497+
dplyr::arrange(.data$distance)
490498

491499
i <- which(stn$distance <= dist)
492500
if(length(i) == 0) {
493501
i <- 1:10
494502
if(!quiet) message("No stations within ", dist,
495-
"km. Returning closest 10 stations.")
503+
"km. Returning closest 10 records")
496504
}
497505
}
498506

R/sysdata.rda

9 Bytes
Binary file not shown.

R/utils.R

+9-7
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
1-
tz_offset <- function(tz, as = "tz") {
2-
t <- as.numeric(difftime(as.POSIXct("2016-01-01 00:00:00", tz = "UTC"),
3-
as.POSIXct("2016-01-01 00:00:00", tz = tz), units = "hours"))
1+
tz_diff <- function(tz, as = "tz") {
2+
if(!is.na(tz)) {
3+
t <- as.numeric(difftime(as.POSIXct("2016-01-01 00:00:00", tz = "UTC"),
4+
as.POSIXct("2016-01-01 00:00:00", tz = tz), units = "hours"))
45

5-
if(as == "tz"){
6-
if(t > 0) t <- paste0("Etc/GMT-", t)
7-
if(t <= 0) t <- paste0("Etc/GMT+", abs(t))
8-
}
6+
if(as == "tz"){
7+
if(t > 0) t <- paste0("Etc/GMT-", t)
8+
if(t <= 0) t <- paste0("Etc/GMT+", abs(t))
9+
}
10+
} else t <- NA_character_
911
t
1012
}
1113

R/weather.R

+10-7
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ weather_dl <- function(station_ids,
147147
if(nrow(stn1) == 0) {
148148
if(length(station_ids) > 1) {
149149
missing <- c(missing, s)
150-
if(!quiet) message("No data for station ", s)
150+
if(verbose) message("No data for station ", s)
151151
next
152152
} else {
153153

@@ -163,13 +163,13 @@ weather_dl <- function(station_ids,
163163
}
164164
}
165165

166-
if(inherits(try(as.Date(stn1$start), silent = TRUE), "try-error")) {
166+
if(!lubridate::is.Date(stn1$start)) {
167167
stn1 <- dplyr::mutate(stn1,
168168
start = lubridate::ymd(as.character(.data$start),
169169
truncated = 2),
170170
start = lubridate::floor_date(.data$start, "year"))
171171
}
172-
if(inherits(try(as.Date(stn1$end), silent = TRUE), "try-error")) {
172+
if(!lubridate::is.Date(stn1$end)) {
173173
stn1 <- dplyr::mutate(stn1,
174174
end = lubridate::ymd(as.character(.data$end),
175175
truncated = 2),
@@ -263,7 +263,7 @@ weather_dl <- function(station_ids,
263263

264264
if(nrow(temp) == 0 || all(is.na(temp) | temp == "")) {
265265
if(length(station_ids) > 1) {
266-
if(!quiet) message("No data for station ", s)
266+
if(verbose) message("No data for station ", s)
267267
missing <- c(missing, s)
268268
next
269269
} else {
@@ -456,7 +456,8 @@ weather_raw <- function(html, skip = 0,
456456
readr::local_edition(1)
457457
suppressWarnings({ # when some data are missing, final columns not present
458458
w <- readr::read_csv(I(raw), n_max = nrows, skip = skip,
459-
col_types = paste(rep("c", ncols), collapse = ""))})
459+
col_types = paste(rep("c", ncols), collapse = ""),
460+
progress = FALSE)})
460461
# Get rid of special symbols right away
461462
w <- remove_sym(w)
462463

@@ -657,7 +658,8 @@ meta_raw <- function(html, encoding = "UTF-8", interval, return = "meta") {
657658
stringr::str_replace_all("(\\t)+", "\\\t") %>%
658659
readr::read_tsv(., n_max = i,
659660
col_names = FALSE,
660-
col_types = readr::cols())
661+
col_types = readr::cols(),
662+
progress = FALSE)
661663

662664
if(ncol(r) > 2) {
663665
stop("Problems parsing metadata. Submit an issue at ",
@@ -671,7 +673,8 @@ meta_raw <- function(html, encoding = "UTF-8", interval, return = "meta") {
671673
stringr::str_remove("\\*https\\:\\/\\/climate.weather.gc.ca\\/FAQ_e.html#Q5") %>%
672674
readr::read_tsv(., skip = stringr::str_which(split, "Legend") + 1,
673675
col_names = FALSE,
674-
col_types = readr::cols())
676+
col_types = readr::cols(),
677+
progress = FALSE)
675678
}
676679
# Get rid of any special symbols
677680
remove_sym(r)

README.Rmd

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ For more details and tutorials checkout the [weathercan website](https://docs.ro
3434
3535
## Installation
3636

37-
You can install `weathercan` from the [rOpenSci r-Universe](https://ropensci.r-universe.dev/ui):
37+
You can install `weathercan` from the [rOpenSci r-Universe](https://ropensci.r-universe.dev/ui/):
3838

3939
```{r, eval = FALSE}
4040
install.packages("weathercan",

0 commit comments

Comments
 (0)