Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

* Improve error for requesting German data out of temporal restrictions (#131).
* Start using the air formatter (#128).
* Do not fail but rather warn when csv is missing from repository (#136).

# getRad 0.2.2

Expand Down
1 change: 1 addition & 0 deletions R/get_vpts_aloft.R
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ get_vpts_aloft <- function(
)
)
) |>
purrr::keep(.p = ~ as.logical(nrow(.x))) |>
purrr::list_rbind() |>
# Move the source column to the front, where it makes sense
dplyr::relocate(
Expand Down
25 changes: 23 additions & 2 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -433,11 +433,32 @@ fetch_from_url_raw <- function(urls, use_cache = TRUE, parallel = TRUE) {
if (parallel) {
data_response <-
data_request |>
httr2::req_perform_parallel(progress = interactive())
httr2::req_perform_parallel(
progress = interactive(),
on_error = "continue"
)
} else {
data_response <-
data_request |>
httr2::req_perform_sequential()
httr2::req_perform_sequential(on_error = "continue")
}
# Make warning for missing csv
if (any(ss <- unlist(lapply(data_response, inherits, "httr2_http_404")))) {
cli::cli_warn(
class = "getRad_warning_404_on_csv_download",
c(
"!" = "The following: {urls[ss]} url{?s} could not be downloaded (HTTP 404 Not Found).",
i = "Given an attempt was made data was present in the coverage data. Therefore this likely relates to an error in the data repository. For now the data has been omitted from the returned result however for a final resolution the issue should be resolved in the repository (e.g. {.url https://github.com/aloftdata/data-repository})."
)
)
for (i in seq_along(data_response)) {
if (ss[i]) {
data_response[[i]] <- raw()
} else {
data_response[[i]] <- httr2::resp_body_raw(data_response[[i]])
}
}
return(data_response)
}
# Fetch the response bodies
purrr::map(data_response, httr2::resp_body_raw)
Expand Down
72 changes: 38 additions & 34 deletions R/utils_vpts_aloft.R
Original file line number Diff line number Diff line change
Expand Up @@ -31,39 +31,43 @@ read_vpts_from_url <- function(urls, use_cache = TRUE) {

fetch_from_url_raw(urls, use_cache = use_cache) |>
purrr::map(
~ vroom::vroom(
delim = ",",
I(.x),
col_types = list(
radar = vroom::col_factor(),
datetime = vroom::col_datetime(),
height = vroom::col_integer(),
u = vroom::col_double(),
v = vroom::col_double(),
w = vroom::col_double(),
ff = vroom::col_double(),
dd = vroom::col_double(),
sd_vvp = vroom::col_double(),
gap = vroom::col_logical(),
eta = vroom::col_double(),
dens = vroom::col_double(),
dbz = vroom::col_double(),
dbz_all = vroom::col_double(),
n = vroom::col_integer(),
n_dbz = vroom::col_integer(),
n_all = vroom::col_integer(),
n_dbz_all = vroom::col_integer(),
rcs = vroom::col_double(),
sd_vvp_threshold = vroom::col_double(),
vcp = vroom::col_integer(),
radar_longitude = vroom::col_double(),
radar_latitude = vroom::col_double(),
radar_height = vroom::col_integer(),
radar_wavelength = vroom::col_double(),
source_file = vroom::col_character()
),
show_col_types = NULL,
progress = FALSE
)
~ if (length(.x)) {
vroom::vroom(
delim = ",",
I(.x),
col_types = list(
radar = vroom::col_factor(),
datetime = vroom::col_datetime(),
height = vroom::col_integer(),
u = vroom::col_double(),
v = vroom::col_double(),
w = vroom::col_double(),
ff = vroom::col_double(),
dd = vroom::col_double(),
sd_vvp = vroom::col_double(),
gap = vroom::col_logical(),
eta = vroom::col_double(),
dens = vroom::col_double(),
dbz = vroom::col_double(),
dbz_all = vroom::col_double(),
n = vroom::col_integer(),
n_dbz = vroom::col_integer(),
n_all = vroom::col_integer(),
n_dbz_all = vroom::col_integer(),
rcs = vroom::col_double(),
sd_vvp_threshold = vroom::col_double(),
vcp = vroom::col_integer(),
radar_longitude = vroom::col_double(),
radar_latitude = vroom::col_double(),
radar_height = vroom::col_integer(),
radar_wavelength = vroom::col_double(),
source_file = vroom::col_character()
),
show_col_types = NULL,
progress = FALSE
)
} else {
(data.frame())
}
)
}
39 changes: 39 additions & 0 deletions tests/testthat/test-utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,42 @@ test_that("odim test", {
class = "getRad_error_radar_not_odim_string"
)
})
test_that("fetch_from_url_raw warns on failing url", {
expect_warning(
res <- fetch_from_url_raw(
c(
"https://aloftdata.s3-eu-west-1.amazonaws.com/baltrad/daily/bejab/2024/bejab_vpts_20240347.csv"
)
),
class = "getRad_warning_404_on_csv_download"
)
# we replace 404 with empty raw vectors to have the same class but not data returned
expect_identical(
res,
list(raw())
)

expect_warning(
res <- read_vpts_from_url(
c(
"https://aloftdata.s3-eu-west-1.amazonaws.com/baltrad/daily/bejab/2024/bejab_vpts_20240307.csv",
"https://aloftdata.s3-eu-west-1.amazonaws.com/baltrad/daily/bejab/2024/bejab_vpts_20240347.csv"
)
),
class = "getRad_warning_404_on_csv_download"
)
# For failed urls we return an empty data. so later functions can just work with it but have no data
expect_identical(
res[2],
list(data.frame())
)
expect_s3_class(
res[[1]],
"data.frame"
)
# use 200 as it just indicated that there is data in the first dataframe (it is probably much longer 20 height bins for a day)
expect_gt(
nrow(res[[1]]),
200
)
})