diff --git a/.Rbuildignore b/.Rbuildignore
index eda663c..001922a 100644
--- a/.Rbuildignore
+++ b/.Rbuildignore
@@ -5,20 +5,15 @@
LICENSE.md
.gitlab-ci.yml
README.Rmd
-all_aqs.Rmd
-all_aqs.pdf
^ci/*
^review/*
HASP_*.tar.gz
-orig_scripts/
DISCLAIMER.md
-^docs$
+^docker$
^Meta$
code.json
-travis.yml
_pkgdown.yml
-pkgdown/
-.gitlab/
+^.gitlab$
^docs$
^pkgdown$
^codecov\.yml$
@@ -49,7 +44,6 @@ vignettes/login.png
vignettes/git_repository.png
vignettes/https_clone.png
deploy_simple.R
-^docs$
cobertura.xml
test-out.xml
man/figures/app.gif
diff --git a/DESCRIPTION b/DESCRIPTION
index 095d75b..6fa628a 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -59,7 +59,8 @@ Suggests:
shinycssloaders,
shinydashboard,
openxlsx,
- rmarkdown
+ rmarkdown,
+ targets
BugReports: https://code.usgs.gov/water/stats/hasp/-/issues
VignetteBuilder: knitr
BuildVignettes: true
diff --git a/NAMESPACE b/NAMESPACE
index 2fb3aff..9ecc067 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -4,6 +4,7 @@ export(Sc_Cl_plot)
export(Sc_Cl_table)
export(composite_data)
export(create_groundwater_report)
+export(create_targets_script)
export(daily_frequency_table)
export(daily_gwl_plot)
export(daily_gwl_summary)
diff --git a/NEWS b/NEWS
index b733846..47e16e5 100644
--- a/NEWS
+++ b/NEWS
@@ -2,6 +2,7 @@ HASP 1.1.0
===========
* Convert to new dataRetrieval USGS Waterdata API functions and format.
* Switched to native R pipe (|>)
+* Added a function to create a targets workflow for composite aquifers
HASP 1.0.3
===========
diff --git a/R/Sc_Cl_plot.R b/R/Sc_Cl_plot.R
index c1c0967..beeaad9 100644
--- a/R/Sc_Cl_plot.R
+++ b/R/Sc_Cl_plot.R
@@ -3,8 +3,8 @@
#' Functions to create the individual chloride, specific conductance,
#' and combination plots and tables for a single site.
#'
-#' @param qw_data data frame returned from dataRetrieval::readWQPqw,
-#' must include columns sample_dt, parm_cd, result_va
+#' @param qw_data data frame returned from \code{\link[dataRetrieval]{readWQPqw}},
+#' must include columns "ActivityStartDateTime", "CharacteristicName", and "ResultMeasureValue"
#' @param plot_title character title for plot
#' @param subtitle character. Sub-title for plot, default is "U.S. Geological Survey".
#' @rdname sc_cl
diff --git a/R/analyze.R b/R/analyze.R
index 713bb06..ec8a210 100644
--- a/R/analyze.R
+++ b/R/analyze.R
@@ -13,14 +13,12 @@
#'
#' @examples
#' aquifer_data <- aquifer_data
-#' aquifer_data <- aquifer_data[aquifer_data$parameter_cd == "72019", ]
-#' summary_info <- site_data_summary(aquifer_data, site_col = "site_no")
+#' aquifer_data <- aquifer_data[aquifer_data$parameter_code == "72019", ]
+#' summary_info <- site_data_summary(aquifer_data)
site_data_summary <- function(x,
value_col = "value",
site_col = "monitoring_location_id"){
- site_no <- value <- ".dplyr"
-
if(nrow(x) == 0) stop("No data")
if(!all(c(site_col, value_col) %in% names(x))) stop("Missing columns")
@@ -51,33 +49,29 @@ site_data_summary <- function(x,
#'
#' Get map info
#'
-#' @param x aquifer data
+#' @param sites aquifer data
#' @return data frame
#' @export
#' @keywords internal
#'
#' @examples
-#' aquifer_data <- aquifer_data
-#' map_info <- prep_map_data(aquifer_data)
-prep_map_data <- function(x ){
+#' site_info <- site_info
+#' map_info <- prep_map_data(site_info)
+prep_map_data <- function(sites){
-
- if(nrow(x) == 0) stop("No data")
-
- if(!("siteInfo" %in% names(attributes(x)))) stop("Missing site attributes")
+ if(nrow(sites) == 0) stop("No data")
- sites <- attr(x, "siteInfo")
+ if(!all(c("monitoring_location_id", "monitoring_location_name", "geometry") %in% names(sites))) stop("Missing columns")
+
- map_data <- sites |>
+ map_data <- sites |>
dplyr::mutate(popup = paste0('',
- site_no,"
+ .data[["monitoring_location_id"]],'">',
+ .data[["monitoring_location_id"]],"
- | Name: | ",
- station_nm,
- ' |
+ | Name: | ", .data[["monitoring_location_name"]],' |
')) |>
- dplyr::filter(!is.na(dec_lat_va))
+ dplyr::filter(!is.na(.data[["geometry"]]))
return(map_data)
@@ -112,11 +106,9 @@ filter_sites <- function(x,
if(nrow(x) == 0) stop("No data")
- if(!all(c("site_no", "year", "value") %in% names(x))) stop("Missing columns")
+ if(!all(c("monitoring_location_id", "year", "value", "parameter_code") %in% names(x))) stop("Missing columns")
- lev_va <- site_no <- year <- value <- n_years <- ".dplyr"
-
- pick_sites <- x[x$parameter_cd == parameter_cd, ]
+ pick_sites <- x[x$parameter_code == parameter_cd, ]
if(nrow(pick_sites) == 0){
warning("No data with requested parameter code.")
@@ -124,8 +116,8 @@ filter_sites <- function(x,
}
pick_sites <- pick_sites |>
- dplyr::filter(!is.na(value)) |>
- dplyr::group_by(site_no, year) |>
+ dplyr::filter(!is.na(.data[["value"]])) |>
+ dplyr::group_by(.data[["monitoring_location_id"]], .data[["year"]]) |>
dplyr::summarize(n_meas = dplyr::n()) |>
dplyr::ungroup()
@@ -154,16 +146,16 @@ filter_sites <- function(x,
}
tots <- expand.grid(year = start_year:end_year,
- site_no = unique(pick_sites$site_no), stringsAsFactors = FALSE) |>
+ monitoring_location_id = unique(pick_sites$monitoring_location_id), stringsAsFactors = FALSE) |>
data.frame()
pick_sites_comp <- pick_sites |>
- dplyr::right_join(tots, by = c("year", "site_no")) |>
- dplyr::filter(year >= start_year,
- year <= end_year)
+ dplyr::right_join(tots, by = c("year", "monitoring_location_id")) |>
+ dplyr::filter(.data[["year"]] >= start_year,
+ .data[["year"]] <= end_year)
- sites_incomplete <- unique(pick_sites_comp$site_no[is.na(pick_sites_comp$n_meas)])
- sites_complete <- unique(pick_sites_comp$site_no)
+ sites_incomplete <- unique(pick_sites_comp$monitoring_location_id[is.na(pick_sites_comp$n_meas)])
+ sites_complete <- unique(pick_sites_comp$monitoring_location_id)
sites_complete <- sites_complete[!sites_complete %in% sites_incomplete]
# If no sites are complete...we could walk back until there are some
@@ -173,25 +165,18 @@ filter_sites <- function(x,
}
pick_sites_comp_sum <- pick_sites_comp |>
- dplyr::filter(site_no %in% sites_complete) |>
- dplyr::group_by(site_no) |>
+ dplyr::filter(.data[["monitoring_location_id"]] %in% sites_complete) |>
+ dplyr::group_by(.data[["monitoring_location_id"]]) |>
dplyr::summarise(n_years = length(unique(year))) |>
dplyr::ungroup() |>
- dplyr::filter(n_years >= !!num_years) |>
- dplyr::pull(site_no)
+ dplyr::filter(.data[["n_years"]] >= !!num_years) |>
+ dplyr::pull(.data[["monitoring_location_id"]])
aquifer_data <- x |>
- dplyr::filter(site_no %in% pick_sites_comp_sum) |>
- dplyr::filter(year >= start_year,
- year <= end_year)
+ dplyr::filter(.data[["monitoring_location_id"]] %in% pick_sites_comp_sum) |>
+ dplyr::filter(.data[["year"]] >= start_year,
+ .data[["year"]] <= end_year)
- if("siteInfo" %in% names(attributes(x))){
- siteInfo <- attr(x, "siteInfo") |>
- dplyr::filter(site_no %in% pick_sites_comp_sum)
-
- attr(aquifer_data, "siteInfo") <- siteInfo
- }
-
return(aquifer_data)
}
@@ -214,11 +199,9 @@ filter_sites <- function(x,
#'
composite_data <- function(x, num_years, parameter_cd){
- year <- site_no <- n_sites_year <- med_site <- value <- name <- ".dplyr"
-
if(nrow(x) == 0) stop("No data")
- if(!all(c("site_no", "year", "value") %in% names(x))) stop("Missing columns")
+ if(!all(c("monitoring_location_id", "year", "value") %in% names(x))) stop("Missing columns")
x <- filter_sites(x, num_years, parameter_cd = parameter_cd)
@@ -226,21 +209,21 @@ composite_data <- function(x, num_years, parameter_cd){
stop("No data ")
}
- n_sites <- length(unique(x$site_no))
+ n_sites <- length(unique(x$monitoring_location_id))
composite <- x |>
- dplyr::group_by(year, site_no) |>
- dplyr::summarize(med_site = stats::median(value, na.rm = TRUE)) |>
+ dplyr::group_by(.data[["year"]], .data[["monitoring_location_id"]]) |>
+ dplyr::summarize(med_site = stats::median(.data[["value"]], na.rm = TRUE)) |>
dplyr::ungroup() |>
- dplyr::distinct(year, site_no, med_site) |>
- dplyr::group_by(year) |>
- dplyr::summarise(mean = mean(med_site, na.rm = TRUE),
- median = stats::median(med_site, na.rm = TRUE),
- n_sites_year = length(unique(site_no))) |>
- dplyr::filter(n_sites_year == {{n_sites}}) |>
- dplyr::select(-n_sites_year) |>
+ dplyr::distinct(.data[["year"]], .data[["monitoring_location_id"]], .data[["med_site"]]) |>
+ dplyr::group_by(.data[["year"]]) |>
+ dplyr::summarise(mean = mean(.data[["med_site"]], na.rm = TRUE),
+ median = stats::median(.data[["med_site"]], na.rm = TRUE),
+ n_sites_year = length(unique(.data[["monitoring_location_id"]]))) |>
+ dplyr::filter(.data[["n_sites_year"]] == {{n_sites}}) |>
+ dplyr::select(-dplyr::all_of("n_sites_year")) |>
tidyr::pivot_longer(c("mean", "median")) |>
- dplyr::mutate(name = factor(name,
+ dplyr::mutate(name = factor(.data[["name"]],
levels = c("median","mean"),
labels = c("Median",
"Mean") ))
@@ -268,7 +251,7 @@ normalized_data <- function(x, num_years, parameter_cd = "72019"){
if(nrow(x) == 0) stop("No data")
- if(!all(c("site_no", "year", "value") %in% names(x))) stop("Missing columns")
+ if(!all(c("monitoring_location_id", "year", "value", "parameter_code") %in% names(x))) stop("Missing columns")
if(nrow(x) == 0){
stop("No data")
@@ -277,30 +260,34 @@ normalized_data <- function(x, num_years, parameter_cd = "72019"){
x <- filter_sites(x,
num_years = num_years,
parameter_cd = parameter_cd)
- n_sites <- length(unique(x$site_no))
+
+ n_sites <- length(unique(x$monitoring_location_id))
+
year_summaries <- site_data_summary(x,
- value_col = "value", site_col = "site_no")
+ value_col = "value",
+ site_col = "monitoring_location_id")
norm_composite <- x |>
- dplyr::group_by(year, site_no) |>
+ dplyr::group_by(.data[["year"]], .data[["monitoring_location_id"]]) |>
dplyr::mutate(med_site = stats::median(value, na.rm = TRUE)) |>
dplyr::ungroup() |>
- dplyr::distinct(year, site_no, med_site) |>
- dplyr::group_by(site_no) |>
- dplyr::mutate(max_med = max(med_site, na.rm = TRUE),
- min_med = min(med_site, na.rm = TRUE),
- mean_med = mean(med_site, na.rm = TRUE)) |>
+ dplyr::distinct(.data[["year"]], .data[["monitoring_location_id"]], .data[["med_site"]]) |>
+ dplyr::group_by(.data[["monitoring_location_id"]]) |>
+ dplyr::mutate(max_med = max(.data[["med_site"]], na.rm = TRUE),
+ min_med = min(.data[["med_site"]], na.rm = TRUE),
+ mean_med = mean(.data[["med_site"]], na.rm = TRUE)) |>
dplyr::ungroup() |>
- dplyr::mutate(x_norm = -1*(med_site - mean_med)/(max_med - min_med)) |>
+ dplyr::mutate(x_norm = -1*(.data[["med_site"]] - .data[["mean_med"]])/
+ (.data[["max_med"]] - .data[["min_med"]])) |>
dplyr::ungroup() |>
- dplyr::group_by(year) |>
- dplyr::summarise(mean = mean(x_norm, na.rm = TRUE),
- median = stats::median(x_norm, na.rm = TRUE),
- n_sites_year = length(unique(site_no))) |>
- dplyr::filter(!n_sites_year < {{n_sites}}) |>
- dplyr::select(-n_sites_year) |>
+ dplyr::group_by(.data[["year"]]) |>
+ dplyr::summarise(mean = mean(.data[["x_norm"]], na.rm = TRUE),
+ median = stats::median(.data[["x_norm"]], na.rm = TRUE),
+ n_sites_year = length(unique(.data[["monitoring_location_id"]]))) |>
+ dplyr::filter(!.data[["n_sites_year"]] < {{n_sites}}) |>
+ dplyr::select(-dplyr::all_of("n_sites_year")) |>
tidyr::pivot_longer(c("mean", "median")) |>
- dplyr::mutate(name = factor(name,
+ dplyr::mutate(name = factor(.data[["name"]],
levels = c("median","mean"),
labels = c("Median",
"Mean") ))
diff --git a/R/chloride.R b/R/chloride.R
index 081a29b..6f6c2be 100644
--- a/R/chloride.R
+++ b/R/chloride.R
@@ -2,7 +2,7 @@
#'
#' Function creates the cloride over time plot with trends.
#'
-#' @param qw_data data frame returned from dataRetrieval::readWQPqw,
+#' @param qw_data data frame returned from \code{\link[dataRetrieval]{readWQPqw}}
#' must include columns ActivityStartDateTime, CharacteristicName, result_va
#' @param CharacteristicName character CharacteristicName to plot
#' @param norm_range a numerical range to potentially group the data. If NA, no grouping is shown.
diff --git a/R/create_groundwater_report.R b/R/create_groundwater_report.R
index 5f66929..0348bc7 100644
--- a/R/create_groundwater_report.R
+++ b/R/create_groundwater_report.R
@@ -49,8 +49,9 @@ create_groundwater_report <- function(siteID,
save_as <- file.path(report_path, paste0(report_name, ".Rmd"))
new <- usethis::write_over(save_as, template_contents)
- usethis::edit_file(save_as)
-
+ if (new) {
+ usethis::edit_file(save_as)
+ }
}
diff --git a/R/create_targets_script.R b/R/create_targets_script.R
new file mode 100644
index 0000000..bb96adc
--- /dev/null
+++ b/R/create_targets_script.R
@@ -0,0 +1,53 @@
+#' Generate targets script for national aquifer data pull
+#'
+#' @description Opens a template of the blanks sample report. Running the function
+#' will create a Rmd file.
+#'
+#' @param national_aquifer_cd character.
+#' @param parameter_cd character.
+#' @param statistic_cd character.
+#' @param start_date character.
+#' @param end_date character.
+#' @param file_name description
+#'
+#' @export
+#' @examples
+#'
+#'
+#' \dontrun{
+#' create_targets_script(national_aquifer_cd = "N100BSNRGB",
+#' parameter_cd = "72019",
+#' statistic_cd = "00003",
+#' start_date = "1988-10-01",
+#' end_date = "2021-01-01",
+#' file_name = "_targets_test.R")
+#' }
+#'
+create_targets_script <- function(national_aquifer_cd,
+ parameter_cd = "72019",
+ statistic_cd = "00003",
+ start_date,
+ end_date,
+ file_name = "_targets.R"){
+
+
+ template_path <- system.file(package = "HASP", "templates", "aquifer_target.R")
+
+
+ template_contents <- strsplit(whisker::whisker.render(xfun::read_utf8(template_path),
+ data = list(national_aquifer_cd = national_aquifer_cd,
+ parameter_cd = parameter_cd,
+ statistic_cd = statistic_cd,
+ start_date = start_date,
+ end_date = end_date)), "\n")[[1]]
+
+
+ new <- usethis::write_over(file_name, template_contents)
+
+ if (new) {
+ usethis::edit_file(file_name)
+ }
+
+
+}
+
diff --git a/R/frequency_analysis.R b/R/frequency_analysis.R
index fcbf4cf..36e0130 100644
--- a/R/frequency_analysis.R
+++ b/R/frequency_analysis.R
@@ -22,17 +22,14 @@
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
#' monthly_frequency <- monthly_frequency_table(gw_level_dv,
#' NULL)
#' head(monthly_frequency)
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' monthly_frequency_combo <- monthly_frequency_table(gw_level_dv = gw_level_dv,
@@ -178,9 +175,7 @@ stats_by_interval <- function(interval,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
@@ -193,8 +188,7 @@ stats_by_interval <- function(interval,
#' flip = FALSE)
#' monthly_frequency
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' monthly_frequency_plot(gw_level_dv,
@@ -505,18 +499,14 @@ monthly_frequency_plot <- function(gw_level_dv,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
-#' weekly_frequency <- weekly_frequency_table(gw_level_dv,
-#' NULL)
+#' weekly_frequency <- weekly_frequency_table(gw_level_dv, NULL)
#' head(weekly_frequency)
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' weekly_frequency <- weekly_frequency_table(gw_level_dv,
@@ -582,15 +572,12 @@ weekly_frequency_table <- function(gw_level_dv,
#' p_code_dv <- "62610"
#' statCd <- "00001"
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
@@ -835,14 +822,11 @@ weekly_frequency_plot <- function(gw_level_dv,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' daily_gwl_plot(gw_level_dv,
@@ -1016,9 +1000,7 @@ daily_gwl_plot <- function(gw_level_dv,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
@@ -1026,8 +1008,7 @@ daily_gwl_plot <- function(gw_level_dv,
#' NULL,
#' parameter_cd = "62610")
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' daily_frequency_table(gw_level_dv,
@@ -1101,9 +1082,7 @@ daily_frequency_table <- function(gw_level_dv,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
@@ -1111,8 +1090,7 @@ daily_frequency_table <- function(gw_level_dv,
#' gwl_data = NULL,
#' parameter_cd = p_code_dv)
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' daily_gwl_summary(gw_level_dv,
diff --git a/R/get_data.R b/R/get_data.R
index 5d986d3..6515b52 100644
--- a/R/get_data.R
+++ b/R/get_data.R
@@ -1,4 +1,3 @@
-
#' get_aquifer_data
#'
#' Get USGS data based on aquiferCd
@@ -23,9 +22,13 @@ get_aquifer_data <- function(aquiferCd, startDate, endDate,
aquifer_data <- data.frame()
site_data <- data.frame()
-
+
states <- unlist(summary_aquifers$states[summary_aquifers$nat_aqfr_cd == aquiferCd])
+ .Deprecated(new = "create_targets_script",
+ package = "HASP",
+ msg = "NWIS servers are slated for decommission. Consider migrating to create_targets_script for composite aquifer data pulls.")
+
for(state in states){
message("Getting data from: ", state)
@@ -75,44 +78,39 @@ get_aquifer_data <- function(aquiferCd, startDate, endDate,
#' @param parameter_cd 5-digit character USGS parameter code. Default is "72019".
#' @export
#'
-#' @examples
-#' end_date <- "2021-01-01"
-#' start_date <- "1989-12-31"
-#' aquiferCd <- "S100CSLLWD"
-#'
-#' \donttest{
-#' # st_data <- get_state_data("WI", aquiferCd,
-#' # start_date, end_date)
-#' }
get_state_data <- function(state, aquiferCd,
startDate, endDate,
parameter_cd = "72019"){
-
+
+ .Deprecated(new = "create_targets_script",
+ package = "HASP",
+ msg = "NWIS servers are slated for decommission. Consider migrating to create_targets_script for composite aquifer data pulls.")
+
levels <- dataRetrieval::readNWISdata(stateCd = state,
- service = "gwlevels",
- startDate= startDate,
- endDate = endDate,
- aquiferCd = aquiferCd,
- format = "rdb,3.0")
-
+ service = "gwlevels",
+ startDate= startDate,
+ endDate = endDate,
+ aquiferCd = aquiferCd,
+ format = "rdb,3.0")
+
levels_dv <- dataRetrieval::readNWISdata(stateCd = state,
- service = "dv",
- statCd = "00003",
- startDate= startDate,
- endDate = endDate,
- aquiferCd = aquiferCd)
+ service = "dv",
+ statCd = "00003",
+ startDate= startDate,
+ endDate = endDate,
+ aquiferCd = aquiferCd)
site_info <- dataRetrieval::whatNWISdata(stateCd = state,
- startDate= startDate,
- endDate = endDate,
+ startDate= startDate,
+ endDate = endDate,
service = "gwlevels")
if(nrow(levels) + nrow(levels_dv) == 0){
return(data.frame())
}
-
+
if(nrow(levels) > 0){
-
+
state_data <- levels |>
dplyr::filter(lev_age_cd == "A") |>
dplyr::select(lev_dt, site_no, parameter_cd, lev_va, sl_lev_va) |>
@@ -129,7 +127,7 @@ get_state_data <- function(state, aquiferCd,
}
if(nrow(levels_dv) > 0){
-
+
state_dv <- levels_dv |>
dplyr::mutate(year = as.numeric(format(dateTime, "%Y")),
water_year = water_year(dateTime),
@@ -138,30 +136,30 @@ get_state_data <- function(state, aquiferCd,
lev_dt = as.Date(dateTime))
cds <- which(!grepl("_cd", names(state_dv)) &
- !names(state_dv) %in% c("agency_cd", "site_no", "water_year",
- "dateTime", "tz_cd", "year",
- "state_call", "lev_dt"))
+ !names(state_dv) %in% c("agency_cd", "site_no", "water_year",
+ "dateTime", "tz_cd", "year",
+ "state_call", "lev_dt"))
names(state_dv)[cds] <- sprintf("%s_value", names(state_dv)[cds])
state_dv <- state_dv |>
tidyr::pivot_longer(cols = c(-agency_cd, -site_no, -water_year,
-dateTime, -tz_cd, -year,
-state_call, -lev_dt),
- names_to = c("pcode", ".value"),
- names_pattern = "(.+)_(.+)") |>
+ names_to = c("pcode", ".value"),
+ names_pattern = "(.+)_(.+)") |>
dplyr::mutate(pcode = gsub("X_", "", pcode),
pcode = substr(pcode, 1, 5)) |>
dplyr::rename(lev_status_cd = cd,
parameter_cd = pcode) |>
dplyr::filter(lev_status_cd == "A") |>
dplyr::select(-dateTime, -tz_cd, -agency_cd, -lev_status_cd)
-
+
} else {
state_dv = data.frame()
}
state_data_tots <- dplyr::bind_rows(state_data,
- state_dv)
+ state_dv)
site_info <- site_info |>
dplyr::filter(site_no %in% unique(state_data_tots$site_no))
@@ -183,7 +181,7 @@ get_state_data <- function(state, aquiferCd,
#' siteID <- "USGS-263819081585801"
#' site_metadata <- site_summary(siteID)
site_summary <- function(siteID, markdown = FALSE){
-
+
site_info <- dataRetrieval::read_waterdata_monitoring_location(monitoring_location_id = siteID)
if(!any(grepl("GW", site_info$site_type_code))){
@@ -216,18 +214,18 @@ site_summary <- function(siteID, markdown = FALSE){
"altitude_method_name"))) |>
dplyr::left_join(nat_aqfrs, by = c("national_aquifer_code" = "nat_aqfr_cd")) |>
dplyr::left_join(dplyr::rename(local_aqfr,
- local_aq = Aqfr_Name_prpr),
+ local_aq = Aqfr_Name_prpr),
by = c("aquifer_type_code" = "aqfr_cd"))
cat(site_info_cleaned$monitoring_location_id, site_info_cleaned$monitoring_location_name, end_of_line)
-
+
cat(site_info_cleaned$county_name, ",", site_info_cleaned$state_name, end_of_line)
cat("Hydrologic Unit: ", site_info_cleaned$hydrologic_unit_code, end_of_line)
cat("Well depth: ", site_info_cleaned$well_constructed_depth, " feet",end_of_line)
cat("Land surface altitude: ", site_info_cleaned$altitude, site_info_cleaned$altitude_method_name , end_of_line)
cat('Well completed in : "', site_info_cleaned$nat_aq,'" (',
site_info_cleaned$national_aquifer_code, ") national aquifer.", end_of_line, sep = "")
-
+
return(site_info_cleaned)
}
@@ -243,7 +241,7 @@ site_summary <- function(siteID, markdown = FALSE){
#' siteID <- "USGS-263819081585801"
#' site_data_available <- data_available(siteID)
data_available <- function(siteID){
-
+
data_info <- dataRetrieval::read_waterdata_ts_meta(monitoring_location_id = siteID,
skipGeometry = TRUE)
@@ -257,19 +255,19 @@ data_available <- function(siteID){
names(data_info_clean)[names(data_info_clean) == "computation_period_identifier"] <- "Data Type"
names(data_info_clean)[names(data_info_clean) == "begin"] <- "Begin Date"
names(data_info_clean)[names(data_info_clean) == "end"] <- "End Date"
-
+
field_info <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = siteID,
skipGeometry = TRUE)
if(nrow(field_info) > 0){
-
+
inventory <- stats::aggregate(time ~ parameter_code,
- data = field_info,
- FUN = min)
+ data = field_info,
+ FUN = min)
names(inventory)[2] <- c("Begin Date")
inventory2 <- stats::aggregate(time ~ parameter_code,
- data = field_info,
- FUN = max)
+ data = field_info,
+ FUN = max)
names(inventory2)[2] <- c("End Date")
inventory <- merge(inventory, inventory2, by = "parameter_code")
@@ -286,11 +284,11 @@ data_available <- function(siteID){
data_info_clean <- data_info_clean |>
rbind(inventory)
}
-
+
what_qw <- dataRetrieval::summarize_waterdata_samples(monitoringLocationIdentifier = siteID)
if(nrow(what_qw) > 0){
-
+
characteristics <- dataRetrieval::check_waterdata_sample_params("characteristics")
characteristics <- characteristics[, c("characteristicNameUserSupplied",
"parameterCode")]
@@ -298,8 +296,8 @@ data_available <- function(siteID){
"parameter_code")
characteristics <- characteristics[!is.na(characteristics$parameter_code), ]
characteristics <- stats::aggregate(parameter_code ~ parameter_name,
- data = characteristics,
- FUN = paste0, collapse = ", ")
+ data = characteristics,
+ FUN = paste0, collapse = ", ")
what_qw_cleaned <- what_qw[, c("characteristicUserSupplied",
@@ -309,7 +307,7 @@ data_available <- function(siteID){
"Begin Date",
"End Date")
what_qw_cleaned$`Data Type` <- "Discrete Samples"
-
+
what_qw_cleaned <- merge(what_qw_cleaned,
characteristics,
by = "parameter_name")
@@ -327,5 +325,3 @@ data_available <- function(siteID){
}
-
-
diff --git a/R/gwl_single_sites.R b/R/gwl_single_sites.R
index 5e4d265..2f0ef14 100644
--- a/R/gwl_single_sites.R
+++ b/R/gwl_single_sites.R
@@ -12,8 +12,7 @@
#' @examples
#'
#' site <- "USGS-263819081585801"
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' pcodes <- dataRetrieval::read_waterdata_parameter_codes(
@@ -81,9 +80,7 @@ gwl_plot_field <- function(gwl_data,
#' statCd <- "00001"
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
@@ -98,8 +95,7 @@ gwl_plot_field <- function(gwl_data,
#' y_label = pcodes$parameter_name,
#' flip = TRUE)
#'
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' gwl_plot_all(gw_level_dv,
@@ -345,7 +341,15 @@ set_up_data <- function(gw_level_dv,
gwl_data$year <- as.numeric(format(gwl_data[["Date"]], "%Y")) +
as.numeric(format(gwl_data[["Date"]], "%j"))/365
+ if("geometry" %in% names(gwl_data)){
+ gwl_data$geometry <- NULL
+ attr(gwl_data, "class") <- "data.frame"
+ attr(gwl_data, "sf_column") <- NULL
+ }
+
gwl_data <- gwl_data[, c("year", "Date", "Value", "Approve")]
+
+
} else {
gwl_data <- data.frame(year = numeric(),
Date = as.Date(character()),
@@ -387,6 +391,12 @@ set_up_data <- function(gw_level_dv,
gw_level_dv$Value <- as.numeric(gw_level_dv[[value_col_dv]])
gw_level_dv$Approve <- gw_level_dv[[approved_dv]]
+ if("geometry" %in% names(gw_level_dv)){
+ gw_level_dv$geometry <- NULL
+ attr(gw_level_dv, "class") <- "data.frame"
+ attr(gw_level_dv, "sf_column") <- NULL
+ }
+
gw_level_dv <- gw_level_dv[, c("Date", "Value", "Approve")]
} else {
diff --git a/R/included_data.R b/R/included_data.R
index 0ef2dca..a2dd06d 100644
--- a/R/included_data.R
+++ b/R/included_data.R
@@ -35,6 +35,17 @@ NULL
#' @examples
#' head(aquifer_data)
NULL
+
+#' Site information for aquifer data included
+#'
+#' @name site_info
+#' @rdname sampleData
+#' @docType data
+#' @keywords water quality data
+#' @examples
+#' head(site_info)
+NULL
+
# Example to get the data:
# start_date <- "1988-10-01"
# end_date <- "2021-01-01"
diff --git a/R/statistics.R b/R/statistics.R
index 79d2add..2a74709 100644
--- a/R/statistics.R
+++ b/R/statistics.R
@@ -29,15 +29,12 @@
#' statCd <- "00001"
#'
#' # Using package example data:
-#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site,
-#' # skipGeometry = TRUE)
+#' # gwl_data <- dataRetrieval::read_waterdata_field_measurements(monitoring_location_id = site)
#' gwl_data <- L2701_example_data$Discrete
#'
#' # gw_level_dv <- dataRetrieval::read_waterdata_daily(monitoring_location_id = site,
#' # parameter_code = p_code_dv,
-#' # statistic_id = statCd,
-#' # skipGeometry = TRUE)
-#' #
+#' # statistic_id = statCd)
#'
#' gw_level_dv <- L2701_example_data$Daily
#'
diff --git a/R/visualize.R b/R/visualize.R
index 0fb8bbe..91a181c 100644
--- a/R/visualize.R
+++ b/R/visualize.R
@@ -2,7 +2,7 @@
#'
#' Create composite hydrograph plot
#'
-#' @param x aquifer data frame. Requires at least 3 columns. Two are required "site_no", "year",
+#' @param x aquifer data frame. Requires at least 3 columns: "monitoring_location_id", "year",
#' and "value".
#' @param num_years integer number of years required. If \code{NA}, the
#' analysis will default to the range of the data in x.
@@ -34,7 +34,7 @@ plot_composite_data <- function(x,
plot_title = "",
subtitle = "U.S. Geological Survey"){
- if(!all(c("site_no", "year", "value") %in% names(x))){
+ if(!all(c("monitoring_location_id", "year", "value") %in% names(x))){
stop("Not all required columns are provided")
}
@@ -103,7 +103,7 @@ plot_normalized_data <- function(x,
plot_title = "",
subtitle = "U.S. Geological Survey"){
- if(!all(c("site_no", "year", "value") %in% names(x))){
+ if(!all(c("monitoring_location_id", "year", "value") %in% names(x))){
stop("Not all required columns are provided")
}
@@ -142,6 +142,7 @@ plot_normalized_data <- function(x,
#'
#' @param x aquifer data frame. Requires at least 3 columns. Two are required "site_no", "year",
#' and "value".
+#' @param site_info data frame from \code{\link[dataRetrieval]{read_waterdata_monitoring_location}}
#' @param num_years integer number of years required
#' @param parameter_cd character, 5-digit parameter code, default is "72019".
#' @return leaflet object
@@ -149,25 +150,27 @@ plot_normalized_data <- function(x,
#' @export
#' @examples
#' aquifer_data <- aquifer_data
+#' site_info <- site_info
#' num_years <- 30
#'
-#' map_data <- map_hydro_data(aquifer_data, num_years)
+#' map_data <- map_hydro_data(aquifer_data, site_info, num_years)
#' map_data
-map_hydro_data <- function(x, num_years, parameter_cd = "72019"){
+map_hydro_data <- function(x, site_info, num_years, parameter_cd = "72019"){
x <- filter_sites(x, num_years, parameter_cd = parameter_cd)
- map_data <- prep_map_data(x)
-
+ site_info <- site_info |>
+ dplyr::filter(.data[["monitoring_location_id"]] %in% unique(x$monitoring_location_id))
+
+ map_data <- prep_map_data(site_info)
+ leaflet_crs <- "+proj=longlat +datum=WGS84"
+
map <- leaflet::leaflet(data = map_data) |>
leaflet::addProviderTiles("CartoDB.Positron") |>
- leaflet::addCircleMarkers(lat = ~dec_lat_va, lng = ~dec_long_va,
- radius = 3,
- fillOpacity = 1,
- popup = ~popup,
- stroke = FALSE) |>
- leaflet::fitBounds(~min(dec_long_va), ~min(dec_lat_va),
- ~max(dec_long_va), ~max(dec_lat_va))
+ leaflet::addCircleMarkers(radius = 3,
+ fillOpacity = 1,
+ popup = ~popup,
+ stroke = FALSE)
return(map)
diff --git a/_pkgdown.yml b/_pkgdown.yml
index b6a6d59..e511aa3 100644
--- a/_pkgdown.yml
+++ b/_pkgdown.yml
@@ -78,6 +78,7 @@ reference:
- L2701_example_data
- nat_aqfr_state
- aquifer_data
+ - create_targets_script
- get_aquifer_data
- get_state_data
- title: Utilities
diff --git a/data/aquifer_data.RData b/data/aquifer_data.RData
index 14531fd..99e7507 100644
Binary files a/data/aquifer_data.RData and b/data/aquifer_data.RData differ
diff --git a/inst/shiny/comp_plot.R b/inst/shiny/comp_plot.R
deleted file mode 100644
index a6f0fb0..0000000
--- a/inst/shiny/comp_plot.R
+++ /dev/null
@@ -1,48 +0,0 @@
-comp_plot <- reactive({
-
- validate(
- need(!is.null(rawData_data$data), "Please select a data set")
- )
-
- df <- rawData()
- pcode <- input$pcode
-
- y <- filter_sites(df,
- parameter_cd = input$pcode,
- start_year = input$start_year,
- end_year = input$end_year)
-
- if(nrow(y) == 0){
- showNotification("No sites have complete records within the start/end years",
- duration = 5)
- }
-
- comp_plot <- plot_composite_data(y,
- parameter_cd = pcode,
- plot_title = rawData_data$aquifer_cd)
-
- return(comp_plot)
-
-})
-
-comp_plot_out <- reactive({
- code_out <- paste0(setup(),'
-
-aquifer_data_filtered <- filter_sites(aquifer_data,
- parameter_cd = "', input$pcode, '",
- start_year = ', input$start_year, ',
- end_year = ', input$end_year, ')
-
-comp_plot <- plot_composite_data(aquifer_data_filtered,
- parameter_cd = "', input$pcode,'",
- plot_title = "',rawData_data$aquifer_cd,'")
-comp_plot
-# To save:
-# Fiddle with height and width (in inches) for best results:
-# Change file name extension to save as png.
-# ggplot2::ggsave(comp_plot, file="comp_plot.pdf",
-# height = 9,
-# width = 11)
- ')
- code_out
-})
\ No newline at end of file
diff --git a/inst/shiny/get_data.R b/inst/shiny/get_data.R
deleted file mode 100644
index 10d8501..0000000
--- a/inst/shiny/get_data.R
+++ /dev/null
@@ -1,105 +0,0 @@
-rawData_data <- reactiveValues(data = NULL,
- example_data = FALSE,
- aquifer_cd = NULL)
-
-observeEvent(input$example_data,{
- rawData_data$example_data <- TRUE
-
- aquifer_data <- HASP::aquifer_data
-
- updateNumericInput(session = session,
- inputId = "end_year", value = max(aquifer_data[["year"]], na.rm = TRUE) - 1,
- max = max(max(aquifer_data[["year"]], na.rm = TRUE), na.rm = TRUE))
- updateNumericInput(session = session,
- inputId = "start_year", value = min(aquifer_data$year, na.rm = TRUE) + 2,
- min = min(aquifer_data$year, na.rm = TRUE))
-
- rawData_data$data <- aquifer_data
- rawData_data$aquifer_cd <- "Basin and Range basin-fill aquifers"
- updateSelectInput(session = session, inputId = "aquiferCd",
- selected = "Basin and Range basin-fill aquifers")
-
-})
-
-observeEvent(input$get_data,{
- rawData_data$example_data <- FALSE
- orig_choice <- rawData_data$aquifer_cd
-
- showNotification("Loading", duration = NULL, id = "load")
-
- aquiferCd <- input$aquiferCd
-
- short_code <- summary_aquifers$nat_aqfr_cd[summary_aquifers$long_name == aquiferCd]
- end_date <- Sys.Date()
- parts <- strsplit(as.character(end_date), split = "-")[[1]]
- parts[[1]] <- as.character(as.numeric(parts[[1]]) - 31) #gives a buffer
- start_date <- paste(parts, collapse = "-")
-
- states <- unlist(summary_aquifers$states[summary_aquifers$long_name == aquiferCd])
-
- aquifer_data <- data.frame()
- site_data <- data.frame()
-
- for(state in states){
-
- id_message <- showNotification(paste("Getting data from: ", state),
- type = "message", duration = NULL)
-
- state_data <- tryCatch(
- expr = {
- HASP:::get_state_data(state = state,
- aquiferCd = short_code,
- startDate = start_date,
- endDate = end_date,
- parameter_cd = input$pcode)
- },
- error = function(e){
- cat(state, "errored \n")
- showNotification(paste("No data from: ", state),
- type = "message", duration = 5, id = state)
- }
- )
- removeNotification(id = id_message)
-
- if(!all(is.na(state_data$site_no))){
- state_data_sites <- dataRetrieval::readNWISsite(unique(state_data$site_no))
-
- state_data_sites <- state_data_sites %>%
- select(station_nm, site_no, dec_lat_va, dec_long_va, coord_datum_cd)
-
- aquifer_data <- bind_rows(aquifer_data, state_data)
- site_data <- bind_rows(site_data, state_data_sites)
- }
-
- }
-
- updateNumericInput(session = session,
- inputId = "end_year", value = max(aquifer_data$year, na.rm = TRUE),
- max = max(aquifer_data$year, na.rm = TRUE))
- updateNumericInput(session = session,
- inputId = "start_year", value = min(aquifer_data$year, na.rm = TRUE),
- min = min(aquifer_data$year, na.rm = TRUE))
-
- if(nrow(aquifer_data) == 0){
- showNotification(paste("Not enough data for: ", aquiferCd),
- type = "message", duration = 5)
- updateSelectInput(session = session, selected = orig_choice,
- inputId = "aquiferCd")
-
- } else {
- attr(aquifer_data, "siteInfo") <- site_data
- rawData_data$aquifer_cd <- aquiferCd
- rawData_data$data <- aquifer_data
- }
-
- removeNotification(id = state)
- removeNotification(id = "load")
-
-})
-
-
-rawData <- reactive({
-
- return(rawData_data$data)
-
-})
\ No newline at end of file
diff --git a/inst/shiny/modules.R b/inst/shiny/modules.R
deleted file mode 100644
index f6d0cdd..0000000
--- a/inst/shiny/modules.R
+++ /dev/null
@@ -1,54 +0,0 @@
-
-ggraph_w_downloaders <- function(id, init_text) {
- ns <- NS(id)
-
- tagList(
- shinycssloaders::withSpinner(plotOutput(ns('plot'))),
- fluidRow(
- column(3, downloadButton(ns('download_plot'), 'Download PNG')),
- column(3, downloadButton(ns('download_csv'), 'Download CSV'))
- ),
- h4("R Code:"),
- shinyAce::aceEditor(outputId = ns('code'), value = init_text,
- mode = "r", theme = "chrome", readOnly = TRUE)
- )
-}
-
-
-graph_download_code <- function(input, output, session,
- plot_gg, code_out, raw_data){
-
- ns <- session$ns
-
- output$plot <- renderPlot({
-
- validate(
- need(!is.null(raw_data()), "Please select a data set")
- )
-
- shinyAce::updateAceEditor(session, editorId = "code", value = code_out() )
-
- plot_gg()
-
- })
-
- output$download_plot <- downloadHandler(
-
- filename = "plot.png",
- content = function(file) {
- ggplot2::ggsave(file, plot = plot_gg(),
- device = "png", width = 11,
- height = 9)
- }
- )
-
- output$download_csv <- downloadHandler(
-
- filename = "plot_data.csv",
-
- content = function(file) {
- write.csv(plot_gg()[['data']], file, row.names = FALSE)
- }
- )
-
-}
diff --git a/inst/shiny/norm_plot.R b/inst/shiny/norm_plot.R
deleted file mode 100644
index 4bdf5ce..0000000
--- a/inst/shiny/norm_plot.R
+++ /dev/null
@@ -1,45 +0,0 @@
-norm_plot <- reactive({
-
- validate(
- need(!is.null(rawData_data$data), "Please select a data set")
- )
-
- df <- rawData()
- pcode <- input$pcode
- y <- filter_sites(df,
- parameter_cd = input$pcode,
- start_year = input$start_year,
- end_year = input$end_year)
-
- if(nrow(y) == 0){
- showNotification("No sites have complete records within the start/end years",
- duration = 5)
- }
- norm_plot <- plot_normalized_data(y,
- parameter_cd = pcode,
- plot_title = rawData_data$aquifer_cd)
-
- return(norm_plot)
-
-})
-
-norm_plot_out <- reactive({
- code_out <- paste0(setup(),'
-aquifer_data_filtered <- filter_sites(aquifer_data,
- parameter_cd = "', input$pcode, '",
- start_year = ', input$start_year, ',
- end_year = ', input$end_year, ')
-
-norm_plot <- plot_normalized_data(aquifer_data_filtered,
- parameter_cd = "',input$pcode,'",
- plot_title ="',rawData_data$aquifer_cd,'")
-norm_plot
-# To save:
-# Fiddle with height and width (in inches) for best results:
-# Change file name extension to save as png.
-# ggplot2::ggsave(comp_plot, file="norm_plot.pdf",
-# height = 9,
-# width = 11)
-')
- code_out
-})
\ No newline at end of file
diff --git a/inst/shiny/server.R b/inst/shiny/server.R
deleted file mode 100644
index db68613..0000000
--- a/inst/shiny/server.R
+++ /dev/null
@@ -1,116 +0,0 @@
-library(shiny)
-library(HASP)
-options(shiny.maxRequestSize = 200 * 1024^2)
-library(dplyr)
-library(tidyr)
-library(leaflet)
-
-source("modules.R",local=TRUE)
-
-shinyServer(function(input, output, session) {
-
-
- observe({
- if (input$close > 0) shiny::stopApp()
- })
-
- source("get_data.R",local=TRUE)$value
- source("comp_plot.R",local=TRUE)$value
- source("norm_plot.R",local=TRUE)$value
-
- output$mymap <- leaflet::renderLeaflet({
-
- map <- leaflet::leaflet() %>%
- leaflet::addProviderTiles("CartoDB.Positron") %>%
- leaflet::setView(lng = -83.5, lat = 44.5, zoom=6)
-
-
- })
-
- observe({
- validate(
- need(!is.null(rawData_data$data), "Please select a data set")
- )
- req(input$mainOut == "map")
-
- showNotification("Prepping map", id = "loadmap",
- type = "message")
-
- aquifer_data <- rawData()
-
- shinyAce::updateAceEditor(session, editorId = "map_code", value = map_code() )
-
- x <- filter_sites(aquifer_data, num_years = 30)
-
- if(nrow(x) == 0){
- x <- aquifer_data
- }
-
- map_data <- prep_map_data(x)
-
- map <- leafletProxy("mymap", data = map_data) %>%
- clearMarkers() %>%
- addCircleMarkers(lat=~dec_lat_va, lng=~dec_long_va,
- radius = 3,
- fillOpacity = 1,
- popup= ~popup,
- stroke=FALSE) %>%
- fitBounds(~min(dec_long_va), ~min(dec_lat_va),
- ~max(dec_long_va), ~max(dec_lat_va))
-
- removeNotification(id = "loadmap")
-
- map
-
- })
-
- callModule(graph_download_code, 'composite_graph',
- plot_gg = comp_plot,
- code_out = comp_plot_out,
- raw_data = reactive({rawData_data$data}))
-
- callModule(graph_download_code, 'normalized_graph',
- plot_gg = norm_plot,
- code_out = norm_plot_out,
- raw_data = reactive({rawData_data$data}))
-
- setup <- reactive({
- end_date <- Sys.Date()
- parts <- strsplit(as.character(end_date), split = "-")[[1]]
- parts[[1]] <- as.character(as.numeric(parts[[1]]) - 30)
- start_date <- paste(parts, collapse = "-")
- long_name <- input$aquiferCd
- aquiferCd <- summary_aquifers$nat_aqfr_cd[summary_aquifers$long_name == long_name]
-
- year_start <- input$start_year
- year_end <- input$end_year
-
- if(rawData_data$example_data){
- setup_code <- paste0('library(HASP)
-aquifer_data <- aquifer_data')
- } else {
- setup_code <- paste0('library(HASP)
-long_name <- "', input$aquiferCd ,'"
-aquiferCd <- summary_aquifers$nat_aqfr_cd[summary_aquifers$long_name == long_name]
-aquifer_data <- get_aquifer_data(aquiferCd = "',aquiferCd,'",
- startDate = "', start_date,'",
- endDate = "', end_date, '")')
- }
-
- setup_code
- })
-
- map_code <- reactive({
- paste0(setup(),'
-map_data <- map_hydro_data(aquifer_data,
- parameter_cd = "', input$pcode,'",
- num_years = 30)
-map_data')
-
- })
-
-
-
- session$onSessionEnded(stopApp)
-
-})
diff --git a/inst/shiny/ui.R b/inst/shiny/ui.R
deleted file mode 100644
index f2b0681..0000000
--- a/inst/shiny/ui.R
+++ /dev/null
@@ -1,93 +0,0 @@
-library(HASP)
-library(dplyr)
-library(shiny)
-library(shinydashboard)
-
-source("modules.R",local=TRUE)
-
-init_text <- "######################################
-# Setup:
-library(HASP)"
-
-header <- dashboardHeader(title = "HASP")
-
-sidebar <- dashboardSidebar(
- sidebarMenu(
- selectInput("aquiferCd", label = "Prinicple Aquifer",
- choices = summary_aquifers$long_name,
- selected = summary_aquifers$long_name[1],
- multiple = FALSE),
- actionButton("get_data", label = "Get Latest Data"),
- radioButtons("pcode",
- choices = c("62610", "62611", "72019"),
- selected = "72019",
- label = "Parameter Code"),
- numericInput("start_year",value = as.POSIXlt(Sys.Date())$year + 1870,
- label = "Starting Year"),
- numericInput("end_year",value = as.POSIXlt(Sys.Date())$year + 1900,
- label = "Ending Year"),
- menuItem("Source code", icon = icon("file-code-o"),
- href = "https://code.usgs.gov/water/stats/HASP"),
- actionButton("example_data", label = "Load Example Data")
- )
-)
-
-body <- dashboardBody(
- includeCSS("www/custom.css"),
- tabBox(width = 12, id = "mainOut",
- tabPanel(title = tagList("Composite Hydrograph", shiny::icon("chart-bar")),
- value = "comp_plot",
- ggraph_w_downloaders("composite_graph", init_text = init_text)
-
- ),
- tabPanel(title = tagList("Normalized Composite Hydrograph", shiny::icon("chart-bar")),
- value = "norm_plot",
- ggraph_w_downloaders("normalized_graph", init_text = init_text)
-
- ),
- tabPanel(title = tagList("Map", shiny::icon("map-marker")),
- value = "map",
- leaflet::leafletOutput("mymap",height = "500px"),
- h4("R Code:"),
- shinyAce::aceEditor(outputId = "map_code", value = init_text,
- mode = "r", theme = "chrome", readOnly = TRUE)
- )
-
- ),
- fluidRow(
- column(1, HTML('')
- ),
- column(11,
- h4("Disclaimer"),
- h5("This software is in the public domain because it contains materials that originally came from the U.S. Geological Survey (USGS), an agency of the United States Department of Interior. For more information, see the official USGS copyright policy at https://www.usgs.gov/visual-id/credit_usgs.html#copyright
- Although this software program has been used by the USGS, no warranty, expressed or implied, is made by the USGS or the U.S. Government as to the accuracy and functioning of the program and related program material nor shall the fact of distribution constitute any such warranty, and no responsibility is assumed by the USGS in connection therewith.
- This software is provided 'AS IS.'"))
-
- )
-)
-
-dashboardPage(header, sidebar, body)
-
-ui <- tagList(
- tags$header(class = "navbar",
- tags$a(href = 'https://www.usgs.gov/', type = "button",
- img(src = 'logo.png', target="_blank",
- title = "USGS", height = "60px"),
- style = "float: left;
- padding: 10px 50px 10px 50px;"),
- tags$li(class = "dropdown", tags$button(
- id = 'close',
- type = "button",
- class = "btn action-button",
- style='color: #000000;
- float: right;
- margin-right:13px;margin-top:7px;margin-bottom:7px',
- onclick = "setTimeout(function(){window.close();},500);", # close browser
- "Stop Shiny App"
- ))),
- dashboardPage(header = dashboardHeader(disable = TRUE),
- sidebar = sidebar,
- body = body),
- tags$footer(htmltools::includeHTML("www/footer.html") )
-)
diff --git a/inst/shiny/www/custom.css b/inst/shiny/www/custom.css
deleted file mode 100644
index 32021e8..0000000
--- a/inst/shiny/www/custom.css
+++ /dev/null
@@ -1,362 +0,0 @@
-/* ================INSTRUCTIONS=================*/
-/* By changing the info below, you can reduce the size of the logo or hide the search box. You can also override the standard font characteristics if you would like to use your own custom styles. In order for your changes to work though, you MUST include a reference in your HTML pages to both the common CSS file and custom CSS file in that order.
-
-Instructions are provided below for customizing these classes. */
-
-/* =============Width===============*/
-/* BY DEFAULT, THERE IS NO MAX WIDTH:
-If you want the want to restrict the width of the page, remove the comment out slashes and astricks surrounding the ".tmp-container {max-width: 1170px;}". you can change the 1170px to a smaller/larger max width if you'd like */
-
-/* .tmp-container {
- max-width: 1170px;
-} */
-
-/* =============Search===============*/
-/* BY DEFAULT, Search box is displayed:
-If you want the to hide the search, remove the comment out slashes and astricks surrounding the ".search-box {display:none;}" below. */
-
-/*
-#search-box {
- display: none;
-}
-*/
-
-/* =============LOGO===============*/
-/* THE DEFAULT LOGO HEIGHT IS 65PX:
-If you want the logo to be smaller (50px), comment out the ".logo-header img {height 65px;}" below and remove the comment out slashes and astricks surrounding the ".logo-header img {height: 50px...margin-top: 18px;}" and the header search input (so the search box size is reduced too). 50px is the MINIMUM HEIGHT for the logo. */
- .logo-header img {
- height: 65px;
-}
-/*
-.logo-header img {
- height: 50px;
-}
-.header-search input[type="search"] {
- height: 30px;
- margin-top: 16px;
-}
-.header-search button[type="submit"] {
- width: 26px;
- height: 30px;
- margin-top: 18px;
-}
-*/
-
-/* =============STANDARD CONTENT===============*/
-/* TO CHANGE THE TEXT SIZE OF THE CONTENT, FONT, ETC:
-By default, USGS has set the font size, family, etc. in order to provide a consistent size for content across all pages. If you would prefer not to have any of these pre-defined formats, you can change them below. NOTE: header and footer will not be changed. */
-#maincontent {
- font-family: 'Source Sans Pro',sans-serif;
- font-size: 14px;
- line-height: 1.42857;
- color: #333333;
- background-color: #fff;
- padding-left: 15px;
- padding-right: 15px;
-}
-
-/* =============SEARCH===============*/
-/* THIS HIDES THE SEARCH BOX ON VERY SMALL DEVICES:
-For simplification, search bar is visible on larger screens but is hidden on small screens. If you would prefer not to have the search box at all, you can remove the "@media (max-width:500px) {" and the second closing "}". below */
-@media (max-width:500px) {
-.header-search form {
- display: none}
-}
-
-/* =============SOCIAL MEDIA===============*/
-/* If you would prefer not to have the social media links, you can remove the comment out slashes and astricks surrounding the content below */
- /* .footer-social-links {
- display: none} */
-
-@charset "UTF-8";
-/* CSS Document */
-
-footer, #navbar {
- font-family: 'Source Sans Pro',sans-serif;
- margin: 0;
- position: relative;
-}
-
-*, *:before, *:after {
- -webkit-box-sizing: border-box;
- -moz-box-sizing: border-box;
- box-sizing: border-box;
-}
-
-footer, header, main, nav, div {
- display: block;
-}
-
-.tmp-container {
- margin-right: auto;
- margin-left: auto;
- padding-left: 15px;
- padding-right: 15px;
-}
-.tmp-container a {
- text-decoration: none;
- background: transparent;
-}
-.tmp-container a:-webkit-any-link {
- cursor: auto;
-}
-.tmp-container:before, .tmp-container:after {
- content: " ";
- display: table;
-}
-.tmp-container:after {
- clear: both;
-}
-
-hr {
- width: 100%;
- margin-top: 42px;
- clear: both;
-}
-@media only screen and (max-width:490px) {
- hr {
- margin-top: 60px;
- }
-}
-
-/* header (only) nav */
-.header-nav {
- background: #00264c;
-}
-
-.logo-header img {
- margin-top: 8px;
- margin-bottom: 8px;
- border: 0;
- max-width: 100%;
- height: auto;
-}
-
-.header-search form {
- float: right;
- width: auto;
- padding-top: 0;
- padding-bottom: 0;
- margin-right: 0;
- margin-left: 0;
- border: 0;
- -webkit-box-shadow: none;
- box-shadow: none;
-}
-
-.header-search input[type="search"] {
- height: 30px;
- width: 260px;
- margin-top: 18px;
-}
-.header-search button[type="submit"] {
- position: absolute;
- top: 0;
- bottom: 0;
- right: 15px;
- border: 0;
- color: #666;
- background: transparent;
- padding: 0;
- width: 39px;
- height: 39px;
- margin-top: 21px;
-}
-
-input{line-height:normal}
-input[type="search"]{-webkit-appearance:textfield;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}
-input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}
-input,button{font-family:inherit;font-size:inherit;line-height:inherit}
-
-
-/* Always float the navbar header */
-.navbar-header {
- float: left;
-}
-
-/* footer (only) css */
-
-.menu.nav {
- margin-bottom: 0;
- padding-left: 0;
- list-style: none;
-}
-
-.nav:before, .nav:after {
- content: none;
-}
-footer.footer {
- background: #00264c;
- padding-bottom: 0px;
- position: relative;
- width: 100%;
- border-top: 1px solid #E5E5E5;
-}
-
-footer.footer .footer-doi {
- padding-right: 0;
- padding-left: 0;
-}
-
-footer.footer .footer-doi ul.menu li a {
- padding: 5px 0px;
- color: #ffffff;
- float: left;
- font-size: 12px;
-}
-
-footer.footer .footer-doi ul.menu li:first-of-type {
- padding-left: 0px;
-}
-
-footer.footer .footer-doi ul.menu li {
- float: left;
- padding-right: 2px;
- padding-left: 5px;
-}
-
-footer.footer .footer-doi ul.menu li:after {
- content: "|";
- color: #ffffff;
- float: left;
- padding-top: 5px;
- padding-left: 7px;
- font-size: 12px;
-}
-
-footer.footer .footer-doi ul.menu li:last-child:after {
- content: "";
-}
-
-footer.footer .footer-wrap .menu.nav a {
- padding: 4px 0px;
- color: #ffffff;
- font-size: 12px;
-}
-
-footer.footer .footer-social-links {
- color: #ffffff;
- font-size: 12px;
- padding-right: 0;
-}
-
-footer.footer .footer-social-links ul {
- float: right;
- margin-top: 0px;
-}
-
-footer.footer .footer-social-links ul li.follow {
- padding-top: 6px;
- padding-right: 5px;
-}
-
-footer.footer .footer-social-links ul li {
- float: left;
- height: 24px;
- margin: 0 2px;
- list-style: none;
- list-style-type: none;
-}
-
-footer.footer .footer-social-links ul li:last-of-type {
- margin-right: 0px;
-}
-
-footer.footer .footer-social-links ul li a i {
- color:#fff;
- font-size: 24px;
-}
-
-/* Font Awesome Social Media icons */
-/*!
- * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome
- * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
- */
-/* FONT PATH
- * -------------------------- */
-/*@font-face {
- font-family: 'FontAwesome';
- src: url('../fonts/fontawesome-webfont.eot');
- src: url('../fonts/fontawesome-webfont.eot') format('embedded-opentype'), url('../fonts/fontawesome-webfont.woff2') format('woff2'), url('../fonts/fontawesome-webfont.woff') format('woff'), url('../fonts/fontawesome-webfont.ttf') format('truetype'), url('../fonts/fontawesome-webfont.svg') format('svg');
- font-weight: normal;
- font-style: normal;
-}
-.fa-wrapper {
- position: relative
-}
-.fa {
- display: inline-block;
- font: normal normal normal 14px/1 FontAwesome;
- font-size: inherit;
- text-rendering: auto;
- -webkit-font-smoothing: antialiased;
- -moz-osx-font-smoothing: grayscale;
-}
-.fa-ul {
- padding-left: 0;
- margin-left: 2.14285714em;
- list-style-type: none;
-}
-.fa-ul > li {
- position: relative;
-}
-.fa-li {
- position: absolute;
- left: -2.14285714em;
- width: 2.14285714em;
- top: 0.14285714em;
- text-align: center;
-}
-.fa-search:before {
- content: "\f002";
-}
-.fa-twitter-square:before {
-Content: "\f081";
-}
-.fa-facebook-square:before {
-Content: "\f082";
-}
-.fa-github:before {
- content: "\f09b";
-}
-.fa-flickr:before {
- content: "\f16e";
-}
-.fa-youtube-play:before {
- content: "\f16a";
-}
-.fa-instagram:before {
- content: "\f16d";
-}
-*/
-.only {
- position: absolute;
- width: 1px;
- height: 1px;
- padding: 0;
- margin: -1px;
- overflow: hidden;
- clip: rect(0, 0, 0, 0);
- border: 0;
-}
-[role="button"]{cursor:pointer}
-
-.navbar {
- background: bottom/contain no-repeat,linear-gradient(94.45deg,#00264c 1.55%,#00264c 98.46%);
- transition: all 0.3s ease;
- box-shadow: 0 0 15px 5px rgba(0,0,0,.15);
- border: none;
- min-height: 40px;
-}
-
-.navbar-light .navbar-brand {
- color: white !important;
-}
-
-.navbar-nav a:hover {
- text-decoration: underline;
-}
-
-caption {
- color: black !important
-}
diff --git a/inst/shiny/www/footer.html b/inst/shiny/www/footer.html
deleted file mode 100644
index 0daee9f..0000000
--- a/inst/shiny/www/footer.html
+++ /dev/null
@@ -1,82 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-