-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy path07_thin_drms.R
More file actions
112 lines (87 loc) · 4.01 KB
/
07_thin_drms.R
File metadata and controls
112 lines (87 loc) · 4.01 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
library(tidyverse)
library(here)
ctrl_file <- read_csv(file=here("ctrl_file_used.csv"))
run_in_parallel <- TRUE
if(run_in_parallel == TRUE){
library(parallel)
num_cores <- 100
}
# thin and tidy posteriors
if(run_in_parallel == TRUE){
thin_posteriors <- function(k) {
i <- ctrl_file$id[k]
results_path <- here("results", i)
tmp_model <- read_rds(file.path(results_path, "stan_model_fit.rds"))
# first get the static posteriors
d <- gather_draws(tmp_model, d) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>% # average across chains
select(value) %>%
mutate(param = "d")
Topt <- gather_draws(tmp_model, Topt) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>% # average across chains
select(value) %>%
mutate(param = "Topt")
width <- gather_draws(tmp_model, width) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>% # average across chains
select(value) %>%
mutate(param = "width")
sigma_obs <- gather_draws(tmp_model, sigma_obs) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>%
select(value) %>%
mutate(param = "sigma_obs")
alpha <- gather_draws(tmp_model, alpha[i]) %>%
filter(i == 1) %>%
group_by(.iteration) %>%
summarise(value = mean(.value), .groups = "drop") %>%
select(value) %>%
mutate(param = "alpha")
est_m <- gather_draws(tmp_model, est_m) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>%
select(value) %>%
mutate(param = "est_m")
mean_recruits <- gather_draws(tmp_model, log_mean_recruits) %>%
group_by(.iteration) %>%
summarise(value = mean(.value)) %>%
select(value) %>%
mutate(param = "mean_recruits", value = exp(value)) # exponentiate so not in log space
tmp <- rbind(d, Topt, width, sigma_obs, mean_recruits, alpha, est_m)
write_rds(tmp, file = file.path(results_path, "fixed_params_averaged.rds"))
# now thin the latent states (pop dy over space and time)
density_hat_thin <- read_rds(here(results_path, "density_hat.rds")) |>
filter(.draw %% 50 == 0) # thin to 1 sample every 50 iterations on each chain
write_rds(density_hat_thin, file = file.path(results_path, "density_hat_thin.rds"))
centroid_proj_thin <- read_rds(here(results_path, "centroid_proj.rds")) |>
filter(.draw %% 50 == 0) # thin to 1 sample every 50 iterations on each chain
write_rds(centroid_proj_thin, file = file.path(results_path, "centroid_proj_thin.rds"))
density_obs_proj_thin <- read_rds(here(results_path, "density_obs_proj.rds")) |>
filter(.draw %% 50 == 0) # thin to 1 sample every 50 iterations on each chain
write_rds(density_obs_proj_thin, file = file.path(results_path, "density_obs_proj_thin.rds"))
range_quantiles_proj_thin <- read_rds(here(results_path, "range_quantiles_proj.rds")) |>
filter(.draw %% 50 == 0) # thin to 1 sample every 50 iterations on each chain
write_rds(range_quantiles_proj_thin, file = file.path(results_path, "range_quantiles_proj_thin.rds"))
range_quantiles_thin <- read_rds(here(results_path, "range_quantiles.rds")) |>
filter(.draw %% 50 == 0) # thin to 1 sample every 50 iterations on each chain
write_rds(range_quantiles_thin, file = file.path(results_path, "range_quantiles_thin.rds"))
sigma_r <- tidybayes::spread_draws(tmp_model, sigma_r[year]) %>%
group_by(.iteration) %>%
summarise(value = mean(sigma_r)) %>%
select(value) %>%
mutate(param = "sigma_r")
write_rds(sigma_r, file = file.path(results_path, "sigma_r.rds"))
rm(list=ls())
}
cl <- makeCluster(num_cores)
clusterExport(cl, c("ctrl_file", "thin_posteriors"))
clusterEvalQ(cl, {
library(tidyverse)
library(here)
library(tidybayes)
})
parLapply(cl, 1:nrow(ctrl_file), thin_posteriors)
stopCluster(cl)
}