Skip to contents

The basic usage of this package is demonstrated below.

1) Normal model

1.1) Example data


N <- 2^10
true_T <- rbind(c(0.95, 0.05, 0),
               c(0.025, 0.95, 0.025),
               c(0.0, 0.05, 0.95))

true_means <- c(-5, 0, 5)
true_sd <- 1.5

simdata_full <- hmm_simulate_normal_data(L = N,
                                        mat_T = true_T,
                                        means = true_means,
                                        sigma = true_sd)
simdata <- simdata_full$data
plot(density(simdata), main = "")

1.2) MCMC Simulation

# Set priors
n_states_inferred <- 3
prior_T <- generate_random_T(n_states_inferred)
prior_means <- c(-7, -1, 12)
prior_sd <- 3

# Run MCMC
iter <- 1500          
warmup <- floor(iter * 0.4)
print_params <- FALSE     
verbose <- TRUE

res_normal <- hmm_mcmc_normal(data = simdata,
                              prior_T = prior_T,
                              prior_means = prior_means,
                              prior_sd = prior_sd,
                              iter = 1500,
                              print_params = FALSE,
                              verbose = FALSE)
res_normal
#> Model: HMM Normal 
#> Type: MCMC 
#> Iter: 1500 
#> Warmup: 300 
#> Thin: 1 
#> States: 3

summary_res_normal <- summary(res_normal)
#> Estimated means:
#>     mean[1]     mean[2]     mean[3] 
#> -4.84206407 -0.07710225  5.07102243 
#> 
#> Estimated standard deviation:
#> 1.518011
#> 
#> Estimated transition rates:
#>            1          2          3
#> 1 0.94658255 0.05341745 0.00000000
#> 2 0.03160574 0.94588114 0.02251312
#> 3 0.00000000 0.06168916 0.93831084
#> 
#> Number of windows assigned to hidden states:
#>   1   2   3 
#> 310 524 190 
#> 
#> Approximate Kullback-Leibler divergence between observed and estimated distributions:
#> -0.0006399195
#> 
#> Log Likelihood:
#>         mean           sd       median 
#> -2087.331615     1.830004 -2086.963357 
#> 
#> P-value of t-test for difference between means of states (stepwise):
#> [1] 1.719287e-229 6.823092e-184

1.3) Diagnostics

plot(res_normal)

2) Gamma-Poisson model

2.1) Example data


L1 <- 2^13  
true_T1 <- rbind(c(0.99, 0.01, 0),
                 c(0.01, 0.98, 0.01),
                 c(0.0, 0.01, 0.99))

true_betas1 <- 1 / (c(0.2, 1.5, 9)) 
true_alpha1 <- 1.3                  

simdata1full <- hmm_simulate_gamma_poisson_data(L = L1,
                                                mat_T = true_T1,
                                                betas = true_betas1,
                                                alpha = true_alpha1)

simdata1 <- simdata1full$data
hist(simdata1, breaks = 50, main = "")

2.2) MCMC Simulation

iter <- 4000              
warmup <- floor(iter * 0.55)
print_params <- FALSE     
verbose <- TRUE           

n3_states_inferred <- 3
prior3_T <- generate_random_T(n3_states_inferred)
prior_alpha3 <- (mean(simdata1)^2) / ((var(simdata1) - mean(simdata1)) / 3) 

prior_betas3 <- c(5,3,1)

res_gamma_poisson <- hmm_mcmc_gamma_poisson(data = simdata1,
                                                         prior_T = prior3_T,
                                                         prior_betas = prior_betas3,
                                                         prior_alpha = prior_alpha3,
                                                         iter = iter,
                                                         warmup = warmup,
                                                         print_params = print_params,
                                                         verbose = verbose)
#> 10%
#> 20%
#> 30%
#> 40%
#> 50%
#> 60%
#> 70%
#> 80%
#> 90%
#> 100%

res_gamma_poisson
#> Model: HMM Gamma-Poisson 
#> Type: MCMC 
#> Iter: 4000 
#> Warmup: 2200 
#> Thin: 1 
#> States: 3

summary_res_gamma_poisson <- summary(res_gamma_poisson)
#> Estimated betas:
#>   beta[1]   beta[2]   beta[3] 
#> 5.5016732 0.6867794 0.1154821 
#> 
#> Estimated alpha:
#> 1.312733
#> 
#> Estimated means:
#> 0.2390537 1.912613 11.37133
#> 
#> Estimated transition rates:
#>            1          2          3
#> 1 0.98861753 0.01138247 0.00000000
#> 2 0.01340607 0.97459836 0.01199558
#> 3 0.00000000 0.01200495 0.98799505
#> 
#> Number of windows assigned to hidden states:
#>    1    2    3 
#> 3055 2560 2577 
#> 
#> Kullback-Leibler divergence between observed and estimated distributions:
#> 0.02734883
#> 
#> Log Likelihood:
#>          mean            sd        median 
#> -16040.377771      1.629396 -16040.063964 
#> 
#> P-value of poisson test for difference between rates of states (stepwise):
#> [1] 0 0

2.3) Diagnostics

plot(res_gamma_poisson)