Fitting a Hidden Markov model with hierarchical emission parameters

That seemed to work - thanks! Here’s the code I implemented for a simpler version of this, where there are 3 hard-encoded states, emissions are Poisson, and the backward algorithm is used to calculate the probability of just a single observation sequence (I found that it was easier to avoid negative infinity log probabilities when working with the backward algorithm).

data {
    int<lower=1> K; // Length of observation list
    int<lower=0> u[K]; // Observation list
    simplex[3] delta; // Initial Probabilities (assumed to be [1, 0, 0])
}
parameters {
    real<lower=0, upper=1> a;
    real<lower=0, upper=1> b;
    vector<lower=0, upper=10>[3] lambdas;
}
transformed parameters {
    simplex[3] theta[3]
      = { [a, 1 - a, 0]',
          [0, b, 1 - b]',
          [0, 0, 1]' };
}
model {
    int nonzeros[3];
    vector[3] f[K];
    vector[3] f_n;
    nonzeros[1] = 2;
    nonzeros[2] = 2;
    nonzeros[3] = 1;
    lambdas ~ uniform(0, 10);
    for (i in 1:3)
        f_n[i] = poisson_lpmf(u[K] | lambdas[i]);
    f[K] = f_n;
    for (i in 1:K-1) {
        int ind = K - i;
        vector[3] f_i;
        for (j in 1:3) {
            real probs[nonzeros[j]];
            int count = 1;
            for (k in 1:3) {
                if (theta[j, k] == 0) {}
                else {
                    probs[count] = theta[j, k] + f[ind + 1, k];
                    count = count + 1;
                }
            }
            f_i[j] = poisson_lpmf(u[ind] | lambdas[j]) + log_sum_exp(probs);
        }
        f[ind] = f_i;
    }
    print (f[1,1]);
    target += log(delta[1]) + f[1, 1];
}
1 Like