I want to fit a logistic regression model.
Here is my stan model
data {
int<lower=1> nSub;
int<lower=1> nx; // the number of coefficient (except the intercept)
int<lower=1> nobs; // the number of observation
int<lower=1> indT[nSub,2];
int<lower=0, upper=1> y[nobs,1];
real x[nobs,nx]; // s0,s1,s2...d1,d2...
}
parameters {
// group-level parameters
vector[4] mu_pr;
vector<lower=0>[4] sigma;
// subject-level raw parameters, follows norm(0,1), for later Matt Trick
vector[nSub] beta0_pr; // intercept
vector[nSub] beta_s0_pr; // effect of current stimulus
vector[nSub] beta_s1_pr; // effect of immediately preceding stimulus
vector[nSub] beta_c1_pr; // effect of immediately preceding choice
}
transformed parameters {
// subject-level parameters
vector<lower=-10,upper=10>[nSub] beta0;
vector<lower=-10,upper=10>[nSub] beta_s0;
vector<lower=-10,upper=10>[nSub] beta_s1;
vector<lower=-10,upper=10>[nSub] beta_c1;
// Matt Trick
for (i in 1:nSub) {
beta0[i] = Phi_approx( mu_pr[1] + sigma[1] * beta0_pr[i] );
beta_s0[i] = Phi_approx( mu_pr[2] + sigma[2] * beta_s0_pr[i] );
beta_s1[i] = Phi_approx( mu_pr[3] + sigma[3] * beta_s1_pr[i] );
beta_c1[i] = Phi_approx( mu_pr[4] + sigma[4] * beta_c1_pr[i] );
}
}
model {
// prior: hyperparameters
mu_pr ~ normal(0,10);
sigma ~ cauchy(0,5);
// prior: individual parameters
beta0_pr ~ normal(0,1);
beta_s0_pr ~ normal(0,1);
beta_s1_pr ~ normal(0,1);
beta_c1_pr ~ normal(0,1);
// subject loop and trial loop
for (i in 1:nSub) {
int Ti = indT[i,1];
int Tf = indT[i,2];
for (t in Ti:Tf) {
real ix;
ix = beta0[i] + beta_s0[i]*x[t,1] + beta_s1[i]*x[t,2] + beta_c1[i]*x[t,3]; // intercept + current + previous stimuli and choice
y[t] ~ bernoulli_logit(ix);
}
}
}
My question is that, in the model code, there are two for loops: Subjects and Trials.
Then, is there a more efficient way to implement the for loops?
Thanks in advance!