Here is my Stan code.
functions{
real gev_lpdf(real y,real mu,real sigma,real xi){
real z;
z=1+xi*(y-mu)/sigma;
if (xi!=0){
if (z<=0){
return negative_infinity();//超出支持域,返回负无穷
}else{
return -log(sigma)-(1+1/xi)*log(z)-pow(z,-1/xi);
}
}else{
//当xi=0时,退化为Gumbel分布
return -log(sigma)-(y-mu)/sigma-exp(-(y-mu)/sigma);
}
}
}
data {
int<lower=0> N; //总样本量=305
int<lower=0> J; //地点数量=5
int<lower=1,upper=J> location[N];
int<lower=0> K_mu;
//int<lower=0> K_sigma;
matrix[N,K_mu] X_mu;
//matrix[N,K_sigma] X_sigma;
real y[N];
}
parameters {
//位置参数
real alpha_mu_0;
real epsilon_mu[J];
vector[K_mu] alpha_mu;
//尺度参数
real alpha_sigma_0;
real epsilon_sigma[J];
//vector[K_sigma] alpha_sigma;
//形状参数
real alpha_xi_0;
real epsilon_xi[J];
//先验层超参数
real<lower=0> sigma_epsilon_mu; //epsilon_mu的标准差
real<lower=0> sigma_epsilon_sigma; //epsilon_sigma的标准差
real<lower=0> sigma_epsilon_xi; //epsilon_xi的标准差
}
transformed parameters {
real<lower=0> sigma2_epsilon_mu; //计算方差的倒数,即精度
sigma2_epsilon_mu=1/square(sigma_epsilon_mu);
real<lower=0> sigma2_epsilon_sigma;
sigma2_epsilon_sigma=1/square(sigma_epsilon_sigma);
real<lower=0> sigma2_epsilon_xi;
sigma2_epsilon_xi=1/square(sigma_epsilon_xi);
}
model {
//地点层级的随机参数先验
epsilon_mu ~ normal(0,sigma_epsilon_mu);
epsilon_sigma ~ normal(0,sigma_epsilon_sigma);
epsilon_xi ~ normal(0,sigma_epsilon_xi);
sigma2_epsilon_mu ~ gamma(0.01,0.01);
sigma2_epsilon_sigma ~ gamma(0.01,0.01);
sigma2_epsilon_xi ~ gamma(0.01,0.01);
//常数项的弱信息先验
alpha_mu_0 ~ normal(0,1000);
alpha_sigma_0 ~ normal(0,1000);
alpha_xi_0 ~ uniform(-1.0,1.0);
//协变量系数的弱信息先验
alpha_mu ~ normal(0,1000);
//alpha_sigma ~ normal(0,1000);
//似然函数(GEV分布)
for (i in 1:N) {
real mu = alpha_mu_0 + X_mu[i]*alpha_mu +epsilon_mu[location[i]];
real log_sigma = alpha_sigma_0 + epsilon_sigma[location[i]];
real sigma = exp(log_sigma);
real xi = alpha_xi_0 + epsilon_xi[location[i]];
target += gev_lpdf(y[i] | mu,sigma,xi);
}
}
Rstan provided a warning message:
My model clearly hasn’t converged,what could be the problem? If anyone could provide me with some help, I would be very grateful!