Hello all,

I’m using rstan to make prediction of count data while tuning hyper parameters.

Data can be downloaded here

gp.pois.data.2d.R (14.5 KB)

Stan file can be run by

data <- read_rdump(“gp.pois.data.2d.R”)

pred_data <- list( N_observed=data$N, D=2, y_observed=data$y,

N_predict=data$N_predict, x_predict=data$x_predict,

observed_idx=data$sample_idx)

fit <- stan(file=‘poisson.stan’,

data=pred_data, seed=5838298,chain=1)

There are 26 2-dimensional observed inputs x and outputs y, and I want to estimate the posterior predictive distribution given 1296 2-dimensional inputs x. However, running Stan code overnight will stuck at 0% of iterations. Does anyone know how to make the estimation more quickly? Are there any techniques that I could possibly use? Many thanks in advance!

```
data {
int<lower=1> N_predict;
int<lower=1> D;
vector[D] x_predict[N_predict];
int<lower=1> N_observed;
int<lower=1, upper=N_predict> observed_idx[N_observed];
int y_observed[N_observed];
}
parameters {
real<lower=0> rho;
real<lower=0> alpha;
vector[N_predict] f_tilde;
}
transformed parameters {
vector[N_predict] log_f_predict;
{
matrix[N_predict, N_predict] cov;
matrix[N_predict, N_predict] L_cov;
cov = cov_exp_quad(x_predict, alpha, rho)
+ diag_matrix(rep_vector(1e-10, N_predict));
L_cov = cholesky_decompose(cov);
log_f_predict = L_cov * f_tilde;
}
}
model {
f_tilde ~ normal(0, 1);
rho ~ inv_gamma(6.8589, 103.582);
alpha ~ normal(0, 10);
y_observed ~ poisson_log(log_f_predict[observed_idx]);
}
generated quantities {
vector[N_predict] f_predict = exp(log_f_predict);
vector[N_predict] y_predict;
for (n in 1:N_predict)
y_predict[n] = poisson_log_rng(log_f_predict[n]);
}
```