My data matrix (counts) is pretty small. Only 200x5, so data size is not a problem. When i run my estimation my computer slows down drastically and it tries to use a ton of memory, even when i check my hard disk it starts to lose like 20 gb. The error code is as follows:
Error in unserialize(socklist[[n]]) : error reading from connection
Error in serialize(data, node$con, xdr = FALSE) :
error writing to connection
I would assume it has something to do with my normalizing_constant function. However, i tried to raise the log_error value to a higher value and yet the error still occurs. I added a print(j) in the function to see how many iterations it was taking for each sum, and with my current log_error, it only takes 2 or 3 to satisfy the while loop condition. Any ideas what could be causing this problem?
functions {
real log_fact(real x) {
return lgamma(x + 1);
}
real com_log_sum(real x, real y) {
if (x == negative_infinity())
return y;
else if (y == negative_infinity())
return x;
else if (x > y)
return x + log1p(exp(y - x));
else
return y + log1p(exp(x - y));
}
real normalizing_constant(real lambda, real nu, real log_error) {
real z = negative_infinity();
real z_last = 0;
int j = 0;
real log_lambda = log(lambda);
while (fabs(z - z_last) > log_error) {
z_last = z;
z = com_log_sum(z, j * log_lambda - nu * log_fact(j));
j = j + 1;
}
print(j);
return z;
}
}
data {
int<lower=1> N; // number of observations
int<lower=1> I; // number of items
int<lower=0> counts[N,I]; // observed counts
}
parameters {
vector[N] omega;
vector[I] theta;
}
transformed parameters {
vector[I] nu = rep_vector(1,I);
real log_error = 3;
matrix[N, I] lambda;
for (n in 1:N) {
for (i in 1:I) {
lambda[n, i] = exp(nu[i] * (omega[n] + theta[i]));
}
}
}
model {
// Priors
omega ~ normal(0, .6);
theta ~ normal(0, .6);
// Likelihood
for (n in 1:N) {
for (i in 1:I) {
real l;
real log_C;
log_C = normalizing_constant(lambda[n, i], nu[i], log_error);
l = counts[n, i] * log(lambda[n, i]) - nu[i] * log_fact(counts[n, i]) - log_C;
target += l;
}
}
}
}