Parallelizing Bayesian Hierarchical Model with Many Parameters

Problem solved! I’m going to respond to my own question in case it helps someone in the future. This is the parallelization of my code:

functions {

	//Custom function for lognormal inverse CDF
	matrix logncdfinv(vector x, vector m, vector s) {							

		int szx;
		int szm;
		szx = size(x);
		szm = size(m);	
		matrix[szx,szm] y;
		y = exp( rep_matrix(s',szx) * sqrt(2) .* rep_matrix(inv_erfc( -2 .* x + 2 ),szm) + rep_matrix(m',szx));

		return y;

		}

	//Likelihood model for CASANDRE				
	real casandre_log(array[] matrix resps, vector guess, matrix sds, matrix se, vector conf) {

		real ll;
		ll = 0;

		for (n in 1:size(resps)) {

			matrix[size(se[:,n]),4] llhC;

			matrix[size(se[:,n]),size(sds[:,n])] avgs;
			avgs = rep_matrix(se[:,n],size(sds[:,n])).*rep_matrix(sds[:,n]',size(se[:,n]));
		
			for (tr in 1:size(se[:,n])){

				matrix[3,size(sds[:,n])] raws;

				for (rws in 1:size(sds[:,n])) {
					raws[1,rws] = normal_cdf(-conf[n],avgs[tr,rws],sds[rws,n]);
					}
				for (rws in 1:size(sds[:,n])) {
					raws[2,rws] = normal_cdf(0,avgs[tr,rws],sds[rws,n]);
					}
				for (rws in 1:size(sds[:,n])) {
					raws[3,rws] = normal_cdf(conf[n],avgs[tr,rws],sds[rws,n]);
					}

				vector[3] ratiodist;

				ratiodist[1] = mean(raws[1,:]);
				ratiodist[2] = mean(raws[2,:]);
				ratiodist[3] = mean(raws[3,:]);

				llhC[tr,1] = ((guess[n]/4) + ((1-guess[n])*ratiodist[1]));
				llhC[tr,2] = ((guess[n]/4) + ((1-guess[n])*(ratiodist[2]-ratiodist[1])));
				llhC[tr,3] = ((guess[n]/4) + ((1-guess[n])*(ratiodist[3]-ratiodist[2])));
				llhC[tr,4] = ((guess[n]/4) + ((1-guess[n])*(1-ratiodist[3])));

 			}

		ll += sum(columns_dot_product(resps[n], log(llhC)));
		
		}

		return ll;
		
	}

	//Partial sum function
	real partial_sum_casandre_log(array[] matrix slice_n_resps, int start, int end, vector guess, matrix sds, matrix se, vector conf) {

	return casandre_log(slice_n_resps, guess[start:end], sds[:,start:end], se[:,start:end], conf[start:end]);

	}
		
}
data {

	int ns;
	int nt;
	int sampn;
	matrix[4,ns*nt] respslong;
	row_vector[ns*nt] orislong;
	vector[sampn] sampx;
	
}
transformed data {

	matrix[ns*nt,4] respstall;
	vector[ns*nt] oristall;
	int nth;

	respstall = respslong';
	oristall = orislong';

	array[ns] matrix[nt,4] resps;
	matrix[nt,ns] oris;

	nth = 1;

	for (sub in 1:ns) {

		resps[sub] = respstall[nth:(nth+nt-1),1:4];									

		oris[:,sub] = oristall[nth:(nth+nt-1)];

		nth = nth + nt;

	}

}
parameters {

	//Parameters
	vector<lower=0,upper=1>[ns] guess;
	vector<lower=0>[ns] sens;
	vector[ns] crit;
	vector<lower=0>[ns] meta;
	vector<lower=0>[ns] conf;

	//Hyperparameters
	real snm;
	real<lower=0> sns;
	real cm;
	real<lower=0> cs;
	real mm;
	real<lower=0> ms;
	real ccm;
	real<lower=0> ccs;

}
model {

	//Calculate local variables
	matrix[nt,ns] sm;
	vector[ns] sc;
	matrix[sampn,ns] xtrans;	
	matrix[sampn,ns] sds;
	matrix[nt,ns] se;

	sm = oris.*rep_matrix(sens',nt);
	sc = crit.*sens;

	xtrans = logncdfinv(sampx,log(1/sqrt(meta.^2+1)),sqrt(log(meta.^2+1)));

	sds = 1./xtrans;
	se = sm-rep_matrix(sc',nt);
	
	//Hyperpriors
	snm 	~	normal(0,1);
	sns 	~	lognormal(0,1);
	cm 	~	normal(0,1);
	cs 	~	lognormal(0,1);
	mm 	~	normal(0,1);
	ms 	~	lognormal(0,1);
	ccm 	~	normal(0,1);
	ccs 	~	lognormal(0,1);

	//Priors
	guess 	~	beta(1,193.0/3.0);
	sens	~	lognormal(snm,sns);
	crit	~	normal(cm,cs);
	meta  	~	lognormal(mm,ms);	
	conf  	~	lognormal(ccm,ccs);

	//Likelihood model	
	target +=  reduce_sum(partial_sum_casandre_log,resps, 1, guess, sds, se, conf);

}
3 Likes