For data part you can assume any random data which satisfies dimensions constraints
All i want to do for now is model should sample properly, which it isn’t.
(PS: Code is shown written below)
It is showing error :
RemoteTraceback Traceback (most recent call last)
RemoteTraceback:
“”"
Traceback (most recent call last):
File “/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/pool.py”, line 121, in worker
result = (True, func(*args, **kwds))
File “/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/pool.py”, line 44, in mapstar
return list(map(*args))
File “stanfit4dummy_model_84553e8202127efa9b9f2f6c6961ebe6_8664948722932070801.pyx”, line 371, in stanfit4dummy_model_84553e8202127efa9b9f2f6c6961ebe6_8664948722932070801._call_sampler_star
File “stanfit4dummy_model_84553e8202127efa9b9f2f6c6961ebe6_8664948722932070801.pyx”, line 404, in stanfit4dummy_model_84553e8202127efa9b9f2f6c6961ebe6_8664948722932070801._call_sampler
RuntimeError: Initialization failed.
“”"
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
in
8 ‘demand’:demand_data[1]
9 }
—> 10 model_fit = model.sampling(data=data_dict, iter=1000, chains=4, warmup=750, n_jobs=-1, seed=42)
/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pystan/model.py in sampling(self, data, pars, chains, iter, warmup, thin, seed, init, sample_file, diagnostic_file, verbose, algorithm, control, n_jobs, **kwargs)
776 call_sampler_args = izip(itertools.repeat(data), args_list, itertools.repeat(pars))
777 call_sampler_star = self.module._call_sampler_star
–> 778 ret_and_samples = _map_parallel(call_sampler_star, call_sampler_args, n_jobs)
779 samples = [smpl for _, smpl in ret_and_samples]
780
/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pystan/model.py in _map_parallel(function, args, n_jobs)
83 try:
84 pool = multiprocessing.Pool(processes=n_jobs)
—> 85 map_result = pool.map(function, args)
86 finally:
87 pool.close()
/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/pool.py in map(self, func, iterable, chunksize)
288 in a list that is returned.
289 ‘’’
–> 290 return self._map_async(func, iterable, mapstar, chunksize).get()
291
292 def starmap(self, func, iterable, chunksize=None):
/Library/Frameworks/Python.framework/Versions/3.7/lib/python3.7/multiprocessing/pool.py in get(self, timeout)
681 return self._value
682 else:
–> 683 raise self._value
684
685 def _set(self, i, obj):
RuntimeError: Initialization failed.
data {
int<lower=1> N; // N = 288
row_vector[N] cdd; // vector of 288 values
row_vector[N] gdp;
row_vector[N] prices;
row_vector[N] demand_1m_lagged;
row_vector[N] demand_1yr_lagged;
row_vector[N] demand; // true value which i want to model
}
parameters {
real alpha;
vector[5] b;
vector[5] mu_beta;
cov_matrix[5] sigma_beta;
vector[12] e;
cov_matrix[12] sigma_e;
}
transformed parameters {
row_vector[N] demand_hat; //transformed model parameter
row_vector[N] temp;
for(i in 1:N) {
temp[i] = cdd[i] * b[1] + gdp[i] * b[2] + prices[i] * b[3] + demand_1m_lagged[i] * b[4] + demand_1yr_lagged[i] * b[5];
demand_hat[i] <- alpha + temp[i] + e[i%12+1];
}
}
model {
alpha ~ normal(0, 100);
for (j in 1:5)
mu_beta[j] ~ normal(0, 100);
sigma_beta ~ inv_wishart(6, diag_matrix(rep_vector(0.0001,5)));
sigma_e ~ inv_wishart(13, diag_matrix(rep_vector(0.0001,12)));
e ~ multi_normal(rep_vector(0.0, 12), sigma_e);
b ~ multi_normal(mu_beta, sigma_beta);
demand ~ normal(demand_hat, 0); //how to incorporate error term, like sigma here already added up
}
model = pystan.model.StanModel(model_code=model_code, model_name='dummy_model')
data_dict = {
'N':288,
'cdd':cdd_data[1],
'gdp':gdp_data[1],
'prices':prices_data[1],
'demand_1m_lagged':demand_1m_lagged_data[1],
'demand_1yr_lagged':demand_1yr_lagged_data[1],
'demand':demand_data[1]
}
model_fit = model.sampling(data=data_dict, iter=1000, chains=4, warmup=750, n_jobs=-1, seed=42)