Mercurial > hg > ltpda
comparison m-toolbox/classes/+utils/@math/rjsample.m @ 0:f0afece42f48
Import.
author | Daniele Nicolodi <nicolodi@science.unitn.it> |
---|---|
date | Wed, 23 Nov 2011 19:22:13 +0100 |
parents | |
children | bc767aaa99a8 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:f0afece42f48 |
---|---|
1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
2 % | |
3 % Reverse Jump MCMC for the computation of Bayes Factor | |
4 % | |
5 % N. Karnesis 27/09/2011 | |
6 % | |
7 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
8 | |
9 | |
10 function [Bxy LogL] = rjsample(model,in,out,nse,cov,number,limit,param,Tc,xi,xo,search,jumps,parplot,dbg_info,inNames,outNames,anneal,SNR0,DeltaL,inModel,outModel) | |
11 | |
12 import utils.const.* | |
13 | |
14 % initialize | |
15 summ = 0; | |
16 nacc = 1; | |
17 heat = xi; | |
18 rejected = 0; | |
19 nummodels = numel(model(1,:)); | |
20 legen = []; | |
21 | |
22 % assigning colors to curves (if not, all curves are blue) | |
23 for gg = 1:(nummodels) | |
24 coloor(gg,:) = rand(1,3); | |
25 for dd = gg+1:(nummodels) | |
26 colour{gg,dd} = rand(1,3); | |
27 end | |
28 end | |
29 | |
30 % plist to pass in to the sampler for use in bode | |
31 spl = plist('outputs', outNames, ... | |
32 'inputs', inNames, ... | |
33 'reorganize', false,... | |
34 'numeric output',true); | |
35 | |
36 nrej = 0; | |
37 | |
38 for k = 1:nummodels | |
39 if ~search | |
40 for ii = 1:nummodels | |
41 smpl(ii) = 0; | |
42 mnacc(:,ii) = 0; | |
43 nacc = 1; | |
44 sumsamples = 1; | |
45 end | |
46 else | |
47 % compute loglikelihood for starting model and values | |
48 switch class(model) | |
49 case 'matrix' | |
50 smpl(k) = utils.math.loglikelihood_matrix(xo{k},in,out,nse,model(:,k),param{k},inModel,outModel); | |
51 case 'smodel' | |
52 smpl(k) = utils.math.loglikelihood(xo{k},in,out,nse,model(:,k),param{k}); | |
53 case 'ssm' | |
54 Amats{k} = model(:,k).amats; | |
55 Bmats{k} = model(:,k).bmats; | |
56 Cmats{k} = model(:,k).cmats; | |
57 Dmats{k} = model(:,k).dmats; | |
58 smpl(k) = utils.math.loglikelihood_ssm(xo{k},in,out,nse,model(:,k),param{k},inNames,outNames, spl, Amats{k}, Bmats{k}, Cmats{k}, Dmats{k}); | |
59 otherwise | |
60 error('### Model must be either from the ''smodel'' or the ''ssm'' class. Please check the inputs') | |
61 end | |
62 mnacc(:,k) = 0; | |
63 nacc = 1; | |
64 sumsamples = 1; | |
65 end | |
66 end | |
67 | |
68 utils.helper.msg(msg.IMPORTANT, 'Starting Reverse Jump MCMC ', mfilename('class'), mfilename); | |
69 | |
70 T = Tc(1); | |
71 hjump = 0; | |
72 | |
73 | |
74 % initialize (choose starting model) | |
75 k = 1; | |
76 % compute loglikelihood for starting model and values | |
77 switch class(model) | |
78 case 'matrix' | |
79 loglk1mdl1 = utils.math.loglikelihood_matrix(xo{k},in,out,nse,model(:,k),param{k},inModel,outModel); | |
80 case 'smodel' | |
81 loglk1mdl1 = utils.math.loglikelihood(xo{k},in,out,nse,model(:,k),param{k}); | |
82 case 'ssm' | |
83 loglk1mdl1 = utils.math.loglikelihood_ssm(xo{k},in,out,nse,model(:,k),param{k},inNames,outNames, spl, Amats{k}, Bmats{k}, Cmats{k}, Dmats{k}); | |
84 otherwise | |
85 error('### Model must be either from the ''smodel'' or the ''ssm'' class. Please check the inputs') | |
86 end | |
87 | |
88 % main loop | |
89 while(nacc<number) | |
90 | |
91 % propose new point in the parameter space for model k | |
92 [xn hjump] = propose(xo{k},cov{k},search,Tc,jumps,hjump,nacc,sumsamples); | |
93 | |
94 % check if out of limits | |
95 if( any(xn < limit{k}(1,:)) || any(xn > limit{k}(2,:))) | |
96 | |
97 loglk2mdl1 = inf; | |
98 | |
99 else | |
100 % switch depending on model class (new likelihood) | |
101 switch class(model) | |
102 case 'ssm' | |
103 [loglk2mdl1 SNR] = utils.math.loglikelihood_ssm(xn,in,out,nse,model(:,k),param{k},inNames,outNames, spl, Amats{k}, Bmats{k}, Cmats{k}, Dmats{k}); | |
104 case 'smodel' | |
105 loglk2mdl1 = utils.math.loglikelihood(xn,in,out,nse,model(:,k),param{k}); | |
106 case 'matrix' | |
107 [loglk2mdl1 SNR] = utils.math.loglikelihood_matrix(xn,in,out,nse,model(:,k),param{k},inModel,outModel); | |
108 otherwise | |
109 error('### Model must be from the ''ssm'' class. Please check the inputs') | |
110 end | |
111 | |
112 % compute annealing | |
113 if ~isempty(Tc) | |
114 if nacc <= Tc(1) | |
115 betta = 1/2 * 10^(-xi*(1-Tc(1)/Tc(2))); | |
116 elseif Tc(1) < nacc && nacc <= Tc(2) | |
117 betta = 1/2 * 10^(-xi*(1-nacc/Tc(2))); | |
118 else | |
119 % compute heat factor | |
120 switch anneal | |
121 case 'simul' | |
122 betta = 1/2; | |
123 case 'thermo' | |
124 if (0 <= SNR && SNR <= SNR0) | |
125 heat = 1; | |
126 elseif (SNR > SNR0) | |
127 heat = (SNR/SNR0)^2; | |
128 end | |
129 betta = 1/2 * 10^(-heat*(1-nacc/number)); | |
130 case 'simple' | |
131 if (nacc > 10 && mod(nacc,10) == 0) | |
132 deltalogp = std(smpl(nacc-10:nacc,1)); | |
133 if deltalogp <= DeltaL(1) | |
134 T = T + DeltaL(3); | |
135 elseif deltalogp >= DeltaL(2) | |
136 T = T - DeltaL(4); | |
137 end | |
138 heat = Tc(1)/T; | |
139 end | |
140 betta = 1/2 * 10^(-heat*(1-nacc/number)); | |
141 end | |
142 end | |
143 else | |
144 betta = 1/2; | |
145 end | |
146 | |
147 end | |
148 | |
149 % metropolis | |
150 [xo x0 loglk1mdl1 rj] = mhsmpl(k,betta,xn,xo,loglk1mdl1,loglk2mdl1); | |
151 | |
152 % update parameters (dimension matching) | |
153 if ~rj | |
154 for gg = 1:nummodels | |
155 xo{gg} = updateparams(param,xo,k,gg); | |
156 end | |
157 end | |
158 | |
159 % draw random integer to jump into model | |
160 % (supposed equal probability to jump to any model including the present one) | |
161 k_new = randi(nummodels,1); | |
162 | |
163 % propose new point in new model k' | |
164 xn_new = propose(xo{k_new},cov{k_new},search,Tc,jumps,hjump,nacc,sumsamples); | |
165 | |
166 % check if out of limits for new model | |
167 if( any(xn_new < limit{k_new}(1,:)) || any(xn_new > limit{k_new}(2,:))) | |
168 | |
169 loglk2mdl2 = inf; | |
170 | |
171 else | |
172 % switch depending on model class (new likelihood for proposed model k') | |
173 switch class(model(k_new)) | |
174 case 'ssm' | |
175 [loglk2mdl2 SNR] = utils.math.loglikelihood_ssm(xn_new,in,out,nse,model(:,k_new),param{k_new},inNames,outNames, spl, Amats{k_new}, Bmats{k_new}, Cmats{k_new}, Dmats{k_new}); | |
176 case 'smodel' | |
177 loglk2mdl2 = utils.math.loglikelihood(xn_new,in,out,nse,model(:,k_new),param{k_new}); | |
178 case 'matrix' | |
179 [loglk2mdl2 SNR] = utils.math.loglikelihood_matrix(xn_new,in,out,nse,model(:,k_new),param{k_new},inModel,outModel); | |
180 otherwise | |
181 error('### Model must be from the ''ssm'' class. Please check the inputs') | |
182 end | |
183 end | |
184 | |
185 % calculate proposal densities | |
186 logq1 = logprpsl(xo{k},x0,cov{k}); | |
187 logq2 = logprpsl(xn_new,xo{k_new},cov{k_new}); | |
188 | |
189 % ratio (independent proposals => Jacobian = 1) | |
190 logr = (logq2 + loglk2mdl2 - loglk2mdl1 - logq1) ; | |
191 % decide if sample in new model is accepted or not | |
192 if logr < 0 | |
193 xo{k_new} = xn_new; | |
194 nacc = nacc+1; | |
195 sumsamples = nacc + rejected; | |
196 mnacc(nacc,:) = mnacc(nacc-1,:); | |
197 mnacc(nacc,k_new) = mnacc(nacc-1,k_new) + 1; | |
198 smpl(nacc,k_new) = loglk2mdl2; | |
199 if (k ~= k_new) smpl(nacc,k) = loglk2mdl1; end | |
200 k = k_new; | |
201 % update parameters again | |
202 for gg = 1:nummodels | |
203 xo{gg} = updateparams(param,xo,k,gg); | |
204 end | |
205 if dbg_info | |
206 utils.helper.msg(msg.IMPORTANT, sprintf('acc new k: %d loglik: %d SNR: %d betta: %d ratio: %d ',k_new,loglk2mdl2,SNR,betta,logr)); | |
207 end | |
208 elseif rand(1) > (1 - exp(-logr)) | |
209 xo{k_new} = xn_new; | |
210 nacc = nacc+1; | |
211 sumsamples = nacc + rejected; | |
212 mnacc(nacc,:) = mnacc(nacc-1,:); | |
213 mnacc(nacc,k_new) = mnacc(nacc-1,k_new) + 1; | |
214 smpl(nacc,k_new) = loglk2mdl2; | |
215 if (k ~= k_new) smpl(nacc,k) = loglk2mdl1; end | |
216 k = k_new; | |
217 % update parameters again | |
218 for gg = 1:nummodels | |
219 xo{gg} = updateparams(param,xo,k,gg); | |
220 end | |
221 if dbg_info | |
222 utils.helper.msg(msg.IMPORTANT, sprintf('acc new k: %d loglik: %d SNR: %d betta: %d ratio: %d ',k_new,loglk2mdl2,SNR,betta,logr)); | |
223 end | |
224 elseif isnan(logr) | |
225 rejected = rejected + 1; | |
226 sumsamples = nacc + rejected; | |
227 if dbg_info | |
228 utils.helper.msg(msg.IMPORTANT, sprintf('rejected: %d out of bounds ',rejected)); | |
229 end | |
230 else | |
231 nacc = nacc+1; | |
232 sumsamples = nacc + rejected; | |
233 mnacc(nacc,:) = mnacc(nacc-1,:); | |
234 mnacc(nacc,k) = mnacc(nacc-1,k) + 1; | |
235 if (k ~= k_new) smpl(nacc,k_new) = loglk2mdl2; end | |
236 % printing on screen the correct things | |
237 if rj | |
238 smpl(nacc,k) = loglk1mdl1; | |
239 if dbg_info | |
240 utils.helper.msg(msg.IMPORTANT, sprintf('acc old k: %d loglik: %d SNR: %d betta: %d ratio: %d ',k,loglk1mdl1,SNR,betta,logr)); | |
241 end | |
242 else | |
243 smpl(nacc,k) = loglk2mdl1; | |
244 if (k ~= k_new) smpl(nacc,k_new) = loglk2mdl2; end | |
245 xo{k} = xn; | |
246 if dbg_info | |
247 utils.helper.msg(msg.IMPORTANT, sprintf('acc old k: %d loglik: %d SNR: %d betta: %d ratio: %d ',k,loglk2mdl1,SNR,betta,logr)); | |
248 end | |
249 end | |
250 end | |
251 | |
252 str = []; | |
253 % printing to screen the following: "acc(#_of_model): # of points in | |
254 % model #_of_model" for ii = 1:nummodels | |
255 if dbg_info | |
256 for ii = 1:nummodels | |
257 str = [str sprintf('acc%d: %d ',ii,mnacc(nacc,ii))]; | |
258 end | |
259 utils.helper.msg(msg.IMPORTANT, str); | |
260 end | |
261 | |
262 % the handshake problem | |
263 for gg = 1:(nummodels) | |
264 for dd = gg+1:(nummodels) | |
265 %calculate Bayes Factor | |
266 Bxy{gg,dd}(nacc,1) = mnacc(nacc,gg)/mnacc(nacc,dd); | |
267 end | |
268 end | |
269 | |
270 % plot Bayes factor | |
271 if (parplot && (mod(summ,500) == 0) && (nacc ~= 0)) | |
272 figure(1) | |
273 for gg = 1:(nummodels) | |
274 for dd = gg+1:(nummodels) | |
275 legen = [legen ; sprintf('B%d%d',gg,dd)]; % legend | |
276 plot(Bxy{gg,dd}(:,1),'color',colour{gg,dd}) | |
277 legend(legen) | |
278 hold on | |
279 end | |
280 end | |
281 end | |
282 hold off | |
283 legen = []; | |
284 | |
285 % plot LogLikelihood for each model | |
286 if (parplot && (mod(summ,100) == 0) && (nacc ~= 0)) | |
287 figure (2) | |
288 for jj = 3:(nummodels+2) | |
289 %if (mnacc(nacc,jj-2) ~= 0) | |
290 plot(smpl(:,jj-2),'color',coloor(jj-2,:)) | |
291 legen = [legen ; sprintf('model%d',jj-2)]; % legend | |
292 legend(legen) | |
293 hold on | |
294 %end | |
295 end | |
296 end | |
297 hold off | |
298 legen = []; | |
299 | |
300 % sum of points (used for plotting mainly -> avoid repeated ploting). | |
301 summ = summ+1; | |
302 end | |
303 LogL = smpl; | |
304 end | |
305 | |
306 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
307 % metropolis algorithm | |
308 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
309 function [xo x0 loglk1 rejected] = mhsmpl(k,betta,xn,xo,loglk1,loglk2) | |
310 | |
311 x0 = xo{k}; | |
312 logalpha = betta*(loglk2 - loglk1); | |
313 | |
314 if logalpha < 0 | |
315 xo{k} = xn; | |
316 loglk1 = loglk2; | |
317 rejected = 0; | |
318 elseif rand(1) > (1 - exp(-logalpha)) | |
319 xo{k} = xn; | |
320 loglk1 = loglk2; | |
321 rejected = 0; | |
322 else | |
323 rejected = 1; | |
324 end | |
325 | |
326 end | |
327 | |
328 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
329 % compute LogLikelihood | |
330 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
331 % function logL = LogLamda(k,param,model,inNames,outNames,spl,Amats,Bmats,Dmats,Cmats) | |
332 % | |
333 % switch class(model(k_new)) | |
334 % case 'ssm' | |
335 % logL = utils.math.loglikelihood_ssm(xn_new,in,out,nse,model(:,k_new),param{k_new},inNames,outNames, spl, Amats{k_new}, Bmats{k_new}, Cmats{k_new}, Dmats{k_new}); | |
336 % case 'smodel' | |
337 % logL = utils.math.loglikelihood(xn_new,in,out,nse,model(:,k_new),param{k_new}); | |
338 % case 'matrix' | |
339 % [logL SNR] = utils.math.loglikelihood_matrix(xn_new,in,out,nse,model(:,k_new),param{k_new},inModel,outModel); | |
340 % otherwise | |
341 % error('### Model must be from the ''ssm'' class. Please check the inputs') | |
342 % end | |
343 % | |
344 % | |
345 % end | |
346 | |
347 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
348 % propose a new jump function | |
349 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
350 function [xn hjump]= propose(xo,cov,search,Tc,jumps,hjump,nacc,sumsamples) | |
351 | |
352 if search | |
353 if nacc <= Tc(1) | |
354 | |
355 if(mod(sumsamples,5) == 0 && mod(sumsamples,25) ~= 0 && mod(sumsamples,40) ~= 0 && hjump ~= 1) | |
356 hjump = 1; | |
357 xn = mvnrnd(xo,jumps(2)^2*cov); | |
358 elseif(mod(sumsamples,20) == 0 && mod(sumsamples,50) ~= 0 && hjump ~= 1) | |
359 hjump = 1; | |
360 xn = mvnrnd(xo,jumps(3)^2*cov); | |
361 elseif(mod(sumsamples,10) == 0 && hjump ~= 1) | |
362 hjump = 1; | |
363 xn = mvnrnd(xo,jumps(4)^2*cov); | |
364 else | |
365 hjump = 0; | |
366 xn = mvnrnd(xo,jumps(1)^2*cov); | |
367 end | |
368 else | |
369 xn = mvnrnd(xo,cov); | |
370 end | |
371 else | |
372 xn = mvnrnd(xo,cov); | |
373 end | |
374 | |
375 | |
376 end | |
377 | |
378 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
379 % update parameters function (dimension matching) | |
380 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
381 function xn= updateparams(param,xo,k,kn) | |
382 | |
383 % dimension difference of models | |
384 dimdif = abs(size(param{kn},2) - size(param{k},2)); | |
385 % mark the different parameters | |
386 difparams = setxor(param{kn},param{k}); | |
387 % total number of different parameters | |
388 totalnumdifpar = numel(difparams); | |
389 kk = 0; | |
390 | |
391 % case: dimension difference equals the # of different parameters | |
392 % and dim(model(k')) > dim(model(k)) | |
393 if (dimdif == totalnumdifpar && size(param{kn},2) > size(param{k},2)) | |
394 | |
395 xn = zeros(size(xo{kn})); | |
396 for ii = 1:min(size(difparams,2)) | |
397 compvec{ii} = strcmp(difparams{ii},param{kn}); | |
398 position(ii) = find(compvec{ii}); % mark the positions of the different parameters | |
399 xn(position(ii)) = xo{kn}(position(ii)); | |
400 end | |
401 for jj = 1:size(param{kn},2) | |
402 if (jj ~= position) | |
403 kk = kk+1; | |
404 xn(jj) = xo{k}(kk); | |
405 end | |
406 end | |
407 | |
408 % case: dimension difference equals the # of different parameters | |
409 % and dim(model(k')) < dim(model(k)) | |
410 elseif (dimdif == totalnumdifpar && size(param{kn},2) < size(param{k},2)) | |
411 | |
412 xn = zeros(size(xo{kn})); | |
413 for ii = 1:min(size(difparams,2)) | |
414 compvec{ii} = strcmp(difparams{ii},param{k}); | |
415 position(ii) = find(compvec{ii}); | |
416 end | |
417 for jj = 1:size(param{k},2) | |
418 if (jj ~= position) | |
419 kk = kk+1; | |
420 xn(kk) = xo{k}(jj); | |
421 end | |
422 end | |
423 | |
424 % case: dimension difference is smaller than the # of different parameters | |
425 % and dim(model(k')) > dim(model(k)) | |
426 elseif (dimdif < totalnumdifpar && size(param{kn},2) > size(param{k},2)) | |
427 | |
428 xn = zeros(size(xo{kn})); | |
429 for ii = 1:min(size(difparams,2)) | |
430 compvec{ii} = strcmp(difparams{ii},param{kn}); | |
431 if any(compvec{ii}) | |
432 position(ii) = find(compvec{ii}); | |
433 xn(position(ii)) = xo{kn}(position(ii)); | |
434 end | |
435 end | |
436 for jj = 1:size(param{kn},2) | |
437 if (jj ~= position) | |
438 kk = kk+1; | |
439 xn(jj) = xo{k}(kk); | |
440 end | |
441 end | |
442 | |
443 % case: dimension difference is smaller than the # of different parameters | |
444 % and dim(model(k')) < dim(model(k)) | |
445 elseif (dimdif < totalnumdifpar && size(param{kn},2) < size(param{k},2)) | |
446 | |
447 xn = zeros(size(xo{kn})); | |
448 for ii = 1:size(param{kn},2) | |
449 compvec{ii} = strcmp(param{kn}{ii},param{k}); | |
450 if any(compvec{ii}) | |
451 position(ii) = find(compvec{ii}); | |
452 %xn(position(ii)) = xo{k}(position(ii)); | |
453 else | |
454 kk = kk+1; | |
455 compvec{ii} = strcmp(param{kn}{ii},param{kn}); | |
456 position2(kk) = find(compvec{ii}); | |
457 xn(position2(kk)) = xo{kn}(position2(kk)); | |
458 position(ii) = 0; | |
459 end | |
460 end | |
461 kk = 0; | |
462 for jj = 1:size(param{kn},2) | |
463 if (position(jj) ~= 0) | |
464 xn(jj) = xo{k}(position(jj)); | |
465 end | |
466 end | |
467 | |
468 % case: dimension difference is smaller than the # of different parameters | |
469 % and dim(model(k')) = dim(model(k)) | |
470 elseif (dimdif < totalnumdifpar && size(param{kn},2) == size(param{k},2)) | |
471 | |
472 xn = zeros(size(xo{kn})); | |
473 for ii = 1:min(size(difparams,2)) | |
474 compvec{ii} = strcmp(difparams{ii},param{kn}); | |
475 if any(compvec{ii}) | |
476 position(ii) = find(compvec{ii}); | |
477 xn(position(ii)) = xo{kn}(position(ii)); | |
478 end | |
479 end | |
480 for jj = 1:size(param{kn},2) | |
481 if (jj ~= position) | |
482 kk = kk+1; | |
483 xn(jj) = xo{k}(kk); | |
484 end | |
485 end | |
486 | |
487 % case: new model proposed is the same as the previous one (k' == k). | |
488 % That means we perform again the metropolis algorithm. | |
489 elseif (totalnumdifpar == 0 && size(param{kn},2) == size(param{k},2)) | |
490 | |
491 xn = xo{k}; | |
492 | |
493 end | |
494 | |
495 % propose the new set of points in the new parameter space | |
496 %[xn hjump]= propose(xn,cov{kn},search,Tc,jumps,hjump,nacc,sumsamples); | |
497 | |
498 end | |
499 | |
500 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
501 % compute proposal density | |
502 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | |
503 function logq = logprpsl(X,mean,cov) | |
504 | |
505 logq = log(mvnpdf(X,mean,cov)); | |
506 if (logq == -Inf) logq = 0; end | |
507 | |
508 end | |
509 |