Fix. Default password should be [] not an empty string
line source
+ − % Training session Topic 5 exercise 05
+ − %
+ − % Fitting time series with xfit
+ − %
+ − % ao/xfit uses a full non-linear least square algorithm to fit data. Here
+ − % the main features will be described to fit an up-chirp sine.
+ − %
+ − % 1) Load time series data
+ − % 2) Define the model to fit
+ − % 3) Inspect data in order to find an initial guess
+ − % 4) Fit data with ao/xfit starting from that guess
+ − % 5) Check results
+ − % 6) Fit again by changing the fitting algorithm (fminunc will be used)
+ − % 7) Check results
+ − % 8) Fit again by using a Monte Carlo search
+ − % 9) Check results
+ − % 10) Final fit with the full model (data bias included)
+ − % 11) Check results
+ − % 12) Comments
+ − %
+ − % Trento, 01-03-2010
+ − % Giuseppe Congedo
+ − %
+ − % $Id: TrainigSession_T5_Ex05.m,v 1.3 2011/05/13 15:13:12 ingo Exp $
+ − %
+ − %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+ −
+ − %% 1) load tsdata
+ −
+ − % load test noise AO from file
+ − data = ao(plist('filename', 'topic5/T5_Ex05_TestNoise.xml'));
+ − data.setName;
+ −
+ − % look at data
+ − iplot(data)
+ − % iplot(abs(fft(data)))
+ − % iplot(psd(data))
+ −
+ − %% 2) set the model
+ −
+ − % try a linearly chirped sine wave with a bias
+ − mdl = smodel('a + P1.*sin(2.*pi.*(P2 + P3.*t).*t + P4)');
+ − mdl.setParams({'a', 'P1', 'P2', 'P3', 'P4'});
+ − mdl.setXvar('t');
+ − mdl.subs('a',5); % set additional parameters - data bias
+ −
+ − %% 3) try to find an initial guess
+ −
+ − % plot the tentative model
+ − mdlTry = mdl.setValues([4 1e-4 1e-5 0]);
+ − mdlTry.setXvals(data.x);
+ − iplot(data,mdlTry.eval)
+ −
+ − %% 4) xfit from an initial guess
+ −
+ − % fit plist
+ − plfit = plist('Function', mdl, ...
+ − 'P0', [4 1e-4 1e-5 0], ... % set initial guess for the parameters
+ − 'LB', [2 1e-6 1e-6 0], ... % set lower bounds for parameters
+ − 'UB', [5 1e-2 1e-2 2*pi] ... % set upper bounds for parameters
+ − );
+ −
+ − % do fit
+ − params = xfit(data, plfit)
+ −
+ − %% 5) check results
+ −
+ − bestMdl = eval(params);
+ −
+ − % plotting results
+ − iplot(data,bestMdl)
+ − iplot(data-bestMdl)
+ − iplot(psd(data-bestMdl))
+ −
+ − % Comments.
+ − % Fit is not so bad, but let's now try to change the fitting algorithm.
+ −
+ − %% 6) xfit from an initial guess (change algorithm)
+ −
+ − % fit plist
+ − plfit = plist('Function', mdl, ...
+ − 'P0', [4 1e-4 1e-5 0], ... % set initial guess for the parameters
+ − 'LB', [2 1e-6 1e-6 0], ... % set lower bounds for parameters
+ − 'UB', [5 1e-2 1e-2 2*pi], ... % set upper bounds for parameters
+ − 'algorithm', 'fminunc'); % set fitting algorithm
+ −
+ − % do fit
+ − params = xfit(data, plfit)
+ −
+ − %% 7) check results
+ −
+ − bestMdl = eval(params);
+ −
+ − % plotting results
+ − iplot(data,bestMdl)
+ − iplot(data-bestMdl)
+ − iplot(psd(data-bestMdl))
+ −
+ − % Comments.
+ − % Fit is not so bad: an improvement with respect to using fminsearch.
+ − % Let's now try to do a Monte Carlo search.
+ −
+ − %% 8) xfit from a Monte Carlo search (bounds are shrinked about the best-fit found above)
+ −
+ − % fit plist
+ − plfit = plist('Function', mdl, ...
+ − 'LB', [2.5 1e-5 1e-6 0], ... % set lower bounds for parameters
+ − 'UB', [3.5 1e-3 1e-4 2*pi], ... % set upper bounds for parameters
+ − 'algorithm', 'fminunc', ... % set fitting algorithm
+ − 'MonteCarlo', 'y', 'Npoints', 1e4); % set the number of points in the parameter space
+ −
+ − % do fit
+ − params = xfit(data, plfit)
+ −
+ − %% 9) check results
+ −
+ − bestMdl = eval(params);
+ −
+ − % plotting results
+ − iplot(data,bestMdl)
+ − iplot(data-bestMdl)
+ − iplot(psd(data-bestMdl))
+ −
+ − % Comments.
+ − % Fit is not so bad: no further improvements with respect to the previous fit.
+ − % Let's now try to fit the full model by doing a Monte Carlo search.
+ −
+ − %% 10) xfit with full model
+ −
+ − % set the full model (including the data bias)
+ − mdl = smodel('a + P1.*sin(2.*pi.*(P2 + P3.*t).*t + P4)');
+ − mdl.setParams({'a', 'P1', 'P2', 'P3', 'P4'});
+ − mdl.setXvar('t');
+ −
+ − % fit plist
+ − plfit = plist('Function', mdl, ...
+ − 'P0', [5 3.01 7.27e-5 1.00e-5 0.325], ... % set initial guess for the parameters
+ − 'LB', [4 2.5 1e-5 1e-6 0], ... % set lower bounds for parameters
+ − 'UB', [6 3.5 1e-3 1e-4 2*pi], ... % set upper bounds for parameters
+ − 'algorithm', 'fminunc' ... % set fitting algorithm
+ − );
+ −
+ − % do fit
+ − params = xfit(data, plfit)
+ −
+ − %% 11) check results
+ −
+ − bestMdl = eval(params);
+ −
+ − % plotting results
+ − iplot(data,bestMdl)
+ − iplot(data-bestMdl)
+ − iplot(psd(data-bestMdl))
+ −
+ − % Comments.
+ − % Fit is good, even for the full model (including bias).
+ −
+ − %% 12) Comments
+ −
+ − % Let's compare the fit results with the real parameters.
+ − %
+ − % Firstly, data were generated with the set of parameters:
+ − % a = 5
+ − % P1 = 3
+ − % P2 = 1e-4
+ − % P3 = 1e-5
+ − % P4 = 0.3
+ − %
+ − % In the end, the fit results are
+ − % chi2 = 1.03
+ − % dof = 995
+ − % a = 4.965 +/- 0.033
+ − % P1 = 3.018 +/- 0.047
+ − % P2 = 6.6e-5 +/- 3.1e-5
+ − % P3 = 1.0032e-5 +/- 3.1e-8
+ − % P4 = 0.334 +/- 0.043
+ − % Quite in well accordance with expected values!
+ − %
+ − % Are they biased? No, they are not. Indeed:
+ − % (p_fit - p_true)/dp = ...
+ − % 1.1
+ − % 0.39
+ − % 1.1
+ − % 1.1
+ − % 0.80
+ − % Very good.