Mercurial > hg > camir-aes2014
comparison toolboxes/FullBNT-1.0.7/KPMstats/weightedRegression.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 function [a, b, error] = weightedRegression(x, z, w) | |
2 % [a , b, error] = fitRegression(x, z, w); | |
3 % % Weighted scalar linear regression | |
4 % | |
5 % Find a,b to minimize | |
6 % error = sum(w * |z - (a*x + b)|^2) | |
7 % and x(i) is a scalar | |
8 | |
9 if nargin < 3, w = ones(1,length(x)); end | |
10 | |
11 w = w(:)'; | |
12 x = x(:)'; | |
13 z = z(:)'; | |
14 | |
15 W = sum(w); | |
16 Y = sum(w .* z); | |
17 YY = sum(w .* z .* z); | |
18 YTY = sum(w .* z .* z); | |
19 X = sum(w .* x); | |
20 XX = sum(w .* x .* x); | |
21 XY = sum(w .* x .* z); | |
22 | |
23 [b, a] = clg_Mstep_simple(W, Y, YY, YTY, X, XX, XY); | |
24 error = sum(w .* (z - (a*x + b)).^2 ); | |
25 | |
26 if 0 | |
27 % demo | |
28 seed = 1; | |
29 rand('state', seed); randn('state', seed); | |
30 x = -10:10; | |
31 N = length(x); | |
32 noise = randn(1,N); | |
33 aTrue = rand(1,1); | |
34 bTrue = rand(1,1); | |
35 z = aTrue*x + bTrue + noise; | |
36 | |
37 w = ones(1,N); | |
38 [a, b, err] = weightedRegression(x, z, w); | |
39 | |
40 b2=regress(z(:), [x(:) ones(N,1)]); | |
41 assert(approxeq(b,b2(2))) | |
42 assert(approxeq(a,b2(1))) | |
43 | |
44 % Make sure we go through x(15) perfectly | |
45 w(15) = 1000; | |
46 [aW, bW, errW] = weightedRegression(x, z, w); | |
47 | |
48 figure; | |
49 plot(x, z, 'ro') | |
50 hold on | |
51 plot(x, a*x+b, 'bx-') | |
52 plot(x, aW*x+bW, 'gs-') | |
53 title(sprintf('a=%5.2f, aHat=%5.2f, aWHat=%5.3f, b=%5.2f, bHat=%5.2f, bWHat=%5.3f, err=%5.3f, errW=%5.3f', ... | |
54 aTrue, a, aW, bTrue, b, bW, err, errW)) | |
55 legend('truth', 'ls', 'wls') | |
56 | |
57 end | |
58 |