Daniel@0: function [a, b, error] = weightedRegression(x, z, w) Daniel@0: % [a , b, error] = fitRegression(x, z, w); Daniel@0: % % Weighted scalar linear regression Daniel@0: % Daniel@0: % Find a,b to minimize Daniel@0: % error = sum(w * |z - (a*x + b)|^2) Daniel@0: % and x(i) is a scalar Daniel@0: Daniel@0: if nargin < 3, w = ones(1,length(x)); end Daniel@0: Daniel@0: w = w(:)'; Daniel@0: x = x(:)'; Daniel@0: z = z(:)'; Daniel@0: Daniel@0: W = sum(w); Daniel@0: Y = sum(w .* z); Daniel@0: YY = sum(w .* z .* z); Daniel@0: YTY = sum(w .* z .* z); Daniel@0: X = sum(w .* x); Daniel@0: XX = sum(w .* x .* x); Daniel@0: XY = sum(w .* x .* z); Daniel@0: Daniel@0: [b, a] = clg_Mstep_simple(W, Y, YY, YTY, X, XX, XY); Daniel@0: error = sum(w .* (z - (a*x + b)).^2 ); Daniel@0: Daniel@0: if 0 Daniel@0: % demo Daniel@0: seed = 1; Daniel@0: rand('state', seed); randn('state', seed); Daniel@0: x = -10:10; Daniel@0: N = length(x); Daniel@0: noise = randn(1,N); Daniel@0: aTrue = rand(1,1); Daniel@0: bTrue = rand(1,1); Daniel@0: z = aTrue*x + bTrue + noise; Daniel@0: Daniel@0: w = ones(1,N); Daniel@0: [a, b, err] = weightedRegression(x, z, w); Daniel@0: Daniel@0: b2=regress(z(:), [x(:) ones(N,1)]); Daniel@0: assert(approxeq(b,b2(2))) Daniel@0: assert(approxeq(a,b2(1))) Daniel@0: Daniel@0: % Make sure we go through x(15) perfectly Daniel@0: w(15) = 1000; Daniel@0: [aW, bW, errW] = weightedRegression(x, z, w); Daniel@0: Daniel@0: figure; Daniel@0: plot(x, z, 'ro') Daniel@0: hold on Daniel@0: plot(x, a*x+b, 'bx-') Daniel@0: plot(x, aW*x+bW, 'gs-') Daniel@0: title(sprintf('a=%5.2f, aHat=%5.2f, aWHat=%5.3f, b=%5.2f, bHat=%5.2f, bWHat=%5.3f, err=%5.3f, errW=%5.3f', ... Daniel@0: aTrue, a, aW, bTrue, b, bW, err, errW)) Daniel@0: legend('truth', 'ls', 'wls') Daniel@0: Daniel@0: end Daniel@0: