diff src/samer/models/Scaler.java @ 0:bf79fb79ee13

Initial Mercurial check in.
author samer
date Tue, 17 Jan 2012 17:50:20 +0000
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/samer/models/Scaler.java	Tue Jan 17 17:50:20 2012 +0000
@@ -0,0 +1,271 @@
+/*
+ *	Copyright (c) 2002, Samer Abdallah, King's College London.
+ *	All rights reserved.
+ *
+ *	This software is provided AS iS and WITHOUT ANY WARRANTY;
+ *	without even the implied warranty of MERCHANTABILITY or
+ *	FITNESS FOR A PARTICULAR PURPOSE.
+ */
+
+package samer.models;
+
+import samer.core.*;
+import samer.core.types.*;
+import samer.maths.*;
+import samer.maths.opt.*;
+import samer.tools.*;
+
+/**
+	Automatic gain control for a given input vector.
+	Ouput appears in out. Also generates a trace of
+	'loudness' of input signal.
+  */
+
+public class Scaler extends NullTask implements Model
+{
+	private Model 		M;
+	private int			n;
+	private Vec			x;
+	private VVector	s;
+	private VDouble	multiplier;
+	private VDouble	offset;
+	private double	logK;
+
+	double []		_x, _s, _g, phi;
+
+	public Scaler( Vec input, Model M) { this(input); setOutputModel(M); M.setInput(s); }
+	public Scaler( Vec input) { this(input.size()); setInput(input); }
+	public Scaler( int N)
+	{
+		n = N;
+
+		x = null;
+		s = new VVector("output",n);
+		multiplier = new VDouble("scale",1.0,VDouble.SIGNAL);
+		offset = new VDouble("offset",0.0,VDouble.SIGNAL);
+		
+		_s = s.array();
+		_g = new double[n];
+		phi = null;
+		reset();
+		logK=Math.log(multiplier.value);
+	}
+
+	public int getSize() { return n; }
+	public VVector output() { return s; }
+	public VDouble getScale() { return multiplier; }
+	public VDouble getOffset() { return offset; }
+	public Model getOutputModel() { return M;	}
+	public void setOutputModel(Model m) { M=m;	}
+	public void setInput(Vec in) { x=in; _x=x.array(); }
+	public void reset() {
+		// multiplier.load(Shell.env());
+		// offset.load(Shell.env());
+	}
+
+	public String toString() { return "Scaler:"+x; } // +"->"+s; }
+	public void dispose()
+	{
+		offset.dispose();
+		multiplier.dispose();
+		s.dispose();
+		super.dispose();
+	}
+
+	public void infer() {
+		double a=offset.value, k=1/multiplier.value;
+		for (int i=0; i<n; i++) _s[i] = k*(_x[i]-a);
+		s.changed();
+	}
+
+	public void compute() {
+		Mathx.mul(_g,M.getGradient(),1/multiplier.value);
+	}
+
+	public double	getEnergy() { return M.getEnergy() + n*logK; }
+	public double [] getGradient() { return _g; }
+
+	public Functionx functionx() {
+		return new Functionx() {
+			Functionx fM=M.functionx();
+			double [] s=new double[n];
+
+			public void dispose() { fM.dispose(); }
+			public void evaluate(Datum P) { P.f=evaluate(P.x,P.g); }
+			public double evaluate(double [] x, double [] g) {
+				double a=offset.value, k=1/multiplier.value;
+				for (int i=0; i<n; i++) s[i] = k*(x[i]-a);
+				double E=fM.evaluate(s,g);
+				Mathx.mul(g,k);
+				return E+n*logK;
+			}
+		};
+	}
+
+	public void starting() { logK=Math.log(multiplier.value); }
+	public void stopping() {}
+	public void run() { infer(); }
+
+	public Trainer getTrainer() { return new Trainer(); }
+	public OffsetTrainer getOffsetTrainer()	{ return new OffsetTrainer(); }
+	public ScaleTrainer getScaleTrainer()	{ return new ScaleTrainer(); }
+
+	public class Trainer extends AnonymousTask implements Model.Trainer
+	{
+		VDouble		rate1=new VDouble("scaleRate",0.001);
+		VDouble		rate2=new VDouble("offsetRate",0.000001);
+		double		G,H,count;
+		double []	_s;
+		int				n;
+
+
+		public Trainer() { _s = s.array(); n=Scaler.this.n; }
+
+		public void reset() { count=0; G=0; H=0; }
+		public String toString() { return "Trainer:"+Scaler.this; }
+
+		public VDouble getScaleRate() { return rate1; }
+		public VDouble getOffsetRate() { return rate2; }
+
+		public void accumulate() { accumulate(1); }
+		public void accumulate(double w) {
+//			if (M.getEnergy() > 8000) return;
+			double [] phi=M.getGradient();
+			double	g=0;
+			for (int i=0; i<n; i++) g += phi[i]*_s[i] - 1;
+			G += w*g;
+			H += w*Mathx.sum(phi);
+			count+=w;
+		}
+
+		public void flush() {
+			if (count==0) return; // nothing to do
+
+			double k=multiplier.value;
+			double mu=offset.value;
+
+			mu += (rate2.value/count)*k*H/n;
+			G *= rate1.value/(n*count);
+			k *= Math.exp(G);
+			multiplier.set(k);
+			offset.set(mu);
+			logK+=G;
+			reset();
+		}
+
+		public void oneshot() {
+			double [] phi=M.getGradient();
+			G=0; H = Mathx.sum(phi);
+			for (int i=0; i<n; i++) G += phi[i]*_s[i] - 1;
+
+			double k=multiplier.value;
+			double mu=offset.value;
+
+			mu += rate2.value*k*H/n;
+			G *= rate1.value/n;
+			k *= Math.exp(G);
+			multiplier.set(k);
+			offset.set(mu);
+			logK+=G;
+		}
+
+
+		public void dispose() { rate1.dispose(); rate2.dispose(); }
+		public void starting() { reset(); logK=Math.log(multiplier.value); }
+		public void run() { oneshot(); }
+	}
+
+	public class ScaleTrainer extends AnonymousTask implements Model.Trainer
+	{
+		VDouble	rate1=new VDouble("scaleRate",0.001);
+		double		G,count;
+		double []	_s;
+		int				n;
+
+
+		public ScaleTrainer() { _s = s.array(); n=Scaler.this.n; }
+		public String toString() { return "ScaleTrainer:"+Scaler.this; }
+
+		public void reset() { count=0; G=0; }
+
+		public void accumulate() { accumulate(1); }
+		public void accumulate(double w) {
+			double [] phi=M.getGradient();
+			double	g=0;
+			for (int i=0; i<n; i++) g += phi[i]*_s[i] - 1;
+			G += w*g;
+			count+=w;
+		}
+
+		public void flush() {
+			if (count==0) return; // nothing to do
+
+			double k=multiplier.value;
+
+			G *= rate1.value/(n*count);
+			k *= Math.exp(G);
+			multiplier.set(k);
+			logK+=G;
+			reset();
+		}
+
+		public void oneshot() {
+			double [] phi=M.getGradient();
+			G=0;
+			for (int i=0; i<n; i++) G += phi[i]*_s[i] - 1;
+
+			double k=multiplier.value;
+
+			G *= rate1.value/n;
+			k *= Math.exp(G);
+			multiplier.set(k);
+			logK+=G;
+		}
+
+
+		public void dispose() { rate1.dispose(); }
+		public void starting() { reset(); logK=Math.log(multiplier.value); }
+		public void run() { oneshot(); }
+	}
+
+	/** This trains only the offset, not the scale */
+	public class OffsetTrainer extends AnonymousTask implements Model.Trainer
+	{
+		VDouble		rate2=new VDouble("offsetRate",0.000001);
+		double		H, count;
+		int				n;
+
+		public OffsetTrainer() { n=Scaler.this.n; }
+		public String toString() { return "OffsetTrainer:"+Scaler.this; }
+
+		public void reset() { count=0; H=0; }
+
+		public void accumulate() { accumulate(1); }
+		public void accumulate(double w) {
+			double [] phi=M.getGradient();
+			H += w*Mathx.sum(phi);
+			count+=w;
+		}
+
+		public void flush() {
+			if (count==0) return; // nothing to do
+			offset.value += (rate2.value/count)*multiplier.value*H/n;
+			offset.changed();
+			reset();
+		}
+
+		public void oneshot() {
+			double [] phi=M.getGradient();
+			H = Mathx.sum(phi);
+			offset.value += rate2.value*multiplier.value*H/n;
+			offset.changed();
+		}
+
+		public void dispose() { rate2.dispose(); }
+		public void starting() { reset(); }
+		public void run() { oneshot(); }
+	}
+	// could have alternative trainers if prior is Gaussian or Laplacian,
+	// in which case, parameters can be estimated in closed form
+}
+