comparison src/samer/models/Scaler.java @ 0:bf79fb79ee13

Initial Mercurial check in.
author samer
date Tue, 17 Jan 2012 17:50:20 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:bf79fb79ee13
1 /*
2 * Copyright (c) 2002, Samer Abdallah, King's College London.
3 * All rights reserved.
4 *
5 * This software is provided AS iS and WITHOUT ANY WARRANTY;
6 * without even the implied warranty of MERCHANTABILITY or
7 * FITNESS FOR A PARTICULAR PURPOSE.
8 */
9
10 package samer.models;
11
12 import samer.core.*;
13 import samer.core.types.*;
14 import samer.maths.*;
15 import samer.maths.opt.*;
16 import samer.tools.*;
17
18 /**
19 Automatic gain control for a given input vector.
20 Ouput appears in out. Also generates a trace of
21 'loudness' of input signal.
22 */
23
24 public class Scaler extends NullTask implements Model
25 {
26 private Model M;
27 private int n;
28 private Vec x;
29 private VVector s;
30 private VDouble multiplier;
31 private VDouble offset;
32 private double logK;
33
34 double [] _x, _s, _g, phi;
35
36 public Scaler( Vec input, Model M) { this(input); setOutputModel(M); M.setInput(s); }
37 public Scaler( Vec input) { this(input.size()); setInput(input); }
38 public Scaler( int N)
39 {
40 n = N;
41
42 x = null;
43 s = new VVector("output",n);
44 multiplier = new VDouble("scale",1.0,VDouble.SIGNAL);
45 offset = new VDouble("offset",0.0,VDouble.SIGNAL);
46
47 _s = s.array();
48 _g = new double[n];
49 phi = null;
50 reset();
51 logK=Math.log(multiplier.value);
52 }
53
54 public int getSize() { return n; }
55 public VVector output() { return s; }
56 public VDouble getScale() { return multiplier; }
57 public VDouble getOffset() { return offset; }
58 public Model getOutputModel() { return M; }
59 public void setOutputModel(Model m) { M=m; }
60 public void setInput(Vec in) { x=in; _x=x.array(); }
61 public void reset() {
62 // multiplier.load(Shell.env());
63 // offset.load(Shell.env());
64 }
65
66 public String toString() { return "Scaler:"+x; } // +"->"+s; }
67 public void dispose()
68 {
69 offset.dispose();
70 multiplier.dispose();
71 s.dispose();
72 super.dispose();
73 }
74
75 public void infer() {
76 double a=offset.value, k=1/multiplier.value;
77 for (int i=0; i<n; i++) _s[i] = k*(_x[i]-a);
78 s.changed();
79 }
80
81 public void compute() {
82 Mathx.mul(_g,M.getGradient(),1/multiplier.value);
83 }
84
85 public double getEnergy() { return M.getEnergy() + n*logK; }
86 public double [] getGradient() { return _g; }
87
88 public Functionx functionx() {
89 return new Functionx() {
90 Functionx fM=M.functionx();
91 double [] s=new double[n];
92
93 public void dispose() { fM.dispose(); }
94 public void evaluate(Datum P) { P.f=evaluate(P.x,P.g); }
95 public double evaluate(double [] x, double [] g) {
96 double a=offset.value, k=1/multiplier.value;
97 for (int i=0; i<n; i++) s[i] = k*(x[i]-a);
98 double E=fM.evaluate(s,g);
99 Mathx.mul(g,k);
100 return E+n*logK;
101 }
102 };
103 }
104
105 public void starting() { logK=Math.log(multiplier.value); }
106 public void stopping() {}
107 public void run() { infer(); }
108
109 public Trainer getTrainer() { return new Trainer(); }
110 public OffsetTrainer getOffsetTrainer() { return new OffsetTrainer(); }
111 public ScaleTrainer getScaleTrainer() { return new ScaleTrainer(); }
112
113 public class Trainer extends AnonymousTask implements Model.Trainer
114 {
115 VDouble rate1=new VDouble("scaleRate",0.001);
116 VDouble rate2=new VDouble("offsetRate",0.000001);
117 double G,H,count;
118 double [] _s;
119 int n;
120
121
122 public Trainer() { _s = s.array(); n=Scaler.this.n; }
123
124 public void reset() { count=0; G=0; H=0; }
125 public String toString() { return "Trainer:"+Scaler.this; }
126
127 public VDouble getScaleRate() { return rate1; }
128 public VDouble getOffsetRate() { return rate2; }
129
130 public void accumulate() { accumulate(1); }
131 public void accumulate(double w) {
132 // if (M.getEnergy() > 8000) return;
133 double [] phi=M.getGradient();
134 double g=0;
135 for (int i=0; i<n; i++) g += phi[i]*_s[i] - 1;
136 G += w*g;
137 H += w*Mathx.sum(phi);
138 count+=w;
139 }
140
141 public void flush() {
142 if (count==0) return; // nothing to do
143
144 double k=multiplier.value;
145 double mu=offset.value;
146
147 mu += (rate2.value/count)*k*H/n;
148 G *= rate1.value/(n*count);
149 k *= Math.exp(G);
150 multiplier.set(k);
151 offset.set(mu);
152 logK+=G;
153 reset();
154 }
155
156 public void oneshot() {
157 double [] phi=M.getGradient();
158 G=0; H = Mathx.sum(phi);
159 for (int i=0; i<n; i++) G += phi[i]*_s[i] - 1;
160
161 double k=multiplier.value;
162 double mu=offset.value;
163
164 mu += rate2.value*k*H/n;
165 G *= rate1.value/n;
166 k *= Math.exp(G);
167 multiplier.set(k);
168 offset.set(mu);
169 logK+=G;
170 }
171
172
173 public void dispose() { rate1.dispose(); rate2.dispose(); }
174 public void starting() { reset(); logK=Math.log(multiplier.value); }
175 public void run() { oneshot(); }
176 }
177
178 public class ScaleTrainer extends AnonymousTask implements Model.Trainer
179 {
180 VDouble rate1=new VDouble("scaleRate",0.001);
181 double G,count;
182 double [] _s;
183 int n;
184
185
186 public ScaleTrainer() { _s = s.array(); n=Scaler.this.n; }
187 public String toString() { return "ScaleTrainer:"+Scaler.this; }
188
189 public void reset() { count=0; G=0; }
190
191 public void accumulate() { accumulate(1); }
192 public void accumulate(double w) {
193 double [] phi=M.getGradient();
194 double g=0;
195 for (int i=0; i<n; i++) g += phi[i]*_s[i] - 1;
196 G += w*g;
197 count+=w;
198 }
199
200 public void flush() {
201 if (count==0) return; // nothing to do
202
203 double k=multiplier.value;
204
205 G *= rate1.value/(n*count);
206 k *= Math.exp(G);
207 multiplier.set(k);
208 logK+=G;
209 reset();
210 }
211
212 public void oneshot() {
213 double [] phi=M.getGradient();
214 G=0;
215 for (int i=0; i<n; i++) G += phi[i]*_s[i] - 1;
216
217 double k=multiplier.value;
218
219 G *= rate1.value/n;
220 k *= Math.exp(G);
221 multiplier.set(k);
222 logK+=G;
223 }
224
225
226 public void dispose() { rate1.dispose(); }
227 public void starting() { reset(); logK=Math.log(multiplier.value); }
228 public void run() { oneshot(); }
229 }
230
231 /** This trains only the offset, not the scale */
232 public class OffsetTrainer extends AnonymousTask implements Model.Trainer
233 {
234 VDouble rate2=new VDouble("offsetRate",0.000001);
235 double H, count;
236 int n;
237
238 public OffsetTrainer() { n=Scaler.this.n; }
239 public String toString() { return "OffsetTrainer:"+Scaler.this; }
240
241 public void reset() { count=0; H=0; }
242
243 public void accumulate() { accumulate(1); }
244 public void accumulate(double w) {
245 double [] phi=M.getGradient();
246 H += w*Mathx.sum(phi);
247 count+=w;
248 }
249
250 public void flush() {
251 if (count==0) return; // nothing to do
252 offset.value += (rate2.value/count)*multiplier.value*H/n;
253 offset.changed();
254 reset();
255 }
256
257 public void oneshot() {
258 double [] phi=M.getGradient();
259 H = Mathx.sum(phi);
260 offset.value += rate2.value*multiplier.value*H/n;
261 offset.changed();
262 }
263
264 public void dispose() { rate2.dispose(); }
265 public void starting() { reset(); }
266 public void run() { oneshot(); }
267 }
268 // could have alternative trainers if prior is Gaussian or Laplacian,
269 // in which case, parameters can be estimated in closed form
270 }
271