comparison src/samer/models/SmoothGeneralisedExponential.java @ 0:bf79fb79ee13

Initial Mercurial check in.
author samer
date Tue, 17 Jan 2012 17:50:20 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:bf79fb79ee13
1 package samer.models;
2
3 import samer.maths.*;
4 import samer.maths.opt.*;
5 import samer.tools.*;
6 import samer.core.*;
7 import samer.core.types.*;
8 import java.util.*;
9
10 /**
11 Non-adaptive generalised exponential factorial prior: the pointy
12 bit of the usual GeneralisedExponential has been smoothed out
13 by blending with a quadratic.
14 */
15
16 public class SmoothGeneralisedExponential extends NamedTask implements Model, Observer {
17 Vec input;
18 VVector alpha, e, grad;
19 int N;
20 VDouble E;
21 double[] x, g, e0, _e, a, eps;
22 double last_eps;
23
24 public SmoothGeneralisedExponential(Vec x) { this(x.size()); setInput(x); }
25 public SmoothGeneralisedExponential(int n) {
26 super("genExp");
27 Shell.push(node);
28 N=n;
29 E=new VDouble("E");
30 e=new VVector("e",N);
31 grad=new VVector("phi",N);
32 alpha=new VVector("alpha",N);
33 eps=new double[N];
34 alpha.addSaver();
35 Shell.pop();
36
37 alpha.addObserver(this);
38 g=grad.array(); // new double[N];
39 e0=new double[N];
40 a=alpha.array();
41 _e=e.array();
42
43 Mathx.setAll(a,1.0);
44 setEps(0.1);
45 }
46
47 public void update(Observable obs, Object arg) {
48 setEps(last_eps);
49 }
50 public void setEps(double e) {
51 last_eps=e;
52 for (int i=0; i<N; i++) { eps[i]=Math.pow(e,2-a[i]); }
53 }
54 public void setInput(Vec in) { input=in; x=input.array(); }
55 public int getSize() { return N; }
56 public void dispose() {
57 alpha.dispose();
58 grad.dispose();
59 E.dispose();
60 }
61
62 public VVector getEnergyVector() { return e; }
63 public VDouble getEnergySignal() { return E; }
64 public double getEnergy() { return E.value; }
65 public double [] getGradient() { return g; }
66 public VVector getAlpha() { return alpha; }
67
68 public void run() { compute(); }
69 public void infer() {}
70 public void compute() {
71 // compute log likelihood
72
73 for (int i=0; i<N; i++) {
74 double t=Math.abs(x[i]);
75 _e[i] = t*t/(Math.pow(t,2-a[i]) + eps[i]);
76 }
77
78 // compute gradient g_i = dL/dx_i
79 for (int i=0; i<N; i++) {
80 if (x[i]==0) g[i]=0;
81 else g[i] = a[i]*(_e[i]/x[i]);
82 }
83
84 e.changed();
85 grad.changed();
86 E.set(Mathx.sum(_e));
87 }
88
89 public Functionx functionx() {
90 return new Functionx() {
91 double [] __e=new double[N];
92 public void dispose() {}
93 public void evaluate(Datum P) { P.f=evaluate(P.x,P.g); }
94 public double evaluate(double [] x, double [] g) {
95 for (int i=0; i<N; i++) {
96 if (x[i]==0) { g[i]=0; __e[i]=0; }
97 else {
98 double t=Math.abs(x[i]);
99 __e[i] = t*t/(Math.pow(t,2-a[i]) + eps[i]);
100 g[i] = a[i]*(__e[i]/x[i]);
101 }
102 }
103 return Mathx.sum(__e); // +L0;
104 }
105 };
106 }
107 }