view src/samer/models/SmoothGeneralisedExponential.java @ 5:b67a33c44de7

Remove some crap, etc
author samer
date Fri, 05 Apr 2019 21:34:25 +0100
parents bf79fb79ee13
children
line wrap: on
line source
package samer.models;

import samer.maths.*;
import samer.maths.opt.*;
import samer.tools.*;
import samer.core.*;
import samer.core.types.*;
import java.util.*;

/**
	Non-adaptive generalised exponential factorial prior: the pointy
	bit of the usual GeneralisedExponential has been smoothed out
	by blending with a quadratic.
 */

public class SmoothGeneralisedExponential extends NamedTask implements Model, Observer {
	Vec		input;
	VVector	alpha, e, grad;
	int			N;
	VDouble	E;
	double[]	x, g, e0, _e, a, eps;
	double	last_eps;

	public SmoothGeneralisedExponential(Vec x) { this(x.size()); setInput(x); }
	public SmoothGeneralisedExponential(int n)  {
		super("genExp");
		Shell.push(node);
		N=n;
		E=new VDouble("E");
		e=new VVector("e",N);
		grad=new VVector("phi",N);
		alpha=new VVector("alpha",N);
		eps=new double[N];
		alpha.addSaver();
		Shell.pop();

		alpha.addObserver(this);
		g=grad.array(); // new double[N];
		e0=new double[N];
		a=alpha.array();
		_e=e.array();
		
		Mathx.setAll(a,1.0); 
		setEps(0.1);
	}

	public void update(Observable obs, Object arg) {
		setEps(last_eps);
	}
	public void setEps(double e) {
		last_eps=e;
		for (int i=0; i<N; i++) { eps[i]=Math.pow(e,2-a[i]); }
	}
	public void setInput(Vec in) { input=in; x=input.array(); }
	public int getSize() { return N; }
	public void dispose() {
		alpha.dispose();
		grad.dispose();
		E.dispose();
	}

	public VVector	getEnergyVector() { return e; }
	public VDouble	getEnergySignal() { return E; }
	public double	getEnergy() { return E.value; }
	public double [] getGradient() { return g; 	}
	public VVector  getAlpha() { return alpha; }
	
	public void run() { compute(); }
	public void infer() {}
	public void compute() {
		// compute log likelihood

		for (int i=0; i<N; i++) {
			double t=Math.abs(x[i]);
			_e[i] = t*t/(Math.pow(t,2-a[i]) + eps[i]);
		}

		// compute gradient g_i = dL/dx_i
		for (int i=0; i<N; i++) {
			if (x[i]==0) g[i]=0;
			else g[i] = a[i]*(_e[i]/x[i]);
		}

		e.changed();
		grad.changed();
		E.set(Mathx.sum(_e));
	}

	public Functionx functionx() {
		return new Functionx() {
			double [] __e=new double[N];
			public void dispose() {}
			public void evaluate(Datum P) { P.f=evaluate(P.x,P.g); }
			public double evaluate(double [] x, double [] g) {
				for (int i=0; i<N; i++) {
					if (x[i]==0) { g[i]=0; __e[i]=0; }
					else {
						double t=Math.abs(x[i]);
						__e[i] = t*t/(Math.pow(t,2-a[i]) + eps[i]);
						g[i] = a[i]*(__e[i]/x[i]);
					}
				}
				return  Mathx.sum(__e); // +L0;
			}
		};
	}
}