view src/samer/models/#Model.java# @ 5:b67a33c44de7

Remove some crap, etc
author samer
date Fri, 05 Apr 2019 21:34:25 +0100
parents bf79fb79ee13
children
line wrap: on
line source
/*
 *	Copyright (c) 2002, Samer Abdallah, King's College London.
 *	All rights reserved.
 *
 *	This software is provided AS iS and WITHOUT ANY WARRANTY;
 *	without even the implied warranty of MERCHANTABILITY or
 *	FITNESS FOR A PARTICULAR PURPOSE.
 */

package samer.models;

import samer.core.*;
import samer.maths.*;
import samer.maths.opt.*;
import samer.tools.*;

public interface Model
{
	/** return size of vector this model expects */
	int getSize();
	
	/** model should begin observing this vector */
	void setInput(Vec x);

	/** should infer values latent variables */
	void infer();

	/**
		 contract is that getEnergy and getGradient
		must return correct values for current x after
		infer and compute has been called, but not necessarily
		before.

:q

		This is to give model an opportunity to
		cache values of energy and gradient to avoid repeated
		computations.
	*/
	void compute();

	/** return E = -log p(x) */
	double getEnergy();

	/** return dE/dx */
	double [] getGradient();

	public void dispose();

	/** This presents a more functional interface to the model
	so that it can be driven by an optimiser. See classes
	Functionx and MinimiserBase in package samer.maths.opt. */

	public Functionx functionx();

	/** This represents a training algorithm for a Model
		Trainer is responsible for counting calls to accumulate()
		between flushes
	*/

	public interface Trainer {
		/** collect statistics for parameter update */
		public void accumulate();

		/** weighted accumulate */
		public void accumulate(double w);

		/** use collected stats to update parameters and reset */
		public void flush();

		/** Must be equivalent to reset(); accumulate(); flush();
			but can be optimised for non-batched training */
		public void oneshot();

		/** reset accumulators without updating parameters */
		public void reset();

		public void dispose();
	}
}