diff src/scheme/models.scm @ 0:bf79fb79ee13

Initial Mercurial check in.
author samer
date Tue, 17 Jan 2012 17:50:20 +0000
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/scheme/models.scm	Tue Jan 17 17:50:20 2012 +0000
@@ -0,0 +1,225 @@
+(import "samer.models.*")
+
+(define vec VVector.)
+
+; prior-spec: (name constructor mk-trainer batch flush-tasks)
+(define (iid-spec logprior)  `("iid" ,(lambda (n) (IIDPrior. n logprior))))
+(define genexp-spec `("genexp" 	,samer.models.GeneralisedExponential. ,.getTrainer 128))
+(define cauchy-spec `("cauchy"	,(lambda (n) (IIDPrior. n (LogCauchy.)))))
+(define gauss-spec  `("gauss" 	,(lambda (n) (IIDPrior. n (HalfSquare.)))))
+(define laplace-spec `("laplace" ,(lambda (n) (IIDPrior. n (Abs.)))))
+(define ica-spec	`("ica" ,ICA. .getTrainer 512))
+(define a-gauss-spec  `("gauss" 	,(lambda (n) (AlignedGaussian. n))))
+
+
+;;; macro for extracting things from a model-spec
+(define-macro (let-model name const train spec . body)
+	`(let ((,name (car ,spec)) (,const (cadr ,spec)) (,train (cddr ,spec)))
+			. ,body))
+
+
+;; what about flush tasks?
+(define (maybe-train model train)
+  (if (not (null? train))
+    (let ((trainer (BatchedTrainer. ((car train) model) (cadr train)))
+	         (post-flush (caddr train)))
+    	(addtask trainer)
+	   (if (not (null? post-flush))
+			(.setPostFlush trainer post-flush)))))
+
+;;; this is a sort of model combinator
+
+(define (chain-2 x spec-1 spec-2)
+  (let-model name-1 cons-1 train-1 spec-1
+  (let-model name-2 cons-2 train-2 spec-2
+	 (node name-1
+		(let* ((model-1 (cons-1 x))
+			 	 (model-2	(node name-2 (cons-2 (.output model-1)))))
+		  (.setOutputModel model-1 model-2)
+		  (addtasks model-1 model-2)
+		  (node name-2 (maybe-train model-2 train-2))
+		  (maybe-train model-1 train-1)
+		  (list model-1 model-2))))))
+
+(define (scaler-spec . training-spec)
+	(append `("scaler" ,Scaler.) training-spec))
+
+(define (mk-scaler x prior . trainer)
+  (node (.getNode x)
+    (let ((models (chain-2 x (apply scaler-spec trainer) prior)))
+	   (.output (car models)))))
+
+
+(define (mk-diffscaler x prior . trainer)
+  (node (.getNode x)
+    (chain-2 x (append `("diffScaler" ,DiffScaler.) trainer) prior)))
+
+
+;;; these are the old methods defined in terms of the above
+(define (norm in) (mk-scaler in laplace-spec .getTrainer 8))
+(define (offset in) (mk-scaler in laplace-spec .getOffsetTrainer 8))
+(define (nonorm in) (mk-scaler in laplace-spec))
+(define (diffnorm x prior) (.output (car (mk-diffscaler x prior .getTrainer 16))))
+(define (diffscale x prior) (.output (car (mk-diffscaler x prior .getScaleTrainer 16))))
+(define (diffoffset 	x prior) (.output (car (mk-diffscaler x prior .getOffsetTrainer 16))))
+(define (smoothscale	x prior)	(.output (car (mk-diffscaler x prior .getTensionedTrainer 16))))
+
+
+;;;; ........... ICA ............
+
+; trainers: .getTrainer
+; trainers: .getDecayWhenActiveTrainer
+(define (ica-spec . training-spec)
+	(let ((ica-cons
+   	(lambda (x)
+  			(let ((ica (ICA. (cur-node) (.size x))))
+				(.setInput ica x)
+				(Shell.registerViewable ica)
+				ica))))
+    `("ica" ,ica-cons . ,training-spec)))
+
+
+(define (mk-ica x prior . trainer)
+  (node (.getNode x)
+	 (if (null? trainer)
+		(car (chain-2 x (ica-spec) prior))
+		(car (chain-2 x (ica-spec (car trainer) (.size x)) prior)))))
+
+
+ ; auto: load weights, basis rowcolumn viewer
+;	(matexec (.getWeightMatrix ica) "load")
+;	(matexec (.getBasisMatrix ica) "rowcolumn")
+
+; flush tasks:
+;		(exec ica "basis")
+;		(exec ica "logdet")
+;		(exec basis "save")
+;		(ICAScalerSync.) (for 3 model version)
+
+
+; trainers: .getScaleTrainer
+; trainers: .getDiffTrainer
+; (define (mk-ica-scaler x prior trainer)
+;   (node (.getNode x)
+;     (let ((models (chain-2 x `("ica" ,ICAWithScaler. ,trainer ,(.size x)) prior)))
+; 	 	(Shell.registerViewable (car models))
+; 	   (.output (car models)))))
+
+;		(.diffFlush trainer)				; (more frequent)
+
+
+
+(define (histogram vec bins)
+	(define hist (node (.getNode vec) (Histogram. vec bins)))
+	;(addtask hist)
+	hist)
+
+
+(define (chain-3 x spec-1 spec-2 spec-3)
+  (let-model name-1 cons-1 train-1 spec-1
+  (let-model name-2 cons-2 train-2 spec-2
+  (let-model name-3 cons-3 train-3 spec-3
+	 (node name-1
+		(let* ((model-1 (cons-1 x))
+			 	 (model-2	(node name-2 (cons-2 (.output model-1))))
+				 (model-3 	(node name-2 (node name-3 (cons-3 (.output model-2))))))
+		  (.setOutputModel model-1 model-2)
+		  (.setOutputModel model-2 model-3)
+		  (addtasks (task
+		  		(.infer model-1)
+		  		(.infer model-2)
+		  		; (.infer model-3)
+				; this is wrong - only need to compute if training
+		  		(.compute model-3)
+		  		(.compute model-2)))
+				; no need to compute model 1 if there is no model 0
+		  (maybe-train model-1 train-1)
+		  (node name-2 (maybe-train model-2 train-2))
+		  (node name-2 (node name-3 (maybe-train model-3 train-3)))
+		  (list model-1 model-2 model-3)))))))
+
+;; flush tasks for model 1/2 trainers?
+	'(sub 32 (seq
+		(ICAScalerSync. ica scaler)
+		(task (exec ica "basis"))))
+
+;;;; Mixture models
+
+(define (mix-2 x spec-1 spec-2)
+  (let-model name-1 cons-1 train-1 spec-1
+  (let-model name-2 cons-2 train-2 spec-2
+  (let* ((model-1 (node name-1 (cons-1 x)))
+  			(model-2	(node name-2 (cons-2 x)))
+			(mixture (Mixture. x 2))
+			(trainer (.getTrainer mixture)))
+		(.setModel M 0 model-1)
+		(.setModel M 1 model-2)
+		(.setTrainer trainer 0 (.getTrainer model-1))
+		(.setTrainer trainer 0 (.getTrainer model-2))
+		(addtasks 
+			(task
+				(.infer model-1) (.compute model-1)
+				(.infer model-2) (.compute model-2)
+				(.infer mixture))
+				(BatchedTrainer. trainer 4))))))
+		
+
+
+
+;;;; Noisy ICA
+
+(import "samer.maths.opt.*")
+
+(define (std-opt s opt)
+	(put "optimiser" opt)
+	(expose (.getViewer opt) "optimiser")
+	(seq (GenerateVector. s (Zero.)) opt))
+
+
+(define (uqn-optimiser s fn) (std-opt s (UnconstrainedMinimiser. s fn)))
+(define (ucg-optimiser s fn) (std-opt s (UnconstrainedConjGrad. s fn)))
+(define (pos-optimiser s fn) (std-opt s (ConstrainedMinimiser. s fn Positivity.class)))
+(define (zc-optimiser s fn)
+	(Shell.put "ZeroCrossingSparsity.jump" (VDouble. "logprior.jump" 1.0))
+	(std-opt s (ConstrainedMinimiser. s fn ZeroCrossingSparsity.class)))
+
+
+(define (ica-cons m) (lambda (x)
+	(let ((ica (NoisyICA. (cur-node) (.size x) m)))
+		(.setInput ica x)
+		ica)))
+
+(define (noisyica input num-outs e-spec s-spec mk-optimiser)
+	(mk-noisyica input
+		`("noisyica" ,(ica-cons num-outs) ,.learnDecayWhenActive ,(.size input))
+		e-spec s-spec mk-optimiser))
+
+(define (noisyica-notrain input num-outs e-spec s-spec mk-optimiser)
+	(mk-noisyica input
+		`("noisyica" ,(ica-cons num-outs) )
+		e-spec s-spec mk-optimiser))
+
+(define (mk-noisyica input ica-spec e-spec s-spec mk-optimiser)
+	; x: input vector
+	(let-model  ica-name ica-cons ica-train ica-spec
+	(let-model e-name e-cons e-train e-spec
+	(let-model s-name s-cons s-train s-spec
+	(node ica-name
+		(letrec (	(ica	 		(ica-cons input))
+							(s-model (node "s"(s-cons (.output ica))))
+							(e-model (node "e" (e-cons (.error ica)))))
+
+			(.setSourceModel ica s-model)
+			(.setNoiseModel  ica e-model)
+			(.setInferenceTask ica (mk-optimiser (.output ica) (.posterior ica)))
+;			(on-change (.basisMatrix ica)
+;				(.setHessian (get "optimiser")
+;					(.inverse (.times (.transpose A) A))))
+
+			(addtasks ica e-model s-model)
+			(maybe-train ica ica-train)
+			(node "e" (node e-name (maybe-train e-model e-train)))
+			(node "s" (node s-name (maybe-train s-model s-train)))
+
+			(list ica e-model s-model)))))))
+