Mercurial > hg > camir-aes2014
comparison toolboxes/distance_learning/mlr/util/soft_classify.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 function Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) | |
2 % Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) | |
3 % | |
4 % W = d-by-d positive semi-definite matrix | |
5 % test_k = k-value to use for KNN | |
6 % Xtrain = d-by-n matrix of training data | |
7 % Ytrain = n-by-1 vector of training labels | |
8 % Xtest = d-by-m matrix of testing data | |
9 % Testnorm= m-by-#kernels vector of k(i,i) for each i in test | |
10 % | |
11 | |
12 addpath('cuttingPlane', 'distance', 'feasible', 'initialize', 'loss', ... | |
13 'metricPsi', 'regularize', 'separationOracle', 'util'); | |
14 | |
15 [d, nTrain, nKernel] = size(Xtrain); | |
16 nTest = size(Xtest, 2); | |
17 test_k = min(test_k, nTrain); | |
18 | |
19 if nargin < 7 | |
20 Testnorm = []; | |
21 end | |
22 | |
23 % Build the distance matrix | |
24 [D, I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm); | |
25 | |
26 % Compute label agreement | |
27 Ypredict = histc(Ytrain(I(1:test_k,:)), unique(Ytrain)'); | |
28 | |
29 end | |
30 | |
31 | |
32 function [D,I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm) | |
33 | |
34 % CASES: | |
35 % Raw: W = [] | |
36 | |
37 % Linear, full: W = d-by-d | |
38 % Single Kernel, full: W = n-by-n | |
39 % MKL, full: W = n-by-n-by-m | |
40 | |
41 % Linear, diagonal: W = d-by-1 | |
42 % Single Kernel, diagonal: W = n-by-1 | |
43 % MKL, diag: W = n-by-m | |
44 % MKL, diag-off-diag: W = m-by-m-by-n | |
45 | |
46 [d, nTrain, nKernel] = size(Xtrain); | |
47 nTest = size(Xtest, 2); | |
48 | |
49 if isempty(W) | |
50 % W = [] => native euclidean distances | |
51 D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm); | |
52 | |
53 elseif size(W,1) == d && size(W,2) == d | |
54 % We're in a full-projection case | |
55 D = setDistanceFullMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); | |
56 | |
57 elseif size(W,1) == d && size(W,2) == nKernel | |
58 % We're in a simple diagonal case | |
59 D = setDistanceDiagMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); | |
60 | |
61 elseif size(W,1) == nKernel && size(W,2) == nKernel && size(W,3) == nTrain | |
62 % We're in DOD mode | |
63 D = setDistanceDODMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); | |
64 | |
65 else | |
66 % Error? | |
67 error('Cannot determine metric mode.'); | |
68 | |
69 end | |
70 | |
71 D = full(D(1:nTrain, nTrain + (1:nTest))); | |
72 [v,I] = sort(D, 1); | |
73 end | |
74 | |
75 | |
76 | |
77 function D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm) | |
78 | |
79 [d, nTrain, nKernel] = size(Xtrain); | |
80 nTest = size(Xtest, 2); | |
81 | |
82 if isempty(Testnorm) | |
83 % Not in kernel mode, compute distances directly | |
84 D = 0; | |
85 for i = 1:nKernel | |
86 D = D + setDistanceDiag([Xtrain(:,:,i) Xtest(:,:,i)], ones(d,1), ... | |
87 nTrain + (1:nTest), 1:nTrain); | |
88 end | |
89 else | |
90 % We are in kernel mode | |
91 D = sparse(nTrain + nTest, nTrain + nTest); | |
92 for i = 1:nKernel | |
93 Trainnorm = diag(Xtrain(:,:,i)); | |
94 D(1:nTrain, nTrain + (1:nTest)) = D(1:nTrain, nTrain + (1:nTest)) ... | |
95 + bsxfun(@plus, Trainnorm, bsxfun(@plus, Testnorm(:,i)', -2 * Xtest(:,:,i))); | |
96 end | |
97 end | |
98 end | |
99 |