wolffd@0
|
1 function Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm)
|
wolffd@0
|
2 % Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm)
|
wolffd@0
|
3 %
|
wolffd@0
|
4 % W = d-by-d positive semi-definite matrix
|
wolffd@0
|
5 % test_k = k-value to use for KNN
|
wolffd@0
|
6 % Xtrain = d-by-n matrix of training data
|
wolffd@0
|
7 % Ytrain = n-by-1 vector of training labels
|
wolffd@0
|
8 % Xtest = d-by-m matrix of testing data
|
wolffd@0
|
9 % Testnorm= m-by-#kernels vector of k(i,i) for each i in test
|
wolffd@0
|
10 %
|
wolffd@0
|
11
|
wolffd@0
|
12 addpath('cuttingPlane', 'distance', 'feasible', 'initialize', 'loss', ...
|
wolffd@0
|
13 'metricPsi', 'regularize', 'separationOracle', 'util');
|
wolffd@0
|
14
|
wolffd@0
|
15 [d, nTrain, nKernel] = size(Xtrain);
|
wolffd@0
|
16 nTest = size(Xtest, 2);
|
wolffd@0
|
17 test_k = min(test_k, nTrain);
|
wolffd@0
|
18
|
wolffd@0
|
19 if nargin < 7
|
wolffd@0
|
20 Testnorm = [];
|
wolffd@0
|
21 end
|
wolffd@0
|
22
|
wolffd@0
|
23 % Build the distance matrix
|
wolffd@0
|
24 [D, I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm);
|
wolffd@0
|
25
|
wolffd@0
|
26 % Compute label agreement
|
wolffd@0
|
27 Ypredict = histc(Ytrain(I(1:test_k,:)), unique(Ytrain)');
|
wolffd@0
|
28
|
wolffd@0
|
29 end
|
wolffd@0
|
30
|
wolffd@0
|
31
|
wolffd@0
|
32 function [D,I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm)
|
wolffd@0
|
33
|
wolffd@0
|
34 % CASES:
|
wolffd@0
|
35 % Raw: W = []
|
wolffd@0
|
36
|
wolffd@0
|
37 % Linear, full: W = d-by-d
|
wolffd@0
|
38 % Single Kernel, full: W = n-by-n
|
wolffd@0
|
39 % MKL, full: W = n-by-n-by-m
|
wolffd@0
|
40
|
wolffd@0
|
41 % Linear, diagonal: W = d-by-1
|
wolffd@0
|
42 % Single Kernel, diagonal: W = n-by-1
|
wolffd@0
|
43 % MKL, diag: W = n-by-m
|
wolffd@0
|
44 % MKL, diag-off-diag: W = m-by-m-by-n
|
wolffd@0
|
45
|
wolffd@0
|
46 [d, nTrain, nKernel] = size(Xtrain);
|
wolffd@0
|
47 nTest = size(Xtest, 2);
|
wolffd@0
|
48
|
wolffd@0
|
49 if isempty(W)
|
wolffd@0
|
50 % W = [] => native euclidean distances
|
wolffd@0
|
51 D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm);
|
wolffd@0
|
52
|
wolffd@0
|
53 elseif size(W,1) == d && size(W,2) == d
|
wolffd@0
|
54 % We're in a full-projection case
|
wolffd@0
|
55 D = setDistanceFullMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain);
|
wolffd@0
|
56
|
wolffd@0
|
57 elseif size(W,1) == d && size(W,2) == nKernel
|
wolffd@0
|
58 % We're in a simple diagonal case
|
wolffd@0
|
59 D = setDistanceDiagMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain);
|
wolffd@0
|
60
|
wolffd@0
|
61 elseif size(W,1) == nKernel && size(W,2) == nKernel && size(W,3) == nTrain
|
wolffd@0
|
62 % We're in DOD mode
|
wolffd@0
|
63 D = setDistanceDODMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain);
|
wolffd@0
|
64
|
wolffd@0
|
65 else
|
wolffd@0
|
66 % Error?
|
wolffd@0
|
67 error('Cannot determine metric mode.');
|
wolffd@0
|
68
|
wolffd@0
|
69 end
|
wolffd@0
|
70
|
wolffd@0
|
71 D = full(D(1:nTrain, nTrain + (1:nTest)));
|
wolffd@0
|
72 [v,I] = sort(D, 1);
|
wolffd@0
|
73 end
|
wolffd@0
|
74
|
wolffd@0
|
75
|
wolffd@0
|
76
|
wolffd@0
|
77 function D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm)
|
wolffd@0
|
78
|
wolffd@0
|
79 [d, nTrain, nKernel] = size(Xtrain);
|
wolffd@0
|
80 nTest = size(Xtest, 2);
|
wolffd@0
|
81
|
wolffd@0
|
82 if isempty(Testnorm)
|
wolffd@0
|
83 % Not in kernel mode, compute distances directly
|
wolffd@0
|
84 D = 0;
|
wolffd@0
|
85 for i = 1:nKernel
|
wolffd@0
|
86 D = D + setDistanceDiag([Xtrain(:,:,i) Xtest(:,:,i)], ones(d,1), ...
|
wolffd@0
|
87 nTrain + (1:nTest), 1:nTrain);
|
wolffd@0
|
88 end
|
wolffd@0
|
89 else
|
wolffd@0
|
90 % We are in kernel mode
|
wolffd@0
|
91 D = sparse(nTrain + nTest, nTrain + nTest);
|
wolffd@0
|
92 for i = 1:nKernel
|
wolffd@0
|
93 Trainnorm = diag(Xtrain(:,:,i));
|
wolffd@0
|
94 D(1:nTrain, nTrain + (1:nTest)) = D(1:nTrain, nTrain + (1:nTest)) ...
|
wolffd@0
|
95 + bsxfun(@plus, Trainnorm, bsxfun(@plus, Testnorm(:,i)', -2 * Xtest(:,:,i)));
|
wolffd@0
|
96 end
|
wolffd@0
|
97 end
|
wolffd@0
|
98 end
|
wolffd@0
|
99
|