Daniel@0: function Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) Daniel@0: % Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) Daniel@0: % Daniel@0: % W = d-by-d positive semi-definite matrix Daniel@0: % test_k = k-value to use for KNN Daniel@0: % Xtrain = d-by-n matrix of training data Daniel@0: % Ytrain = n-by-1 vector of training labels Daniel@0: % Xtest = d-by-m matrix of testing data Daniel@0: % Testnorm= m-by-#kernels vector of k(i,i) for each i in test Daniel@0: % Daniel@0: Daniel@0: addpath('cuttingPlane', 'distance', 'feasible', 'initialize', 'loss', ... Daniel@0: 'metricPsi', 'regularize', 'separationOracle', 'util'); Daniel@0: Daniel@0: [d, nTrain, nKernel] = size(Xtrain); Daniel@0: nTest = size(Xtest, 2); Daniel@0: test_k = min(test_k, nTrain); Daniel@0: Daniel@0: if nargin < 7 Daniel@0: Testnorm = []; Daniel@0: end Daniel@0: Daniel@0: % Build the distance matrix Daniel@0: [D, I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm); Daniel@0: Daniel@0: % Compute label agreement Daniel@0: Ypredict = histc(Ytrain(I(1:test_k,:)), unique(Ytrain)'); Daniel@0: Daniel@0: end Daniel@0: Daniel@0: Daniel@0: function [D,I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm) Daniel@0: Daniel@0: % CASES: Daniel@0: % Raw: W = [] Daniel@0: Daniel@0: % Linear, full: W = d-by-d Daniel@0: % Single Kernel, full: W = n-by-n Daniel@0: % MKL, full: W = n-by-n-by-m Daniel@0: Daniel@0: % Linear, diagonal: W = d-by-1 Daniel@0: % Single Kernel, diagonal: W = n-by-1 Daniel@0: % MKL, diag: W = n-by-m Daniel@0: % MKL, diag-off-diag: W = m-by-m-by-n Daniel@0: Daniel@0: [d, nTrain, nKernel] = size(Xtrain); Daniel@0: nTest = size(Xtest, 2); Daniel@0: Daniel@0: if isempty(W) Daniel@0: % W = [] => native euclidean distances Daniel@0: D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm); Daniel@0: Daniel@0: elseif size(W,1) == d && size(W,2) == d Daniel@0: % We're in a full-projection case Daniel@0: D = setDistanceFullMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); Daniel@0: Daniel@0: elseif size(W,1) == d && size(W,2) == nKernel Daniel@0: % We're in a simple diagonal case Daniel@0: D = setDistanceDiagMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); Daniel@0: Daniel@0: elseif size(W,1) == nKernel && size(W,2) == nKernel && size(W,3) == nTrain Daniel@0: % We're in DOD mode Daniel@0: D = setDistanceDODMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); Daniel@0: Daniel@0: else Daniel@0: % Error? Daniel@0: error('Cannot determine metric mode.'); Daniel@0: Daniel@0: end Daniel@0: Daniel@0: D = full(D(1:nTrain, nTrain + (1:nTest))); Daniel@0: [v,I] = sort(D, 1); Daniel@0: end Daniel@0: Daniel@0: Daniel@0: Daniel@0: function D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm) Daniel@0: Daniel@0: [d, nTrain, nKernel] = size(Xtrain); Daniel@0: nTest = size(Xtest, 2); Daniel@0: Daniel@0: if isempty(Testnorm) Daniel@0: % Not in kernel mode, compute distances directly Daniel@0: D = 0; Daniel@0: for i = 1:nKernel Daniel@0: D = D + setDistanceDiag([Xtrain(:,:,i) Xtest(:,:,i)], ones(d,1), ... Daniel@0: nTrain + (1:nTest), 1:nTrain); Daniel@0: end Daniel@0: else Daniel@0: % We are in kernel mode Daniel@0: D = sparse(nTrain + nTest, nTrain + nTest); Daniel@0: for i = 1:nKernel Daniel@0: Trainnorm = diag(Xtrain(:,:,i)); Daniel@0: D(1:nTrain, nTrain + (1:nTest)) = D(1:nTrain, nTrain + (1:nTest)) ... Daniel@0: + bsxfun(@plus, Trainnorm, bsxfun(@plus, Testnorm(:,i)', -2 * Xtest(:,:,i))); Daniel@0: end Daniel@0: end Daniel@0: end Daniel@0: