wolffd@0: function Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) wolffd@0: % Ypredict = soft_classify(W, test_k, Xtrain, Ytrain, Xtest, Testnorm) wolffd@0: % wolffd@0: % W = d-by-d positive semi-definite matrix wolffd@0: % test_k = k-value to use for KNN wolffd@0: % Xtrain = d-by-n matrix of training data wolffd@0: % Ytrain = n-by-1 vector of training labels wolffd@0: % Xtest = d-by-m matrix of testing data wolffd@0: % Testnorm= m-by-#kernels vector of k(i,i) for each i in test wolffd@0: % wolffd@0: wolffd@0: addpath('cuttingPlane', 'distance', 'feasible', 'initialize', 'loss', ... wolffd@0: 'metricPsi', 'regularize', 'separationOracle', 'util'); wolffd@0: wolffd@0: [d, nTrain, nKernel] = size(Xtrain); wolffd@0: nTest = size(Xtest, 2); wolffd@0: test_k = min(test_k, nTrain); wolffd@0: wolffd@0: if nargin < 7 wolffd@0: Testnorm = []; wolffd@0: end wolffd@0: wolffd@0: % Build the distance matrix wolffd@0: [D, I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm); wolffd@0: wolffd@0: % Compute label agreement wolffd@0: Ypredict = histc(Ytrain(I(1:test_k,:)), unique(Ytrain)'); wolffd@0: wolffd@0: end wolffd@0: wolffd@0: wolffd@0: function [D,I] = mlr_test_distance(W, Xtrain, Xtest, Testnorm) wolffd@0: wolffd@0: % CASES: wolffd@0: % Raw: W = [] wolffd@0: wolffd@0: % Linear, full: W = d-by-d wolffd@0: % Single Kernel, full: W = n-by-n wolffd@0: % MKL, full: W = n-by-n-by-m wolffd@0: wolffd@0: % Linear, diagonal: W = d-by-1 wolffd@0: % Single Kernel, diagonal: W = n-by-1 wolffd@0: % MKL, diag: W = n-by-m wolffd@0: % MKL, diag-off-diag: W = m-by-m-by-n wolffd@0: wolffd@0: [d, nTrain, nKernel] = size(Xtrain); wolffd@0: nTest = size(Xtest, 2); wolffd@0: wolffd@0: if isempty(W) wolffd@0: % W = [] => native euclidean distances wolffd@0: D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm); wolffd@0: wolffd@0: elseif size(W,1) == d && size(W,2) == d wolffd@0: % We're in a full-projection case wolffd@0: D = setDistanceFullMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); wolffd@0: wolffd@0: elseif size(W,1) == d && size(W,2) == nKernel wolffd@0: % We're in a simple diagonal case wolffd@0: D = setDistanceDiagMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); wolffd@0: wolffd@0: elseif size(W,1) == nKernel && size(W,2) == nKernel && size(W,3) == nTrain wolffd@0: % We're in DOD mode wolffd@0: D = setDistanceDODMKL([Xtrain Xtest], W, nTrain + (1:nTest), 1:nTrain); wolffd@0: wolffd@0: else wolffd@0: % Error? wolffd@0: error('Cannot determine metric mode.'); wolffd@0: wolffd@0: end wolffd@0: wolffd@0: D = full(D(1:nTrain, nTrain + (1:nTest))); wolffd@0: [v,I] = sort(D, 1); wolffd@0: end wolffd@0: wolffd@0: wolffd@0: wolffd@0: function D = mlr_test_distance_raw(Xtrain, Xtest, Testnorm) wolffd@0: wolffd@0: [d, nTrain, nKernel] = size(Xtrain); wolffd@0: nTest = size(Xtest, 2); wolffd@0: wolffd@0: if isempty(Testnorm) wolffd@0: % Not in kernel mode, compute distances directly wolffd@0: D = 0; wolffd@0: for i = 1:nKernel wolffd@0: D = D + setDistanceDiag([Xtrain(:,:,i) Xtest(:,:,i)], ones(d,1), ... wolffd@0: nTrain + (1:nTest), 1:nTrain); wolffd@0: end wolffd@0: else wolffd@0: % We are in kernel mode wolffd@0: D = sparse(nTrain + nTest, nTrain + nTest); wolffd@0: for i = 1:nKernel wolffd@0: Trainnorm = diag(Xtrain(:,:,i)); wolffd@0: D(1:nTrain, nTrain + (1:nTest)) = D(1:nTrain, nTrain + (1:nTest)) ... wolffd@0: + bsxfun(@plus, Trainnorm, bsxfun(@plus, Testnorm(:,i)', -2 * Xtest(:,:,i))); wolffd@0: end wolffd@0: end wolffd@0: end wolffd@0: