annotate toolboxes/distance_learning/mlr/separationOracle/separationOracleAUC.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
rev   line source
wolffd@0 1 function [Y, Loss] = separationOracleAUC(q, D, pos, neg, k)
wolffd@0 2 %
wolffd@0 3 % [Y,Loss] = separationOracleAUC(q, D, pos, neg, k)
wolffd@0 4 %
wolffd@0 5 % q = index of the query point
wolffd@0 6 % D = the current distance matrix
wolffd@0 7 % pos = indices of relevant results for q
wolffd@0 8 % neg = indices of irrelevant results for q
wolffd@0 9 % k = length of the list to consider (unused in AUC)
wolffd@0 10 %
wolffd@0 11 % Y is a permutation 1:n corresponding to the maximally
wolffd@0 12 % violated constraint
wolffd@0 13 %
wolffd@0 14 % Loss is the loss for Y, in this case, 1-AUC(Y)
wolffd@0 15
wolffd@0 16
wolffd@0 17 % First, sort the documents in descending order of W'Phi(q,x)
wolffd@0 18 % Phi = - (X(q) - X(x)) * (X(q) - X(x))'
wolffd@0 19
wolffd@0 20 % Sort the positive documents
wolffd@0 21 ScorePos = - D(pos,q);
wolffd@0 22 [Vpos, Ipos] = sort(full(ScorePos'), 'descend');
wolffd@0 23 Ipos = pos(Ipos);
wolffd@0 24
wolffd@0 25 % Sort the negative documents
wolffd@0 26 ScoreNeg = - D(neg,q);
wolffd@0 27 [Vneg, Ineg] = sort(full(ScoreNeg'), 'descend');
wolffd@0 28 Ineg = neg(Ineg);
wolffd@0 29
wolffd@0 30
wolffd@0 31 % How many pos and neg documents are we using here?
wolffd@0 32 numPos = length(pos);
wolffd@0 33 numNeg = length(neg);
wolffd@0 34 n = numPos + numNeg;
wolffd@0 35
wolffd@0 36
wolffd@0 37 NegsBefore = sum(bsxfun(@lt, Vpos, Vneg' + 0.5),1);
wolffd@0 38
wolffd@0 39 % Construct Y from NegsBefore
wolffd@0 40 Y = nan * ones(n,1);
wolffd@0 41 Y((1:numPos) + NegsBefore) = Ipos;
wolffd@0 42 Y(isnan(Y)) = Ineg;
wolffd@0 43
wolffd@0 44 % Compute AUC loss for this ranking
wolffd@0 45 Loss = 1 - sum(NegsBefore) / (numPos * numNeg * 2);
wolffd@0 46 end
wolffd@0 47