wolffd@0
|
1 function [Y, Loss] = separationOracleMRR(q, D, pos, neg, k)
|
wolffd@0
|
2 %
|
wolffd@0
|
3 % [Y,Loss] = separationOracleMRR(q, D, pos, neg, k)
|
wolffd@0
|
4 %
|
wolffd@0
|
5 % q = index of the query point
|
wolffd@0
|
6 % D = the current distance matrix
|
wolffd@0
|
7 % pos = indices of relevant results for q
|
wolffd@0
|
8 % neg = indices of irrelevant results for q
|
wolffd@0
|
9 % k = length of the list to consider (unused in MRR)
|
wolffd@0
|
10 %
|
wolffd@0
|
11 % Y is a permutation 1:n corresponding to the maximally
|
wolffd@0
|
12 % violated constraint
|
wolffd@0
|
13 %
|
wolffd@0
|
14 % Loss is the loss for Y, in this case, 1-MRR(Y)
|
wolffd@0
|
15
|
wolffd@0
|
16
|
wolffd@0
|
17 % First, sort the documents in descending order of W'Phi(q,x)
|
wolffd@0
|
18 % Phi = - (X(q) - X(x)) * (X(q) - X(x))'
|
wolffd@0
|
19
|
wolffd@0
|
20 % Sort the positive documents
|
wolffd@0
|
21 ScorePos = - D(pos,q);
|
wolffd@0
|
22 [Vpos, Ipos] = sort(full(ScorePos'), 'descend');
|
wolffd@0
|
23 Ipos = pos(Ipos);
|
wolffd@0
|
24
|
wolffd@0
|
25 % Sort the negative documents
|
wolffd@0
|
26 ScoreNeg = -D(neg,q);
|
wolffd@0
|
27 [Vneg, Ineg] = sort(full(ScoreNeg'), 'descend');
|
wolffd@0
|
28 Ineg = neg(Ineg);
|
wolffd@0
|
29
|
wolffd@0
|
30 % Now, solve the DP for the interleaving
|
wolffd@0
|
31
|
wolffd@0
|
32 numPos = length(pos);
|
wolffd@0
|
33 numNeg = length(neg);
|
wolffd@0
|
34 n = numPos + numNeg;
|
wolffd@0
|
35
|
wolffd@0
|
36 cVpos = cumsum(Vpos);
|
wolffd@0
|
37 cVneg = cumsum(Vneg);
|
wolffd@0
|
38
|
wolffd@0
|
39
|
wolffd@0
|
40 % Algorithm:
|
wolffd@0
|
41 % For each RR score in 1/1, 1/2, ..., 1/(numNeg+1)
|
wolffd@0
|
42 % Calculate maximum discriminant score for that precision level
|
wolffd@0
|
43 MRR = ((1:(numNeg+1)).^-1)';
|
wolffd@0
|
44
|
wolffd@0
|
45
|
wolffd@0
|
46 Discriminant = zeros(numNeg+1, 1);
|
wolffd@0
|
47 Discriminant(end) = numPos * cVneg(end) - numNeg * cVpos(end);
|
wolffd@0
|
48
|
wolffd@0
|
49 % For the rest of the positions, we're interleaving one more negative
|
wolffd@0
|
50 % example into the 2nd-through-last positives
|
wolffd@0
|
51 offsets = 1 + binarysearch(Vneg, Vpos(2:end));
|
wolffd@0
|
52
|
wolffd@0
|
53 % How many of the remaining positives go before Vneg(a)?
|
wolffd@0
|
54 NegsBefore = -bsxfun(@ge, offsets, (1:length(Vpos))');
|
wolffd@0
|
55
|
wolffd@0
|
56 % For the last position, all negatives come before all positives
|
wolffd@0
|
57 NegsBefore(:,numNeg+1) = numNeg;
|
wolffd@0
|
58
|
wolffd@0
|
59 Discriminant(1:numNeg) = -2 * (offsets .* Vneg - cVpos(offsets));
|
wolffd@0
|
60 Discriminant = sum(Discriminant) - cumsum(Discriminant) + Discriminant;
|
wolffd@0
|
61
|
wolffd@0
|
62
|
wolffd@0
|
63 % Normalize discriminant scores
|
wolffd@0
|
64 Discriminant = Discriminant / (numPos * numNeg);
|
wolffd@0
|
65 [s, x] = max(Discriminant - MRR);
|
wolffd@0
|
66
|
wolffd@0
|
67 % Now we know that there are x-1 relevant docs in the max ranking
|
wolffd@0
|
68 % Construct Y from NegsBefore(x,:)
|
wolffd@0
|
69
|
wolffd@0
|
70 Y = nan * ones(n,1);
|
wolffd@0
|
71 Y((1:numPos)' + sum(NegsBefore(:,x:end),2)) = Ipos;
|
wolffd@0
|
72 Y(isnan(Y)) = Ineg;
|
wolffd@0
|
73
|
wolffd@0
|
74 % Compute loss for this list
|
wolffd@0
|
75 Loss = 1 - MRR(x);
|
wolffd@0
|
76 end
|
wolffd@0
|
77
|