wolffd@0: function mixgauss = mixgauss_classifier_train(trainFeatures, trainLabels, nc, varargin) wolffd@0: % function mixgauss = mixgauss_classifier_train(trainFeatures, trainLabels, nclusters, varargin) wolffd@0: % trainFeatures(:,i) for i'th example wolffd@0: % trainLabels should be 0,1 wolffd@0: % To evaluate performance on a tets set, use wolffd@0: % mixgauss = mixgauss_classifier_train(trainFeatures, trainLabels, nc, 'testFeatures', tf, 'testLabels', tl) wolffd@0: wolffd@0: [testFeatures, testLabels, max_iter, thresh, cov_type, mu, Sigma, priorC, method, ... wolffd@0: cov_prior, verbose, prune_thresh] = process_options(... wolffd@0: varargin, 'testFeatures', [], 'testLabels', [], ... wolffd@0: 'max_iter', 10, 'thresh', 0.01, 'cov_type', 'diag', ... wolffd@0: 'mu', [], 'Sigma', [], 'priorC', [], 'method', 'kmeans', ... wolffd@0: 'cov_prior', [], 'verbose', 0, 'prune_thresh', 0); wolffd@0: wolffd@0: Nclasses = 2; % max([trainLabels testLabels]) + 1; wolffd@0: wolffd@0: pos = find(trainLabels == 1); wolffd@0: neg = find(trainLabels == 0); wolffd@0: wolffd@0: if verbose, fprintf('fitting pos\n'); end wolffd@0: [mixgauss.pos.mu, mixgauss.pos.Sigma, mixgauss.pos.prior] = ... wolffd@0: mixgauss_em(trainFeatures(:, pos), nc, varargin{:}); wolffd@0: wolffd@0: if verbose, fprintf('fitting neg\n'); end wolffd@0: [mixgauss.neg.mu, mixgauss.neg.Sigma, mixgauss.neg.prior] = ... wolffd@0: mixgauss_em(trainFeatures(:, neg), nc, varargin{:}); wolffd@0: wolffd@0: wolffd@0: if ~isempty(priorC) wolffd@0: mixgauss.priorC = priorC; wolffd@0: else wolffd@0: mixgauss.priorC = normalize([length(pos) length(neg)]); wolffd@0: end