annotate toolboxes/SVM-light/src/svm_learn_main.c @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
rev   line source
wolffd@0 1 /***********************************************************************/
wolffd@0 2 /* */
wolffd@0 3 /* svm_learn_main.c */
wolffd@0 4 /* */
wolffd@0 5 /* Command line interface to the learning module of the */
wolffd@0 6 /* Support Vector Machine. */
wolffd@0 7 /* */
wolffd@0 8 /* Author: Thorsten Joachims */
wolffd@0 9 /* Date: 02.07.02 */
wolffd@0 10 /* */
wolffd@0 11 /* Copyright (c) 2000 Thorsten Joachims - All rights reserved */
wolffd@0 12 /* */
wolffd@0 13 /* This software is available for non-commercial use only. It must */
wolffd@0 14 /* not be modified and distributed without prior permission of the */
wolffd@0 15 /* author. The author is not responsible for implications from the */
wolffd@0 16 /* use of this software. */
wolffd@0 17 /* */
wolffd@0 18 /***********************************************************************/
wolffd@0 19
wolffd@0 20
wolffd@0 21 /* uncomment, if you want to use svm-learn out of C++ */
wolffd@0 22 /* extern "C" { */
wolffd@0 23 # include "svm_common.h"
wolffd@0 24 # include "svm_learn.h"
wolffd@0 25 /* } */
wolffd@0 26
wolffd@0 27 char docfile[200]; /* file with training examples */
wolffd@0 28 char modelfile[200]; /* file for resulting classifier */
wolffd@0 29 char restartfile[200]; /* file with initial alphas */
wolffd@0 30
wolffd@0 31 void read_input_parameters(int, char **, char *, char *, char *, long *,
wolffd@0 32 LEARN_PARM *, KERNEL_PARM *);
wolffd@0 33 void wait_any_key();
wolffd@0 34 void print_help();
wolffd@0 35
wolffd@0 36
wolffd@0 37
wolffd@0 38 int main (int argc, char* argv[])
wolffd@0 39 {
wolffd@0 40 DOC **docs; /* training examples */
wolffd@0 41 long totwords,totdoc,i;
wolffd@0 42 double *target;
wolffd@0 43 double *alpha_in=NULL;
wolffd@0 44 KERNEL_CACHE *kernel_cache;
wolffd@0 45 LEARN_PARM learn_parm;
wolffd@0 46 KERNEL_PARM kernel_parm;
wolffd@0 47 MODEL *model=(MODEL *)my_malloc(sizeof(MODEL));
wolffd@0 48
wolffd@0 49 read_input_parameters(argc,argv,docfile,modelfile,restartfile,&verbosity,
wolffd@0 50 &learn_parm,&kernel_parm);
wolffd@0 51 read_documents(docfile,&docs,&target,&totwords,&totdoc);
wolffd@0 52 if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc);
wolffd@0 53
wolffd@0 54 if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */
wolffd@0 55 kernel_cache=NULL;
wolffd@0 56 }
wolffd@0 57 else {
wolffd@0 58 /* Always get a new kernel cache. It is not possible to use the
wolffd@0 59 same cache for two different training runs */
wolffd@0 60 kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size);
wolffd@0 61 }
wolffd@0 62
wolffd@0 63 if(learn_parm.type == CLASSIFICATION) {
wolffd@0 64 svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,
wolffd@0 65 &kernel_parm,kernel_cache,model,alpha_in);
wolffd@0 66 }
wolffd@0 67 else if(learn_parm.type == REGRESSION) {
wolffd@0 68 svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,
wolffd@0 69 &kernel_parm,&kernel_cache,model);
wolffd@0 70 }
wolffd@0 71 else if(learn_parm.type == RANKING) {
wolffd@0 72 svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm,
wolffd@0 73 &kernel_parm,&kernel_cache,model);
wolffd@0 74 }
wolffd@0 75 else if(learn_parm.type == OPTIMIZATION) {
wolffd@0 76 svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm,
wolffd@0 77 &kernel_parm,kernel_cache,model,alpha_in);
wolffd@0 78 }
wolffd@0 79
wolffd@0 80 if(kernel_cache) {
wolffd@0 81 /* Free the memory used for the cache. */
wolffd@0 82 kernel_cache_cleanup(kernel_cache);
wolffd@0 83 }
wolffd@0 84
wolffd@0 85 /* Warning: The model contains references to the original data 'docs'.
wolffd@0 86 If you want to free the original data, and only keep the model, you
wolffd@0 87 have to make a deep copy of 'model'. */
wolffd@0 88 /* deep_copy_of_model=copy_model(model); */
wolffd@0 89 write_model(modelfile,model);
wolffd@0 90
wolffd@0 91 free(alpha_in);
wolffd@0 92 free_model(model,0);
wolffd@0 93 for(i=0;i<totdoc;i++)
wolffd@0 94 free_example(docs[i],1);
wolffd@0 95 free(docs);
wolffd@0 96 free(target);
wolffd@0 97
wolffd@0 98 return(0);
wolffd@0 99 }
wolffd@0 100
wolffd@0 101 /*---------------------------------------------------------------------------*/
wolffd@0 102
wolffd@0 103 void read_input_parameters(int argc,char *argv[],char *docfile,char *modelfile,
wolffd@0 104 char *restartfile,long *verbosity,
wolffd@0 105 LEARN_PARM *learn_parm,KERNEL_PARM *kernel_parm)
wolffd@0 106 {
wolffd@0 107 long i;
wolffd@0 108 char type[100];
wolffd@0 109
wolffd@0 110 /* set default */
wolffd@0 111 strcpy (modelfile, "svm_model");
wolffd@0 112 strcpy (learn_parm->predfile, "trans_predictions");
wolffd@0 113 strcpy (learn_parm->alphafile, "");
wolffd@0 114 strcpy (restartfile, "");
wolffd@0 115 (*verbosity)=1;
wolffd@0 116 learn_parm->biased_hyperplane=1;
wolffd@0 117 learn_parm->sharedslack=0;
wolffd@0 118 learn_parm->remove_inconsistent=0;
wolffd@0 119 learn_parm->skip_final_opt_check=0;
wolffd@0 120 learn_parm->svm_maxqpsize=10;
wolffd@0 121 learn_parm->svm_newvarsinqp=0;
wolffd@0 122 learn_parm->svm_iter_to_shrink=-9999;
wolffd@0 123 learn_parm->maxiter=100000;
wolffd@0 124 learn_parm->kernel_cache_size=40;
wolffd@0 125 learn_parm->svm_c=0.0;
wolffd@0 126 learn_parm->eps=0.1;
wolffd@0 127 learn_parm->transduction_posratio=-1.0;
wolffd@0 128 learn_parm->svm_costratio=1.0;
wolffd@0 129 learn_parm->svm_costratio_unlab=1.0;
wolffd@0 130 learn_parm->svm_unlabbound=1E-5;
wolffd@0 131 learn_parm->epsilon_crit=0.001;
wolffd@0 132 learn_parm->epsilon_a=1E-15;
wolffd@0 133 learn_parm->compute_loo=0;
wolffd@0 134 learn_parm->rho=1.0;
wolffd@0 135 learn_parm->xa_depth=0;
wolffd@0 136 kernel_parm->kernel_type=0;
wolffd@0 137 kernel_parm->poly_degree=3;
wolffd@0 138 kernel_parm->rbf_gamma=1.0;
wolffd@0 139 kernel_parm->coef_lin=1;
wolffd@0 140 kernel_parm->coef_const=1;
wolffd@0 141 strcpy(kernel_parm->custom,"empty");
wolffd@0 142 strcpy(type,"c");
wolffd@0 143
wolffd@0 144 for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
wolffd@0 145 switch ((argv[i])[1])
wolffd@0 146 {
wolffd@0 147 case '?': print_help(); exit(0);
wolffd@0 148 case 'z': i++; strcpy(type,argv[i]); break;
wolffd@0 149 case 'v': i++; (*verbosity)=atol(argv[i]); break;
wolffd@0 150 case 'b': i++; learn_parm->biased_hyperplane=atol(argv[i]); break;
wolffd@0 151 case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;
wolffd@0 152 case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break;
wolffd@0 153 case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
wolffd@0 154 case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
wolffd@0 155 case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
wolffd@0 156 case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
wolffd@0 157 case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
wolffd@0 158 case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;
wolffd@0 159 case 'w': i++; learn_parm->eps=atof(argv[i]); break;
wolffd@0 160 case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break;
wolffd@0 161 case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break;
wolffd@0 162 case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break;
wolffd@0 163 case 'o': i++; learn_parm->rho=atof(argv[i]); break;
wolffd@0 164 case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break;
wolffd@0 165 case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break;
wolffd@0 166 case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
wolffd@0 167 case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
wolffd@0 168 case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
wolffd@0 169 case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
wolffd@0 170 case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
wolffd@0 171 case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
wolffd@0 172 case 'l': i++; strcpy(learn_parm->predfile,argv[i]); break;
wolffd@0 173 case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
wolffd@0 174 case 'y': i++; strcpy(restartfile,argv[i]); break;
wolffd@0 175 default: printf("\nUnrecognized option %s!\n\n",argv[i]);
wolffd@0 176 print_help();
wolffd@0 177 exit(0);
wolffd@0 178 }
wolffd@0 179 }
wolffd@0 180 if(i>=argc) {
wolffd@0 181 printf("\nNot enough input parameters!\n\n");
wolffd@0 182 wait_any_key();
wolffd@0 183 print_help();
wolffd@0 184 exit(0);
wolffd@0 185 }
wolffd@0 186 strcpy (docfile, argv[i]);
wolffd@0 187 if((i+1)<argc) {
wolffd@0 188 strcpy (modelfile, argv[i+1]);
wolffd@0 189 }
wolffd@0 190 if(learn_parm->svm_iter_to_shrink == -9999) {
wolffd@0 191 if(kernel_parm->kernel_type == LINEAR)
wolffd@0 192 learn_parm->svm_iter_to_shrink=2;
wolffd@0 193 else
wolffd@0 194 learn_parm->svm_iter_to_shrink=100;
wolffd@0 195 }
wolffd@0 196 if(strcmp(type,"c")==0) {
wolffd@0 197 learn_parm->type=CLASSIFICATION;
wolffd@0 198 }
wolffd@0 199 else if(strcmp(type,"r")==0) {
wolffd@0 200 learn_parm->type=REGRESSION;
wolffd@0 201 }
wolffd@0 202 else if(strcmp(type,"p")==0) {
wolffd@0 203 learn_parm->type=RANKING;
wolffd@0 204 }
wolffd@0 205 else if(strcmp(type,"o")==0) {
wolffd@0 206 learn_parm->type=OPTIMIZATION;
wolffd@0 207 }
wolffd@0 208 else if(strcmp(type,"s")==0) {
wolffd@0 209 learn_parm->type=OPTIMIZATION;
wolffd@0 210 learn_parm->sharedslack=1;
wolffd@0 211 }
wolffd@0 212 else {
wolffd@0 213 printf("\nUnknown type '%s': Valid types are 'c' (classification), 'r' regession, and 'p' preference ranking.\n",type);
wolffd@0 214 wait_any_key();
wolffd@0 215 print_help();
wolffd@0 216 exit(0);
wolffd@0 217 }
wolffd@0 218 if((learn_parm->skip_final_opt_check)
wolffd@0 219 && (kernel_parm->kernel_type == LINEAR)) {
wolffd@0 220 printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
wolffd@0 221 learn_parm->skip_final_opt_check=0;
wolffd@0 222 }
wolffd@0 223 if((learn_parm->skip_final_opt_check)
wolffd@0 224 && (learn_parm->remove_inconsistent)) {
wolffd@0 225 printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
wolffd@0 226 wait_any_key();
wolffd@0 227 print_help();
wolffd@0 228 exit(0);
wolffd@0 229 }
wolffd@0 230 if((learn_parm->svm_maxqpsize<2)) {
wolffd@0 231 printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
wolffd@0 232 wait_any_key();
wolffd@0 233 print_help();
wolffd@0 234 exit(0);
wolffd@0 235 }
wolffd@0 236 if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
wolffd@0 237 printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
wolffd@0 238 printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
wolffd@0 239 wait_any_key();
wolffd@0 240 print_help();
wolffd@0 241 exit(0);
wolffd@0 242 }
wolffd@0 243 if(learn_parm->svm_iter_to_shrink<1) {
wolffd@0 244 printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
wolffd@0 245 wait_any_key();
wolffd@0 246 print_help();
wolffd@0 247 exit(0);
wolffd@0 248 }
wolffd@0 249 if(learn_parm->svm_c<0) {
wolffd@0 250 printf("\nThe C parameter must be greater than zero!\n\n");
wolffd@0 251 wait_any_key();
wolffd@0 252 print_help();
wolffd@0 253 exit(0);
wolffd@0 254 }
wolffd@0 255 if(learn_parm->transduction_posratio>1) {
wolffd@0 256 printf("\nThe fraction of unlabeled examples to classify as positives must\n");
wolffd@0 257 printf("be less than 1.0 !!!\n\n");
wolffd@0 258 wait_any_key();
wolffd@0 259 print_help();
wolffd@0 260 exit(0);
wolffd@0 261 }
wolffd@0 262 if(learn_parm->svm_costratio<=0) {
wolffd@0 263 printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
wolffd@0 264 wait_any_key();
wolffd@0 265 print_help();
wolffd@0 266 exit(0);
wolffd@0 267 }
wolffd@0 268 if(learn_parm->epsilon_crit<=0) {
wolffd@0 269 printf("\nThe epsilon parameter must be greater than zero!\n\n");
wolffd@0 270 wait_any_key();
wolffd@0 271 print_help();
wolffd@0 272 exit(0);
wolffd@0 273 }
wolffd@0 274 if(learn_parm->rho<0) {
wolffd@0 275 printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
wolffd@0 276 printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
wolffd@0 277 printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
wolffd@0 278 wait_any_key();
wolffd@0 279 print_help();
wolffd@0 280 exit(0);
wolffd@0 281 }
wolffd@0 282 if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
wolffd@0 283 printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
wolffd@0 284 printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
wolffd@0 285 printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
wolffd@0 286 wait_any_key();
wolffd@0 287 print_help();
wolffd@0 288 exit(0);
wolffd@0 289 }
wolffd@0 290 }
wolffd@0 291
wolffd@0 292 void wait_any_key()
wolffd@0 293 {
wolffd@0 294 printf("\n(more)\n");
wolffd@0 295 (void)getc(stdin);
wolffd@0 296 }
wolffd@0 297
wolffd@0 298 void print_help()
wolffd@0 299 {
wolffd@0 300 printf("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE);
wolffd@0 301 copyright_notice();
wolffd@0 302 printf(" usage: svm_learn [options] example_file model_file\n\n");
wolffd@0 303 printf("Arguments:\n");
wolffd@0 304 printf(" example_file-> file with training data\n");
wolffd@0 305 printf(" model_file -> file to store learned decision rule in\n");
wolffd@0 306
wolffd@0 307 printf("General options:\n");
wolffd@0 308 printf(" -? -> this help\n");
wolffd@0 309 printf(" -v [0..3] -> verbosity level (default 1)\n");
wolffd@0 310 printf("Learning options:\n");
wolffd@0 311 printf(" -z {c,r,p} -> select between classification (c), regression (r),\n");
wolffd@0 312 printf(" and preference ranking (p) (default classification)\n");
wolffd@0 313 printf(" -c float -> C: trade-off between training error\n");
wolffd@0 314 printf(" and margin (default [avg. x*x]^-1)\n");
wolffd@0 315 printf(" -w [0..] -> epsilon width of tube for regression\n");
wolffd@0 316 printf(" (default 0.1)\n");
wolffd@0 317 printf(" -j float -> Cost: cost-factor, by which training errors on\n");
wolffd@0 318 printf(" positive examples outweight errors on negative\n");
wolffd@0 319 printf(" examples (default 1) (see [4])\n");
wolffd@0 320 printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n");
wolffd@0 321 printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n");
wolffd@0 322 printf(" -i [0,1] -> remove inconsistent training examples\n");
wolffd@0 323 printf(" and retrain (default 0)\n");
wolffd@0 324 printf("Performance estimation options:\n");
wolffd@0 325 printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n");
wolffd@0 326 printf(" (see [5])\n");
wolffd@0 327 printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n");
wolffd@0 328 printf(" leave-one-out computation (default 1.0) (see [2])\n");
wolffd@0 329 printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n");
wolffd@0 330 printf(" (default 0)\n");
wolffd@0 331 printf("Transduction options (see [3]):\n");
wolffd@0 332 printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n");
wolffd@0 333 printf(" into the positive class (default is the ratio of\n");
wolffd@0 334 printf(" positive and negative examples in the training data)\n");
wolffd@0 335 printf("Kernel options:\n");
wolffd@0 336 printf(" -t int -> type of kernel function:\n");
wolffd@0 337 printf(" 0: linear (default)\n");
wolffd@0 338 printf(" 1: polynomial (s a*b+c)^d\n");
wolffd@0 339 printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n");
wolffd@0 340 printf(" 3: sigmoid tanh(s a*b + c)\n");
wolffd@0 341 printf(" 4: user defined kernel from kernel.h\n");
wolffd@0 342 printf(" -d int -> parameter d in polynomial kernel\n");
wolffd@0 343 printf(" -g float -> parameter gamma in rbf kernel\n");
wolffd@0 344 printf(" -s float -> parameter s in sigmoid/poly kernel\n");
wolffd@0 345 printf(" -r float -> parameter c in sigmoid/poly kernel\n");
wolffd@0 346 printf(" -u string -> parameter of user defined kernel\n");
wolffd@0 347 printf("Optimization options (see [1]):\n");
wolffd@0 348 printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n");
wolffd@0 349 printf(" -n [2..q] -> number of new variables entering the working set\n");
wolffd@0 350 printf(" in each iteration (default n = q). Set n<q to prevent\n");
wolffd@0 351 printf(" zig-zagging.\n");
wolffd@0 352 printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n");
wolffd@0 353 printf(" The larger the faster...\n");
wolffd@0 354 printf(" -e float -> eps: Allow that error for termination criterion\n");
wolffd@0 355 printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n");
wolffd@0 356 printf(" -y [0,1] -> restart the optimization from alpha values in file\n");
wolffd@0 357 printf(" specified by -a option. (default 0)\n");
wolffd@0 358 printf(" -h [5..] -> number of iterations a variable needs to be\n");
wolffd@0 359 printf(" optimal before considered for shrinking (default 100)\n");
wolffd@0 360 printf(" -f [0,1] -> do final optimality check for variables removed\n");
wolffd@0 361 printf(" by shrinking. Although this test is usually \n");
wolffd@0 362 printf(" positive, there is no guarantee that the optimum\n");
wolffd@0 363 printf(" was found if the test is omitted. (default 1)\n");
wolffd@0 364 printf(" -y string -> if option is given, reads alphas from file with given\n");
wolffd@0 365 printf(" and uses them as starting point. (default 'disabled')\n");
wolffd@0 366 printf(" -# int -> terminate optimization, if no progress after this\n");
wolffd@0 367 printf(" number of iterations. (default 100000)\n");
wolffd@0 368 printf("Output options:\n");
wolffd@0 369 printf(" -l string -> file to write predicted labels of unlabeled\n");
wolffd@0 370 printf(" examples into after transductive learning\n");
wolffd@0 371 printf(" -a string -> write all alphas to this file after learning\n");
wolffd@0 372 printf(" (in the same order as in the training set)\n");
wolffd@0 373 wait_any_key();
wolffd@0 374 printf("\nMore details in:\n");
wolffd@0 375 printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
wolffd@0 376 printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n");
wolffd@0 377 printf(" A. Smola (ed.), MIT Press, 1999.\n");
wolffd@0 378 printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n");
wolffd@0 379 printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n");
wolffd@0 380 printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n");
wolffd@0 381 printf(" Vector Machines. International Conference on Machine Learning (ICML),\n");
wolffd@0 382 printf(" 1999.\n");
wolffd@0 383 printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n");
wolffd@0 384 printf(" with a knowledge-based approach - A case study in intensive care \n");
wolffd@0 385 printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n");
wolffd@0 386 printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n");
wolffd@0 387 printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
wolffd@0 388 printf(" 2002.\n\n");
wolffd@0 389 }
wolffd@0 390
wolffd@0 391