@InProceedings{Franc-LOO-ICML08,
  IS = { zkontrolovano 31 Dec 2009 },
  UPDATE  = { 2009-12-31 },
   author = {Franc, V. and Laskov, P.L. and M{\" u}ller, K.R.M.},
   title = {Stopping conditions for exact computation of leave-one-out 
            error in support vector machines},
   booktitle = {Proceedings of the 25th Annual International 
                Conference on Machine Learning (ICML 2008)},
   location = {Helsinki, Finland},
   editor   = {Andrew McCallum and Sam Roweis},
   year     = {2008},
   pages    = {328--335},
   day      = {5--9},
   month    = {July},
   venue    = {Helsinki, Finland},
   publisher = {ACM},
   address  = {New York, USA},
   isbn     = {978-1-59593-793-3},
   book_pages = {1203},
   keywords = {support vector machines, optimal stopping conditions, 
               leave-one-out},
   psurl = {[Franc-LOO-ICML08.pdf]},
   annote ={We propose a new stopping condition for a Support Vector
     Machine (SVM) solver which precisely reflects the objective of
     the Leave-One-Out error computation.  The stopping condition
     guarantees that the output on an intermediate SVM solution is
     identical to the output of the optimal SVM solution with one data
     point excluded from the training set. A simple augmentation of a
     general SVM training algorithm allows one to use a stopping
     criterion equivalent to the proposed sufficient condition.  A
     comprehensive experimental evaluation of our method shows
     consistent speedup of the exact LOO computation by our method, up
     to the factor of 13 for the linear kernel. The new algorithm can
     be seen as an example of constructive guidance of an optimization
     algorithm towards achieving the best attainable expected risk at
     optimal computational cost.},
}