@inproceedings{Franc-OCAS-ICML08,
  IS = { zkontrolovano 31 Dec 2009 },
  UPDATE  = { 2009-12-31 },
   author = {Franc, V. and Sonnenburg, S.S.},
   title = {Optimized cutting plane algorithm for support vector machines},
   booktitle = {Proceedings of the 25th Annual International 
                Conference on Machine Learning (ICML 2008)},
   venue = {Helsinki, Finland},
   editor = {Andrew McCallum and Sam Roweis},
   year = {2008},
   pages = {320--327},
   month = {July},
   annote = {We have developed a new Linear Support Vector Machine
    (SVM) training algorithm called OCAS. Its computational effort
    scales linearly with the sample size.  In an extensive empirical
    evaluation OCAS significantly outperforms current state of the art
    SVM solvers, like SVMLight, SVMPerf and BMRM, achieving speedups
    of over 1,000 on some datasets over SVMLight and 20 over SVMPerf,
    while obtaining the same precise Support Vector solution. OCAS
    even in the early optimization steps shows often faster
    convergence than the so far in this domain prevailing
    approximative methods SGD and Pegasos. Effectively parallelizing
    OCAS we were able to train on a dataset of size 15 million
    examples (itself about 32GB in size) in just 671 seconds - a
    competing string kernel SVM required 97,484 seconds to train on 10
    million examples sub-sampled from this dataset.},
   psurl = {[Franc-OCAS-ICML08.pdf]},
   day =         {5--9},
   publisher =   {ACM},
   address =     {New York, USA},
   isbn =        {978-1-59593-793-3},
   book_pages=   {1203},
   keywords =    {support vector machines, large scale learning, 
                  cutting plane algorithm},
   note =        {electronic},
}