@Article{franc-prj03,
  IS = { zkontrolovano 07 Dec 2003 },
  UPDATE =       { 2003-08-22 },
  author =       {Franc, Vojt{\v e}ch and Hlav{\'a}{\v c}, V{\' a}clav},
  title =        {An iterative algorithm learning the maximal margin 
                  classifier},
  journal =      {Pattern recognition},
  year =         {2003},
  volume =       {36},
  number =       {9},
  pages =        {1985--1996},
  month =        {September},
  project =      {CTU 0208313, GACR 102/00/1679, IST-2001-32184 ActIPret, 
                  IST-2001-33266 ISAAC, MSM 212300013},
  publisher =    {Elsevier Science},
  issn =         {0031-3203},
  address =      {Amsterdam, The Netherlands},
  keywords =     {Pattern recognition, Linear classifier, Suppervised learning,
                  Support Vector Machines, Kernel functions},
  annote =       {
   A simple learning algorithm for maximal margin classifiers (also
   support vector machines with quadratic cost function) is proposed.
   We build our iterative algorithm on top of the
   Schlesinger--Kozinec algorithm (S-K-algorithm) from 1981 which
   finds a maximal
   margin hyperplane with a given precision for separable data. We
   suggest a generalization of the S-K-algorithm (i) to the
   non-linear case using kernel functions and (ii) for non-separable
   data. The requirement in memory storage is linear to the data. This
   property allows the proposed algorithm to be used for large training
   problems.
   The resulting algorithm is simple to implement and as the
   experiments showed competitive to the state-of-the-art algorithms.
   The implementation of the algorithm in Matlab is available.
   We tested the algorithm on the problem aiming at recognition
   poor quality numerals.
   },
psurl       = { [Franc-PR2003.pdf] },
}