IS = { zkontrolovano 07 Dec 2003 },
  UPDATE  = { 2003-09-05 },
  author =       {Franc, Vojt{\v e}ch and Hlav{\'a}{\v c}, V{\' a}clav},
  title =        {Greedy Algorithm for a Training Set Reduction in the 
                  Kernel Methods},
  booktitle =    {CAIP 2003: Computer Analysis of Images and Patterns},
  pages =        {426--433},
  book_pages =   {777},
  year =         {2003},
  editor =       {Nikolai Petkov and Michel A. Westenberg},
  venue =        {Groningen, The Netherlands},
  address =      {Berlin, Germany},
  day =          {25--27},
  keywords =     {kernel methods, PCA, training set reduction},
  month =        {August},
  publisher =    {Springer},
  project =      {MIRACLE ICA1-CT-2000-70002, IST-2001-32184 ActIPret, 
                  MSM 212300013, GACR 102/03/0440},
  isbn =         {3-540-40730-8},
  annote =       {We propose a technique for a training set approximation 
   and its usage in kernel methods. The approach aims to represent data 
   in a low dimensional space with possibly minimal representation error 
   which is similar to the Principal Component Analysis (PCA). In contrast 
   to the PCA, the basis vectors of the low dimensional space used for 
   data representation are properly selected vectors from the training 
   set and not as their linear combinations. The basis vectors can 
   be selected by a simple algorithm which has low computational 
   requirements and allows on-line processing of huge data sets. 
   The proposed method was used to approximate training sets of the 
   Support Vector Machines and Kernel Fisher Linear Discriminant which 
   are known method for learning classifiers. The experiments show that 
   the proposed approximation can significantly reduce the complexity of 
   the found classifiers (the number of the support vectors) while 
   retaining their accuracy.
psurl       = { [franc-caip03.pdf] },