@InProceedings{Cai-ICVS-2013,
  IS = { zkontrolovano 24 Jan 2014 },
  UPDATE  = { 2013-08-02 },
  author =     {Cai, Hongping and Werner, Tom{\'a}{\v s} and 
    Matas, Ji{\v r}{\'\i}},
  title =      {Fast Detection of Multiple Textureless {3-D} Objects},
  year =       {2013},
  pages =      {103-112},
  editor    = {Chen, Mei and Leibe, Bastian and Neumann, Bernd},
  booktitle = {Computer Vision Systems - 9th International Conference,
               ICVS 2013, St. Petersburg, Russian Federation, July 16-18, 2013. Proceedings},
  publisher = {Springer},
  series    = {Lecture Notes in Computer Science},
  volume    = {7963},
  year      = {2013},
  isbn      = {978-3-642-39401-0},
  issn      = {0302-9743},
  ee        = {http://dx.doi.org/10.1007/978-3-642-39402-7},
  address   =  {Heidelberg, Germany},
  book_pages = {366},
  month =      {July},
  day =        {16-18},
  venue =      {St. Petersburg, Russian Federation},
  prestige =   {international},
  annote = {We propose a fast edge-based approach for detection and
     approximate pose estimation of multiple textureless objects in a
     single image.  The objects are trained from a set of edge maps,
     each showing one object in one pose. To each scanning window in
     the input image, the nearest neighbor is found among these
     training templates by a two-level cascade. The first cascade
     level, based on a novel edge-based sparse image descriptor and
     fast search by index table, prunes the majority of background
     windows. The second level verifies the surviving detection
     hypotheses by oriented chamfer matching, improved by selecting
     discriminative edges and by compensating a bias towards simple
     objects. The method outperforms the state-of-the-art approach~by
     Damen et\,al.~(2012).  The processing is near real-time, ranging
     from 2 to 4 frames per second for the training set size 10^4.},
  keywords = {visual recognition, scanning window, textureless objects},
  psurl = {PDF},
  project = {FP7-ICT-270138 DARWIN, TACR TE01020415 V3C},
}