@InProceedings{Havlena-et-al-EGOVIS-2009,
  IS = { zkontrolovano 29 Jan 2010 },
  UPDATE  = { 2009-09-21 },
  author =      {Havlena, Michal and Ess, Andreas and Moreau, Wim and 
                 Torii, Akihiko and Jan{\v c}o{\v s}ek, Michal and 
                 Pajdla, Tom{\'a}{\v s} and Van Gool, Luc},
  title =       {AWEAR 2.0 System: Omni-directional Audio-Visual 
                 Data Acquisition and Processing},
  year =        {2009},
  pages =       {49-56},
  booktitle =   {EGOVIS 2009: Proceedings of the First Workshop on 
                 Egocentric Vision},
  publisher =   {Omnipress},
  address =     {Madison, USA},
  isbn =        {978-1-4244-3993-5},
  book_pages =  {62},
  month =       {June},
  day =         {20},
  venue =       {Miami, USA},
  annote = {We present a wearable audio-visual capturing system,
    termed AWEAR 2.0, along with its underlying vision components that
    allow robust self-localization, multi-body pedestrian tracking,
    and dense scene reconstruction. Designed as a backpack, the system
    is aimed at supporting the cognitive abilities of the wearer. In
    this paper, we focus on the design issues for the hardware
    platform and on the performance of the current state-of-the-art
    computer vision methods on the acquired sequences. We describe the
    calibration procedure of the two omni-directional cameras present
    in the system as well as a Structure-from-Motion pipeline that
    allows for stable multi-body tracking even from rather shaky video
    sequences thanks to ground plane stabilization. Furthermore, we
    show how a dense scene reconstruction can be obtained from the
    data acquired with the platform.},
  keywords =    {Omnidirectional Vision, Audio-Visual Data Acquisition},
  authorship =  {15-15-14-14-14-14-14},
  note =        {CD-ROM},
  project =     {FP6-IST-027787 DIRAC, GACR 201/07/1136, MSM6840770038, 
                 CTU0908713, CTU0921413},
  psurl =       {[10.1109/CVPR.2009.5204361.pdf]},
}