IS = { zkontrolovano 14 Jan 2013 },
  UPDATE  = { 2012-12-27 },
  key =         {Cerman-Hlavac-ICPR-2012},
  author =      {Cerman, Luk{\'a}{\v s} and Hlav{\'a}{\v c}, V{\'a}clav},
  title =	{Tracking with Context as a Semi-supervised Learning and Labeling Problem},
  year =        {2012},
  pages =	{2124--2127},
  booktitle =   {ICPR '12: Proceedings of 21st International Conference on Pattern Recognition},
  publisher =   {IEEE},
  address =     {New York, USA},
  isbn =        {978-4-9906441-0-9},
  book_pages =  {3768},
  month =       {November},
  day =         {11--15},
  venue =       {Tsukuba International Congress Center, Tsukuba, Japan},
  organization ={IAPR},
  annote =      {It is suggested how a Markov random field can be used
    for object tracking with context information. The tracking is
    formulated as a two layer process. In the first phase, the image
    is represented by a set of feature points which are tracked by a
    standard tracker. In the second phase, the proposed
    semi-supervised learning and labeling algorithm is used to label
    the points to three classes -- object, background and companion.
    The object state (pose) is defined by the set of points labeled as
    the object. The companion represents the object context and
    contains non-object points with a motion similar to the motion of
    the object.  As initialization, labels of the object points only
    are provided by a user in the very first frame. The appearance and
    motion models of the three classes and the labels of the remaining
    points in the whole video sequence are estimated in a GrabCut
    fashion. We show that the use of the companion class together with
    a 3D (space-time) Markov random field helps to identify object
    points behind full occlusions or under strong appearance changes.},
  keywords =    {Motion, Tracking and Video Analysis, Classification and Clustering},
  prestige =    {international},
  note =        {CD-ROM},
  project =     {FP7-ICT-247525 HUMAVIPS, FP7-ICT-247870 NIFTi, TACR TE01020197},