@Article{Kubelka-JFR2014,
  IS = { zkontrolovano 19 May 2015 },
UPDATE   = { 2015-05-19 },
  author =      {Kubelka, Vladim{\'\i}r and Oswald, Lorenz and Pomerleau, Fran{\c c}ois and Colas, Francis and Svoboda, Tom{\'a}{\v s} and Reinstein, Michal},
  affiliation = {13133-NULL-NULL-NULL-13133-13133},
  authorship =  {16.7-16.7-16.7-16.7-16.7-16.7},
  title =       {Robust Data Fusion of Multi-modal Sensory Information for Mobile Robots},
year     = { 2015 },
month       = { June },
pages       = { 447-473 },
  journal =     {Journal of Field Robotics},
  publisher =   {John Wiley and Sons},
address  = { 111 River Street, Hoboken,  US },
issn     = { 1556-4959 },
volume      = { 32 },
number      = { 4 },
  annote =      {Urban Search and Rescue missions for mobile robots
                  require reliable state estimation systems resilient
                  to conditions given by the dynamically changing
                  environment.  We design and evaluate a data fusion
                  system for localization of a mobile skid-steer robot
                  intended for USAR missions.  We exploit a rich
                  sensor suite including both proprioceptive (inertial
                  measurement unit and tracks odometry) and
                  exteroceptive sensors (omnidirectional camera and
                  rotating laser rangefinder).  To cope with the
                  specificities of each sensing modality (such as
                  significantly differing sampling frequencies), we
                  introduce a novel fusion scheme based on Extended
                  Kalman filter for 6DOF orientation and position
                  estimation.  We demonstrate the performance on field
                  tests of more than 4.4km driven under standard USAR
                  conditions.  Part of our datasets include ground
                  truth positioning; indoor with a Vicon motion
                  capture system and outdoor with a Leica theodolite
                  tracker.  The overall median accuracy of
                  localization - achieved by combining all the four
                  modalities - was 1.2{\%} and 1.4{\%} of the total
                  distance traveled, for indoor and outdoor
                  environments respectively.  To identify the true
                  limits of the proposed data fusion we propose and
                  employ a novel experimental evaluation procedure
                  based on failure case scenarios.  This way we
                  address the common issues like: slippage, reduced
                  camera field of view, limited laser rangefinder
                  range, together with moving obstacles spoiling the
                  metric map.  We believe such characterization of the
                  failure cases is a first step towards identifying
                  the behavior of state estimation under such
                  conditions.  We release all our datasets to the
                  robotics community for possible benchmarking.},
  keywords =    {perception, position estimation, exploration},
project  = { FP7-ICT-247870 NIFTi, FP7-ICT-609763 TRADR, GACR 14-13876S },
doi         = { 10.1002/rob.21535 },
  ut_isi =      {},
  scopus =      {},
www         = {https://sites.google.com/site/kubelvla/public-datasets/nifti-zurich-2013},
}