@InProceedings{Werner-ICML07,
  IS = { zkontrolovano 13 Dec 2007 },
  author =     {Werner, Tom{\'a}{\v s}},
  title =      {What Is Decreased by the Max-sum 
                Arc Consistency Algorithm?},
  booktitle =  {ICML 2007: Proceedings of the 24th International 
                Conference on Machine Learning},
  venue =      {Corvallis, Oregon, USA},
  pages =      {1007--1014},
  editor =     {Zoubin Ghahramani},
  year =       {2007},
  month =      {June},
  day =        {20--24},
  publisher =  {ACM},
  address =    {New Yoork, USA},
  isbn =       {978-1-59593-793-3},
  book_pages = {1203},
  annote = {Inference tasks in Markov random fields (MRFs) are closely
    related to the constraint satisfaction problem (CSP) and its soft
    generalizations. In particular, MAP inference in MRF is equivalent
    to the weighted (max-sum) CSP. A well-known tool to tackle CSPs
    are arc consistency algorithms, {a.k.a.} relaxation
    labeling. A promising approach to MAP inference in MRFs is linear
    programming relaxation solved by sequential tree-reweighted
    message passing (TRW-S). There is a not widely known algorithm
    equivalent to TRW-S, max-sum diffusion, which is slower but very
    simple.  We give two theoretical results. First, we show that arc
    consistency algorithms and max-sum diffusion become the same thing
    if formulated in an abstract-algebraic way. Thus, we argue that
    max-sum arc consistency algorithm or max-sum relaxation labeling
    is a more suitable name for max-sum diffusion. Second, we give a
    criterion that strictly decreases during these algorithms.  It
    turns out that every class of equivalent problems contains a
    unique problem that is minimal /wrt/ this criterion.},
  keywords = {constraint satisfaction and optimisation, arc consistency, 
    Markov random field, undirected graphical model},
  psurl = {paper and slides [PDF]},
  project =    {IST-004176 COSPAL, MSM6840770038},
  authorship = {100},
}