OpenGM  2.3.x
Discrete Graphical Model Library
fusion_based_inf.hxx
Go to the documentation of this file.
1 #pragma once
2 #ifndef OPENGM_FUSION_BASED_INF_HXX
3 #define OPENGM_FUSION_BASED_INF_HXX
4 
5 #include <vector>
6 #include <string>
7 #include <iostream>
8 
9 #include "opengm/opengm.hxx"
13 
14 // Fusion Move Solver (they solve binary problems)
19 
20 #ifdef WITH_CPLEX
22 #endif
23 #ifdef WITH_QPBO
24 #include "QPBO.h"
27 #endif
28 #ifdef WITH_AD3
30 #endif
31 
32 #include <stdlib.h> /* srand, rand */
33 
34 
36 
37 // fusion move model generator
39 
40 
41 
42 
43 
44 
45 
46 
47 
48 namespace opengm
49 {
50 
51 
52 
53 
54 
55 
56 
57 
58 
59 
60 
61 namespace proposal_gen{
62 
63 
64 
65 
66  template<class GM, class ACC>
68  {
69  public:
70  typedef ACC AccumulationType;
71  typedef GM GraphicalModelType;
73  struct Parameter
74  {
76  };
77  AlphaExpansionGen(const GM &gm, const Parameter &param)
78  : gm_(gm),
79  param_(param),
80  currentAlpha_(0)
81  {
82  maxLabel_ =0;
83  for(size_t i=0; i<gm.numberOfVariables();++i){
84  if(gm.numberOfLabels(i)>maxLabel_){
85  maxLabel_ = gm.numberOfLabels(i);
86  }
87  }
88  }
89  void reset()
90  {
91  currentAlpha_ = 0;
92  }
93 
94  size_t defaultNumStopIt() {return maxLabel_;}
95 
96  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
97  {
98  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
99  {
100  if (gm_.numberOfLabels(vi) > currentAlpha_ )
101  {
102  proposal[vi] = currentAlpha_;
103  }
104  else
105  {
106  proposal[vi] = current[vi];
107  }
108  }
109  ++currentAlpha_;
110  if(currentAlpha_>=maxLabel_){
111  currentAlpha_ = 0;
112  }
113  }
114  LabelType currentAlpha(){return currentAlpha_;}
115  private:
116  const GM &gm_;
117  Parameter param_;
118  LabelType maxLabel_;
119  LabelType currentAlpha_;
120  };
121 
122 
123 
124  template<class GM, class ACC>
125  class UpDownGen
126  {
127  public:
128  typedef ACC AccumulationType;
129  typedef GM GraphicalModelType;
131  struct Parameter
132  {
134  const std::string startDirection = std::string("up")
135  )
136  : startDirection_(startDirection)
137  {
138 
139  }
140  std::string startDirection_;
141  };
142  UpDownGen(const GM &gm, const Parameter &param)
143  : gm_(gm),
144  param_(param),
145  argBuffer_(gm.numberOfVariables(),0),
146  direction_(gm.numberOfVariables())
147  {
148  this->reset();
149  }
150  void reset()
151  {
152  if(param_.startDirection_== std::string("random")){
153  for(size_t i=0; i<gm_.numberOfVariables();++i){
154  direction_[i]=rand()%2 == 0 ? -1:1;
155  }
156  }
157  else if(param_.startDirection_== std::string("up")){
158  for(size_t i=0; i<gm_.numberOfVariables();++i){
159  direction_[i]=1;
160  }
161  }
162  else if(param_.startDirection_== std::string("down")){
163  for(size_t i=0; i<gm_.numberOfVariables();++i){
164  direction_[i]=-1;
165  }
166  }
167  else{
168  throw opengm::RuntimeError("wrong starting direction for UpDownGen");
169  }
170  }
171 
172  size_t defaultNumStopIt() {return 2;}
173 
174  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
175  {
176  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
177  {
178  const size_t numL = gm_.numberOfLabels(vi);
179 
180  const LabelType ol = argBuffer_[vi];
181  const LabelType cl = current[vi];
182 
183  std::copy(current.begin(), current.end(), argBuffer_.begin());
184 
185  // flip direction?
186  if(ol == cl){
187  direction_[vi]*=-1;
188  }
189  const LabelType d = direction_[vi];
190  if(d==1){
191 
192  if(cl+1<numL){
193  proposal[vi] = cl +1;
194  }
195  else{
196  direction_[vi] = -1;
197  proposal[vi] = cl - 1 ;
198  }
199  }
200  else{
201  if(cl>=1){
202  proposal[vi] = cl - 1;
203  }
204  else{
205  direction_[vi] = 1;
206  proposal[vi] = cl + 1 ;
207  }
208  }
209  }
210  }
211  private:
212  const GM &gm_;
213  Parameter param_;
214  std::vector<LabelType> argBuffer_;
215  std::vector<LabelType> direction_;
216  std::vector<LabelType> jumpSize_;
217  };
218 
219 
220  template<class GM, class ACC>
222  {
223  public:
224  typedef ACC AccumulationType;
225  typedef GM GraphicalModelType;
227  struct Parameter
228  {
230  };
231  private:
232  static size_t getMaxLabel(const GM &gm){
233  size_t maxLabel = 0;
234  for(size_t i=0; i<gm.numberOfVariables();++i){
235  if(gm.numberOfLabels(i)>maxLabel ){
236  maxLabel = gm.numberOfLabels(i);
237  }
238  }
239  return maxLabel;
240  }
241  public:
242  AlphaBetaSwapGen(const GM &gm, const Parameter &param)
243  : gm_(gm),
244  param_(param),
245  maxLabel_(getMaxLabel(gm)),
246  abShape_(2, maxLabel_),
247  abWalker_(abShape_.begin(), 2)
248  {
249  // ++abWalker_;
250  }
251  void reset()
252  {
253  abWalker_.reset();
254  }
255 
256  size_t defaultNumStopIt() {return (maxLabel_*maxLabel_-maxLabel_)/2;}
257 
258  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
259  {
260  if( maxLabel_<2){
261  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
262  proposal[vi] = current[vi];
263  }else{
264  ++abWalker_;
265  if(currentAlpha()+1 == maxLabel_ && currentBeta()+1== maxLabel_){
266  reset();
267  }
268  while (abWalker_.coordinateTuple()[0] == abWalker_.coordinateTuple()[1])
269  {
270  ++abWalker_;
271  }
272 
273  const LabelType alpha = abWalker_.coordinateTuple()[0];
274  const LabelType beta = abWalker_.coordinateTuple()[1];
275 
276  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
277  {
278  if ( current[vi] == alpha && gm_.numberOfLabels(vi) > beta )
279  {
280  proposal[vi] = beta;
281  }
282  else if ( current[vi] == beta && gm_.numberOfLabels(vi) > alpha )
283  {
284  proposal[vi] = alpha;
285  }
286  else
287  {
288  proposal[vi] = current[vi];
289  }
290  }
291  }
292  }
293 
295  {
296  return abWalker_.coordinateTuple()[0];
297  }
299  {
300  return abWalker_.coordinateTuple()[1];
301  }
302  private:
303 
304  const GM &gm_;
305  Parameter param_;
306  LabelType maxLabel_;
307  std::vector<LabelType> abShape_;
308  ShapeWalker<typename std::vector<LabelType>::const_iterator> abWalker_;
309 
310  };
311 
312  template<class GM, class ACC>
313  class RandomGen
314  {
315  public:
316  typedef ACC AccumulationType;
317  typedef GM GraphicalModelType;
319  struct Parameter
320  {
322  };
323  RandomGen(const GM &gm, const Parameter &param)
324  : gm_(gm),
325  param_(param),
326  currentStep_(0)
327  {
328  }
329  void reset()
330  {
331  currentStep_ = 0;
332  }
333  size_t defaultNumStopIt() {return 10;}
334  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
335  {
336  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
337  // draw label
338  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
339  proposal[vi] = randomLabel();
340  }
341  ++currentStep_;
342  }
343  private:
344  const GM &gm_;
345  Parameter param_;
346  LabelType currentStep_;
347  };
348 
349 
350  template<class GM, class ACC>
352  {
353  public:
354  typedef ACC AccumulationType;
355  typedef GM GraphicalModelType;
357  struct Parameter
358  {
360  };
361  Random2Gen(const GM &gm, const Parameter &param)
362  : gm_(gm),
363  param_(param),
364  currentStep_(0)
365  {
366  }
367  void reset()
368  {
369  currentStep_ = 0;
370  }
371  size_t defaultNumStopIt() {return 4;}
372  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
373  {
374  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
375  // draw label
376  opengm::RandomUniform<size_t> randomLabel(0,3,currentStep_+vi);
377  proposal[vi] = std::min(randomLabel(),size_t(1));
378  }
379  ++currentStep_;
380  }
381  private:
382  const GM &gm_;
383  Parameter param_;
384  LabelType currentStep_;
385  };
386 
387 
388 
389  template<class GM, class ACC>
391  {
392  public:
393  typedef ACC AccumulationType;
394  typedef GM GraphicalModelType;
396  struct Parameter
397  {
399  };
400  RandomLFGen(const GM &gm, const Parameter &param)
401  : gm_(gm),
402  param_(param),
403  currentStep_(0)
404  {
405  }
406  void reset()
407  {
408  currentStep_ = 0;
409  }
410  size_t defaultNumStopIt() {return 10;}
411  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
412  {
413  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
414  // draw label
415  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
416  proposal[vi] = randomLabel();
417  }
418  typename opengm::LazyFlipper<GM,ACC>::Parameter para(1,proposal.begin(),proposal.end());
419  opengm::LazyFlipper<GM,ACC> lf(gm_,para);
420  lf.infer();
421  lf.arg(proposal);
422  ++currentStep_;
423  }
424  private:
425  const GM &gm_;
426  Parameter param_;
427  LabelType currentStep_;
428  };
429 
430 
431  template<class GM, class ACC>
433  {
434  public:
435  typedef ACC AccumulationType;
436  typedef GM GraphicalModelType;
438  struct Parameter
439  {
440  Parameter(const float temp=1.0)
441  : temp_(temp){
442  }
443  float temp_;
444  };
445 
446  NonUniformRandomGen(const GM &gm, const Parameter &param)
447  : gm_(gm),
448  param_(param),
449  currentStep_(0),
450  randomGens_(gm.numberOfVariables())
451  {
452  std::vector<bool> hasUnary(gm.numberOfVariables(),false);
453 
454  for(IndexType fi=0; fi<gm_.numberOfFactors(); ++fi){
455 
456  if(gm_[fi].numberOfVariables()==1){
457 
458  const IndexType vi = gm_[fi].variableIndex(0);
459  const LabelType numLabels = gm_.numberOfLabels(vi);
460  std::vector<ValueType> weights(numLabels);
461  gm_[fi].copyValues(&weights[0]);
462  const ValueType minValue = *std::min_element(weights.begin(),weights.end());
463  for(LabelType l=0; l<numLabels; ++l){
464  weights[l]-= minValue;
465  }
466  for(LabelType l=0; l<numLabels; ++l){
467  //OPENGM_CHECK_OP(weights[l],>=,0.0, "NonUniformRandomGen allows only positive unaries");
468  weights[l]=std::exp(-1.0*param_.temp_*weights[l]);
469  }
470  randomGens_[vi]=GenType(weights.begin(),weights.end());
471  hasUnary[vi]=true;
472  }
473  }
474  for(IndexType vi=0 ;vi<gm_.numberOfVariables(); ++vi){
475  if(!hasUnary[vi]){
476  const LabelType numLabels = gm_.numberOfLabels(vi);
477  std::vector<ValueType> weights(numLabels,1.0);
478  randomGens_[vi]=GenType(weights.begin(),weights.end());
479  }
480  }
481 
482  }
483 
484  void reset()
485  {
486  currentStep_ = 0;
487  }
488 
489  size_t defaultNumStopIt() {
490  return 10;
491  }
492  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
493  {
494  for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
495  proposal[vi]=randomGens_[vi]();
496  }
497  ++currentStep_;
498  }
499  private:
500  const GM &gm_;
501  Parameter param_;
502  LabelType currentStep_;
503 
504  typedef RandomDiscreteWeighted<LabelType,ValueType> GenType;
505 
506  std::vector < RandomDiscreteWeighted<LabelType,ValueType> > randomGens_;
507  };
508 
509 
510  template<class GM, class ACC>
511  class BlurGen
512  {
513  public:
514  typedef ACC AccumulationType;
515  typedef GM GraphicalModelType;
517  struct Parameter
518  {
519  Parameter(double sigma = 20.0) : sigma_(sigma)
520  {
521  }
522  double sigma_;
523  };
524  BlurGen(const GM &gm, const Parameter &param)
525  : gm_(gm),
526  param_(param),
527  currentStep_(0)
528  {
529  const double pi = 3.1416;
530  const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
531  const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
532  const size_t kradius = std::ceil(3*param_.sigma_);
533  kernel_.resize(2*kradius + 1);
534  double sum = 0;
535  for(double i = 0; i <= kradius ; ++i) {
536  double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
537  kernel_[kradius+i] = value;
538  kernel_[kradius-i] = value;
539  sum += 2*value;
540  }
541  for(double i = 0; i <= kradius ; ++i) {
542  kernel_[kradius+i] /= sum;
543  kernel_[kradius-i] /= sum;
544  }
545 
546  size_t N = gm_.numberOfFactors(0);
547  for(size_t i=1; i<gm_.numberOfVariables(); ++i){
548  if(N==gm_.numberOfFactors(i)){
549  height_ = i+1;
550  break;
551  }
552  }
553 
554  width_ = gm_.numberOfVariables()/height_;
555 
556  OPENGM_ASSERT(height_*width_ == gm_.numberOfVariables());
557 
558  //Generate blured label
559  bluredLabel_.resize(gm_.numberOfVariables(),0);
560  std::vector<double> temp(gm_.numberOfVariables(),0.0);
561  std::vector<LabelType> localLabel(gm_.numberOfVariables(),0);
562  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
563  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
564  if(gm_[*it].numberOfVariables() == 1){
565  ValueType v;
566  ACC::neutral(v);
567  for(LabelType l=0; l<gm_.numberOfLabels(i); ++l){
568  if(ACC::bop(gm_[*it](&l),v)){
569  v=gm_[*it](&l);
570  localLabel[i]=l;
571  }
572  }
573  continue;
574  }
575  }
576  }
577  const int radius = (kernel_.size()-1)/2;
578  const int h = height_-1;
579  const int w = width_ -1;
580  for (int i = 0; i < height_; ++i) {
581  for (int j = 0; j < width_; ++j) {
582  double val = 0.0;
583  for (int k = 0; k < 2*radius+1; ++k) {
584  int i2 = std::min( h,std::max(0,i-radius+k));
585  val += kernel_[k] * localLabel[ind(i2,j)];
586  }
587  temp[ind(i,j)] = val;
588  }
589  }
590  for (int i = 0; i < height_; ++i) {
591  for (int j = 0; j < width_; ++j) {
592  double val = 0.0;
593  for (int k = 0; k < 2*radius+1; ++k) {
594  int j2 = std::min(w,std::max(0,i-radius+k));
595  val += kernel_[k] * temp[ind(i, j2)];
596  }
597  bluredLabel_[ind(i,j)] = std::min(double(gm_.numberOfLabels(ind(i,j))),(std::max(0.0,val)));
598  }
599  }
600  }
601 
602  void reset(){}
603  size_t defaultNumStopIt() {return 10;}
604 
605  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
606  {
607  if ((currentStep_ % 2) == 0){
608  for (int i = 0; i < height_; ++i) {
609  for (int j = 0; j < width_; ++j) {
610  const size_t var = ind(i,j);
611  opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(var),currentStep_+i+j);
612  proposal[var] = (LabelType)(randomLabel());
613  }
614  }
615  }else{
616  proposal.resize(gm_.numberOfVariables(),0.0);
617  opengm::RandomUniform<double> randomLabel(-param_.sigma_*1.5, param_.sigma_*1.5,currentStep_);
618  for(size_t i=0; i<proposal.size();++i){
619  proposal[i] = std::min(gm_.numberOfLabels(i), (LabelType)(std::max(0.0,bluredLabel_[i] + randomLabel())));
620  }
621  }
622  ++currentStep_;
623  }
624  private:
625  size_t ind(int i, int j){ return i+j*height_;}
626  const GM &gm_;
627  Parameter param_;
628  size_t height_;
629  size_t width_;
630  std::vector<double> kernel_;
631  std::vector<double> bluredLabel_;
632  LabelType currentStep_;
633  };
634 
635 
636  template<class GM, class ACC>
638  {
639  public:
640  typedef ACC AccumulationType;
641  typedef GM GraphicalModelType;
643  struct Parameter
644  {
645  Parameter(double sigma = 20.0, bool useLocalMargs = false, double temp=1) : sigma_(sigma), useLocalMargs_(useLocalMargs), temp_(temp)
646  {
647  }
648  double sigma_;
650  double temp_;
651 
652  };
653  EnergyBlurGen(const GM &gm, const Parameter &param)
654  : gm_(gm),
655  param_(param),
656  currentStep_(0)
657  {
658  const double pi = 3.1416;
659  const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
660  const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
661  const size_t kradius = std::ceil(3*param_.sigma_);
662  std::vector<double> kernel;
663  kernel.resize(2*kradius + 1);
664  double sum = 0;
665  for(double i = 0; i <= kradius ; ++i) {
666  double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
667  kernel[kradius+i] = value;
668  kernel[kradius-i] = value;
669  sum += 2*value;
670  }
671  for(double i = 0; i <= kradius ; ++i) {
672  kernel[kradius+i] /= sum;
673  kernel[kradius-i] /= sum;
674  }
675 
676  size_t N = gm_.numberOfFactors(0);
677  for(size_t i=1; i<gm_.numberOfVariables(); ++i){
678  if(N==gm_.numberOfFactors(i)){
679  height_ = i+1;
680  break;
681  }
682  }
683 
684  width_ = gm_.numberOfVariables()/height_;
685 
686  OPENGM_ASSERT(height_*width_ == gm_.numberOfVariables());
687 
688  //Generate energy-blured label
689  size_t numLabels =gm_.numberOfLabels(0);
690  std::vector<double> temp(gm_.numberOfVariables(),0.0);
691  std::vector<double> bluredEnergy(gm_.numberOfVariables(),1000000000000.0);
692  std::vector<double> bluredOpt(gm_.numberOfVariables(),0);
693  std::vector<double> energy(gm_.numberOfVariables(),0.0);
694  std::vector<IndexType> unaries(gm_.numberOfVariables());
695  std::vector<std::vector<double> > margs;;
696  if(param_.useLocalMargs_)
697  margs.resize(gm_.numberOfVariables(),std::vector<double>(numLabels));
698 
699  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
700  bool found = false;
701  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
702  if(gm_[*it].numberOfVariables() == 1){
703  unaries[i] = *it;
704  found = true;
705  if(gm_[*it].numberOfLabels(0) != numLabels)
706  throw RuntimeError("number of labels are not equal for all variables");
707  continue;
708  }
709  }
710  if(!found)
711  throw RuntimeError("missing unary");
712  }
713 
714 
715  for(size_t l=0; l<numLabels; ++l){
716  for (int i = 0; i < height_; ++i) {
717  for (int j = 0; j < width_; ++j) {
718  const size_t var = ind(i, j);
719  energy[var] =gm_[unaries[ind(i, j)]](&l);
720  }
721  }
722 
723  const int radius = (kernel.size()-1)/2;
724  const int h = height_-1;
725  const int w = width_ -1;
726  for (int i = 0; i < height_; ++i) {
727  for (int j = 0; j < width_; ++j) {
728  double val = 0.0;
729  const size_t var = ind(i, j);
730  for (int k = 0; k < 2*radius+1; ++k) {
731  int i2 = std::min( h,std::max(0,i-radius+k));
732  val += kernel[k] * energy[ind(i2,j)];
733  }
734  temp[var] = val;
735  }
736  }
737  for (int i = 0; i < height_; ++i) {
738  for (int j = 0; j < width_; ++j) {
739  double val = 0.0;
740  const size_t var = ind(i, j);
741  for (int k = 0; k < 2*radius+1; ++k) {
742  int j2 = std::min(w,std::max(0,i-radius+k));
743  val += kernel[k] * temp[ind(i, j2)];
744  }
745  if(param_.useLocalMargs_){
746  margs[var][l]=val;
747  }else{
748  if(val < bluredEnergy[var]){
749  bluredEnergy[var] = val;
750  bluredOpt[var] = l;
751  }
752  }
753  }
754  }
755  }
756  if(param_.useLocalMargs_){
757  localMargGens_.reserve(bluredOpt.size());
758  for(size_t var=0 ; var<bluredOpt.size(); ++var){
759  const ValueType minValue = *std::min_element(margs[var].begin(),margs[var].end());
760  for(LabelType l=0; l<numLabels; ++l){
761  margs[var][l]-= minValue;
762  }
763  for(LabelType l=0; l<numLabels; ++l){
764  margs[var][l]=std::exp(-1.0*param_.temp_*margs[var][l]);
765  }
766  localMargGens_[var]=opengm::RandomDiscreteWeighted<LabelType,ValueType>(margs[var].begin(),margs[var].end(),var);
767  }
768  }else{
769  uniformGens_.reserve(bluredOpt.size());
770  for(size_t var=0 ; var<bluredOpt.size(); ++var){
771  LabelType minVal = (LabelType)(std::max((double)(0) , bluredOpt[var]-param_.sigma_*1.5));
772  LabelType maxVal = (LabelType)(std::min((double)(numLabels) , bluredOpt[var]+param_.sigma_*1.5));
773  uniformGens_[var] = opengm::RandomUniform<LabelType>(minVal, maxVal+1, var);
774  }
775  }
776  }
777 
778  void reset(){}
779  size_t defaultNumStopIt() {return 10;}
780 
781  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal)
782  {
783  proposal.resize(gm_.numberOfVariables());
784  if(param_.useLocalMargs_){
785  for(size_t i=0; i<proposal.size();++i){
786  proposal[i] = localMargGens_[i]();
787  }
788  }
789  else{
790  opengm::RandomUniform<LabelType> randomLabel(0, gm_.numberOfLabels(0),currentStep_);
791  if ((currentStep_ % 2) == 0){
792  for(size_t i=0; i<proposal.size();++i){
793  proposal[i] = randomLabel();
794  }
795  }else{
796  for(size_t i=0; i<proposal.size();++i){
797  proposal[i] = uniformGens_[i]();
798  }
799  }
800  }
801  ++currentStep_;
802  }
803  private:
804  size_t ind(int i, int j){ return i+j*height_;}
805  const GM &gm_;
806  Parameter param_;
807  size_t height_;
808  size_t width_;
809  LabelType currentStep_;
810 
811  // Random Generators
812  std::vector<opengm::RandomDiscreteWeighted<LabelType,ValueType> > localMargGens_;
813  std::vector<opengm::RandomUniform<LabelType> > uniformGens_;
814  };
815 
816 
817  template<class GM, class ACC>
818  class DynamincGen{
819  public:
820  typedef ACC AccumulationType;
821  typedef GM GraphicalModelType;
831  EnergyBlur
832  };
833 
834  struct Parameter{
836  };
837 
838  DynamincGen(const GM & gm, const Parameter & param)
839  :
840  gm_(gm),
841  param_(param){
842  }
843 
844  void reset(){
845  if(param_.gen_ == AlphaExpansion)
846  alphaExpansionGen_->reset();
847  else if(param_.gen_ == AlphaBetaSwap)
848  alphaBetaSwapGen_->reset();
849  else if(param_.gen_ == UpDown)
850  upDownGen_->reset();
851  else if(param_.gen_ == Random)
852  randomGen_->reset();
853  else if(param_.gen_ == RandomLF)
854  randomLFGen_->reset();
855  else if(param_.gen_ == NonUniformRandom)
856  nonUniformRandomGen_->reset();
857  else if(param_.gen_ == Blur)
858  blurGen_->reset();
859  else if(param_.gen_ == EnergyBlur)
860  energyBlurGen_->reset();
861  else{
862  throw RuntimeError("unknown generator type");
863  }
864  }
865  size_t defaultNumStopIt() {
866  if(param_.gen_ == AlphaExpansion)
867  return alphaExpansionGen_->defaultNumStopIt();
868  else if(param_.gen_ == AlphaBetaSwap)
869  return alphaBetaSwapGen_->defaultNumStopIt();
870  else if(param_.gen_ == UpDown)
871  return upDownGen_->defaultNumStopIt();
872  else if(param_.gen_ == Random)
873  return randomGen_->defaultNumStopIt();
874  else if(param_.gen_ == RandomLF)
875  return randomLFGen_->defaultNumStopIt();
876  else if(param_.gen_ == NonUniformRandom)
877  return nonUniformRandomGen_->defaultNumStopIt();
878  else if(param_.gen_ == Blur)
879  return blurGen_->defaultNumStopIt();
880  else if(param_.gen_ == EnergyBlur)
881  return energyBlurGen_->defaultNumStopIt();
882  else{
883  throw RuntimeError("unknown generator type");
884  }
885  }
886  void getProposal(const std::vector<LabelType> &current , std::vector<LabelType> &proposal){
887  if(param_.gen_ == AlphaExpansion)
888  return alphaExpansionGen_->getProposal(current, proposal);
889  else if(param_.gen_ == AlphaBetaSwap)
890  return alphaBetaSwapGen_->getProposal(current, proposal);
891  else if(param_.gen_ == UpDown)
892  return upDownGen_->getProposal(current, proposal);
893  else if(param_.gen_ == Random)
894  return randomGen_->getProposal(current, proposal);
895  else if(param_.gen_ == RandomLF)
896  return randomLFGen_->getProposal(current, proposal);
897  else if(param_.gen_ == NonUniformRandom)
898  return nonUniformRandomGen_->getProposal(current, proposal);
899  else if(param_.gen_ == Blur)
900  return blurGen_->getProposal(current, proposal);
901  else if(param_.gen_ == EnergyBlur)
902  return energyBlurGen_->getProposal(current, proposal);
903  else{
904  throw RuntimeError("unknown generator type");
905  }
906  }
907  private:
908  const GM & gm_;
909  Parameter param_;
910 
911  // generators
912  AlphaExpansionGen<GM, ACC> * alphaExpansionGen_;
913  AlphaBetaSwapGen <GM, ACC> * alphaBetaSwapGen_;
914  UpDownGen<GM, ACC> * upDownGen_;
915  RandomGen<GM, ACC> * randomGen_;
916  RandomLFGen<GM, ACC> * randomLFGen_;
917  NonUniformRandomGen<GM, ACC> * nonUniformRandomGen_;
918  BlurGen<GM, ACC> * blurGen_;
919  EnergyBlurGen<GM, ACC> * energyBlurGen_;
920  };
921 
922 
923 }
924 
925 
926 template<class GM, class PROPOSAL_GEN>
927 class FusionBasedInf : public Inference<GM, typename PROPOSAL_GEN::AccumulationType>
928 {
929 public:
930  typedef PROPOSAL_GEN ProposalGen;
931  typedef typename ProposalGen::AccumulationType AccumulationType;
932  typedef AccumulationType ACC;
933  typedef GM GraphicalModelType;
935 
939 
940 
941 
944 
945  typedef typename ProposalGen::Parameter ProposalParameter;
947 
948  template<class _GM>
949  struct RebindGm{
950  typedef typename PROPOSAL_GEN:: template RebindGm<_GM>::type _P;
952  };
953 
954  template<class _GM,class _ACC>
956  typedef typename PROPOSAL_GEN:: template RebindGmAndAcc<_GM, _ACC>::type _P;
958  };
959 
960 
961  class Parameter
962  {
963  public:
965  const ProposalParameter & proposalParam = ProposalParameter(),
966  const FusionParameter & fusionParam = FusionParameter(),
967  const size_t numIt=1000,
968  const size_t numStopIt = 0
969  )
970  : proposalParam_(proposalParam),
971  fusionParam_(fusionParam),
972  numIt_(numIt),
973  numStopIt_(numStopIt)
974  {
975 
976  }
977 
978  template<class P>
979  Parameter(const P & p)
980  : proposalParam_(p.proposalParam_),
981  fusionParam_(p.fusionParam_),
982  numIt_(p.numIt_),
983  numStopIt_(p.numStopIt_){
984  }
985 
986 
987  ProposalParameter proposalParam_;
988  FusionParameter fusionParam_;
989  size_t numIt_;
990  size_t numStopIt_;
991 
992 
993  };
994 
995 
996  FusionBasedInf(const GraphicalModelType &, const Parameter & = Parameter() );
997  ~FusionBasedInf();
998  std::string name() const;
999  const GraphicalModelType &graphicalModel() const;
1001  void reset();
1002  template<class VisitorType>
1003  InferenceTermination infer(VisitorType &);
1004  void setStartingPoint(typename std::vector<LabelType>::const_iterator);
1005  virtual InferenceTermination arg(std::vector<LabelType> &, const size_t = 1) const ;
1006  virtual ValueType value()const {return bestValue_;}
1007 private:
1008 
1009 
1010  const GraphicalModelType &gm_;
1011  Parameter param_;
1012 
1013  FusionMoverType * fusionMover_;
1014 
1015  PROPOSAL_GEN proposalGen_;
1016  ValueType bestValue_;
1017  std::vector<LabelType> bestArg_;
1018  size_t maxOrder_;
1019 };
1020 
1021 
1022 
1023 
1024 template<class GM, class PROPOSAL_GEN>
1027  const GraphicalModelType &gm,
1028  const Parameter &parameter
1029 )
1030  : gm_(gm),
1031  param_(parameter),
1032  fusionMover_(NULL),
1033  proposalGen_(gm_, parameter.proposalParam_),
1034  bestValue_(),
1035  bestArg_(gm_.numberOfVariables(), 0),
1036  maxOrder_(gm.factorOrder())
1037 {
1038  ACC::neutral(bestValue_);
1039  fusionMover_ = new FusionMoverType(gm_,parameter.fusionParam_);
1040  //set default starting point
1041  std::vector<LabelType> conf(gm_.numberOfVariables(),0);
1042  for (size_t i=0; i<gm_.numberOfVariables(); ++i){
1043  for(typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
1044  if(gm_[*it].numberOfVariables() == 1){
1045  ValueType v;
1046  ACC::neutral(v);
1047  for(LabelType l=0; l<gm_.numberOfLabels(i); ++l){
1048  if(ACC::bop(gm_[*it](&l),v)){
1049  v=gm_[*it](&l);
1050  conf[i]=l;
1051  }
1052  }
1053  continue;
1054  }
1055  }
1056  }
1057  setStartingPoint(conf.begin());
1058 }
1059 template<class GM, class PROPOSAL_GEN>
1061 {
1062  delete fusionMover_;
1063 }
1064 
1065 
1066 template<class GM, class PROPOSAL_GEN>
1067 inline void
1069 {
1070  throw RuntimeError("not implemented yet");
1071 }
1072 
1073 template<class GM, class PROPOSAL_GEN>
1074 inline void
1077  typename std::vector<typename FusionBasedInf<GM, PROPOSAL_GEN>::LabelType>::const_iterator begin
1078 )
1079 {
1080  std::copy(begin, begin + gm_.numberOfVariables(), bestArg_.begin());
1081  bestValue_ = gm_.evaluate(bestArg_.begin());
1082 }
1083 
1084 template<class GM, class PROPOSAL_GEN>
1085 inline std::string
1087 {
1088  return "FusionBasedInf";
1089 }
1090 
1091 template<class GM, class PROPOSAL_GEN>
1094 {
1095  return gm_;
1096 }
1097 
1098 template<class GM, class PROPOSAL_GEN>
1099 inline InferenceTermination
1101 {
1102  EmptyVisitorType v;
1103  return infer(v);
1104 }
1105 
1106 
1107 template<class GM, class PROPOSAL_GEN>
1108 template<class VisitorType>
1111  VisitorType &visitor
1112 )
1113 {
1114  // evaluate the current best state
1115  bestValue_ = gm_.evaluate(bestArg_.begin());
1116 
1117  visitor.begin(*this);
1118 
1119 
1120  if(param_.numStopIt_ == 0){
1121  param_.numStopIt_ = proposalGen_.defaultNumStopIt();
1122  }
1123 
1124  std::vector<LabelType> proposedState(gm_.numberOfVariables());
1125  std::vector<LabelType> fusedState(gm_.numberOfVariables());
1126 
1127  size_t countRoundsWithNoImprovement = 0;
1128 
1129  for(size_t iteration=0; iteration<param_.numIt_; ++iteration){
1130  // store initial value before one proposal round
1131  const ValueType valueBeforeRound = bestValue_;
1132 
1133  proposalGen_.getProposal(bestArg_,proposedState);
1134 
1135  // this might be to expensive
1136  ValueType proposalValue = gm_.evaluate(proposedState);
1137  //ValueType proposalValue = 100000000000000000000000.0;
1138 
1139  const bool anyVar = fusionMover_->fuse(bestArg_,proposedState, fusedState,
1140  bestValue_, proposalValue, bestValue_);
1141 
1142 
1143  if(anyVar){
1144  if( !ACC::bop(bestValue_, valueBeforeRound)){
1145  ++countRoundsWithNoImprovement;
1146  }
1147  else{
1148  // Improvement
1149  countRoundsWithNoImprovement = 0;
1150  bestArg_ = fusedState;
1151  }
1152  if(visitor(*this)!=0){
1153  break;
1154  }
1155  }
1156  else{
1157  ++countRoundsWithNoImprovement;
1158  }
1159  // check if converged or done
1160  if(countRoundsWithNoImprovement==param_.numStopIt_ && param_.numStopIt_ !=0 )
1161  break;
1162  }
1163  visitor.end(*this);
1164  return NORMAL;
1165 }
1166 
1167 
1168 
1169 
1170 template<class GM, class PROPOSAL_GEN>
1171 inline InferenceTermination
1174  std::vector<LabelType> &x,
1175  const size_t N
1176 ) const
1177 {
1178  if (N == 1)
1179  {
1180  x.resize(gm_.numberOfVariables());
1181  for (size_t j = 0; j < x.size(); ++j)
1182  {
1183  x[j] = bestArg_[j];
1184  }
1185  return NORMAL;
1186  }
1187  else
1188  {
1189  return UNKNOWN;
1190  }
1191 }
1192 
1193 } // namespace opengm
1194 
1195 #endif // #ifndef OPENGM_FUSION_BASED_INF_HXX
RandomGen(const GM &gm, const Parameter &param)
AlphaBetaSwapGen(const GM &gm, const Parameter &param)
EnergyBlurGen(const GM &gm, const Parameter &param)
The OpenGM namespace.
Definition: config.hxx:43
opengm::visitors::VerboseVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > VerboseVisitorType
Parameter(const ProposalParameter &proposalParam=ProposalParameter(), const FusionParameter &fusionParam=FusionParameter(), const size_t numIt=1000, const size_t numStopIt=0)
void infer(const typename INF::GraphicalModelType &gm, const typename INF::Parameter &param, std::vector< typename INF::LabelType > &conf)
Definition: inference.hxx:34
virtual InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
UpDownGen(const GM &gm, const Parameter &param)
FusionBasedInf< _GM, _P > type
#define OPENGM_ASSERT(expression)
Definition: opengm.hxx:77
ProposalGen::AccumulationType AccumulationType
InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
output a solution
AlphaExpansionGen(const GM &gm, const Parameter &param)
opengm::visitors::EmptyVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > EmptyVisitorType
RandomLFGen(const GM &gm, const Parameter &param)
NonUniformRandomGen(const GM &gm, const Parameter &param)
std::string name() const
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
Alpha-Beta-Swap Algorithm.
PROPOSAL_GEN::template RebindGmAndAcc< _GM, _ACC >::type _P
FusionMoverType::Parameter FusionParameter
Inference algorithm interface.
Definition: inference.hxx:43
void setStartingPoint(typename std::vector< LabelType >::const_iterator)
opengm::visitors::TimingVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > TimingVisitorType
DynamincGen(const GM &gm, const Parameter &param)
Alpha-Expansion Algorithm.
HlFusionMover< GraphicalModelType, AccumulationType > FusionMover
Random2Gen(const GM &gm, const Parameter &param)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
const GraphicalModelType & graphicalModel() const
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
virtual ValueType value() const
return the solution (value)
Parameter(double sigma=20.0, bool useLocalMargs=false, double temp=1)
InferenceTermination infer()
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
BlurGen(const GM &gm, const Parameter &param)
PROPOSAL_GEN::template RebindGm< _GM >::type _P
HlFusionMover< GraphicalModelType, AccumulationType > FusionMoverType
Parameter(const std::string startDirection=std::string("up"))
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
A generalization of ICM B. Andres, J. H. Kappes, U. Koethe and Hamprecht F. A., The Lazy Flipper: MA...
FusionBasedInf(const GraphicalModelType &, const Parameter &=Parameter())
ProposalGen::Parameter ProposalParameter
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)
OpenGM runtime error.
Definition: opengm.hxx:100
InferenceTermination infer()
start the algorithm
InferenceTermination
Definition: inference.hxx:24
void getProposal(const std::vector< LabelType > &current, std::vector< LabelType > &proposal)