OpenGM  2.3.x
Discrete Graphical Model Library
mqpbo.hxx
Go to the documentation of this file.
1 #pragma once
2 #ifndef OPENGM_MQPBO_HXX
3 #define OPENGM_MQPBO_HXX
4 
5 #include <vector>
6 #include <string>
7 #include <iostream>
8 
9 #include "opengm/opengm.hxx"
16 
17 //#define MQPBOHotFixOutPutPartialOPtimalityMap
18 #ifdef MQPBOHotFixOutPutPartialOPtimalityMap
20 #endif
21 
23 
24 namespace opengm {
25 
36  template<class GM, class ACC>
37  class MQPBO : public Inference<GM, ACC>
38  {
39  public:
40  typedef ACC AccumulationType;
41  typedef GM GmType;
42  typedef GM GraphicalModelType;
48 
49  template<class _GM>
50  struct RebindGm{
52  };
53 
54  template<class _GM,class _ACC>
57  };
58 
59 
60 
62 
63  class Parameter{
64  public:
65  Parameter(): useKovtunsMethod_(true), probing_(false), strongPersistency_(false), rounds_(0), permutationType_(NONE) {};
66 
67 
68  template<class P>
69  Parameter(const P & p)
70  : label_(p.label_),
71  useKovtunsMethod_(p.useKovtunsMethod_),
72  probing_(p.probing_),
73  strongPersistency_(p.strongPersistency_),
74  rounds_(p.rounds_),
75  permutationType_(p.permutationType_){
76 
77  }
78 
79  std::vector<LabelType> label_;
81  const bool probing_; //do not use this!
83  size_t rounds_;
85  };
86 
87  MQPBO(const GmType&, const Parameter& = Parameter());
88  ~MQPBO();
89  std::string name() const;
90  const GmType& graphicalModel() const;
92  void reset();
93  typename GM::ValueType bound() const;
94  typename GM::ValueType value() const;
95  template<class VisitorType>
96  InferenceTermination infer(VisitorType&);
97  InferenceTermination testQuess(std::vector<LabelType> &guess);
99  void setStartingPoint(typename std::vector<typename GM::LabelType>::const_iterator);
100  virtual InferenceTermination arg(std::vector<LabelType>&, const size_t = 1) const ;
101 
102  const std::vector<opengm::Tribool>& partialOptimality(IndexType var) const {return partialOptimality_[var];}
103  bool partialOptimality(IndexType var, LabelType& l) const {l=label_[var]; return optimal_[var];}
104 
105  double optimalityV() const;
106  double optimality() const;
107  private:
109  void AddUnaryTerm(int var, ValueType v0, ValueType v1);
110  void AddPairwiseTerm(int var0, int var1,ValueType v00,ValueType v01,ValueType v10,ValueType v11);
111 
112  const GmType& gm_;
113  Parameter param_;
114 
115  kolmogorov::qpbo::QPBO<GraphValueType>* qpbo_;
116  ValueType constTerm_;
117  ValueType bound_;
118  //int* label_;
119  //int* defaultLabel_;
120 
121  std::vector<std::vector<LabelType> > permutation_; // org -> new
122  std::vector<std::vector<LabelType> > inversePermutation_; // new -> org
123  std::vector<std::vector<opengm::Tribool> > partialOptimality_;
124  std::vector<bool> optimal_;
125  std::vector<LabelType> label_;
126  std::vector<size_t> variableOffset_;
127 
128  size_t numNodes_;
129  size_t numEdges_;
130 
131  GraphValueType scale;
132 
133  };
135 
136 
137  template<class GM, class ACC>
139  (
140  const GmType& gm,
141  const Parameter& parameter
142  )
143  : gm_(gm),
144  param_(parameter),
145  scale(1)
146  {
147  for(size_t j = 0; j < gm_.numberOfFactors(); ++j) {
148  if(gm_[j].numberOfVariables() > 2) {
149  throw RuntimeError("This implementation of MQPBO supports only factors of order <= 2.");
150  }
151  }
152 
153  //Allocate Memory for Permutations
154  permutation_.resize(gm_.numberOfVariables());
155  inversePermutation_.resize(gm_.numberOfVariables());
156  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
157  permutation_[var].resize(gm_.numberOfLabels(var));
158  inversePermutation_[var].resize(gm_.numberOfLabels(var));
159  }
160 
161  //Set Default Optimality
162  partialOptimality_.resize(gm_.numberOfVariables());
163  optimal_.resize(gm_.numberOfVariables(),false);
164  label_.resize(gm_.numberOfVariables());
165  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
166  partialOptimality_[var].resize(gm_.numberOfLabels(var),opengm::Tribool::Maybe);
167  }
168 
169  //Calculated number of nodes and edges
170  numNodes_=0;
171  numEdges_=0;
172  size_t numSOF=0;
173  variableOffset_.resize(gm_.numberOfVariables(),0);
174  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
175  variableOffset_[var] = numNodes_;
176  numNodes_ += gm_.numberOfLabels(var)-1;
177  }
178  for(IndexType var=1; var<gm_.numberOfVariables(); ++var){
179  OPENGM_ASSERT( variableOffset_[var-1]< variableOffset_[var]);
180  }
181 
182  for(IndexType f=0; f<gm_.numberOfFactors(); ++f){
183  if(gm_[f].numberOfVariables()==1)
184  numEdges_ += gm_[f].numberOfLabels(0)-2;
185  if(gm_[f].numberOfVariables()==2){
186  numEdges_ += (gm_[f].numberOfLabels(0)-1);//*(gm_[f].numberOfLabels(1)-1);
187  ++numSOF;
188  }
189  }
190 
191  if(param_.rounds_>0){
192  //std::cout << "Large" <<std::endl;
193  qpbo_ = new kolmogorov::qpbo::QPBO<GraphValueType > (numNodes_, numEdges_); // max number of nodes & edges
194  qpbo_->AddNode(numNodes_);
195  }
196  else{
197  //std::cout << "Small" <<std::endl;
198  qpbo_ = new kolmogorov::qpbo::QPBO<GraphValueType > (gm_.numberOfVariables(), numSOF); // max number of nodes & edges
199  qpbo_->AddNode(gm_.numberOfVariables());
200  }
201  }
202 
203  template<class GM, class ACC>
205  (
206  )
207  {
208  delete qpbo_;
209  }
210 
213  template<class GM, class ACC>
214  inline void
216  {
218  }
219 
221  template<class GM, class ACC>
222  inline void
224  (
225  typename std::vector<typename GM::LabelType>::const_iterator begin
226  )
227  {
229  }
230 
231  template<class GM, class ACC>
232  inline std::string
234  {
235  return "MQPBO";
236  }
237 
238  template<class GM, class ACC>
239  inline const typename MQPBO<GM,ACC>::GmType&
241  {
242  return gm_;
243  }
244 
245 
246  template<class GM, class ACC>
247  inline void
249  qpbo_->AddUnaryTerm(var, scale*v0, scale*v1);
250  }
251 
252  template<class GM, class ACC>
253  inline void
254  MQPBO<GM,ACC>::AddPairwiseTerm(int var0, int var1,ValueType v00,ValueType v01,ValueType v10,ValueType v11){
255  qpbo_->AddPairwiseTerm(var0, var1,scale*v00,scale*v01,scale*v10,scale*v11);
256  }
257 
258  template<class GM, class ACC>
259  inline InferenceTermination
261  {
262  qpbo_->Reset();
263  qpbo_->AddNode(gm_.numberOfVariables());
264  for(size_t f = 0; f < gm_.numberOfFactors(); ++f) {
265  if(gm_[f].numberOfVariables() == 0) {
266  ;
267  }
268  else if(gm_[f].numberOfVariables() == 1) {
269  const LabelType numLabels = gm_[f].numberOfLabels(0);
270  const IndexType var = gm_[f].variableIndex(0);
271 
272  ValueType v0 = gm_[f](&guess);
273  ValueType v1; ACC::neutral(v1);
274  for(LabelType i=0; i<guess; ++i)
275  ACC::op(gm_[f](&i),v1);
276  for(LabelType i=guess+1; i<numLabels; ++i)
277  ACC::op(gm_[f](&i),v1);
278  AddUnaryTerm(var, v0, v1);
279  }
280  else if(gm_[f].numberOfVariables() == 2) {
281  const IndexType var0 = gm_[f].variableIndex(0);
282  const IndexType var1 = gm_[f].variableIndex(1);
283 
284  LabelType c[2] = {guess,guess};
285  LabelType c2[2] = {0,1};
286 
287  ValueType v00 = gm_[f](c);
288  ValueType v01 = gm_[f](c2);
289  ValueType v10 = v01;
290  ValueType v11 = std::min(v00,v01);
291  AddPairwiseTerm(var0, var1,v00,v01,v10,v11);
292  }
293  }
294  qpbo_->MergeParallelEdges();
295  qpbo_->Solve();
296  for(IndexType var=0; var<gm_.numberOfVariables();++var){
297  if(qpbo_->GetLabel(var)==0){
298  for(LabelType l=0; l<gm_.numberOfLabels(var); ++l){
299  partialOptimality_[var][l] =opengm::Tribool::False;
300  }
301  partialOptimality_[var][guess] =opengm::Tribool::True;
302  optimal_[var]=true;
303  label_[var]=guess;
304  }
305  }
306  return NORMAL;
307  }
308 
309 
310  template<class GM, class ACC>
311  inline InferenceTermination
312  MQPBO<GM,ACC>::testQuess(std::vector<LabelType> &guess)
313  {
314  qpbo_->Reset();
315  qpbo_->AddNode(gm_.numberOfVariables());
316 
317  for(size_t var=0; var<gm_.numberOfVariables(); ++var){
318  std::vector<ValueType> v(gm_.numberOfLabels(var),0);
319  for(size_t i=0; i<gm_.numberOfFactors(var); ++i){
320  size_t f = gm_.factorOfVariable(var, i);
321  if(gm_[f].numberOfVariables()==1){
322  for(size_t j=0; j<v.size(); ++j){
323  v[j] += gm_[f](&j);
324  }
325 
326  }
327  else if(gm_[f].numberOfVariables() == 2) {
328  LabelType c[] = {guess[gm_[f].variableIndex(0)],guess[gm_[f].variableIndex(1)]};
329  if(gm_[f].variableIndex(0)==var){
330  for(c[0]=0; c[0]<guess[var]; ++c[0]){
331  v[c[0]] += gm_[f](c);
332  }
333  for(c[0]=guess[var]+1; c[0]<v.size(); ++c[0]){
334  v[c[0]] += gm_[f](c);
335  }
336  }
337  else if(gm_[f].variableIndex(1)==var){
338  for(c[1]=0; c[1]<guess[var]; ++c[1]){
339  v[c[1]] += gm_[f](c);
340  }
341  for(c[1]=guess[var]+1; c[1]<v.size(); ++c[1]){
342  v[c[1]] += gm_[f](c);
343  }
344  }
345  else{
346  OPENGM_ASSERT(false);
347  }
348  }
349  }
350  ValueType v0 = v[guess[var]];
351  ValueType v1; ACC::neutral(v1);
352  for(size_t j=0; j<guess[var]; ++j){
353  ACC::op(v[j],v1);
354  }
355  for(size_t j=guess[var]+1; j<v.size(); ++j){
356  ACC::op(v[j],v1);
357  }
358  AddUnaryTerm(var, v0, v1);
359  }
360 
361 
362  for(size_t f = 0; f < gm_.numberOfFactors(); ++f) {
363  if(gm_[f].numberOfVariables() < 2) {
364  continue;
365  }
366  else if(gm_[f].numberOfVariables() == 2) {
367  const IndexType var0 = gm_[f].variableIndex(0);
368  const IndexType var1 = gm_[f].variableIndex(1);
369 
370  LabelType c[2] = {guess[var0],guess[var1]};
371  LabelType c0[2] = {guess[var0],guess[var1]};
372  LabelType c1[2] = {guess[var0],guess[var1]};
373  ValueType v00 = gm_[f](c);
374  ValueType v01 = 0;
375  ValueType v10 = 0;
376  ValueType v11; ACC::neutral(v11);
377 
378  for(c[0]=0; c[0]<gm_[f].numberOfLabels(0); ++c[0]){
379  for(c[1]=0; c[1]<gm_[f].numberOfLabels(1); ++c[1]){
380  if(c[0]==guess[var0] || c[1]==guess[var1]){
381  continue;
382  }
383  else{
384  c0[0]=c[0];
385  c1[1]=c[1];
386  ValueType v = gm_[f](c) - gm_[f](c0) - gm_[f](c1);
387  ACC::op(v,v11);
388  }
389  }
390  }
391  AddPairwiseTerm(var0, var1,v00,v01,v10,v11);
392  }
393  }
394  qpbo_->MergeParallelEdges();
395  qpbo_->Solve();
396  for(IndexType var=0; var<gm_.numberOfVariables();++var){
397  if(qpbo_->GetLabel(var)==0){
398  for(LabelType l=0; l<gm_.numberOfLabels(var); ++l){
399  partialOptimality_[var][l] =opengm::Tribool::False;
400  }
401  partialOptimality_[var][guess[var]] =opengm::Tribool::True;
402  optimal_[var]=true;
403  label_[var]=guess[var];
404  }
405  }
406  return NORMAL;
407  }
408 
409  template<class GM, class ACC>
410  inline InferenceTermination
412  {
413  //Set up MQPBO for current partial optimality
414  std::vector<IndexType> var2VarR(gm_.numberOfVariables());
415  std::vector<IndexType> varR2Var;
416  std::vector<size_t> varROffset;
417  size_t numBVar=0;
418  for(size_t var = 0; var < gm_.numberOfVariables(); ++var) {
419  if(optimal_[var]){
420  ;//do nothing
421  }
422  else{
423  varROffset.push_back(numBVar);
424  numBVar = numBVar + gm_.numberOfLabels(var)-1;
425  var2VarR[var]=varR2Var.size();
426  varR2Var.push_back(var);
427  }
428  }
429  std::cout << varR2Var.size() <<" / "<<gm_.numberOfVariables()<<std::endl;
430 
431  //Find Permutation
432  if(permutationType==NONE){
433  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
434  for(LabelType l=0; l<gm_.numberOfLabels(var); ++l){
435  permutation_[var][l]=l;
436  }
437  }
438  }
439  else if(permutationType==RANDOM){
440  srand ( unsigned ( time (NULL) ) );
441  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
442  LabelType numStates = gm_.numberOfLabels(var);
443  //IDENTYTY PERMUTATION
444  for(LabelType i=0; i<numStates;++i){
445  permutation_[var][i]=i;
446  }
447  //SHUFFEL PERMUTATION
448  std::random_shuffle(permutation_[var].begin(),permutation_[var].end());
449  }
450  }
451  else if(permutationType==MINMARG){
452 
454 
455 
456 
457  std::vector<LabelType> numberOfLabels(varR2Var.size());
458  for(size_t i=0; i<varR2Var.size(); ++i)
459  numberOfLabels[i] = gm_.numberOfLabels(varR2Var[i]);
460  typename SUBGM::SpaceType subspace(numberOfLabels.begin(),numberOfLabels.end());
461  SUBGM gm(subspace);
462  for(IndexType f=0; f<gm_.numberOfFactors();++f){
463  std::vector<PositionAndLabel<IndexType, LabelType> > fixed;
464  std::vector<IndexType> vars;
465  for(IndexType i=0; i<gm_[f].numberOfVariables();++i){
466  const IndexType var = gm_[f].variableIndex(i);
467  if(optimal_[var]){
468  fixed.push_back(PositionAndLabel<IndexType, LabelType>(i,label_[var]));
469  }
470  else{
471  vars.push_back(var2VarR[var]);
472  }
473  }
474  opengm::ViewFixVariablesFunction<GM> func(gm_[f], fixed);
475  gm.addFactor(gm.addFunction(func),vars.begin(),vars.end());
476  }
477 
479  typename LBP::Parameter para;
480  para.maximumNumberOfSteps_ = 100;
481  para.damping_ = 0.5;
482  LBP bp(gm,para);
483  bp.infer();
484 
485  //for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
486  for(IndexType varR=0; varR<gm.numberOfVariables(); ++varR){
487  IndexType var = varR2Var[varR];
488  LabelType numStates = gm_.numberOfLabels(var);
489  typename GM::IndependentFactorType marg;
490  bp.marginal(varR, marg);
491 
492  //SHUFFEL PERMUTATION
493  std::vector<LabelType> list(numStates);
494  for(LabelType i=0; i<numStates;++i){
495  list[i]=i;
496  }
497  LabelType t;
498  for(LabelType i=0; i<numStates;++i){
499  for(LabelType j=i+1; i<numStates;++i){
500  if(marg(&list[j])<marg(&list[i])){
501  t = list[i];
502  list[i]=list[j];
503  list[j]=t;
504  }
505  }
506  }
507  for(LabelType i=0; i<numStates;++i){
508  permutation_[var][i] = list[i];
509  }
510  }
511  }
512  else{
513  throw RuntimeError("Error: Unknown Permutation!");
514  }
515  //CALCULATE INVERSE PERMUTATION
516  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
517  for(LabelType l=0; l<gm_.numberOfLabels(var); ++l){
518  inversePermutation_[var][permutation_[var][l]]=l;
519  }
520  }
521 
522 
523  //Build Graph
524  ValueType constValue = 0;
525  qpbo_->Reset();
526  qpbo_->AddNode(numBVar);
527  //qpbo_->AddNode(numNodes_);
528 
529  for(IndexType varR = 0; varR < varR2Var.size(); ++varR) {
530  IndexType var = varR2Var[varR];
531  for(size_t l = 0; l+1<gm_.numberOfLabels(var); ++l){
532  AddUnaryTerm((int) (varROffset[varR]+l), 0.0, 0.0);
533  }
534  for(LabelType l=1; l+1<gm_.numberOfLabels(var); ++l){
535  AddPairwiseTerm((int) (varROffset[varR]+l-1), (int) (varROffset[varR]+l), 0.0, 1e30, 0.0, 0.0);
536  }
537  }
538  /*
539  for(size_t var = 0; var < gm_.numberOfVariables(); ++var) {
540  for(size_t l = 0; l+1<gm_.numberOfLabels(var); ++l){
541  AddUnaryTerm((int) (variableOffset_[var]+l), 0.0, 0.0);
542  }
543  for(LabelType l=1; l+1<gm_.numberOfLabels(var); ++l){
544  AddPairwiseTerm((int) (variableOffset_[var]+l-1), (int) (variableOffset_[var]+l), 0.0, 1000000.0, 0.0, 0.0);
545  }
546  }
547  */
548 
549  for(size_t f = 0; f < gm_.numberOfFactors(); ++f) {
550  if(gm_[f].numberOfVariables() == 0) {
551  const LabelType l = 0;
552  constValue += gm_[f](&l);
553  }
554  else if(gm_[f].numberOfVariables() == 1) {
555  const LabelType numLabels = gm_[f].numberOfLabels(0);
556  const IndexType var = gm_[f].variableIndex(0);
557  if(optimal_[var]){
558  constValue += gm_[f](&(label_[var]));
559  }
560  else{
561  LabelType l0 = inversePermutation_[var][0];
562  LabelType l1;
563  constValue += gm_[f](&l0);
564  const IndexType varR = var2VarR[var];
565  for(LabelType i=1 ; i<numLabels; ++i){
566  l0 = inversePermutation_[var][i-1];
567  l1 = inversePermutation_[var][i];
568  AddUnaryTerm((int) (varROffset[varR]+i-1), 0.0, gm_[f](&l1)-gm_[f](&l0));
569  //AddUnaryTerm((int) (variableOffset_[var]+i-1), 0.0, gm_[f](&l1)-gm_[f](&l0));
570  }
571  }
572  }
573  else if(gm_[f].numberOfVariables() == 2) {
574  const IndexType var0 = gm_[f].variableIndex(0);
575  const IndexType var1 = gm_[f].variableIndex(1);
576  const IndexType varR0 = var2VarR[var0];
577  const IndexType varR1 = var2VarR[var1];
578 
579  if(optimal_[var0]&&optimal_[var1]){
580  LabelType l[2] = { label_[var0], label_[var1]};
581  constValue += gm_[f](l);
582  }
583  else if(optimal_[var0]){
584  const LabelType numLabels = gm_[f].numberOfLabels(1);
585  LabelType l0[2] = { label_[var0], inversePermutation_[var1][0]};
586  LabelType l1[2] = { label_[var0], 0};
587  constValue += gm_[f](l0);
588  for(LabelType i=1 ; i<numLabels; ++i){
589  l0[1] = inversePermutation_[var1][i-1];
590  l1[1] = inversePermutation_[var1][i];
591  //AddUnaryTerm((int) (variableOffset_[var1]+i-1), 0.0, gm_[f](l1)-gm_[f](l0));
592  AddUnaryTerm((int) (varROffset[varR1]+i-1), 0.0, gm_[f](l1)-gm_[f](l0));
593  }
594  }
595  else if(optimal_[var1]){
596  const LabelType numLabels = gm_[f].numberOfLabels(0);
597  LabelType l0[2] = { inversePermutation_[var0][0], label_[var1]};
598  LabelType l1[2] = { 0, label_[var1]};
599  constValue += gm_[f](l0);
600  for(LabelType i=1 ; i<numLabels; ++i){
601  l0[0] = inversePermutation_[var0][i-1];
602  l1[0] = inversePermutation_[var0][i];
603  AddUnaryTerm((int) (varROffset[varR0]+i-1), 0.0, gm_[f](l1)-gm_[f](l0));
604  //AddUnaryTerm((int) (variableOffset_[var0]+i-1), 0.0, gm_[f](l1)-gm_[f](l0));
605  }
606  }
607  else{
608  {
609  const LabelType l[2]={inversePermutation_[var0][0],inversePermutation_[var1][0]};
610  constValue += gm_[f](l);
611  }
612  for(size_t i=1; i<gm_[f].numberOfLabels(0);++i){
613  const LabelType l1[2]={inversePermutation_[var0][i] ,inversePermutation_[var1][0]};
614  const LabelType l2[2]={inversePermutation_[var0][i-1],inversePermutation_[var1][0]};
615  AddUnaryTerm((int) (varROffset[varR0]+i-1), 0.0, gm_[f](l1)-gm_[f](l2));
616  //AddUnaryTerm((int) (variableOffset_[var0]+i-1), 0.0, gm_[f](l1)-gm_[f](l2));
617  }
618  for(size_t i=1; i<gm_[f].numberOfLabels(1);++i){
619  const LabelType l1[2]={inversePermutation_[var0][0],inversePermutation_[var1][i]};
620  const LabelType l2[2]={inversePermutation_[var0][0],inversePermutation_[var1][i-1]};
621  AddUnaryTerm((int) (varROffset[varR1]+i-1), 0.0, gm_[f](l1)-gm_[f](l2));
622  //AddUnaryTerm((int) (variableOffset_[var1]+i-1), 0.0, gm_[f](l1)-gm_[f](l2));
623  }
624  for(size_t i=1; i<gm_[f].numberOfLabels(0);++i){
625  for(size_t j=1; j<gm_[f].numberOfLabels(1);++j){
626  const int node0 = varROffset[varR0]+i-1;
627  const int node1 = varROffset[varR1]+j-1;
628  //const int node0 = variableOffset_[var0]+i-1;
629  //const int node1 = variableOffset_[var1]+j-1;
630  ValueType v = 0;
631  int l[2] = {(int)inversePermutation_[var0][i],(int)inversePermutation_[var1][j]}; v += gm_[f](l);
632  l[0]=inversePermutation_[var0][i-1]; v -= gm_[f](l);
633  l[1]=inversePermutation_[var1][j-1]; v += gm_[f](l);
634  l[0]=inversePermutation_[var0][i]; v -= gm_[f](l);
635  if(v!=0.0)
636  AddPairwiseTerm(node0, node1,0.0,0.0,0.0,v);
637  }
638  }
639  }
640  }
641  }
642  qpbo_->MergeParallelEdges();
643 
644  //Optimize
645 
646  qpbo_->Solve();
647  if(!param_.strongPersistency_)
648  qpbo_->ComputeWeakPersistencies();
649  // if(!parameter_.strongPersistency_) {
650  // qpbo_->ComputeWeakPersistencies();
651  // }
652 
653  bound_ = constValue + 0.5 * qpbo_->ComputeTwiceLowerBound();
654 
655  /*PROBEING*/
656  if(param_.probing_) {
657  std::cout << "Start Probing ..."<<std::endl;
658  // Initialize mapping for probe
659  int *mapping = new int[numBVar];
660  //int *mapping = new int[numNodes_];
661  for(int i = 0; i < static_cast<int>(numBVar); ++i) {
662  //for(int i = 0; i < static_cast<int>(numNodes_); ++i) {
663  qpbo_->SetLabel(i, qpbo_->GetLabel(i));
664  mapping[i] = i * 2;
665  }
666  typename kolmogorov::qpbo::QPBO<GraphValueType>::ProbeOptions options;
667  options.C = 1000000000;
668  if(!param_.strongPersistency_)
669  options.weak_persistencies = 1;
670  else
671  options.weak_persistencies = 0;
672  qpbo_->Probe(mapping, options);
673  if(!param_.strongPersistency_)
674  qpbo_->ComputeWeakPersistencies();
675 
676  for(IndexType var=0; var<gm_.numberOfVariables();++var){
677  if(optimal_[var]) continue;
678  IndexType varR = var2VarR[var];
679  //Lable==0
680  {
681  int l = qpbo_->GetLabel(mapping[varROffset[varR]]/2);
682  if(l>=0) l = (l + mapping[varROffset[varR]]) % 2;
683  //int l = qpbo_->GetLabel(mapping[variableOffset_[var]]/2);
684  //if(l>=0) l = (l + mapping[variableOffset_[var]]) % 2;
685  if(l==0) {partialOptimality_[var][inversePermutation_[var][0]]&=opengm::Tribool::True;}
686  else if(l==1){partialOptimality_[var][inversePermutation_[var][0]]&=opengm::Tribool::False;}
687  else {partialOptimality_[var][inversePermutation_[var][0]]&=opengm::Tribool::Maybe;}
688  }
689  //Label==max
690  {
691  int l = qpbo_->GetLabel(mapping[varROffset[varR]+gm_.numberOfLabels(var)-2]/2);
692  if(l>=0) l = (l + mapping[varROffset[varR]+gm_.numberOfLabels(var)-2]) % 2;
693  //int l = qpbo_->GetLabel(mapping[variableOffset_[var]+gm_.numberOfLabels(var)-2]/2);
694  //if(l>=0) l = (l + mapping[variableOffset_[var]+gm_.numberOfLabels(var)-2]) % 2;
695  if(l==0) {partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::False;}
696  else if(l==1){partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::True;}
697  else {partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::Maybe;}
698  }
699  //ELSE
700 
701  for(LabelType l=1; l+1<gm_.numberOfLabels(var);++l)
702  {
703  int l1 = qpbo_->GetLabel(mapping[varROffset[varR]+l-1]/2);
704  int l2 = qpbo_->GetLabel(mapping[varROffset[varR]+l]/2);
705  if(l1>=0) l1 = (l1 + mapping[varROffset[varR]+l-1]) % 2;
706  if(l2>=0) l2 = (l2 + mapping[varROffset[varR]+l]) % 2;
707  //int l1 = qpbo_->GetLabel(mapping[variableOffset_[var]+l-1]/2);
708  //int l2 = qpbo_->GetLabel(mapping[variableOffset_[var]+l]/2);
709  //if(l1>=0) l1 = (l1 + mapping[variableOffset_[var]+l-1]) % 2;
710  //if(l2>=0) l2 = (l2 + mapping[variableOffset_[var]+l]) % 2;
711 
712  OPENGM_ASSERT(!(l1==0 && l2==1));
713  if(l1==1 && l2==0) {partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::True;}
714  else if(l2==1) {partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::False;}
715  else if(l1==0) {partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::False;}
716  //else {partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::Maybe;}
717  }
718  }
719  delete mapping;
720  }
721  else{
722  for(IndexType var=0; var<gm_.numberOfVariables();++var){
723  if(optimal_[var]) continue;
724  IndexType varR = var2VarR[var];
725  //Lable==0
726  {
727  int l = qpbo_->GetLabel(varROffset[varR]);
728  //int l = qpbo_->GetLabel(variableOffset_[var]);
729  if(l==0){
730  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][0]]==opengm::Tribool::False));
731  partialOptimality_[var][inversePermutation_[var][0]]&=opengm::Tribool::True;
732  }
733  else if(l==1){
734  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][0]]==opengm::Tribool::True));
735  partialOptimality_[var][inversePermutation_[var][0]]&=opengm::Tribool::False;
736  }
737  // else {partialOptimality_[var][permutation_[var][0]]&=opengm::Tribool::Maybe;}
738  }
739  //Label==max
740  {
741  int l = qpbo_->GetLabel(varROffset[varR]+gm_.numberOfLabels(var)-2);
742  //int l = qpbo_->GetLabel(variableOffset_[var]+gm_.numberOfLabels(var)-2);
743  if(l==0){
744  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]==opengm::Tribool::True));
745  partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::False;
746  }
747  else if(l==1){
748  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]==opengm::Tribool::False));
749  partialOptimality_[var][inversePermutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::True;
750  }
751  //else {partialOptimality_[var][permutation_[var][gm_.numberOfLabels(var)-1]]&=opengm::Tribool::Maybe;}
752  }
753  //ELSE
754 
755  for(LabelType l=1; l+1<gm_.numberOfLabels(var);++l)
756  {
757  int l1 = qpbo_->GetLabel(varROffset[varR]+l-1);
758  int l2 = qpbo_->GetLabel(varROffset[varR]+l);
759  //int l1 = qpbo_->GetLabel(variableOffset_[var]+l-1);
760  //int l2 = qpbo_->GetLabel(variableOffset_[var]+l);
761  OPENGM_ASSERT(!(l1==0 && l2==1));
762  if(l1==1 && l2==0) {
763  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][l]]==opengm::Tribool::False));
764  partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::True;
765  }
766  else if(l2==1){
767  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][l]]==opengm::Tribool::True));
768  partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::False;
769  }
770  else if(l1==0){
771  OPENGM_ASSERT(!(partialOptimality_[var][inversePermutation_[var][l]]==opengm::Tribool::True));
772  partialOptimality_[var][inversePermutation_[var][l]]&=opengm::Tribool::False;
773  }
774  //else{
775  // partialOptimality_[var][permutation_[var][l]]&=opengm::Tribool::Maybe;
776  //}
777  }
778  }
779  }
780  for(IndexType var=0; var<gm_.numberOfVariables();++var){
781  if(optimal_[var]) continue;
782  LabelType countTRUE = 0;
783  LabelType countFALSE = 0;
784  for(LabelType l=1; l+1<gm_.numberOfLabels(var);++l){
785  if(partialOptimality_[var][l]==opengm::Tribool::True)
786  ++countTRUE;
787  if(partialOptimality_[var][l]==opengm::Tribool::False)
788  ++countFALSE;
789  }
790  if(countTRUE==1){
791  optimal_[var]=true;
792  for(LabelType l=1; l+1<gm_.numberOfLabels(var);++l){
793  if(partialOptimality_[var][l]==opengm::Tribool::True)
794  label_[var]=l;
795  else
796  partialOptimality_[var][l]=opengm::Tribool::False;
797  }
798  }
799  if(countFALSE+1==gm_.numberOfLabels(var)){
800  optimal_[var]=true;
801  for(LabelType l=1; l+1<gm_.numberOfLabels(var);++l){
802  if(partialOptimality_[var][l]==opengm::Tribool::Maybe){
803  label_[var]=l;
804  partialOptimality_[var][l]=opengm::Tribool::True;
805  }
806  }
807  }
808  }
809  return NORMAL;
810  }
811 
812  template<class GM, class ACC>
814  ()
815  {
816  EmptyVisitorType visitor;
817  return infer(visitor);
818  }
819 
820  template<class GM, class ACC>
821  template<class VisitorType>
823  (
824  VisitorType& visitor
825  )
826  {
827  visitor.addLog("optimality");
828  visitor.addLog("optimalityV");
829  if(param_.rounds_>1 && param_.strongPersistency_==false)
830  std::cout << "WARNING: Using weak persistency and several rounds may lead to wrong results if solution is not unique!"<<std::endl;
831 
832  LabelType maxNumberOfLabels = 0;
833  for(IndexType var=0; var<gm_.numberOfVariables();++var){
834  maxNumberOfLabels = std::max(maxNumberOfLabels, gm_.numberOfLabels(var));
835  }
836  bool isPotts = true;
837 
838  for(IndexType f=0; f< gm_.numberOfFactors(); ++f){
839  if(gm_[f].numberOfVariables()<2) continue;
840  isPotts &= gm_[f].isPotts();
841  if(!isPotts) break;
842  }
843 
844  visitor.begin(*this);
845 
846  if(param_.useKovtunsMethod_){
847  if(isPotts){
848  //std::cout << "Use Kovtuns method for potts"<<std::endl;
849  for(LabelType l=0; l<maxNumberOfLabels; ++l) {
850  testQuess(l);
851  double xoptimality = optimality();
852  double xoptimalityV = optimalityV();
853  visitor(*this);
854  visitor.log("optimality",xoptimality);
855  visitor.log("optimalityV",xoptimalityV);
856 
857  //std::cout << "partialOptimality : " << optimality() << std::endl;
858  }
859  }
860  else{
861  std::cout << "Use Kovtuns method for non-potts is not supported yet"<<std::endl;
862  /*
863  for(LabelType l=0; l<maxNumberOfLabels; ++l){
864  std::vector<LabelType> guess(gm_.numberOfVariables(),l);
865  for(IndexType var=0; var<gm_.numberOfVariables();++var){
866  if(l>=gm_.numberOfLabels(var)){
867  guess[var]=l-1;
868  }
869  }
870  testQuess(guess);
871  double xoptimality = optimality();
872  visitor(*this,this->value(),bound(),"partialOptimality",xoptimality);
873  //std::cout << "partialOptimality : " << optimality() << std::endl;
874  }
875  */
876  }
877  }
878 
879  if(param_.rounds_>0){
880  std::cout << "Start "<<param_.rounds_ << " of multilabel QPBO for different permutations" <<std::endl;
881  for(size_t rr=0; rr<param_.rounds_;++rr){
882  testPermutation(param_.permutationType_);
883  double xoptimality = optimality();
884  double xoptimalityV = optimalityV();
885  visitor(*this);
886  visitor.log("optimality",xoptimality);
887  visitor.log("optimalityV",xoptimalityV);
888 
889  //std::cout << "partialOptimality : " << optimality() << std::endl;
890  }
891  }
892 
893 #ifdef MQPBOHotFixOutPutPartialOPtimalityMap
894  hid_t fid = marray::hdf5::createFile("mqpbotmp.h5");
895  std::vector<double> optimal;
896  for(size_t i=0; i<optimal_.size();++i)
897  optimal.push_back((double)(optimal_[i]));
898  marray::hdf5::save(fid, "popt", optimal);
900 #endif
901 
902  visitor.end(*this);
903 
904  return NORMAL;
905  }
906 
907  template<class GM, class ACC>
908  double
910  () const
911  {
912  size_t labeled = 0;
913  size_t unlabeled = 0;
914  for(IndexType var=0; var<gm_.numberOfVariables();++var){
915  for(LabelType l=0; l<gm_.numberOfLabels(var);++l){
916  if(partialOptimality_[var][l]==opengm::Tribool::Maybe)
917  ++unlabeled;
918  else
919  ++labeled;
920  }
921  }
922  return labeled*1.0/(labeled+unlabeled);
923  }
924 
925  template<class GM, class ACC>
926  double
928  () const
929  {
930  size_t labeled = 0;
931  for(IndexType var=0; var<gm_.numberOfVariables();++var){
932  for(LabelType l=0; l<gm_.numberOfLabels(var);++l){
933  if(partialOptimality_[var][l]==opengm::Tribool::True){
934  ++labeled;
935  continue;
936  }
937  }
938  }
939  return labeled*1.0/gm_.numberOfVariables();
940  }
941 
942  template<class GM, class ACC>
943  typename GM::ValueType
945  () const
946  {
947  return bound_;
948  }
949 
950  template<class GM, class ACC>
951  typename GM::ValueType MQPBO<GM,ACC>::value() const {
952  std::vector<LabelType> states;
953  arg(states);
954  return gm_.evaluate(states);
955  }
956 
957  template<class GM, class ACC>
958  inline InferenceTermination
960  (
961  std::vector<LabelType>& x,
962  const size_t N
963  ) const
964  {
965  if(N==1){
966  x.resize(gm_.numberOfVariables(),0);
967 
968  for(IndexType var=0; var<gm_.numberOfVariables(); ++var){
969  size_t countTrue = 0;
970  size_t countFalse = 0;
971  size_t countMaybe = 0;
972  x[var]=0;
973  for(LabelType l=0; l<gm_.numberOfLabels(var);++l){
974  if(partialOptimality_[var][l]==opengm::Tribool::Maybe){
975  x[var] = l;
976  ++countMaybe;
977  }
978  if(partialOptimality_[var][l]==opengm::Tribool::True){
979  x[var] = l;
980  ++countTrue;
981  }
982  if(partialOptimality_[var][l]==opengm::Tribool::False){
983  ++countFalse;
984  }
985  }
986  OPENGM_ASSERT(countTrue+countFalse+countMaybe == gm_.numberOfLabels(var));
987  OPENGM_ASSERT(countTrue<2);
988  OPENGM_ASSERT(countFalse<gm_.numberOfLabels(var));
989  }
990  return NORMAL;
991  }
992  else {
993  return UNKNOWN;
994  }
995  }
996 } // namespace opengm
997 
998 #endif // #ifndef OPENGM_MQPBO_HXX
ACC AccumulationType
Definition: mqpbo.hxx:40
GM::ValueType value() const
return the solution (value)
Definition: mqpbo.hxx:951
void setStartingPoint(typename std::vector< typename GM::LabelType >::const_iterator)
set starting point
Definition: mqpbo.hxx:224
The OpenGM namespace.
Definition: config.hxx:43
std::string name() const
Definition: mqpbo.hxx:233
hid_t createFile(const std::string &, HDF5Version=DEFAULT_HDF5_VERSION)
Create an HDF5 file.
Parameter(const P &p)
Definition: mqpbo.hxx:69
Discrete space in which variables can have differently many labels.
void reset()
reset assumes that the structure of the graphical model has not changed
Definition: mqpbo.hxx:215
double optimalityV() const
Definition: mqpbo.hxx:928
A framework for message passing algorithms Cf. F. R. Kschischang, B. J. Frey and H...
InferenceTermination testPermutation(PermutationType permutationType)
Definition: mqpbo.hxx:411
[class mqpbo] Multilabel QPBO (MQPBO) Implements the algorithms described in i) Ivan Kovtun: Partial ...
Definition: mqpbo.hxx:37
#define OPENGM_ASSERT(expression)
Definition: opengm.hxx:77
bool partialOptimality(IndexType var, LabelType &l) const
Definition: mqpbo.hxx:103
ValueType GraphValueType
Definition: mqpbo.hxx:47
OPENGM_GM_TYPE_TYPEDEFS
Definition: mqpbo.hxx:43
MQPBO< _GM, ACC > type
Definition: mqpbo.hxx:51
LabelType numberOfLabels(const IndexType) const
GraphicalModelType::IndexType IndexType
Definition: inference.hxx:49
MQPBO(const GmType &, const Parameter &=Parameter())
[class mqpbo]
Definition: mqpbo.hxx:139
GM::ValueType bound() const
return a bound on the solution
Definition: mqpbo.hxx:945
GraphicalModelType::ValueType ValueType
Definition: inference.hxx:50
Inference algorithm interface.
Definition: inference.hxx:43
visitors::VerboseVisitor< MQPBO< GM, ACC > > VerboseVisitorType
Definition: mqpbo.hxx:44
MQPBO< _GM, _ACC > type
Definition: mqpbo.hxx:56
virtual InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
output a solution
Definition: mqpbo.hxx:960
void closeFile(const hid_t &)
Close an HDF5 file.
void save(const hid_t &, const std::string &, const Marray< T > &)
Save an Marray as an HDF5 dataset.
InferenceTermination testQuess(std::vector< LabelType > &guess)
Definition: mqpbo.hxx:312
const bool probing_
Definition: mqpbo.hxx:81
double optimality() const
Definition: mqpbo.hxx:910
Funcion that refers to a factor of another GraphicalModel in which some variables are fixed...
std::vector< LabelType > label_
Definition: mqpbo.hxx:79
GraphicalModelType::LabelType LabelType
Definition: inference.hxx:48
visitors::TimingVisitor< MQPBO< GM, ACC > > TimingVisitorType
Definition: mqpbo.hxx:46
const GmType & graphicalModel() const
Definition: mqpbo.hxx:240
visitors::EmptyVisitor< MQPBO< GM, ACC > > EmptyVisitorType
Definition: mqpbo.hxx:45
const std::vector< opengm::Tribool > & partialOptimality(IndexType var) const
Definition: mqpbo.hxx:102
GM GraphicalModelType
Definition: mqpbo.hxx:42
PermutationType permutationType_
Definition: mqpbo.hxx:84
OpenGM runtime error.
Definition: opengm.hxx:100
InferenceTermination infer()
Definition: mqpbo.hxx:814
InferenceTermination
Definition: inference.hxx:24