2 #ifndef OPENGM_FUSION_BASED_INF_HXX 3 #define OPENGM_FUSION_BASED_INF_HXX 61 namespace proposal_gen{
66 template<
class GM,
class ACC>
83 for(
size_t i=0; i<gm.numberOfVariables();++i){
84 if(gm.numberOfLabels(i)>maxLabel_){
85 maxLabel_ = gm.numberOfLabels(i);
96 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
98 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
100 if (gm_.numberOfLabels(vi) > currentAlpha_ )
102 proposal[vi] = currentAlpha_;
106 proposal[vi] = current[vi];
110 if(currentAlpha_>=maxLabel_){
124 template<
class GM,
class ACC>
134 const std::string startDirection = std::string(
"up")
136 : startDirection_(startDirection)
145 argBuffer_(gm.numberOfVariables(),0),
146 direction_(gm.numberOfVariables())
152 if(param_.startDirection_== std::string(
"random")){
153 for(
size_t i=0; i<gm_.numberOfVariables();++i){
154 direction_[i]=rand()%2 == 0 ? -1:1;
157 else if(param_.startDirection_== std::string(
"up")){
158 for(
size_t i=0; i<gm_.numberOfVariables();++i){
162 else if(param_.startDirection_== std::string(
"down")){
163 for(
size_t i=0; i<gm_.numberOfVariables();++i){
174 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
176 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
178 const size_t numL = gm_.numberOfLabels(vi);
183 std::copy(current.begin(), current.end(), argBuffer_.begin());
193 proposal[vi] = cl +1;
197 proposal[vi] = cl - 1 ;
202 proposal[vi] = cl - 1;
206 proposal[vi] = cl + 1 ;
214 std::vector<LabelType> argBuffer_;
215 std::vector<LabelType> direction_;
216 std::vector<LabelType> jumpSize_;
220 template<
class GM,
class ACC>
232 static size_t getMaxLabel(
const GM &gm){
234 for(
size_t i=0; i<gm.numberOfVariables();++i){
235 if(gm.numberOfLabels(i)>maxLabel ){
236 maxLabel = gm.numberOfLabels(i);
245 maxLabel_(getMaxLabel(gm)),
246 abShape_(2, maxLabel_),
247 abWalker_(abShape_.begin(), 2)
258 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
261 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
262 proposal[vi] = current[vi];
265 if(
currentAlpha()+1 == maxLabel_ && currentBeta()+1== maxLabel_){
268 while (abWalker_.coordinateTuple()[0] == abWalker_.coordinateTuple()[1])
273 const LabelType alpha = abWalker_.coordinateTuple()[0];
274 const LabelType beta = abWalker_.coordinateTuple()[1];
276 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi)
278 if ( current[vi] == alpha && gm_.numberOfLabels(vi) > beta )
282 else if ( current[vi] == beta && gm_.numberOfLabels(vi) > alpha )
284 proposal[vi] = alpha;
288 proposal[vi] = current[vi];
296 return abWalker_.coordinateTuple()[0];
300 return abWalker_.coordinateTuple()[1];
307 std::vector<LabelType> abShape_;
308 ShapeWalker<typename std::vector<LabelType>::const_iterator> abWalker_;
312 template<
class GM,
class ACC>
334 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
336 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
338 opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
339 proposal[vi] = randomLabel();
350 template<
class GM,
class ACC>
372 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
374 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
376 opengm::RandomUniform<size_t> randomLabel(0,3,currentStep_+vi);
377 proposal[vi] = std::min(randomLabel(),
size_t(1));
389 template<
class GM,
class ACC>
411 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
413 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
415 opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(vi),currentStep_+vi);
416 proposal[vi] = randomLabel();
431 template<
class GM,
class ACC>
450 randomGens_(gm.numberOfVariables())
452 std::vector<bool> hasUnary(gm.numberOfVariables(),
false);
454 for(IndexType fi=0; fi<gm_.numberOfFactors(); ++fi){
456 if(gm_[fi].numberOfVariables()==1){
458 const IndexType vi = gm_[fi].variableIndex(0);
459 const LabelType numLabels = gm_.numberOfLabels(vi);
460 std::vector<ValueType> weights(numLabels);
461 gm_[fi].copyValues(&weights[0]);
462 const ValueType minValue = *std::min_element(weights.begin(),weights.end());
464 weights[l]-= minValue;
468 weights[l]=std::exp(-1.0*param_.temp_*weights[l]);
470 randomGens_[vi]=GenType(weights.begin(),weights.end());
474 for(IndexType vi=0 ;vi<gm_.numberOfVariables(); ++vi){
476 const LabelType numLabels = gm_.numberOfLabels(vi);
477 std::vector<ValueType> weights(numLabels,1.0);
478 randomGens_[vi]=GenType(weights.begin(),weights.end());
492 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
494 for (IndexType vi = 0; vi < gm_.numberOfVariables(); ++vi){
495 proposal[vi]=randomGens_[vi]();
504 typedef RandomDiscreteWeighted<LabelType,ValueType> GenType;
506 std::vector < RandomDiscreteWeighted<LabelType,ValueType> > randomGens_;
510 template<
class GM,
class ACC>
529 const double pi = 3.1416;
530 const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
531 const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
532 const size_t kradius = std::ceil(3*param_.sigma_);
533 kernel_.resize(2*kradius + 1);
535 for(
double i = 0; i <= kradius ; ++i) {
536 double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
537 kernel_[kradius+i] = value;
538 kernel_[kradius-i] = value;
541 for(
double i = 0; i <= kradius ; ++i) {
542 kernel_[kradius+i] /= sum;
543 kernel_[kradius-i] /= sum;
546 size_t N = gm_.numberOfFactors(0);
547 for(
size_t i=1; i<gm_.numberOfVariables(); ++i){
548 if(N==gm_.numberOfFactors(i)){
554 width_ = gm_.numberOfVariables()/height_;
559 bluredLabel_.resize(gm_.numberOfVariables(),0);
560 std::vector<double> temp(gm_.numberOfVariables(),0.0);
561 std::vector<LabelType> localLabel(gm_.numberOfVariables(),0);
562 for (
size_t i=0; i<gm_.numberOfVariables(); ++i){
563 for(
typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
564 if(gm_[*it].numberOfVariables() == 1){
567 for(
LabelType l=0; l<gm_.numberOfLabels(i); ++l){
568 if(ACC::bop(gm_[*it](&l),v)){
577 const int radius = (kernel_.size()-1)/2;
578 const int h = height_-1;
579 const int w = width_ -1;
580 for (
int i = 0; i < height_; ++i) {
581 for (
int j = 0; j < width_; ++j) {
583 for (
int k = 0; k < 2*radius+1; ++k) {
584 int i2 = std::min( h,std::max(0,i-radius+k));
585 val += kernel_[k] * localLabel[ind(i2,j)];
587 temp[ind(i,j)] = val;
590 for (
int i = 0; i < height_; ++i) {
591 for (
int j = 0; j < width_; ++j) {
593 for (
int k = 0; k < 2*radius+1; ++k) {
594 int j2 = std::min(w,std::max(0,i-radius+k));
595 val += kernel_[k] * temp[ind(i, j2)];
597 bluredLabel_[ind(i,j)] = std::min(
double(gm_.numberOfLabels(ind(i,j))),(std::max(0.0,val)));
605 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
607 if ((currentStep_ % 2) == 0){
608 for (
int i = 0; i < height_; ++i) {
609 for (
int j = 0; j < width_; ++j) {
610 const size_t var = ind(i,j);
611 opengm::RandomUniform<size_t> randomLabel(0, gm_.numberOfLabels(var),currentStep_+i+j);
612 proposal[var] = (
LabelType)(randomLabel());
616 proposal.resize(gm_.numberOfVariables(),0.0);
617 opengm::RandomUniform<double> randomLabel(-param_.sigma_*1.5, param_.sigma_*1.5,currentStep_);
618 for(
size_t i=0; i<proposal.size();++i){
619 proposal[i] = std::min(gm_.numberOfLabels(i), (
LabelType)(std::max(0.0,bluredLabel_[i] + randomLabel())));
625 size_t ind(
int i,
int j){
return i+j*height_;}
630 std::vector<double> kernel_;
631 std::vector<double> bluredLabel_;
636 template<
class GM,
class ACC>
645 Parameter(
double sigma = 20.0,
bool useLocalMargs =
false,
double temp=1) : sigma_(sigma), useLocalMargs_(useLocalMargs), temp_(temp)
658 const double pi = 3.1416;
659 const double oneOverSqrt2PiSigmaSquared = 1.0 / (std::sqrt(2.0 * pi) * param_.sigma_);
660 const double oneOverTwoSigmaSquared = 1.0 / (2.0* param_.sigma_ * param_.sigma_);
661 const size_t kradius = std::ceil(3*param_.sigma_);
662 std::vector<double> kernel;
663 kernel.resize(2*kradius + 1);
665 for(
double i = 0; i <= kradius ; ++i) {
666 double value = oneOverSqrt2PiSigmaSquared * std::exp(-(i*i)*oneOverTwoSigmaSquared);
667 kernel[kradius+i] = value;
668 kernel[kradius-i] = value;
671 for(
double i = 0; i <= kradius ; ++i) {
672 kernel[kradius+i] /= sum;
673 kernel[kradius-i] /= sum;
676 size_t N = gm_.numberOfFactors(0);
677 for(
size_t i=1; i<gm_.numberOfVariables(); ++i){
678 if(N==gm_.numberOfFactors(i)){
684 width_ = gm_.numberOfVariables()/height_;
689 size_t numLabels =gm_.numberOfLabels(0);
690 std::vector<double> temp(gm_.numberOfVariables(),0.0);
691 std::vector<double> bluredEnergy(gm_.numberOfVariables(),1000000000000.0);
692 std::vector<double> bluredOpt(gm_.numberOfVariables(),0);
693 std::vector<double> energy(gm_.numberOfVariables(),0.0);
694 std::vector<IndexType> unaries(gm_.numberOfVariables());
695 std::vector<std::vector<double> > margs;;
696 if(param_.useLocalMargs_)
697 margs.resize(gm_.numberOfVariables(),std::vector<double>(numLabels));
699 for (
size_t i=0; i<gm_.numberOfVariables(); ++i){
701 for(
typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
702 if(gm_[*it].numberOfVariables() == 1){
705 if(gm_[*it].numberOfLabels(0) != numLabels)
706 throw RuntimeError(
"number of labels are not equal for all variables");
715 for(
size_t l=0; l<numLabels; ++l){
716 for (
int i = 0; i < height_; ++i) {
717 for (
int j = 0; j < width_; ++j) {
718 const size_t var = ind(i, j);
719 energy[var] =gm_[unaries[ind(i, j)]](&l);
723 const int radius = (kernel.size()-1)/2;
724 const int h = height_-1;
725 const int w = width_ -1;
726 for (
int i = 0; i < height_; ++i) {
727 for (
int j = 0; j < width_; ++j) {
729 const size_t var = ind(i, j);
730 for (
int k = 0; k < 2*radius+1; ++k) {
731 int i2 = std::min( h,std::max(0,i-radius+k));
732 val += kernel[k] * energy[ind(i2,j)];
737 for (
int i = 0; i < height_; ++i) {
738 for (
int j = 0; j < width_; ++j) {
740 const size_t var = ind(i, j);
741 for (
int k = 0; k < 2*radius+1; ++k) {
742 int j2 = std::min(w,std::max(0,i-radius+k));
743 val += kernel[k] * temp[ind(i, j2)];
745 if(param_.useLocalMargs_){
748 if(val < bluredEnergy[var]){
749 bluredEnergy[var] = val;
756 if(param_.useLocalMargs_){
757 localMargGens_.reserve(bluredOpt.size());
758 for(
size_t var=0 ; var<bluredOpt.size(); ++var){
759 const ValueType minValue = *std::min_element(margs[var].begin(),margs[var].end());
761 margs[var][l]-= minValue;
764 margs[var][l]=std::exp(-1.0*param_.temp_*margs[var][l]);
766 localMargGens_[var]=opengm::RandomDiscreteWeighted<LabelType,ValueType>(margs[var].begin(),margs[var].end(),var);
769 uniformGens_.reserve(bluredOpt.size());
770 for(
size_t var=0 ; var<bluredOpt.size(); ++var){
771 LabelType minVal = (
LabelType)(std::max((
double)(0) , bluredOpt[var]-param_.sigma_*1.5));
772 LabelType maxVal = (
LabelType)(std::min((
double)(numLabels) , bluredOpt[var]+param_.sigma_*1.5));
773 uniformGens_[var] = opengm::RandomUniform<LabelType>(minVal, maxVal+1, var);
781 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal)
783 proposal.resize(gm_.numberOfVariables());
784 if(param_.useLocalMargs_){
785 for(
size_t i=0; i<proposal.size();++i){
786 proposal[i] = localMargGens_[i]();
790 opengm::RandomUniform<LabelType> randomLabel(0, gm_.numberOfLabels(0),currentStep_);
791 if ((currentStep_ % 2) == 0){
792 for(
size_t i=0; i<proposal.size();++i){
793 proposal[i] = randomLabel();
796 for(
size_t i=0; i<proposal.size();++i){
797 proposal[i] = uniformGens_[i]();
804 size_t ind(
int i,
int j){
return i+j*height_;}
812 std::vector<opengm::RandomDiscreteWeighted<LabelType,ValueType> > localMargGens_;
813 std::vector<opengm::RandomUniform<LabelType> > uniformGens_;
817 template<
class GM,
class ACC>
846 alphaExpansionGen_->reset();
848 alphaBetaSwapGen_->reset();
849 else if(param_.gen_ == UpDown)
851 else if(param_.gen_ == Random)
853 else if(param_.gen_ == RandomLF)
854 randomLFGen_->reset();
855 else if(param_.gen_ == NonUniformRandom)
856 nonUniformRandomGen_->reset();
857 else if(param_.gen_ == Blur)
859 else if(param_.gen_ == EnergyBlur)
860 energyBlurGen_->reset();
867 return alphaExpansionGen_->defaultNumStopIt();
869 return alphaBetaSwapGen_->defaultNumStopIt();
870 else if(param_.gen_ == UpDown)
871 return upDownGen_->defaultNumStopIt();
872 else if(param_.gen_ == Random)
873 return randomGen_->defaultNumStopIt();
874 else if(param_.gen_ == RandomLF)
875 return randomLFGen_->defaultNumStopIt();
876 else if(param_.gen_ == NonUniformRandom)
877 return nonUniformRandomGen_->defaultNumStopIt();
878 else if(param_.gen_ == Blur)
879 return blurGen_->defaultNumStopIt();
880 else if(param_.gen_ == EnergyBlur)
881 return energyBlurGen_->defaultNumStopIt();
886 void getProposal(
const std::vector<LabelType> ¤t , std::vector<LabelType> &proposal){
888 return alphaExpansionGen_->getProposal(current, proposal);
890 return alphaBetaSwapGen_->getProposal(current, proposal);
891 else if(param_.gen_ == UpDown)
892 return upDownGen_->getProposal(current, proposal);
893 else if(param_.gen_ == Random)
894 return randomGen_->getProposal(current, proposal);
895 else if(param_.gen_ == RandomLF)
896 return randomLFGen_->getProposal(current, proposal);
897 else if(param_.gen_ == NonUniformRandom)
898 return nonUniformRandomGen_->getProposal(current, proposal);
899 else if(param_.gen_ == Blur)
900 return blurGen_->getProposal(current, proposal);
901 else if(param_.gen_ == EnergyBlur)
902 return energyBlurGen_->getProposal(current, proposal);
926 template<
class GM,
class PROPOSAL_GEN>
932 typedef AccumulationType
ACC;
954 template<
class _GM,
class _ACC>
965 const ProposalParameter & proposalParam = ProposalParameter(),
966 const FusionParameter & fusionParam = FusionParameter(),
967 const size_t numIt=1000,
968 const size_t numStopIt = 0
970 : proposalParam_(proposalParam),
971 fusionParam_(fusionParam),
973 numStopIt_(numStopIt)
980 : proposalParam_(p.proposalParam_),
981 fusionParam_(p.fusionParam_),
983 numStopIt_(p.numStopIt_){
998 std::string name()
const;
999 const GraphicalModelType &graphicalModel()
const;
1002 template<
class VisitorType>
1004 void setStartingPoint(
typename std::vector<LabelType>::const_iterator);
1006 virtual ValueType
value()
const {
return bestValue_;}
1010 const GraphicalModelType &gm_;
1013 FusionMoverType * fusionMover_;
1015 PROPOSAL_GEN proposalGen_;
1016 ValueType bestValue_;
1017 std::vector<LabelType> bestArg_;
1024 template<
class GM,
class PROPOSAL_GEN>
1027 const GraphicalModelType &gm,
1035 bestArg_(gm_.numberOfVariables(), 0),
1036 maxOrder_(gm.factorOrder())
1038 ACC::neutral(bestValue_);
1039 fusionMover_ =
new FusionMoverType(gm_,parameter.
fusionParam_);
1041 std::vector<LabelType> conf(gm_.numberOfVariables(),0);
1042 for (
size_t i=0; i<gm_.numberOfVariables(); ++i){
1043 for(
typename GM::ConstFactorIterator it=gm_.factorsOfVariableBegin(i); it!=gm_.factorsOfVariableEnd(i);++it){
1044 if(gm_[*it].numberOfVariables() == 1){
1047 for(
LabelType l=0; l<gm_.numberOfLabels(i); ++l){
1048 if(ACC::bop(gm_[*it](&l),v)){
1057 setStartingPoint(conf.begin());
1059 template<
class GM,
class PROPOSAL_GEN>
1062 delete fusionMover_;
1066 template<
class GM,
class PROPOSAL_GEN>
1073 template<
class GM,
class PROPOSAL_GEN>
1080 std::copy(begin, begin + gm_.numberOfVariables(), bestArg_.begin());
1081 bestValue_ = gm_.evaluate(bestArg_.begin());
1084 template<
class GM,
class PROPOSAL_GEN>
1088 return "FusionBasedInf";
1091 template<
class GM,
class PROPOSAL_GEN>
1098 template<
class GM,
class PROPOSAL_GEN>
1107 template<
class GM,
class PROPOSAL_GEN>
1108 template<
class VisitorType>
1111 VisitorType &visitor
1115 bestValue_ = gm_.evaluate(bestArg_.begin());
1117 visitor.begin(*
this);
1120 if(param_.numStopIt_ == 0){
1121 param_.numStopIt_ = proposalGen_.defaultNumStopIt();
1124 std::vector<LabelType> proposedState(gm_.numberOfVariables());
1125 std::vector<LabelType> fusedState(gm_.numberOfVariables());
1127 size_t countRoundsWithNoImprovement = 0;
1129 for(
size_t iteration=0; iteration<param_.numIt_; ++iteration){
1131 const ValueType valueBeforeRound = bestValue_;
1133 proposalGen_.getProposal(bestArg_,proposedState);
1136 ValueType proposalValue = gm_.evaluate(proposedState);
1139 const bool anyVar = fusionMover_->fuse(bestArg_,proposedState, fusedState,
1140 bestValue_, proposalValue, bestValue_);
1144 if( !ACC::bop(bestValue_, valueBeforeRound)){
1145 ++countRoundsWithNoImprovement;
1149 countRoundsWithNoImprovement = 0;
1150 bestArg_ = fusedState;
1152 if(visitor(*
this)!=0){
1157 ++countRoundsWithNoImprovement;
1160 if(countRoundsWithNoImprovement==param_.numStopIt_ && param_.numStopIt_ !=0 )
1170 template<
class GM,
class PROPOSAL_GEN>
1174 std::vector<LabelType> &x,
1180 x.resize(gm_.numberOfVariables());
1181 for (
size_t j = 0; j < x.size(); ++j)
1195 #endif // #ifndef OPENGM_FUSION_BASED_INF_HXX
RandomGen(const GM &gm, const Parameter ¶m)
AlphaBetaSwapGen(const GM &gm, const Parameter ¶m)
EnergyBlurGen(const GM &gm, const Parameter ¶m)
size_t defaultNumStopIt()
FusionParameter fusionParam_
opengm::visitors::VerboseVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > VerboseVisitorType
Parameter(double sigma=20.0)
Parameter(const ProposalParameter &proposalParam=ProposalParameter(), const FusionParameter &fusionParam=FusionParameter(), const size_t numIt=1000, const size_t numStopIt=0)
void infer(const typename INF::GraphicalModelType &gm, const typename INF::Parameter ¶m, std::vector< typename INF::LabelType > &conf)
virtual InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
UpDownGen(const GM &gm, const Parameter ¶m)
FusionBasedInf< _GM, _P > type
#define OPENGM_ASSERT(expression)
ProposalGen::AccumulationType AccumulationType
InferenceTermination arg(std::vector< LabelType > &, const size_t=1) const
output a solution
size_t defaultNumStopIt()
AlphaExpansionGen(const GM &gm, const Parameter ¶m)
opengm::visitors::EmptyVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > EmptyVisitorType
RandomLFGen(const GM &gm, const Parameter ¶m)
std::string startDirection_
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
Alpha-Beta-Swap Algorithm.
PROPOSAL_GEN::template RebindGmAndAcc< _GM, _ACC >::type _P
ProposalParameter proposalParam_
FusionMoverType::Parameter FusionParameter
Inference algorithm interface.
void setStartingPoint(typename std::vector< LabelType >::const_iterator)
size_t defaultNumStopIt()
opengm::visitors::TimingVisitor< FusionBasedInf< GM, PROPOSAL_GEN > > TimingVisitorType
DynamincGen(const GM &gm, const Parameter ¶m)
size_t defaultNumStopIt()
Alpha-Expansion Algorithm.
FusionBasedInf< _GM, _P > type
HlFusionMover< GraphicalModelType, AccumulationType > FusionMover
Random2Gen(const GM &gm, const Parameter ¶m)
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
const GraphicalModelType & graphicalModel() const
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
virtual ValueType value() const
return the solution (value)
Parameter(double sigma=20.0, bool useLocalMargs=false, double temp=1)
size_t defaultNumStopIt()
InferenceTermination infer()
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
BlurGen(const GM &gm, const Parameter ¶m)
size_t defaultNumStopIt()
PROPOSAL_GEN::template RebindGm< _GM >::type _P
HlFusionMover< GraphicalModelType, AccumulationType > FusionMoverType
Parameter(const std::string startDirection=std::string("up"))
GraphicalModelType::LabelType LabelType
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
A generalization of ICM B. Andres, J. H. Kappes, U. Koethe and Hamprecht F. A., The Lazy Flipper: MA...
size_t defaultNumStopIt()
FusionBasedInf(const GraphicalModelType &, const Parameter &=Parameter())
ProposalGen::Parameter ProposalParameter
size_t defaultNumStopIt()
void getProposal(const std::vector< LabelType > ¤t, std::vector< LabelType > &proposal)
InferenceTermination infer()
start the algorithm
size_t defaultNumStopIt()