Commits

Ruben Martinez-Cantin committed 0fbc49b

More robust default parameters. Cleaned posterior models.

Comments (0)

Files changed (19)

   ./src/bayesoptdisc.cpp
   ./src/bayesoptbase.cpp
   ./src/posteriormodel.cpp
-  ./src/empiricalbayes.cpp
-  ./src/mcmc.cpp
+  ./src/posterior_fixed.cpp
+  ./src/posterior_empirical.cpp
+  ./src/posterior_mcmc.cpp
   ./src/inneroptimization.cpp
   ./src/dataset.cpp
   ./src/nonparametricprocess.cpp
 # with spaces.
 
 INPUT                  = @DOXYFILE_SOURCE_DIRS@
+USE_MDFILE_AS_MAINPAGE = README.md
 
 # This tag can be used to specify the character encoding of the source files
 # that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
-BayesOpt: A Bayesian optimization toolbox            {#mainpage}
+BayesOpt: A Bayesian optimization toolbox
 =========================================
 
 BayesOpt is an free, efficient, implementation of the Bayesian

include/empiricalbayes.hpp

-/**  \file empiricalbayes.hpp \brief Empirical Bayesian estimator */
-/*
--------------------------------------------------------------------------
-   This file is part of BayesOpt, an efficient C++ library for 
-   Bayesian optimization.
-
-   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
- 
-   BayesOpt is free software: you can redistribute it and/or modify it 
-   under the terms of the GNU General Public License as published by
-   the Free Software Foundation, either version 3 of the License, or
-   (at your option) any later version.
-
-   BayesOpt is distributed in the hope that it will be useful, but 
-   WITHOUT ANY WARRANTY; without even the implied warranty of
-   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-   GNU General Public License for more details.
-
-   You should have received a copy of the GNU General Public License
-   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
-------------------------------------------------------------------------
-*/
-
-
-#ifndef  _EMPIRICALBAYES_HPP_
-#define  _EMPIRICALBAYES_HPP_
-
-#include <boost/scoped_ptr.hpp>
-#include "inneroptimization.hpp"
-#include "criteria_functors.hpp"
-#include "posteriormodel.hpp"
-
-namespace bayesopt {
-
-  /** \addtogroup BayesOpt
-   *  \brief Main module for Bayesian optimization
-   */
-  /*@{*/
-
- /**
-   * \brief Bayesian optimization using different non-parametric 
-   * processes as distributions over surrogate functions. 
-   */
-  class EmpiricalBayes: public PosteriorModel
-  {
-  public:
-    /** 
-     * Constructor
-     * @param params set of parameters (see parameters.h)
-     */
-    EmpiricalBayes(size_t dim, bopt_params params, randEngine& eng);
-
-    /** 
-     * Default destructor
-     */
-    virtual ~EmpiricalBayes();
-
-    void updateHyperParameters();
-    void fitSurrogateModel();
-    void updateSurrogateModel();
-    double evaluateCriteria(const vectord& query);
-
-    bool criteriaRequiresComparison();
-    void setFirstCriterium();
-    bool setNextCriterium(const vectord& prevResult);
-    std::string getBestCriteria(vectord& best);
-
-    ProbabilityDistribution* getPrediction(const vectord& query);
-
-  private:
-
-    EmpiricalBayes();
-
-    void setSurrogateModel(randEngine& eng);    
-    void setCriteria(randEngine& eng);
-
-  private:  // Members
-    boost::scoped_ptr<NonParametricProcess> mGP; ///< Pointer to surrogate model
-    boost::scoped_ptr<Criteria> mCrit;                   ///< Metacriteria model
-
-    boost::scoped_ptr<NLOPT_Optimization> kOptimizer;
-  };
-
-  /**@}*/
-
-  inline void EmpiricalBayes::fitSurrogateModel()
-  { mGP->fitSurrogateModel(); };
-
-  inline void EmpiricalBayes::updateSurrogateModel()
-  { mGP->updateSurrogateModel(); };
-
-  inline double EmpiricalBayes::evaluateCriteria(const vectord& query)
-  { return (*mCrit)(query); };
-
-  inline bool EmpiricalBayes::criteriaRequiresComparison()
-  {return mCrit->requireComparison(); };
-    
-  inline void EmpiricalBayes::setFirstCriterium()
-  { mCrit->initialCriteria(); };
-
-  inline bool EmpiricalBayes::setNextCriterium(const vectord& prevResult)
-  { return false; };
-
-  inline std::string EmpiricalBayes::getBestCriteria(vectord& best)
-  { return mCrit->name(); };
-
-  inline  ProbabilityDistribution* EmpiricalBayes::getPrediction(const vectord& query)
-  { return mGP->prediction(query); };
-
-
-} //namespace bayesopt
-
-
-#endif

include/kernels/kernel_atomic.hpp

     {
       if(theta.size() != n_params)
 	{
-	  FILE_LOG(logERROR) << "Wrong number of kernel hyperparameters"; 
 	  throw std::invalid_argument("Wrong number of kernel hyperparameters");
 	}
       params = theta; //TODO: To make enough space. Make it more efficient.

include/mcmc.hpp

-/**  \file mcmcm.hpp \brief Markov Chain Monte Carlo algorithms */
-/*
--------------------------------------------------------------------------
-   This file is part of BayesOpt, an efficient C++ library for 
-   Bayesian optimization.
-
-   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
- 
-   BayesOpt is free software: you can redistribute it and/or modify it 
-   under the terms of the GNU General Public License as published by
-   the Free Software Foundation, either version 3 of the License, or
-   (at your option) any later version.
-
-   BayesOpt is distributed in the hope that it will be useful, but 
-   WITHOUT ANY WARRANTY; without even the implied warranty of
-   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-   GNU General Public License for more details.
-
-   You should have received a copy of the GNU General Public License
-   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
-------------------------------------------------------------------------
-*/
-
-
-#ifndef  _MCMC_HPP_
-#define  _MCMC_HPP_
-
-#include <boost/ptr_container/ptr_vector.hpp>
-#include "randgen.hpp"
-#include "optimizable.hpp"
-#include "criteria_functors.hpp"
-#include "posteriormodel.hpp"
-
-namespace bayesopt {
-
-  // We plan to add more in the future 
-  typedef enum {
-    SLICE_MCMC           ///< Slice sampling
-  } McmcAlgorithms;
-
-
-  /**
-   * \brief Markov Chain Monte Carlo sampler
-   *
-   * It generates a set of particles that are distributed according to
-   * an arbitrary pdf. IMPORTANT: As it should be a replacement for
-   * the optimization (ML or MAP) estimation, it also assumes a
-   * NEGATIVE LOG PDF.
-   *
-   * @see NLOPT_Optimization
-   */
-  class MCMCSampler
-  {
-  public:
-    /** 
-     * \brief Constructor (Note: default constructor is private)
-     * 
-     * @param rbo point to RBOptimizable type of object with the PDF
-     *            to sample from. IMPORTANT: We assume that the 
-     *            evaluation of rbo is the NEGATIVE LOG PDF.  
-     * @param dim number of input dimensions
-     * @param eng random number generation engine (boost)
-     */
-    MCMCSampler(RBOptimizable* rbo, size_t dim, randEngine& eng);
-    virtual ~MCMCSampler();
-
-    /** Sets the sampling algorithm (slice, MH, etc.) */
-    void setAlgorithm(McmcAlgorithms newAlg);
-
-    /** Sets the number of particles that are stored */
-    void setNParticles(size_t nParticles);
-
-    /**Usually, the initial samples of any MCMC method are biased and
-     *	they are discarded. This phase is called the burnout. This
-     *	method sets the number of particles to be discarded 
-     */
-    void setNBurnOut(size_t nParticles);
-
-    /** Compute the set of particles according to the target PDF.
-     * @param Xnext input: initial point of the Markov Chain, 
-     *              output: last point of the Markov Chain
-     */
-    void run(vectord &Xnext);
-
-    vectord getParticle(size_t i);
-
-    void printParticles();
-
-  private:
-    void randomJump(vectord &x);
-    void burnOut(vectord &x);
-    void sliceSample(vectord &x);
-
-    boost::scoped_ptr<RBOptimizableWrapper> obj;
-
-    McmcAlgorithms mAlg;
-    size_t mDims;
-    size_t nBurnOut;
-    size_t nSamples;
-    bool mStepOut;
-
-    vectord mSigma;
-    vecOfvec mParticles;
-    randEngine mtRandom;
-
-  private: //Forbidden
-    MCMCSampler();
-    MCMCSampler(MCMCSampler& copy);
-  };
-
-  inline void MCMCSampler::setAlgorithm(McmcAlgorithms newAlg)
-  { mAlg = newAlg; };
-
-  inline void MCMCSampler::setNParticles(size_t nParticles)
-  { nSamples = nParticles; };
-
-  inline void MCMCSampler::setNBurnOut(size_t nParticles)
-  { nBurnOut = nParticles; };
-
-  inline vectord MCMCSampler::getParticle(size_t i)
-  { return mParticles[i]; };
-
-  inline void MCMCSampler::printParticles()
-  {
-    for(size_t i=0; i<mParticles.size(); ++i)
-      { 
-	FILE_LOG(logDEBUG) << i << "->" << mParticles[i] 
-			   << " | Log-lik " << -obj->evaluate(mParticles[i]);
-      }
-  }
-
-
-
-  /**
-   * \brief Posterior model of nonparametric processes/criteria based
-   * on MCMC samples.
-   *
-   * For computational reasons we store a copy of each conditional
-   * models with the corresponding particle generated by MCMC. That is
-   * to avoid costly operations like matrix inversions for every
-   * kernel parameter in a GP prediction. Thus, we assume that the
-   * number of particles is not very large.
-   */
-  class MCMCModel: public PosteriorModel
-  {
-  public:
-
-    typedef boost::ptr_vector<NonParametricProcess>  GPVect;
-    typedef boost::ptr_vector<Criteria>  CritVect;
-
-    /** 
-     * \brief Constructor (Note: default constructor is private)
-     * 
-     * @param dim number of input dimensions
-     * @param params configuration parameters (see parameters.h)
-     * @param eng random number generation engine (boost)
-     */
-    MCMCModel(size_t dim, bopt_params params, randEngine& eng);
-
-    virtual ~MCMCModel();
-
-    void updateHyperParameters();
-    void fitSurrogateModel();
-    void updateSurrogateModel();
-    double evaluateCriteria(const vectord& query);
-
-    bool criteriaRequiresComparison();
-    void setFirstCriterium();
-    bool setNextCriterium(const vectord& prevResult);
-    std::string getBestCriteria(vectord& best);
-
-    ProbabilityDistribution* getPrediction(const vectord& query);
-   
-  private:
-    void setSurrogateModel(randEngine& eng);    
-    void setCriteria(randEngine& eng);
-
-  private:  // Members
-    size_t nParticles;
-    GPVect mGP;                ///< Pointer to surrogate model
-    CritVect mCrit;                    ///< Metacriteria model
-
-    boost::scoped_ptr<MCMCSampler> kSampler;
-
-  private: //Forbidden
-    MCMCModel();
-    MCMCModel(MCMCModel& copy);
-  };
-
-  /**@}*/
-
-  inline void MCMCModel::fitSurrogateModel()
-  { 
-    for(GPVect::iterator it=mGP.begin(); it != mGP.end(); ++it)
-      it->fitSurrogateModel(); 
-  };
-
-  inline void MCMCModel::updateSurrogateModel()
-  {     
-    for(GPVect::iterator it=mGP.begin(); it != mGP.end(); ++it)
-      it->updateSurrogateModel(); 
-  };
-
-  inline double MCMCModel::evaluateCriteria(const vectord& query)
-  { 
-    double sum = 0.0;
-    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
-      {
-	sum += it->evaluate(query); 
-      }
-    return sum/static_cast<double>(nParticles);
-  };
-
-  inline bool MCMCModel::criteriaRequiresComparison()
-  {return mCrit[0].requireComparison(); };
-    
-  inline void MCMCModel::setFirstCriterium()
-  { 
-    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
-      {
-	it->initialCriteria();
-      }
-  };
-
-  // Although we change the criteria for all MCMC particles, we use
-  // only the first element to compute de Hedge algorithm, because it
-  // should be based on the average result, thus being common for all
-  // the particles.
-  inline bool MCMCModel::setNextCriterium(const vectord& prevResult)
-  { 
-    bool rotated;
-    mCrit[0].pushResult(prevResult);
-    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
-      {
-	rotated = it->rotateCriteria();
-      }
-    return rotated; 
-  };
-
-  inline std::string MCMCModel::getBestCriteria(vectord& best)
-  { return mCrit[0].getBestCriteria(best); };
-
-  inline ProbabilityDistribution* MCMCModel::getPrediction(const vectord& query)
-  { return mGP[0].prediction(query); };
-
-
-
-} //namespace bayesopt
-
-
-#endif

include/posterior_empirical.hpp

+/**  \file empiricalbayes.hpp \brief Empirical Bayesian estimator */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _EMPIRICALBAYES_HPP_
+#define  _EMPIRICALBAYES_HPP_
+
+#include <boost/scoped_ptr.hpp>
+#include "criteria_functors.hpp"
+#include "posteriormodel.hpp"
+
+namespace bayesopt {
+
+  /** \addtogroup BayesOpt
+   *  \brief Main module for Bayesian optimization
+   */
+  /*@{*/
+
+  class NLOPT_Optimization;
+
+ /**
+   * \brief Bayesian optimization using different non-parametric 
+   * processes as distributions over surrogate functions. 
+   */
+  class EmpiricalBayes: public PosteriorModel
+  {
+  public:
+    /** 
+     * Constructor
+     * @param params set of parameters (see parameters.h)
+     */
+    EmpiricalBayes(size_t dim, bopt_params params, randEngine& eng);
+
+    /** 
+     * Default destructor
+     */
+    virtual ~EmpiricalBayes();
+
+    void updateHyperParameters();
+    void fitSurrogateModel();
+    void updateSurrogateModel();
+    double evaluateCriteria(const vectord& query);
+
+    bool criteriaRequiresComparison();
+    void setFirstCriterium();
+    bool setNextCriterium(const vectord& prevResult);
+    std::string getBestCriteria(vectord& best);
+
+    ProbabilityDistribution* getPrediction(const vectord& query);
+
+  private:
+    EmpiricalBayes();
+
+    void setSurrogateModel(randEngine& eng);    
+    void setCriteria(randEngine& eng);
+
+  private:  // Members
+    boost::scoped_ptr<NonParametricProcess> mGP; ///< Pointer to surrogate model
+    boost::scoped_ptr<Criteria> mCrit;                   ///< Metacriteria model
+
+    boost::scoped_ptr<NLOPT_Optimization> kOptimizer;
+  };
+
+  /**@}*/
+
+  inline void EmpiricalBayes::fitSurrogateModel()
+  { mGP->fitSurrogateModel(); };
+
+  inline void EmpiricalBayes::updateSurrogateModel()
+  { mGP->updateSurrogateModel(); };
+
+  inline double EmpiricalBayes::evaluateCriteria(const vectord& query)
+  { return (*mCrit)(query); };
+
+  inline bool EmpiricalBayes::criteriaRequiresComparison()
+  {return mCrit->requireComparison(); };
+    
+  inline void EmpiricalBayes::setFirstCriterium()
+  { mCrit->initialCriteria(); };
+
+  inline bool EmpiricalBayes::setNextCriterium(const vectord& prevResult)
+  { return false; };
+
+  inline std::string EmpiricalBayes::getBestCriteria(vectord& best)
+  { return mCrit->name(); };
+
+  inline  ProbabilityDistribution* EmpiricalBayes::getPrediction(const vectord& query)
+  { return mGP->prediction(query); };
+
+
+} //namespace bayesopt
+
+
+#endif

include/posterior_fixed.hpp

+/**  \file posterior_fixed.hpp \brief Posterior model based on fixed kernel parameters */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _POSTERIOR_FIXED_HPP_
+#define  _POSTERIOR_FIXED_HPP_
+
+#include <boost/scoped_ptr.hpp>
+#include "inneroptimization.hpp"
+#include "criteria_functors.hpp"
+#include "posteriormodel.hpp"
+
+namespace bayesopt {
+
+  /** \addtogroup BayesOpt
+   *  \brief Main module for Bayesian optimization
+   */
+  /*@{*/
+
+ /**
+   * \brief Bayesian optimization using different non-parametric 
+   * processes as distributions over surrogate functions. 
+   */
+  class PosteriorFixed: public PosteriorModel
+  {
+  public:
+    /** 
+     * Constructor
+     * @param params set of parameters (see parameters.h)
+     */
+    PosteriorFixed(size_t dim, bopt_params params, randEngine& eng);
+
+    /** 
+     * Default destructor
+     */
+    virtual ~PosteriorFixed();
+
+    void updateHyperParameters();
+    void fitSurrogateModel();
+    void updateSurrogateModel();
+    double evaluateCriteria(const vectord& query);
+
+    bool criteriaRequiresComparison();
+    void setFirstCriterium();
+    bool setNextCriterium(const vectord& prevResult);
+    std::string getBestCriteria(vectord& best);
+
+    ProbabilityDistribution* getPrediction(const vectord& query);
+
+  private:
+    PosteriorFixed();
+
+    void setSurrogateModel(randEngine& eng);    
+    void setCriteria(randEngine& eng);
+
+  private:  // Members
+    boost::scoped_ptr<NonParametricProcess> mGP; ///< Pointer to surrogate model
+    boost::scoped_ptr<Criteria> mCrit;                   ///< Metacriteria model
+  };
+
+  /**@}*/
+
+  inline void PosteriorFixed::updateHyperParameters()
+  { /* Fixed model. Does not require updates */ };
+
+  inline void PosteriorFixed::fitSurrogateModel()
+  { mGP->fitSurrogateModel(); };
+
+  inline void PosteriorFixed::updateSurrogateModel()
+  { mGP->updateSurrogateModel(); };
+
+  inline double PosteriorFixed::evaluateCriteria(const vectord& query)
+  { return (*mCrit)(query); };
+
+  inline bool PosteriorFixed::criteriaRequiresComparison()
+  {return mCrit->requireComparison(); };
+    
+  inline void PosteriorFixed::setFirstCriterium()
+  { mCrit->initialCriteria(); };
+
+  inline bool PosteriorFixed::setNextCriterium(const vectord& prevResult)
+  { return false; };
+
+  inline std::string PosteriorFixed::getBestCriteria(vectord& best)
+  { return mCrit->name(); };
+
+  inline  ProbabilityDistribution* PosteriorFixed::getPrediction(const vectord& query)
+  { return mGP->prediction(query); };
+
+
+} //namespace bayesopt
+
+
+#endif

include/posterior_mcmc.hpp

+/**  \file mcmcm.hpp \brief Markov Chain Monte Carlo algorithms */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _MCMC_HPP_
+#define  _MCMC_HPP_
+
+#include <boost/ptr_container/ptr_vector.hpp>
+#include "randgen.hpp"
+#include "optimizable.hpp"
+#include "criteria_functors.hpp"
+#include "posteriormodel.hpp"
+
+namespace bayesopt {
+
+  // We plan to add more in the future 
+  typedef enum {
+    SLICE_MCMC           ///< Slice sampling
+  } McmcAlgorithms;
+
+
+  /**
+   * \brief Markov Chain Monte Carlo sampler
+   *
+   * It generates a set of particles that are distributed according to
+   * an arbitrary pdf. IMPORTANT: As it should be a replacement for
+   * the optimization (ML or MAP) estimation, it also assumes a
+   * NEGATIVE LOG PDF.
+   *
+   * @see NLOPT_Optimization
+   */
+  class MCMCSampler
+  {
+  public:
+    /** 
+     * \brief Constructor (Note: default constructor is private)
+     * 
+     * @param rbo point to RBOptimizable type of object with the PDF
+     *            to sample from. IMPORTANT: We assume that the 
+     *            evaluation of rbo is the NEGATIVE LOG PDF.  
+     * @param dim number of input dimensions
+     * @param eng random number generation engine (boost)
+     */
+    MCMCSampler(RBOptimizable* rbo, size_t dim, randEngine& eng);
+    virtual ~MCMCSampler();
+
+    /** Sets the sampling algorithm (slice, MH, etc.) */
+    void setAlgorithm(McmcAlgorithms newAlg);
+
+    /** Sets the number of particles that are stored */
+    void setNParticles(size_t nParticles);
+
+    /**Usually, the initial samples of any MCMC method are biased and
+     *	they are discarded. This phase is called the burnout. This
+     *	method sets the number of particles to be discarded 
+     */
+    void setNBurnOut(size_t nParticles);
+
+    /** Compute the set of particles according to the target PDF.
+     * @param Xnext input: initial point of the Markov Chain, 
+     *              output: last point of the Markov Chain
+     */
+    void run(vectord &Xnext);
+
+    vectord getParticle(size_t i);
+
+    void printParticles();
+
+  private:
+    void randomJump(vectord &x);
+    void burnOut(vectord &x);
+    void sliceSample(vectord &x);
+
+    boost::scoped_ptr<RBOptimizableWrapper> obj;
+
+    McmcAlgorithms mAlg;
+    size_t mDims;
+    size_t nBurnOut;
+    size_t nSamples;
+    bool mStepOut;
+
+    vectord mSigma;
+    vecOfvec mParticles;
+    randEngine mtRandom;
+
+  private: //Forbidden
+    MCMCSampler();
+    MCMCSampler(MCMCSampler& copy);
+  };
+
+  inline void MCMCSampler::setAlgorithm(McmcAlgorithms newAlg)
+  { mAlg = newAlg; };
+
+  inline void MCMCSampler::setNParticles(size_t nParticles)
+  { nSamples = nParticles; };
+
+  inline void MCMCSampler::setNBurnOut(size_t nParticles)
+  { nBurnOut = nParticles; };
+
+  inline vectord MCMCSampler::getParticle(size_t i)
+  { return mParticles[i]; };
+
+  inline void MCMCSampler::printParticles()
+  {
+    for(size_t i=0; i<mParticles.size(); ++i)
+      { 
+	FILE_LOG(logDEBUG) << i << "->" << mParticles[i] 
+			   << " | Log-lik " << -obj->evaluate(mParticles[i]);
+      }
+  }
+
+
+
+  /**
+   * \brief Posterior model of nonparametric processes/criteria based
+   * on MCMC samples.
+   *
+   * For computational reasons we store a copy of each conditional
+   * models with the corresponding particle generated by MCMC. That is
+   * to avoid costly operations like matrix inversions for every
+   * kernel parameter in a GP prediction. Thus, we assume that the
+   * number of particles is not very large.
+   */
+  class MCMCModel: public PosteriorModel
+  {
+  public:
+
+    typedef boost::ptr_vector<NonParametricProcess>  GPVect;
+    typedef boost::ptr_vector<Criteria>  CritVect;
+
+    /** 
+     * \brief Constructor (Note: default constructor is private)
+     * 
+     * @param dim number of input dimensions
+     * @param params configuration parameters (see parameters.h)
+     * @param eng random number generation engine (boost)
+     */
+    MCMCModel(size_t dim, bopt_params params, randEngine& eng);
+
+    virtual ~MCMCModel();
+
+    void updateHyperParameters();
+    void fitSurrogateModel();
+    void updateSurrogateModel();
+    double evaluateCriteria(const vectord& query);
+
+    bool criteriaRequiresComparison();
+    void setFirstCriterium();
+    bool setNextCriterium(const vectord& prevResult);
+    std::string getBestCriteria(vectord& best);
+
+    ProbabilityDistribution* getPrediction(const vectord& query);
+   
+  private:
+    void setSurrogateModel(randEngine& eng);    
+    void setCriteria(randEngine& eng);
+
+  private:  // Members
+    size_t nParticles;
+    GPVect mGP;                ///< Pointer to surrogate model
+    CritVect mCrit;                    ///< Metacriteria model
+
+    boost::scoped_ptr<MCMCSampler> kSampler;
+
+  private: //Forbidden
+    MCMCModel();
+    MCMCModel(MCMCModel& copy);
+  };
+
+  /**@}*/
+
+  inline void MCMCModel::fitSurrogateModel()
+  { 
+    for(GPVect::iterator it=mGP.begin(); it != mGP.end(); ++it)
+      it->fitSurrogateModel(); 
+  };
+
+  inline void MCMCModel::updateSurrogateModel()
+  {     
+    for(GPVect::iterator it=mGP.begin(); it != mGP.end(); ++it)
+      it->updateSurrogateModel(); 
+  };
+
+  inline double MCMCModel::evaluateCriteria(const vectord& query)
+  { 
+    double sum = 0.0;
+    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
+      {
+	sum += it->evaluate(query); 
+      }
+    return sum/static_cast<double>(nParticles);
+  };
+
+  inline bool MCMCModel::criteriaRequiresComparison()
+  {return mCrit[0].requireComparison(); };
+    
+  inline void MCMCModel::setFirstCriterium()
+  { 
+    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
+      {
+	it->initialCriteria();
+      }
+  };
+
+  // Although we change the criteria for all MCMC particles, we use
+  // only the first element to compute de Hedge algorithm, because it
+  // should be based on the average result, thus being common for all
+  // the particles.
+  inline bool MCMCModel::setNextCriterium(const vectord& prevResult)
+  { 
+    bool rotated;
+    mCrit[0].pushResult(prevResult);
+    for(CritVect::iterator it=mCrit.begin(); it != mCrit.end(); ++it)
+      {
+	rotated = it->rotateCriteria();
+      }
+    return rotated; 
+  };
+
+  inline std::string MCMCModel::getBestCriteria(vectord& best)
+  { return mCrit[0].getBestCriteria(best); };
+
+  inline ProbabilityDistribution* MCMCModel::getPrediction(const vectord& query)
+  { return mGP[0].prediction(query); };
+
+
+
+} //namespace bayesopt
+
+
+#endif
 clear all, close all
 addpath('testfunctions')
 
-params.n_iterations = 100;
-params.n_init_samples = 5;
+params.n_iterations = 190;
+params.n_init_samples = 10;
 params.crit_name = 'cEI';
 params.surr_name = 'sStudentTProcessNIG';
 params.noise = 1e-6;
 params.kernel_name = 'kMaternARD5';
-params.kernel_hp_mean = [1 1];
-params.kernel_hp_std = [10 10];
+params.kernel_hp_mean = [1];
+params.kernel_hp_std = [10];
 params.verbose_level = 1;
 params.log_filename = 'matbopt.log';
 
-% n = 5;
-% lb = ones(n,1)*pi/2;
-% ub = ones(n,1)*pi;
-% fun = 'michalewicz';
 
-n = 2;
-lb = zeros(n,1);
-ub = ones(n,1);
-fun = 'branin';
-
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 disp('Continuous optimization');
-tic;
-bayesoptcont(fun,n,params,lb,ub)
-toc;
-pause;
-disp('Discrete optimization');
-% The set of points must be numDimension x numPoints.
-np = 100;
-xset = repmat((ub-lb),1,np) .* rand(n,np) - repmat(lb,1,np);
-
-tic;
-bayesoptdisc(fun,xset, params);
-toc;
-pause;
-
-fun = 'hartmann';
-params.kernel_hp_mean = ones(1,6);
-params.kernel_hp_std = ones(1,6)*10;
-n = 6;
+fun = 'branin'; n = 2;
 lb = zeros(n,1);
 ub = ones(n,1);
 
 tic;
 bayesoptcont(fun,n,params,lb,ub)
 toc;
+pause;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+disp('Discrete optimization');
+% We still use branin
+% The set of points must be numDimension x numPoints.
+np = 100;
+xset = repmat((ub-lb),1,np) .* rand(n,np) - repmat(lb,1,np);
+
+tic;
+bayesoptdisc(fun, xset, params);
+toc;
+yset = zeros(np,1);
+for i=1:np
+    yset(i) = feval(fun,xset(:,i));
+end;
+[y_min,id] = min(yset);
+disp(xset(:,id));
+disp(y_min);
+pause;
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+disp('Continuous optimization');
+fun = 'hartmann'; n = 6;
+lb = zeros(n,1);
+ub = ones(n,1);
+
+tic;
+bayesoptcont(fun,n,params,lb,ub)
+toc;

src/empiricalbayes.cpp

-/*
--------------------------------------------------------------------------
-   This file is part of BayesOpt, an efficient C++ library for 
-   Bayesian optimization.
-
-   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
- 
-   BayesOpt is free software: you can redistribute it and/or modify it 
-   under the terms of the GNU General Public License as published by
-   the Free Software Foundation, either version 3 of the License, or
-   (at your option) any later version.
-
-   BayesOpt is distributed in the hope that it will be useful, but 
-   WITHOUT ANY WARRANTY; without even the implied warranty of
-   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-   GNU General Public License for more details.
-
-   You should have received a copy of the GNU General Public License
-   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
-------------------------------------------------------------------------
-*/
-
-#include "log.hpp"
-#include "empiricalbayes.hpp"
-
-namespace bayesopt
-{
-
-
-  EmpiricalBayes::EmpiricalBayes(size_t dim, bopt_params parameters, 
-				 randEngine& eng):
-    PosteriorModel(dim,parameters,eng)
-  {
-    // Configure Surrogate and Criteria Functions
-    setSurrogateModel(eng);
-    setCriteria(eng);
-
-    // Seting kernel optimization
-    size_t nhp = mGP->nHyperParameters();
-    kOptimizer.reset(new NLOPT_Optimization(mGP.get(),nhp));
-
-    //TODO: Generalize
-    if (mParameters.sc_type == SC_ML)
-      {
-	kOptimizer->setAlgorithm(BOBYQA);    // local search to avoid underfitting
-      }
-    else
-      {
-	kOptimizer->setAlgorithm(COMBINED);
-      }
-    // kOptimizer->setLimits(svectord(nhp,1e-10),svectord(nhp,100.));
-    //Limits in log space
-    kOptimizer->setLimits(svectord(nhp,-6.0),svectord(nhp,1.0));
-  } // __init__
-
-  EmpiricalBayes::~EmpiricalBayes()
-  { } // Default destructor
-
-
-  void EmpiricalBayes::updateHyperParameters()
-  {
-    FILE_LOG(logDEBUG) << "------ Optimizing hyperparameters ------";
-    vectord optimalTheta = mGP->getHyperParameters();
-
-    FILE_LOG(logDEBUG) << "Initial hyper parameters: " << optimalTheta;
-    kOptimizer->run(optimalTheta);
-    mGP->setHyperParameters(optimalTheta);
-    FILE_LOG(logDEBUG) << "Final hyper parameters: " << optimalTheta;	
-  };
-
-
-  void EmpiricalBayes::setSurrogateModel(randEngine& eng)
-  {
-    mGP.reset(NonParametricProcess::create(mDims,mParameters,
-					   mData,mMean,eng));
-  } // setSurrogateModel
-
-  void EmpiricalBayes::setCriteria(randEngine& eng)
-  {
-    CriteriaFactory mCFactory;
-
-    mCrit.reset(mCFactory.create(mParameters.crit_name,mGP.get()));
-    mCrit->setRandomEngine(eng);
-
-    if (mCrit->nParameters() == mParameters.n_crit_params)
-      {
-	mCrit->setParameters(utils::array2vector(mParameters.crit_params,
-					       mParameters.n_crit_params));
-      }
-    else // If the number of paramerters is different, use default.
-      {
-	if (mParameters.n_crit_params != 0)
-	  {
-	    FILE_LOG(logERROR) << "Expected " << mCrit->nParameters() 
-			       << " parameters. Got " 
-			       << mParameters.n_crit_params << " instead.";
-	  }
-	FILE_LOG(logINFO) << "Usign default parameters for criteria.";
-      }
-  } // setCriteria
-
-
-} //namespace bayesopt
-

src/kernel_functors.cpp

     KernelFactory mKFactory;
 
     mKernel.reset(mKFactory.create(k_name, dim));
-    setKernelPrior(thetav,stheta);
-    mKernel->setHyperParameters(thetav);
+
+    if ((thetav.size() == 1) && (stheta.size() == 1) && (mKernel->nHyperParameters() != 1))
+      {
+	// We assume isotropic prior, so we replicate the vectors for all dimensions
+	size_t n = mKernel->nHyperParameters();
+
+	FILE_LOG(logINFO) << "Expected " << n << " hyperparameters."
+			  << " Replicating given prior.";
+
+	vectord newthetav = svectord(n,thetav(0));
+	vectord newstheta = svectord(n,stheta(0));
+
+	setKernelPrior(newthetav,newstheta);
+	mKernel->setHyperParameters(newthetav);
+      }
+    else
+      {
+	setKernelPrior(thetav,stheta);
+	mKernel->setHyperParameters(thetav);
+      }
   }
 
   void KernelModel::setKernel (kernel_parameters kernel, 

src/mcmc.cpp

-/*
--------------------------------------------------------------------------
-   This file is part of BayesOpt, an efficient C++ library for 
-   Bayesian optimization.
-
-   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
- 
-   BayesOpt is free software: you can redistribute it and/or modify it 
-   under the terms of the GNU General Public License as published by
-   the Free Software Foundation, either version 3 of the License, or
-   (at your option) any later version.
-
-   BayesOpt is distributed in the hope that it will be useful, but 
-   WITHOUT ANY WARRANTY; without even the implied warranty of
-   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-   GNU General Public License for more details.
-
-   You should have received a copy of the GNU General Public License
-   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
-------------------------------------------------------------------------
-*/
-#include "log.hpp"
-#include "lhs.hpp"
-#include "mcmc.hpp"
-
-namespace bayesopt
-{
-  MCMCSampler::MCMCSampler(RBOptimizable* rbo, size_t dim, randEngine& eng):
-    mtRandom(eng), obj(new RBOptimizableWrapper(rbo))
-  {
-    mAlg = SLICE_MCMC;
-    mDims = dim;
-    nBurnOut = 500;
-    nSamples = 100;
-    mStepOut = true;
-    mSigma = svectord(dim,6);
-  };
-
-  MCMCSampler::~MCMCSampler()
-  {};
-
-  void MCMCSampler::randomJump(vectord &x)
-  {
-    randNFloat sample( mtRandom, normalDist(0,1) );
-    FILE_LOG(logERROR) << "Doing random jump.";
-    for(vectord::iterator it = x.begin(); it != x.end(); ++it)
-      {
-	*it = sample()*6;
-      }
-    FILE_LOG(logERROR) << "Likelihood." << x << " | " << obj->evaluate(x);
-  }
-
-  //TODO: Include new algorithms when we add them.
-  void MCMCSampler::burnOut(vectord &x)
-  {
-    for(size_t i=0; i<nBurnOut; ++i)  
-      {
-	try
-	  {
-	    sliceSample(x);
-	  }
-	catch(std::runtime_error& e)
-	  {
-	    FILE_LOG(logERROR) << e.what();
-	    randomJump(x);
-	  }
-      }
-  }
-
-
-  void MCMCSampler::sliceSample(vectord &x)
-  {
-    randFloat sample( mtRandom, realUniformDist(0,1) );
-    size_t n = x.size();
-
-    std::vector<int> perms = utils::return_index_vector(0,n);
-
-    utils::randomPerms(perms, mtRandom);
-
-    for (size_t i = 0; i<n; ++i)
-      {
-	const size_t ind = perms[i];
-	const double sigma = mSigma(ind);
-
-	const double y_max = -obj->evaluate(x);
-	const double y = y_max+std::log(sample());  
-	//y = y_max * sample(), but we are in negative log space
-	//std::cout << y_max << "|||" << y << std::endl;
-
-	if (y == 0.0) 
-	  {
-	    throw std::runtime_error("Error in MCMC: Initial point"
-				     " out of support region."); 
-	  }
-
-	// Step out
-	const double x_cur = x(ind);
-	const double r = sample();
-	double xl = x_cur - r * sigma;
-	double xr = x_cur + (1-r)*sigma;
-
-	if (mStepOut)
-	  {
-	    x(ind) = xl;
-	    while (-obj->evaluate(x) > y) { x(ind) -= sigma; }
-	    xl = x(ind);
-
-	    x(ind) = xr;
-	    while (-obj->evaluate(x) > y) { x(ind) += sigma; }
-	    xr = x(ind);
-	  }
-
-	//Shrink
-	bool on_slice = false;
-	while (!on_slice)
-	  {
-	    x(ind) = (xr-xl) * sample() + xl;
-	    if (-obj->evaluate(x) < y)
-	      {
-		if      (x(ind) > x_cur)  xr = x(ind);
-		else if (x(ind) < x_cur)  xl = x(ind);
-		else throw std::runtime_error("Error in MCMC. Slice colapsed.");
-	      }
-	    else
-	      {
-		on_slice = true;
-	      }
-	  }
-      }
-  }
-
-  //TODO: Include new algorithms when we add them.
-  void MCMCSampler::run(vectord &Xnext)
-  {
-    randFloat sample( mtRandom, realUniformDist(0,1) );
-    if (nBurnOut>0) burnOut(Xnext);
-
-    mParticles.clear();
-    for(size_t i=0; i<nSamples; ++i)  
-      {
-	try
-	  {
-	    sliceSample(Xnext);
-	  }
-	catch(std::runtime_error& e)
-	  {
-	    FILE_LOG(logERROR) << e.what();
-	    randomJump(Xnext);
-	  }
-	mParticles.push_back(Xnext);
-      }
-    printParticles();
-  }
-
-  //////////////////////////////////////////////////////////////////////
-  MCMCModel::MCMCModel(size_t dim, bopt_params parameters, 
-		       randEngine& eng):
-    PosteriorModel(dim,parameters,eng), nParticles(10)
-  {
-    //TODO: Take nParticles from parameters
-    
-    // Configure Surrogate and Criteria Functions
-    setSurrogateModel(eng);
-    setCriteria(eng);
-
-    // Seting MCMC for kernel hyperparameters...
-    // We use the first GP as the "walker" to get the particles. Then,
-    // we will use a whole vector of GPs to avoid recomputing the
-    // kernel matrices after every data point.
-    size_t nhp = mGP[0].nHyperParameters();
-    kSampler.reset(new MCMCSampler(&mGP[0],nhp,eng));
-
-    kSampler->setNParticles(nParticles);
-    kSampler->setNBurnOut(300);
-  }
-
-  MCMCModel::~MCMCModel()
-  { } // Default destructor
-
-
-  void MCMCModel::updateHyperParameters()
-  {
-    // We take the initial point as the last particle from previous update.
-    size_t last = mGP.size()-1;
-    vectord lastTheta = mGP[last].getHyperParameters();
-
-    FILE_LOG(logDEBUG) << "Initial kernel parameters: " << lastTheta;
-    kSampler->run(lastTheta);
-    for(size_t i = 0; i<nParticles; ++i)
-      {
-	mGP[i].setHyperParameters(kSampler->getParticle(i));
-      }
-    FILE_LOG(logDEBUG) << "Final kernel parameters: " << lastTheta;	
-  };
-
-
-  void MCMCModel::setSurrogateModel(randEngine& eng)
-  {
-    for(size_t i = 0; i<nParticles; ++i)
-      {
-	mGP.push_back(NonParametricProcess::create(mDims,mParameters,
-						   mData,mMean,eng));
-      } 
-  } // setSurrogateModel
-
-  void MCMCModel::setCriteria(randEngine& eng)
-  {
-    CriteriaFactory mCFactory;
-
-    for(size_t i = 0; i<nParticles; ++i)
-      {
-	mCrit.push_back(mCFactory.create(mParameters.crit_name,&mGP[i]));
-	mCrit[i].setRandomEngine(eng);
-
-	if (mCrit[i].nParameters() == mParameters.n_crit_params)
-	  {
-	    mCrit[i].setParameters(utils::array2vector(mParameters.crit_params,
-						       mParameters.n_crit_params));
-	  }
-	else // If the number of paramerters is different, use default.
-	  {
-	    if (mParameters.n_crit_params != 0)
-	      {
-		FILE_LOG(logERROR) << "Expected " << mCrit[i].nParameters() 
-				   << " parameters. Got " 
-				   << mParameters.n_crit_params << " instead.";
-	      }
-	    FILE_LOG(logINFO) << "Usign default parameters for criteria.";
-	  }
-      }
-  } // setCriteria
-
-
-
-
-}// namespace bayesopt

src/parameters.cpp

 const double DEFAULT_NOISE   = 1e-4;
 
 /* Algorithm parameters */
-const size_t DEFAULT_ITERATIONS  = 300;
-const size_t DEFAULT_SAMPLES     = 30;
-const size_t DEFAULT_VERBOSE     = 1;
+const size_t DEFAULT_ITERATIONS        = 190;
+const size_t DEFAULT_INIT_SAMPLES      = 10;
+const size_t DEFAULT_ITERATIONS_RELEAR = 50;
+const size_t DEFAULT_INNER_EVALUATIONS = 500;   /**< Used per dimmension */
 
-/* Algorithm limits */
-const size_t MAX_ITERATIONS  = 1000;        /**< Used if n_iterations <0 */
+const size_t DEFAULT_VERBOSE           = 1;
 
-/* INNER Optimizer default values */
-const size_t MAX_INNER_EVALUATIONS = 500;   /**< Used per dimmension */
 
 
 learning_type str2learn(const char* name)
   else return L_ERROR;
 }
 
-
-
-
 const char* learn2str(learning_type name)
 {
   switch(name)
     }
 }
 
-
 score_type str2score(const char* name)
 {
   if      (!strcmp(name,"SC_MTL")   || !strcmp(name,"mtl"))   return SC_MTL;
   else return SC_ERROR;
 }
 
-
-
-
 const char* score2str(score_type name)
 {
   switch(name)
 {
   kernel_parameters kernel;
   kernel.name = new char[128];
-  strcpy(kernel.name,"kMaternISO3");
+  strcpy(kernel.name,"kMaternARD5");
 
   kernel.hp_mean[0] = KERNEL_THETA;
-  kernel.hp_std[0] = KERNEL_SIGMA;
-  kernel.n_hp = 1;
+  kernel.hp_std[0]  = KERNEL_SIGMA;
+  kernel.n_hp       = 1;
   
   mean_parameters mean;
   mean.name = new char[128];
   strcpy(mean.name,"mConst");
 
   mean.coef_mean[0] = MEAN_MU;
-  mean.coef_std[0] = MEAN_SIGMA;
-  mean.n_coef = 1;
+  mean.coef_std[0]  = MEAN_SIGMA;
+  mean.n_coef       = 1;
   
 
   bopt_params params;
-  params.n_iterations =   DEFAULT_ITERATIONS;
-  params.n_inner_iterations = MAX_INNER_EVALUATIONS;
-  params.n_init_samples = DEFAULT_SAMPLES;
-  params.n_iter_relearn = 0;
 
-  params.init_method = 1;
-  params.use_random_seed = 1;
+  params.n_iterations       = DEFAULT_ITERATIONS;
+  params.n_inner_iterations = DEFAULT_INNER_EVALUATIONS;
+  params.n_init_samples     = DEFAULT_INIT_SAMPLES;
+  params.n_iter_relearn     = DEFAULT_ITERATIONS_RELEAR;
+
+  params.init_method      = 1;
+  params.use_random_seed  = 1;
 
   params.verbose_level = DEFAULT_VERBOSE;
-  params.log_filename = new char[128];
+  params.log_filename  = new char[128];
   strcpy(params.log_filename,"bayesopt.log");
 
   params.load_save_flag = 0;
   strcpy(params.log_filename,"bayesopt.dat");
 
   params.surr_name = new char[128];
-  strcpy(params.surr_name,"sGaussianProcess");
+  strcpy(params.surr_name,"sStudentTProcessJef");
 
   params.sigma_s = DEFAULT_SIGMA;
-  params.noise = DEFAULT_NOISE;
-  params.alpha = PRIOR_ALPHA;
-  params.beta = PRIOR_BETA;
-  params.l_all = 0;
-  params.l_type = L_EMPIRICAL;
+  params.noise   = DEFAULT_NOISE;
+  params.alpha   = PRIOR_ALPHA;
+  params.beta    = PRIOR_BETA;
+
+  params.l_all   = 0;
+  params.l_type  = L_EMPIRICAL;
   params.sc_type = SC_MAP;
   params.epsilon = 0.0;
   
 
   params.kernel = kernel;
   params.mean = mean;
+
   return params;
 }

src/posterior_empirical.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#include "log.hpp"
+#include "inneroptimization.hpp"
+#include "posterior_empirical.hpp"
+
+namespace bayesopt
+{
+
+
+  EmpiricalBayes::EmpiricalBayes(size_t dim, bopt_params parameters, 
+				 randEngine& eng):
+    PosteriorModel(dim,parameters,eng)
+  {
+    // Configure Surrogate and Criteria Functions
+    setSurrogateModel(eng);
+    setCriteria(eng);
+
+    // Seting kernel optimization
+    size_t nhp = mGP->nHyperParameters();
+    kOptimizer.reset(new NLOPT_Optimization(mGP.get(),nhp));
+
+    //TODO: Generalize
+    if (mParameters.sc_type == SC_ML)
+      {
+	kOptimizer->setAlgorithm(BOBYQA);    // local search to avoid underfitting
+      }
+    else
+      {
+	kOptimizer->setAlgorithm(COMBINED);
+      }
+    // kOptimizer->setLimits(svectord(nhp,1e-10),svectord(nhp,100.));
+    //Limits in log space
+    kOptimizer->setLimits(svectord(nhp,-6.0),svectord(nhp,1.0));
+  } 
+
+  EmpiricalBayes::~EmpiricalBayes()
+  { } // Default destructor
+
+
+  void EmpiricalBayes::updateHyperParameters()
+  {
+    FILE_LOG(logDEBUG) << "------ Optimizing hyperparameters ------";
+    vectord optimalTheta = mGP->getHyperParameters();
+
+    FILE_LOG(logDEBUG) << "Initial hyper parameters: " << optimalTheta;
+    kOptimizer->run(optimalTheta);
+    mGP->setHyperParameters(optimalTheta);
+    FILE_LOG(logDEBUG) << "Final hyper parameters: " << optimalTheta;	
+  };
+
+
+  void EmpiricalBayes::setSurrogateModel(randEngine& eng)
+  {
+    mGP.reset(NonParametricProcess::create(mDims,mParameters,
+					   mData,mMean,eng));
+  } // setSurrogateModel
+
+  void EmpiricalBayes::setCriteria(randEngine& eng)
+  {
+    CriteriaFactory mCFactory;
+
+    mCrit.reset(mCFactory.create(mParameters.crit_name,mGP.get()));
+    mCrit->setRandomEngine(eng);
+
+    if (mCrit->nParameters() == mParameters.n_crit_params)
+      {
+	mCrit->setParameters(utils::array2vector(mParameters.crit_params,
+					       mParameters.n_crit_params));
+      }
+    else // If the number of paramerters is different, use default.
+      {
+	if (mParameters.n_crit_params != 0)
+	  {
+	    FILE_LOG(logERROR) << "Expected " << mCrit->nParameters() 
+			       << " parameters. Got " 
+			       << mParameters.n_crit_params << " instead.";
+	  }
+	FILE_LOG(logINFO) << "Usign default parameters for criteria.";
+      }
+  } // setCriteria
+
+
+} //namespace bayesopt
+

src/posterior_fixed.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#include "log.hpp"
+#include "posterior_fixed.hpp"
+
+namespace bayesopt
+{
+
+
+  PosteriorFixed::PosteriorFixed(size_t dim, bopt_params parameters, 
+				 randEngine& eng):
+    PosteriorModel(dim,parameters,eng)
+  {
+    // Configure Surrogate and Criteria Functions
+    setSurrogateModel(eng);
+    setCriteria(eng);
+  }
+
+  PosteriorFixed::~PosteriorFixed()
+  { } // Default destructor
+
+
+  void PosteriorFixed::setSurrogateModel(randEngine& eng)
+  {
+    mGP.reset(NonParametricProcess::create(mDims,mParameters,
+					   mData,mMean,eng));
+  } // setSurrogateModel
+
+  void PosteriorFixed::setCriteria(randEngine& eng)
+  {
+    CriteriaFactory mCFactory;
+
+    mCrit.reset(mCFactory.create(mParameters.crit_name,mGP.get()));
+    mCrit->setRandomEngine(eng);
+
+    if (mCrit->nParameters() == mParameters.n_crit_params)
+      {
+	mCrit->setParameters(utils::array2vector(mParameters.crit_params,
+					       mParameters.n_crit_params));
+      }
+    else // If the number of paramerters is different, use default.
+      {
+	if (mParameters.n_crit_params != 0)
+	  {
+	    FILE_LOG(logERROR) << "Expected " << mCrit->nParameters() 
+			       << " parameters. Got " 
+			       << mParameters.n_crit_params << " instead.";
+	  }
+	FILE_LOG(logINFO) << "Usign default parameters for criteria.";
+      }
+  } // setCriteria
+
+
+} //namespace bayesopt
+

src/posterior_mcmc.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+#include "log.hpp"
+#include "lhs.hpp"
+#include "posterior_mcmc.hpp"
+
+namespace bayesopt
+{
+  MCMCSampler::MCMCSampler(RBOptimizable* rbo, size_t dim, randEngine& eng):
+    mtRandom(eng), obj(new RBOptimizableWrapper(rbo))
+  {
+    mAlg = SLICE_MCMC;
+    mDims = dim;
+    nBurnOut = 500;
+    nSamples = 100;
+    mStepOut = true;
+    mSigma = svectord(dim,6);
+  };
+
+  MCMCSampler::~MCMCSampler()
+  {};
+
+  void MCMCSampler::randomJump(vectord &x)
+  {
+    randNFloat sample( mtRandom, normalDist(0,1) );
+    FILE_LOG(logERROR) << "Doing random jump.";
+    for(vectord::iterator it = x.begin(); it != x.end(); ++it)
+      {
+	*it = sample()*6;
+      }
+    FILE_LOG(logERROR) << "Likelihood." << x << " | " << obj->evaluate(x);
+  }
+
+  //TODO: Include new algorithms when we add them.
+  void MCMCSampler::burnOut(vectord &x)
+  {
+    for(size_t i=0; i<nBurnOut; ++i)  
+      {
+	try
+	  {
+	    sliceSample(x);
+	  }
+	catch(std::runtime_error& e)
+	  {
+	    FILE_LOG(logERROR) << e.what();
+	    randomJump(x);
+	  }
+      }
+  }
+
+
+  void MCMCSampler::sliceSample(vectord &x)
+  {
+    randFloat sample( mtRandom, realUniformDist(0,1) );
+    size_t n = x.size();
+
+    std::vector<int> perms = utils::return_index_vector(0,n);
+
+    utils::randomPerms(perms, mtRandom);
+
+    for (size_t i = 0; i<n; ++i)
+      {
+	const size_t ind = perms[i];
+	const double sigma = mSigma(ind);
+
+	const double y_max = -obj->evaluate(x);
+	const double y = y_max+std::log(sample());  
+	//y = y_max * sample(), but we are in negative log space
+	//std::cout << y_max << "|||" << y << std::endl;
+
+	if (y == 0.0) 
+	  {
+	    throw std::runtime_error("Error in MCMC: Initial point"
+				     " out of support region."); 
+	  }
+
+	// Step out
+	const double x_cur = x(ind);
+	const double r = sample();
+	double xl = x_cur - r * sigma;
+	double xr = x_cur + (1-r)*sigma;
+
+	if (mStepOut)
+	  {
+	    x(ind) = xl;
+	    while (-obj->evaluate(x) > y) { x(ind) -= sigma; }
+	    xl = x(ind);
+
+	    x(ind) = xr;
+	    while (-obj->evaluate(x) > y) { x(ind) += sigma; }
+	    xr = x(ind);
+	  }
+
+	//Shrink
+	bool on_slice = false;
+	while (!on_slice)
+	  {
+	    x(ind) = (xr-xl) * sample() + xl;
+	    if (-obj->evaluate(x) < y)
+	      {
+		if      (x(ind) > x_cur)  xr = x(ind);
+		else if (x(ind) < x_cur)  xl = x(ind);
+		else throw std::runtime_error("Error in MCMC. Slice colapsed.");
+	      }
+	    else
+	      {
+		on_slice = true;
+	      }
+	  }
+      }
+  }
+
+  //TODO: Include new algorithms when we add them.
+  void MCMCSampler::run(vectord &Xnext)
+  {
+    randFloat sample( mtRandom, realUniformDist(0,1) );
+    if (nBurnOut>0) burnOut(Xnext);
+
+    mParticles.clear();
+    for(size_t i=0; i<nSamples; ++i)  
+      {
+	try
+	  {
+	    sliceSample(Xnext);
+	  }
+	catch(std::runtime_error& e)
+	  {
+	    FILE_LOG(logERROR) << e.what();
+	    randomJump(Xnext);
+	  }
+	mParticles.push_back(Xnext);
+      }
+    printParticles();
+  }
+
+  //////////////////////////////////////////////////////////////////////
+  MCMCModel::MCMCModel(size_t dim, bopt_params parameters, 
+		       randEngine& eng):
+    PosteriorModel(dim,parameters,eng), nParticles(10)
+  {
+    //TODO: Take nParticles from parameters
+    
+    // Configure Surrogate and Criteria Functions
+    setSurrogateModel(eng);
+    setCriteria(eng);
+
+    // Seting MCMC for kernel hyperparameters...
+    // We use the first GP as the "walker" to get the particles. Then,
+    // we will use a whole vector of GPs to avoid recomputing the
+    // kernel matrices after every data point.
+    size_t nhp = mGP[0].nHyperParameters();
+    kSampler.reset(new MCMCSampler(&mGP[0],nhp,eng));
+
+    kSampler->setNParticles(nParticles);
+    kSampler->setNBurnOut(300);
+  }
+
+  MCMCModel::~MCMCModel()
+  { } // Default destructor
+
+
+  void MCMCModel::updateHyperParameters()
+  {
+    // We take the initial point as the last particle from previous update.
+    size_t last = mGP.size()-1;
+    vectord lastTheta = mGP[last].getHyperParameters();
+
+    FILE_LOG(logDEBUG) << "Initial kernel parameters: " << lastTheta;
+    kSampler->run(lastTheta);
+    for(size_t i = 0; i<nParticles; ++i)
+      {
+	mGP[i].setHyperParameters(kSampler->getParticle(i));
+      }
+    FILE_LOG(logDEBUG) << "Final kernel parameters: " << lastTheta;	
+  };
+
+
+  void MCMCModel::setSurrogateModel(randEngine& eng)
+  {
+    for(size_t i = 0; i<nParticles; ++i)
+      {
+	mGP.push_back(NonParametricProcess::create(mDims,mParameters,
+						   mData,mMean,eng));
+      } 
+  } // setSurrogateModel
+
+  void MCMCModel::setCriteria(randEngine& eng)
+  {
+    CriteriaFactory mCFactory;
+
+    for(size_t i = 0; i<nParticles; ++i)
+      {
+	mCrit.push_back(mCFactory.create(mParameters.crit_name,&mGP[i]));
+	mCrit[i].setRandomEngine(eng);
+
+	if (mCrit[i].nParameters() == mParameters.n_crit_params)
+	  {
+	    mCrit[i].setParameters(utils::array2vector(mParameters.crit_params,
+						       mParameters.n_crit_params));
+	  }
+	else // If the number of paramerters is different, use default.
+	  {
+	    if (mParameters.n_crit_params != 0)
+	      {
+		FILE_LOG(logERROR) << "Expected " << mCrit[i].nParameters() 
+				   << " parameters. Got " 
+				   << mParameters.n_crit_params << " instead.";
+	      }
+	    FILE_LOG(logINFO) << "Usign default parameters for criteria.";
+	  }
+      }
+  } // setCriteria
+
+
+
+
+}// namespace bayesopt

src/posteriormodel.cpp

 
 #include "log.hpp"
 #include "posteriormodel.hpp"
-#include "empiricalbayes.hpp"
-#include "mcmc.hpp"
+#include "posterior_fixed.hpp"
+#include "posterior_empirical.hpp"
+#include "posterior_mcmc.hpp"
 
 namespace bayesopt
 {
   {
     switch (params.l_type)
       {
-      case L_FIXED: // TODO:return new
+      case L_FIXED: return new PosteriorFixed(dim,params,eng);
       case L_EMPIRICAL: return new EmpiricalBayes(dim,params,eng);
       case L_DISCRETE: // TODO:return new
       case L_MCMC: return new MCMCModel(dim,params,eng);

tests/testmcmc.cpp

 #include <boost/numeric/ublas/lu.hpp>
 #include "specialtypes.hpp"
 #include "cholesky.hpp"
-#include "mcmc.hpp"
+#include "posterior_mcmc.hpp"
 
 namespace bnu = boost::numeric::ublas;