1. Ruben Martinez-Cantin
  2. BayesOpt

Commits

Ruben Martinez-Cantin  committed df86915

Changing name of empirical bayes. Adding slice sampling.

  • Participants
  • Parent commits 1b3e1d6
  • Branches default

Comments (0)

Files changed (10)

File include/empiricalbayes.hpp

View file
+/** \file empiricalbayes.hpp
+    \brief Implementes a empirical Bayesian nonparametric process with a 
+    ML, MAP or similar estimate of kernel parameters. */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef _EMPIRICAL_BAYES_HPP_
+#define _EMPIRICAL_BAYES_HPP_
+
+namespace bayesopt
+{
+
+  /** \addtogroup  LearningMethods */
+  /**@{*/
+
+
+  /**
+   * \brief Empirical Bayesian NonParametric process.
+   */
+  class ConditionalBayesProcess: public KernelRegressor, RBOptimizable
+  {
+  public:
+  
+
+} // namespace bayesopt
+
+#endif

File include/empiricalbayesprocess.hpp

View file
   /**
    * \brief Empirical Bayesian NonParametric process.
    */
-  class EmpiricalBayesProcess: public KernelRegressor, RBOptimizable
+  class ConditionalBayesProcess: public KernelRegressor, RBOptimizable
   {
   public:
-    EmpiricalBayesProcess(size_t dim, bopt_params parameters,
+    ConditionalBayesProcess(size_t dim, bopt_params parameters,
 			  const Dataset& data);
-    virtual ~EmpiricalBayesProcess();
+    virtual ~ConditionalBayesProcess();
 
     /** 
      * \brief Function that returns the prediction of the GP for a query point
 
 
 
-  inline double EmpiricalBayesProcess::evaluate(const vectord& x)
+  inline double ConditionalBayesProcess::evaluate(const vectord& x)
   { 
     mKernel.setHyperParameters(x);
     return evaluateKernelParams();

File include/gaussian_process.hpp

View file
   /**
    * \brief Standard zero mean gaussian process with noisy observations.
    */
-  class GaussianProcess: public EmpiricalBayesProcess
+  class GaussianProcess: public ConditionalBayesProcess
   {
   public:
     GaussianProcess(size_t dim, bopt_params params, const Dataset& data);

File include/hierarchical_gaussian_process.hpp

View file
   /**
    * \brief Virtual class for hierarchical Gaussian processes.
    */
-  class HierarchicalGaussianProcess: public EmpiricalBayesProcess
+  class HierarchicalGaussianProcess: public ConditionalBayesProcess
   {
   public:
     HierarchicalGaussianProcess(size_t dim, bopt_params params, const Dataset& data);

File include/mcmc.hpp

View file
     MCMCSampler(size_t n_samples = 500);
     virtual ~MCMCSampler();
 
-    newParticle(const vectord& past);
+    void burnOut(vectord &x)
+    void sliceSample(vectord &x);
+    void sampleParticles(const vectord &initial, bool burnout);
 
   private:
-    vecOfvec particles;
+    RBOptimizable* obj;
+    size_t mDims;
+    size_t nBurnOut;
+    size_t nSamples;
+    bool mStepOut;
+    vectord mSigma;
+    vecOfvec mParticles;
+  };
+
+  inline void MCMC::burnOut(vectord &x)
+  {
+    for(size_t i=0; i<nBurnOut; ++i)  sliceSample(x);
   }
 
+  inline void MCMC::sampleParticles(const vectord &initX, bool burnout)
+  {
+    vectord x = initX;
+    if (burnout) burnOut(x);
+
+    mParticles.clear();
+    for(size_t i=0; i<nSamples; ++i)  
+      {
+	sliceSample(x);
+	mParticles.push_back(x);
+      }
+    
+  }
+
+
 } //namespace bayesopt
 
 

File include/optimizekernel.hpp

View file
   class OptimizeKernel: public NLOPT_Optimization
   {
   public:
-    explicit OptimizeKernel(EmpiricalBayesProcess* npp):
+    explicit OptimizeKernel(ConditionalBayesProcess* npp):
       NLOPT_Optimization(), npp_(npp) {};
     virtual ~OptimizeKernel(){};
 
     }
     
   private:
-    EmpiricalBayesProcess* npp_;
+    ConditionalBayesProcess* npp_;
   };
 
 }

File src/empiricalbayesprocess.cpp

View file
 
 namespace bayesopt
 {
-  EmpiricalBayesProcess::EmpiricalBayesProcess(size_t dim, bopt_params parameters, 
+  ConditionalBayesProcess::ConditionalBayesProcess(size_t dim, bopt_params parameters, 
 					       const Dataset& data):
     KernelRegressor(dim,parameters,data)
   { 
     kOptimizer->setLimits(svectord(nhp,1e-10),svectord(nhp,100.));
   }
 
-  EmpiricalBayesProcess::~EmpiricalBayesProcess()
+  ConditionalBayesProcess::~ConditionalBayesProcess()
   {
     delete kOptimizer;
   }
 
 
-  void EmpiricalBayesProcess::updateKernelParameters()
+  void ConditionalBayesProcess::updateKernelParameters()
   {
     if (mLearnType == L_FIXED)
       {
       }
   };
 
-  double EmpiricalBayesProcess::evaluateKernelParams()
+  double ConditionalBayesProcess::evaluateKernelParams()
   { 
     switch(mLearnType)
       {
   }
 
 
-  double EmpiricalBayesProcess::negativeCrossValidation()
+  double ConditionalBayesProcess::negativeCrossValidation()
   {
     // This is highly ineffient implementation for comparison purposes.
     Dataset data(mData);

File src/gaussian_process.cpp

View file
   namespace ublas = boost::numeric::ublas;
 
   GaussianProcess::GaussianProcess(size_t dim, bopt_params params, const Dataset& data):
-    EmpiricalBayesProcess(dim, params, data)
+    ConditionalBayesProcess(dim, params, data)
   {
     mSigma = params.sigma_s;
     d_ = new GaussianDistribution();

File src/hierarchical_gaussian_process.cpp

View file
   HierarchicalGaussianProcess::HierarchicalGaussianProcess(size_t dim, 
 							   bopt_params params, 
 							   const Dataset& data):
-    EmpiricalBayesProcess(dim, params, data) {};
+    ConditionalBayesProcess(dim, params, data) {};
 
   double HierarchicalGaussianProcess::negativeTotalLogLikelihood()
   {

File src/mcmc.cpp

View file
+
+
+
+void MCMC::sliceSample(vectord &x)
+{
+  randFloat sample( mtRandom, realUniformDist(0,1) );
+  size_t n = x.size();
+
+  std::vector<int> perms = utils::returnIndexVector(n);
+  utils::randomPerms(perms, mtRandom);
+
+  for (size_t i = 0; i<n; ++i)
+    {
+      size_t ind = perms[i];
+      double sigma = mSigma(ind);
+
+      double y_max = obj->evaluate(x);
+      double y = sample()*y_max;
+
+      // Step out
+      double x_cur = x(ind);
+      double r = sample();
+      double xl = x_cur - r * sigma;
+      double xl = x_cur + (1-r)*sigma;
+
+      if (mStepOut)
+	{
+	  x(ind) = xl;
+	  while (obj->evaluate(x) > y) x(ind) -= sigma;
+	  xl = x(ind);
+
+	  x(ind) = xr;
+	  while (obj->evaluate(x) > y) x(ind) += sigma;
+	  xr = x(ind);
+	}
+
+      //Shrink
+      bool on_slice = false;
+      while (!on_slice)
+	{
+	  x(ind) = (xr-xl) * sample() + xl;
+	  if (obj->evaluate(x) < y)
+	    {
+	      if      (x(ind) > x_cur)  xr = x(ind);
+	      else if (x(ind) < x_cur)  xl = x(ind);
+	      else throw std::runtime_error("Error in MCMC. Slice colapsed.");
+	    }
+	  else
+	    {
+	      on_slice = true;
+	    }
+	}
+    }
+}
+
+
+