Commits

Ruben Martinez-Cantin  committed 83f7745

Adding missing files

  • Participants
  • Parent commits 4bcd2ae

Comments (0)

Files changed (4)

 cmake_install.cmake
 Doxyfile
 install_manifest.txt
+nlopt/config.h
+GTAGS
+GPATH
+GRTAGS
 syntax:glob
 *.a
 *.o

File include/empiricalbayesprocess.hpp

+/** \file empiricalbayesprocess.hpp
+    \brief Implementes a empirical Bayesian nonparametric process with a 
+    ML, MAP or similar estimate of kernel parameters. */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _EMPIRICAL_BAYES_PROCESS_HPP_
+#define  _EMPIRICAL_BAYES_PROCESS_HPP_
+
+#include "nonparametricprocess.hpp"
+#include "inneroptimization.hpp"
+
+namespace bayesopt
+{
+
+  /** \addtogroup  NonParametricProcesses */
+  /**@{*/
+
+
+  /**
+   * \brief Empirical Bayesian NonParametric process.
+   */
+  class EmpiricalBayesProcess: public NonParametricProcess
+  {
+  public:
+    EmpiricalBayesProcess(size_t dim, bopt_params parameters);
+    virtual ~EmpiricalBayesProcess();
+
+    /** 
+     * \brief Function that returns the prediction of the GP for a query point
+     * in the hypercube [0,1].
+     * 
+     * @param query in the hypercube [0,1] to evaluate the Gaussian process
+     * @return pointer to the probability distribution.
+     */	
+    virtual ProbabilityDistribution* prediction(const vectord &query) = 0;
+		 		 
+    int updateKernelParameters();
+
+    /** 
+     * \brief Computes the score (eg:likelihood) of the kernel
+     * parameters.
+     * @param query set of parameters.
+     * @return score
+     */
+    double evaluateKernelParams(const vectord& query);
+
+
+  protected:
+    /** 
+     * \brief Computes the negative log likelihood of the data for all
+     * the parameters.
+     * @return value negative log likelihood
+     */
+    virtual double negativeTotalLogLikelihood() = 0;
+
+
+    /** 
+     * \brief Computes the negative log likelihood of the data for the
+     * kernel hyperparameters.
+     * @return value negative log likelihood
+     */
+    virtual double negativeLogLikelihood() = 0;
+
+  private:
+    /**
+     * Computes the negative score of the data using cross validation.
+     * @return negative score
+     */
+    double negativeCrossValidation();
+
+  private:
+    InnerOptimization* kOptimizer;
+  };
+
+  /**@}*/
+  
+} //namespace bayesopt
+
+#endif
+

File include/fullbayesprocess.hpp

-/** \file FullBayesProcess.hpp
+/** \file fullbayesprocess.hpp
     \brief Implementes a fully Bayesian nonparametric process with a 
     sampling distribution over kernel parameters. */
 /*

File src/empiricalbayesprocess.cpp

+
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#include "empiricalbayesprocess.hpp"
+#include "log.hpp"
+#include "optimizekernel.hpp"	
+
+
+namespace bayesopt
+{
+  EmpiricalBayesProcess::EmpiricalBayesProcess(size_t dim, bopt_params parameters):
+    NonParametricProcess(dim,parameters)
+  { 
+    kOptimizer = new OptimizeKernel(this);
+
+    //TODO: Generalize
+    if (parameters.l_type == L_ML)
+      {
+	kOptimizer->setAlgorithm(BOBYQA);    // local search to avoid underfitting
+      }
+    else
+      {
+	kOptimizer->setAlgorithm(COMBINED);
+      }
+    kOptimizer->setLimits(1e-10,100.);
+  }
+
+  EmpiricalBayesProcess::~EmpiricalBayesProcess()
+  {
+    delete kOptimizer;
+  }
+
+
+  int EmpiricalBayesProcess::updateKernelParameters()
+  {
+    int error = -1;
+    vectord optimalTheta = mKernel.getHyperParameters();
+
+    FILE_LOG(logDEBUG) << "Computing kernel parameters. Seed: " 
+		       << optimalTheta;
+
+    kOptimizer->run(optimalTheta);
+    error = mKernel.setHyperParameters(optimalTheta);
+
+    if (error)
+      {
+	FILE_LOG(logERROR) << "Error updating kernel parameters.";
+	exit(EXIT_FAILURE);
+      }   
+
+    FILE_LOG(logDEBUG) << "Final kernel parameters: " << optimalTheta;	
+    return error;
+  };
+
+  double EmpiricalBayesProcess::evaluateKernelParams(const vectord& query)
+  { 
+    int error = mKernel.setHyperParameters(query);
+    if (error) 
+      {
+	FILE_LOG(logERROR) << "Problem optimizing kernel parameters."; 
+	exit(EXIT_FAILURE);	
+      }
+
+    double result;
+    switch(mLearnType)
+      {
+      case L_ML:
+	result = negativeTotalLogLikelihood(); break;
+      case L_MAP:
+	// It is a minus because the prior is the positive and we want the negative.
+	result = negativeLogLikelihood()-mKernel.kernelLogPrior();
+	break;
+      case L_LOO:
+	result = negativeCrossValidation(); break;
+      default:
+	FILE_LOG(logERROR) << "Learning type not supported";
+      }	  
+    return result;
+  }
+
+
+  double EmpiricalBayesProcess::negativeCrossValidation()
+  {
+    // This is highly ineffient implementation for comparison purposes.
+    size_t n = mGPXX.size();
+    size_t last = n-1;
+    int error = 0;
+    double sum = 0.0;
+    vecOfvec tempXX(mGPXX);
+    vectord tempY(mGPY);
+    vectord tempM(mMeanV);
+    matrixd tempF(mFeatM);
+    for(size_t i = 0; i<n; ++i)
+      {
+	vectord x = mGPXX[0];  double y = mGPY(0);
+	double m = mMeanV(0);
+
+	mGPXX.erase(mGPXX.begin()); 
+	utils::erase(mGPY,mGPY.begin());
+	utils::erase(mMeanV,mMeanV.begin());
+	utils::erase_column(mFeatM,0);
+
+	precomputeSurrogate();
+	ProbabilityDistribution* pd = prediction(x);
+	sum += log(pd->pdf(y));
+	mGPXX.push_back(x);     
+	mGPY.resize(mGPY.size()+1);  mGPY(mGPY.size()-1) = y;
+	mMeanV.resize(mGPY.size());  mMeanV(mGPY.size()-1) = m;
+	mFeatM.resize(mFeatM.size1(),mFeatM.size2()+1);  
+	mFeatM = tempF;
+      }
+      std::cout << "End" << mGPY.size();
+    return -sum;
+  }
+
+} // namespace bayesopt