Commits

Ruben Martinez-Cantin committed e58ad58

Cleaning files. Unifying interface.

Comments (0)

Files changed (61)

 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} 
 		      ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
 
-set(CMAKE_LIBRARY_PATH ${CMAKE_LIBRARY_PATH} /opt/local/lib /opt/local/Library)
-set(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} /opt/local/include
-                                             /opt/local/Library)
-set(CMAKE_PROGRAM_PATH ${CMAKE_PROGRAM_PATH} /opt/local/bin/ /opt/local/Library)
+if(APPLE)
+  # For macports or similar systems
+  set(CMAKE_LIBRARY_PATH ${CMAKE_LIBRARY_PATH} /opt/local/lib /opt/local/Library)
+  set(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} /opt/local/include
+                                               /opt/local/Library)
+  set(CMAKE_PROGRAM_PATH ${CMAKE_PROGRAM_PATH} /opt/local/bin/ /opt/local/Library)
+endif(APPLE)
 
 
 if(NOT CMAKE_BUILD_TYPE)
 endif(BAYESOPT_BUILD_EXAMPLES)
 
 INSTALL(FILES 
-  ./include/bayesoptcont.hpp 
-  ./include/bayesoptdisc.hpp 
-  ./wrappers/bayesoptwpr.h 
+  ./include/bayesopt.hpp 
+  ./wrappers/bayesopt.h 
   DESTINATION include
 )
 
   ARCHIVE DESTINATION lib
 )
 
+
 IF(BAYESOPT_PYTHON_INTERFACE)
   INCLUDE(PythonMagic)
+#  INCLUDE(FindPython)
   INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH})
 
   ADD_LIBRARY(esopt MODULE ${BAYESOPT_SRCS}   
   IF(NOT WIN32)
     # Kind of a hack but it works
     SET_TARGET_PROPERTIES(esopt PROPERTIES PREFIX "bay" SUFFIX ".so")
+  ELSE()
+    SET_TARGET_PROPERTIES(esopt PROPERTIES PREFIX "libbay" SUFFIX ".dll")
   ENDIF()
 
   INSTALL(

cmake/FindPython.cmake

 if (NOT PYTHON_EXEC)
     find_program(PYTHON_EXEC "python${Python_FIND_VERSION}"
         PATHS
-        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.1\\InstallPath]
-        [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.0\\InstallPath]
+        # [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.1\\InstallPath]
+        # [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\3.0\\InstallPath]
         [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath]
         [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.6\\InstallPath]
         [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.5\\InstallPath]

devel/bayesoptcont.hpp

+/**  \file bayesoptcont.hpp \brief Continuous Bayesian optimization */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef  _BAYESOPTCONT_HPP_
+#define  _BAYESOPTCONT_HPP_
+
+#include "boundingbox.hpp"
+#include "bayesoptbase.hpp"
+#include "inneroptimization.hpp"
+
+namespace bayesopt  {
+
+  /** \addtogroup BayesOpt */
+  /**@{*/
+
+  /**
+   * \brief Bayesian optimization using different non-parametric
+   * processes as distributions over surrogate functions. The
+   * exploration spaces is assumed to be continous and box-bounded.
+   */
+  class BAYESOPT_API ContinuousModel: public BayesOptBase
+  {
+  public:
+   
+    /** Default constructor */
+    ContinuousModel();
+
+    /** 
+     * Constructor
+     * @param dim number of input dimensions
+     * @param params set of parameters (see parameters.h)
+     */
+    ContinuousModel(size_t dim, bopt_params params);
+
+    /**  Default destructor  */
+    virtual ~ContinuousModel();
+  
+    /** Initialize the optimization process.  */
+    void initializeOptimization();
+
+    /** 
+     * Once the optimization has been perfomed, return the optimal
+     * point.
+     */
+    vectord getFinalResult();
+
+    /** 
+     * \brief Sets the bounding box. 
+     *
+     * @param lowerBound vector with the lower bounds of the hypercube
+     * @param upperBound vector with the upper bounds of the hypercube
+     * 
+     * @return 0 if terminate successfully, nonzero otherwise
+     */
+    int setBoundingBox( const vectord &lowerBound,
+			const vectord &upperBound);
+
+
+  protected:
+
+    /** 
+     * \brief Print data for every step according to the verbose level
+     * 
+     * @param iteration iteration number 
+     * @param xNext next point
+     * @param yNext function value at next point
+     */
+    void plotStepData(size_t iteration, const vectord& xNext,
+		      double yNext);
+
+    /** Selects the initial set of points to build the surrogate model. */
+    void sampleInitialPoints();
+
+    /** Sample a single point in the input space. Used for epsilon greedy exploration. */
+    vectord samplePoint();
+
+    /** 
+     * \brief Returns the corresponding criteria of a series of queries
+     * in the hypercube [0,1] in order to choose the best point to try
+     * the next iteration.
+     * 
+     * @param query point in the hypercube [0,1] to evaluate the
+     * Gaussian process
+     * 
+     * @return negative criteria (Expected Improvement, LCB,
+     * I-optimality, etc.).
+     */	
+    //double evaluate( const vectord &query )
+    //{  return evaluateCriteria(query); }; 
+
+    
+    /** 
+     * \brief Wrapper for the target function normalize in the hypercube
+     * [0,1]
+     * @param query point to evaluate in [0,1] hypercube
+     * @return actual return value of the target function
+     */
+    double evaluateSampleInternal( const vectord &query );
+    
+    /** 
+     * \brief Wrapper of the innerOptimization class to find the optimal
+     * point acording to the criteria.
+     * @param xOpt optimal point
+     */
+    void findOptimal(vectord &xOpt);
+
+  private:
+    utils::BoundingBox<vectord> *mBB;      ///< Bounding Box (input space limits)
+    NLOPT_Optimization* cOptimizer;
+  };
+  
+  /**@}*/
+
+
+  inline double ContinuousModel::evaluateSampleInternal( const vectord &query )
+  { 
+    return evaluateSample(mBB->unnormalizeVector(query));
+  }; // evaluateSampleInternal
+
+  inline void ContinuousModel::findOptimal(vectord &xOpt)
+  { cOptimizer->run(xOpt); };
+
+  inline vectord ContinuousModel::samplePoint()
+  {	    
+    randFloat drawSample(mEngine,realUniformDist(0,1));
+    vectord Xnext(mDims);    
+    for(vectord::iterator x = Xnext.begin(); x != Xnext.end(); ++x)
+      {
+	*x = drawSample();
+      }
+    
+    // for (size_t i = 0; i<Xnext.size(); ++i)
+    //   {
+    // 	 Xnext(i) = drawSample();
+    //   } 
+
+    return Xnext;
+};
+
+}  //namespace bayesopt
+
+
+#endif

devel/bayesoptdisc.hpp

+/**  \file bayesoptdisc.hpp \brief Discrete Bayesian optimization */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef  _BAYESOPTDISC_HPP_
+#define  _BAYESOPTDISC_HPP_
+
+#include "bayesoptbase.hpp"
+
+namespace bayesopt
+{
+  /** \addtogroup BayesOpt */
+  /*@{*/
+
+
+  /**
+   * \brief Sequential Kriging Optimization using different non-parametric 
+   * processes as surrogate (kriging) functions. 
+   */
+  class BAYESOPT_API DiscreteModel : public BayesOptBase
+  {
+  public:
+
+    /** 
+     * Constructor
+     * @param validSet  Set of potential inputs
+     */
+    DiscreteModel(const vecOfvec &validSet );
+
+    /** 
+     * Constructor
+     * @param validSet  Set of potential inputs
+     * @param params set of parameters (see parameters.h)
+     */
+    DiscreteModel(const vecOfvec &validSet, bopt_params params);
+    
+    /** Default destructor  */
+    virtual ~DiscreteModel();
+
+    /** Initialize the optimization process. */
+    void initializeOptimization();
+
+    /** Once the optimization has been perfomed, return the optimal point. */
+    vectord getFinalResult();
+
+    
+  protected:
+    
+    
+    /** Print data for every step according to the verbose level */
+    void plotStepData(size_t iteration, const vectord& xNext,
+		     double yNext);
+
+    /** Selects the initial set of points to build the surrogate model. */
+    void sampleInitialPoints();
+
+    /** Sample a single point in the input space. Used for epsilon greedy exploration. */
+    vectord samplePoint();
+
+    /** 
+     * \brief Wrapper for the target function normalize in the hypercube
+     * [0,1]
+     * @param query point to evaluate in [0,1] hypercube
+     * @return actual return value of the target function
+     */
+    double evaluateSampleInternal( const vectord &query ); 
+
+    void findOptimal(vectord &xOpt);
+
+  protected:
+    vecOfvec mInputSet;               ///< List of input points
+  };
+
+
+  /**@}*/
+  
+  inline vectord DiscreteModel::samplePoint()
+  {   
+    randInt sample(mEngine, intUniformDist(0,mInputSet.size()-1));
+    return mInputSet[sample()];
+  };
+
+  inline double DiscreteModel::evaluateSampleInternal( const vectord &query )
+  { return evaluateSample(query); }; 
+
+} //namespace bayesopt
+
+
+#endif

devel/empiricalbayes.hpp

+/** \file empiricalbayes.hpp
+    \brief Implementes a empirical Bayesian nonparametric process with a 
+    ML, MAP or similar estimate of kernel parameters. */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef _EMPIRICAL_BAYES_HPP_
+#define _EMPIRICAL_BAYES_HPP_
+
+namespace bayesopt
+{
+
+  /** \addtogroup  LearningMethods */
+  /**@{*/
+
+
+  /**
+   * \brief Empirical Bayesian NonParametric process.
+   */
+  class ConditionalBayesProcess: public KernelRegressor, RBOptimizable
+  {
+  public:
+  
+
+} // namespace bayesopt
+
+#endif

devel/empiricalbayesprocess.cpp

+
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#include "empiricalbayesprocess.hpp"
+#include "log.hpp"
+//#include "optimizekernel.hpp"	
+
+
+namespace bayesopt
+{
+  ConditionalBayesProcess::ConditionalBayesProcess(size_t dim, bopt_params parameters, 
+						   const Dataset& data, randEngine& eng):
+    KernelRegressor(dim,parameters,data,eng)
+  { 
+    // if (mLearnType == L_BAYES)
+    //   {
+    // 	FILE_LOG(logERROR) << "Empirical Bayes model and full Bayes learning are incompatible.";
+    // 	throw std::invalid_argument("Trying full Bayes learning for an empirical Bayes model.");
+    //   }
+
+    size_t nhp = mKernel.nHyperParameters();
+    kOptimizer = new NLOPT_Optimization(this,nhp);
+
+    //TODO: Generalize
+    if (parameters.sc_type == SC_ML)
+      {
+	kOptimizer->setAlgorithm(BOBYQA);    // local search to avoid underfitting
+      }
+    else
+      {
+	kOptimizer->setAlgorithm(COMBINED);
+      }
+    kOptimizer->setLimits(svectord(nhp,1e-10),svectord(nhp,100.));
+  }
+
+  ConditionalBayesProcess::~ConditionalBayesProcess()
+  {
+    delete kOptimizer;
+  }
+
+
+  void ConditionalBayesProcess::updateKernelParameters()
+  {
+    // if (mLearnType == L_FIXED)
+    //   {
+    // 	FILE_LOG(logDEBUG) << "Fixed hyperparameters. Not learning";
+    //   }
+    // else
+    //   {
+	vectord optimalTheta = mKernel.getHyperParameters();
+	
+	FILE_LOG(logDEBUG) << "Initial kernel parameters: " << optimalTheta;
+	kOptimizer->run(optimalTheta);
+	mKernel.setHyperParameters(optimalTheta);
+	FILE_LOG(logDEBUG) << "Final kernel parameters: " << optimalTheta;	
+      // }
+  };
+
+  double ConditionalBayesProcess::evaluateKernelParams()
+  { 
+    switch(mScoreType)
+      {
+      case SC_MTL:
+	return negativeTotalLogLikelihood();
+      case SC_ML:
+	return negativeLogLikelihood();
+      case SC_MAP:
+	// It is a minus because the prior is the positive and we want
+	// the negative.
+	return negativeLogLikelihood()-mKernel.kernelLogPrior();
+      case SC_LOOCV:
+	return negativeCrossValidation(); 
+      default:
+	FILE_LOG(logERROR) << "Learning type not supported";
+	throw std::invalid_argument("Learning type not supported");
+      }	  
+  }
+
+
+  double ConditionalBayesProcess::negativeCrossValidation()
+  {
+    // This is highly ineffient implementation for comparison purposes.
+    Dataset data(mData);
+
+    size_t n = data.getNSamples();
+    size_t last = n-1;
+    int error = 0;
+    double sum = 0.0;
+
+    matrixd tempF(mMean.mFeatM);
+
+
+    // We take the first element, use it for validation and then paste
+    // it at the end. Thus, after every iteration, the first element
+    // is different and, at the end, all the elements should have
+    // rotated.
+    for(size_t i = 0; i<n; ++i)
+      {
+	// Take the first element
+	const double y = data.getSampleY(0);
+	const vectord x = data.getSampleX(0);
+
+	// Remove it for cross validation
+	data.mX.erase(data.mX.begin()); 
+	utils::erase(data.mY,data.mY.begin());
+	utils::erase_column(mMean.mFeatM,0);
+
+	// Compute the cross validation
+	computeCholeskyCorrelation();
+	precomputePrediction(); 
+	ProbabilityDistribution* pd = prediction(x);
+	sum += log(pd->pdf(y));
+
+	//Paste it back at the end
+	data.addSample(x,y);
+	mMean.mFeatM.resize(mMean.mFeatM.size1(),mMean.mFeatM.size2()+1);  
+	mMean.mFeatM = tempF;
+      }
+    std::cout << "End" << data.getNSamples();
+    return -sum;   //Because we are minimizing.
+  }
+
+} // namespace bayesopt

devel/empiricalbayesprocess.hpp

+/** \file conditionalbayesprocess.hpp
+    \brief Implementes a empirical Bayesian nonparametric process with a 
+    ML, MAP or similar estimate of kernel parameters. */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _EMPIRICAL_BAYES_PROCESS_HPP_
+#define  _EMPIRICAL_BAYES_PROCESS_HPP_
+
+#include "kernelregressor.hpp"
+#include "inneroptimization.hpp"
+
+namespace bayesopt
+{
+
+  /** \addtogroup  NonParametricProcesses */
+  /**@{*/
+
+
+  /**
+   * \brief Empirical Bayesian NonParametric process.
+   */
+  class EmpiricalBayesProcess: public RBOptimizable
+  {
+  public:
+    EmpiricalBayesProcess(size_t dim, bopt_params parameters,
+			  const Dataset& data, randEngine& eng);
+    virtual ~EmpiricalBayesProcess();
+
+    /** 
+     * \brief Function that returns the prediction of the GP for a query point
+     * in the hypercube [0,1].
+     * 
+     * @param query in the hypercube [0,1] to evaluate the Gaussian process
+     * @return pointer to the probability distribution.
+     */	
+    virtual ProbabilityDistribution* prediction(const vectord &query) = 0;
+		 		 
+    /** 
+     * \brief Updates the kernel parameters acording with a point
+     * estimate (ML, MAP, etc.)
+     */
+    void updateKernelParameters();
+
+    /** 
+     * \brief Computes the score (eg:likelihood) of the kernel
+     * parameters.  
+     * Warning: To evaluate the score, it is necessary to change the parameters
+     * @param x set of parameters.  
+     * @return score
+     */
+    double evaluate(const vectord &x);
+
+    /** 
+     * \brief Computes the score (eg:likelihood) of the current kernel
+     * parameters.
+     * @param query set of parameters.
+     * @return score
+     */
+    double evaluateKernelParams();
+
+
+  protected:
+    /** 
+     * \brief Computes the negative log likelihood of the data for all
+     * the parameters.
+     * @return value negative log likelihood
+     */
+    virtual double negativeTotalLogLikelihood() = 0;
+
+
+    /** 
+     * \brief Computes the negative log likelihood of the data for the
+     * kernel hyperparameters.
+     * @return value negative log likelihood
+     */
+    virtual double negativeLogLikelihood() = 0;
+
+  private:
+    /**
+     * Computes the negative score of the data using cross validation.
+     * @return negative score
+     */
+    double negativeCrossValidation();
+
+  private:
+    NLOPT_Optimization* kOptimizer;
+  };
+
+
+
+  inline double ConditionalBayesProcess::evaluate(const vectord& x)
+  { 
+    mKernel.setHyperParameters(x);
+    return evaluateKernelParams();
+  };
+
+
+  /**@}*/
+  
+} //namespace bayesopt
+
+#endif
+

devel/fullbayesprocess.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#include "log.hpp"
+#include "fullbayesprocess.hpp"
+
+namespace bayesopt
+{
+
+  namespace ublas = boost::numeric::ublas; 
+
+  FullBayesProcess::FullBayesProcess(size_t dim, bopt_params params, const Dataset& data, randEngine& eng):
+    KernelRegressor(dim,params,data),mGeneralParams(params),mWeights(N_PROC)
+  {
+    d_ = new MixtureDistribution(N_PROC);
+    initializeKernelParameters();
+  };
+
+  FullBayesProcess::~FullBayesProcess()
+  {
+    delete d_;
+  };
+
+  ProbabilityDistribution* FullBayesProcess::prediction(const vectord &query);
+  {
+    for(size_t i=0;i<N_PROC;++i)
+      {
+	ProbabilityDistribution* pd = mVProc[i]->prediction(query);
+	d_.setComponent(i,pd,mWeights(i))
+      }
+  }
+
+  void FullBayesProcess::precomputePrediction()
+  {
+    updateKernelParameters()
+    for(size_t i=0;i<N_PROC;++i)
+      {
+	mVProc[i]->precomputePrediction();
+      }
+  }
+
+
+  //   MixtureDistribution* dist(N_PROC);
+
+  //   for (size_t ii = 0; ii < N_PROC; ++ii)
+  //     { 
+	
+  // 	vectord th = column(kTheta,ii);
+  // 	std::copy(th.begin(),th.end(),newParams.kernel.hp_mean);
+  // 	mVProc.push_back(KernelRegressor::create(dim_,newParams));
+  //     }
+    
+  //   //Sum of Gaussians?
+  // };
+
+  int FullBayesProcess::initializeKernelParameters()
+  {
+    double w = 1.0/static_cast<double>(N_PROC);
+    mWeights = svectord(n,w);
+
+    //All the inner processes share the same parameters except the
+    //kernel paramenters and the learning type.
+    bopt_params newParams = mGeneralParams;
+    newParams.learning_type = L_FIXED;
+    size_t nhp = mGeneralParams.kernel.n_hp;
+    matrixd kTheta(nhp,N_PROC);
+    randEngine reng(200u);
+    lhs(kTheta,reng);
+
+    for (size_t ii = 0; ii < N_PROC; ++ii)
+      { 
+	vectord th = column(kTheta,ii);
+	std::copy(th.begin(),th.end(),newParams.kernel.hp_mean);
+	mVProc.push_back(NonParametricProcess::create(dim_,newParams));
+      }
+    
+    return 0;
+  }
+
+  void FullBayesProcess::updateKernelParameters()
+  {
+    double sum = 0.0;
+    for (size_t ii = 0; ii < N_PROC; ++ii)
+      { 
+	double lik = mVProc[ii]->evaluateKernelParams();
+	mWeights(ii) *= lik;
+	sum += mWeights(ii);
+      }
+    mWeights /= sum;  //Normalization
+  };

devel/fullbayesprocess.hpp

+/** \file fullbayesprocess.hpp
+    \brief Implementes a fully Bayesian nonparametric process with a 
+    sampling distribution over kernel parameters. */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _FULL_BAYES_PROCESS_HPP_
+#define  _FULL_BAYES_PROCESS_HPP_
+
+namespace bayesopt
+{
+
+  /** \addtogroup  NonParametricProcesses */
+  /**@{*/
+
+
+  /**
+   * \brief Full Bayesian NonParametric process.
+   */
+  class FullBayesProcess: public KernelRegressor
+  {
+  public:
+    static const size_t N_PROC = 10;
+
+    FullBayesProcess(size_t dim, bopt_params params, const Dataset& data, randEngine& eng);
+    virtual ~FullBayesProcess();
+
+    /** 
+     * \brief Function that returns the prediction of the GP for a query point
+     * in the hypercube [0,1].
+     * 
+     * @param query in the hypercube [0,1] to evaluate the Gaussian process
+     * @return pointer to the probability distribution.
+     */	
+    ProbabilityDistribution* prediction(const vectord &query);
+
+    /** 
+     * \brief Updates the kernel parameters acording with a Bayesian
+     * estimate (grid sampling, MCMC, etc.)
+     */
+    void updateKernelParameters();
+
+  private:
+    std::vector<KernelRegressor*>   mVProc;
+    vectord                            mWeights;
+    
+    MixtureDistribution* d_;      //!< Predictive distributions
+  };
+
+
+  /**@}*/
+
+} //namespace bayesopt
+
+
+#endif
+
+
+
+void MCMC::sliceSample(vectord &x)
+{
+  randFloat sample( mtRandom, realUniformDist(0,1) );
+  size_t n = x.size();
+
+  std::vector<int> perms = utils::return_index_vector(n);
+  utils::randomPerms(perms, mtRandom);
+
+  for (size_t i = 0; i<n; ++i)
+    {
+      size_t ind = perms[i];
+      double sigma = mSigma(ind);
+
+      double y_max = obj->evaluate(x);
+      double y = sample()*y_max;
+
+      // Step out
+      double x_cur = x(ind);
+      double r = sample();
+      double xl = x_cur - r * sigma;
+      double xl = x_cur + (1-r)*sigma;
+
+      if (mStepOut)
+	{
+	  x(ind) = xl;
+	  while (obj->evaluate(x) > y) x(ind) -= sigma;
+	  xl = x(ind);
+
+	  x(ind) = xr;
+	  while (obj->evaluate(x) > y) x(ind) += sigma;
+	  xr = x(ind);
+	}
+
+      //Shrink
+      bool on_slice = false;
+      while (!on_slice)
+	{
+	  x(ind) = (xr-xl) * sample() + xl;
+	  if (obj->evaluate(x) < y)
+	    {
+	      if      (x(ind) > x_cur)  xr = x(ind);
+	      else if (x(ind) < x_cur)  xl = x(ind);
+	      else throw std::runtime_error("Error in MCMC. Slice colapsed.");
+	    }
+	  else
+	    {
+	      on_slice = true;
+	    }
+	}
+    }
+}
+
+
+void MCMC::sampleParticles(const vectord &initX, bool burnout)
+{
+  vectord x = initX;
+  if (burnout) burnOut(x);
+  
+  mParticles.clear();
+  for(size_t i=0; i<nSamples; ++i)  
+    {
+      sliceSample(x);
+      mParticles.push_back(x);
+    }
+}
+
+/**  \file bayesoptbase.hpp \brief Bayesian optimization module */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef  _MCMC_HPP_
+#define  _MCMC_HPP_
+
+
+namespace bayesopt {
+
+  class MCMCSampler
+  {
+  public:
+    MCMCSampler(size_t n_samples = 500);
+    virtual ~MCMCSampler();
+
+    void burnOut(vectord &x)
+    void sliceSample(vectord &x);
+    void sampleParticles(const vectord &initial, bool burnout);
+
+  private:
+    RBOptimizable* obj;
+    size_t mDims;
+    size_t nBurnOut;
+    size_t nSamples;
+    bool mStepOut;
+    vectord mSigma;
+    vecOfvec mParticles;
+  };
+
+  inline void MCMC::burnOut(vectord &x)
+  {
+    for(size_t i=0; i<nBurnOut; ++i)  sliceSample(x);
+  }
+
+} //namespace bayesopt
+
+
+#endif

devel/mixture_distribution.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#include "mixture_distribution.hpp"
+
+MixtureDistribution::MixtureDistribution(size_t n):
+  ProbabilityDistribution(),mPD(n,NULL),mW(n)
+{
+  //TODO: Fix this hack
+};
+
+MixtureDistribution::~MixtureDistribution(){};
+
+double MixtureDistribution::pdf(double x)
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->pdf(x);
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::negativeExpectedImprovement(double min,size_t g)
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->negativeExpectedImprovement(min,g);
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::lowerConfidenceBound(double beta)
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->lowerConfidenceBound(beta);
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::negativeProbabilityOfImprovement(double min,
+							     double epsilon)
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->negativeProbabilityOfImprovement(min,epsilon);
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::sample_query(randEngine& eng)
+{
+  size_t n = mW.size();
+  vectord cumw(n);
+  
+  std::partial_sum(mW.begin(), mW.end(), cumw.begin(), 
+		   std::plus<double>());
+  
+  randFloat sampleUniform(eng, realUniformDist(0,1));
+  double u = sampleUniform();
+
+  for (size_t i=0; i < cumw.size(); ++i)
+    {
+      if (u < cumw(i))
+	return mPD[i]->sample_query(eng);
+    }
+  return mPD[0]->sample_query(eng); //just in case...
+};
+
+double MixtureDistribution::getMean()
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->getMean();
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::getStd()
+{
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      res(i) = mPD[i]->getStd();
+    }
+  return inner_prod(mW,res);
+};
+
+double MixtureDistribution::getGaussianStd()
+{
+  double totalMean = getMean();
+  size_t n = mW.size();
+  vectord res(n);
+  for(size_t i=0;i<n;++i)
+    {
+      double sigma = mPD[i]->getStd();
+      double mm = mPD[i]->getMean() - totalMean; 
+      res(i) = mm*mm + sigma; 
+    }
+  return inner_prod(mW,res) ; 
+}

devel/mixture_distribution.hpp

+/** \file mixture_distribution.hpp 
+    \brief Mixture of gaussians probability distribution */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef __MIXTURE_DISTRIBUTION_HPP__
+#define __MIXTURE_DISTRIBUTION_HPP__
+
+#include "prob_distribution.hpp" 
+
+class MixtureDistribution: public ProbabilityDistribution
+{
+public:
+  MixtureDistribution(size_t n);
+  virtual ~MixtureDistribution();
+
+  void setComponent(size_t i, ProbabilityDistribution* d)
+  { mPD[i] = d;};
+  
+  void setComponent(size_t i, ProbabilityDistribution* d,
+		    double w);
+  {mPD[i] = d;  mW[i] = w;};
+
+  void setWeights(const vectord& w) {mW = w;};
+  /** 
+   * \brief Probability density function
+   * @param x query point
+   * @return probability
+   */
+  double pdf(double x);
+
+  /** 
+   * \brief Expected Improvement algorithm for minimization
+   * @param min  minimum value found
+   * @param g exponent (used for annealing)
+   *
+   * @return negative value of the expected improvement
+   */
+  double negativeExpectedImprovement(double min, size_t g);
+
+  /** 
+   * \brief Lower confindence bound. Can be seen as the inverse of the Upper 
+   * confidence bound
+   * @param beta std coefficient (used for annealing)
+   * @return value of the lower confidence bound
+   */
+  double lowerConfidenceBound(double beta);
+
+  /** 
+   * Probability of improvement algorithm for minimization
+   * @param min  minimum value found
+   * @param epsilon minimum improvement margin
+   * 
+   * @return negative value of the probability of improvement
+   */
+  double negativeProbabilityOfImprovement(double min,
+					  double epsilon);
+
+  /** 
+   * Sample outcome acording to the marginal distribution at the query point.
+   * @param eng boost.random engine
+   * 
+   * @return outcome
+   */
+  double sample_query(randEngine& eng);
+
+  double getMean();
+  double getStd();
+  double getGaussianStd();
+
+private:
+  std::vector<ProbabilityDistribution*> mPD; 
+  vectord mW; 
+};
+
+
+
+#endif

devel/nlopt_optimization.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+#include <cmath>
+#include <nlopt.h>
+#include "nloptwpr.h"
+#include "parameters.h"
+#include "log.hpp"
+#include "inneroptimization.hpp"
+
+namespace bayesopt
+{
+  void checkNLOPTerror(nlopt_result errortype)
+  {
+    switch(errortype)
+      {
+      case -1: FILE_LOG(logERROR) << "NLOPT: General failure"; break;
+      case -2: FILE_LOG(logERROR) << "NLOPT: Invalid arguments. Check bounds."; break;
+      case -3: FILE_LOG(logERROR) << "NLOPT: Out of memory"; break;
+      case -4: FILE_LOG(logERROR) << "NLOPT Warning: Potential roundoff error. " 
+				  << "In general, this can be ignored."; break;
+      case -5: FILE_LOG(logERROR) << "NLOPT: Force stop."; break;
+      default: ;
+      }
+  }
+
+  NLOPT_Optimization::NLOPT_Optimization()
+  { 
+    alg = DIRECT;    mDown = 0.;    mUp = 1.;    maxEvals = MAX_INNER_EVALUATIONS;
+  };
+
+
+  int NLOPT_Optimization::run(vectord &Xnext)
+  {   
+    void *objPointer = static_cast<void *>(rbobj);
+    int n = static_cast<int>(Xnext.size());
+    int error;
+
+    assert(objPointer != NULL);
+    error = send_to_nlopt_optimize(&Xnext(0), n, objPointer);
+
+    return error;
+  } // run (uBlas)
+
+
+
+  int NLOPT_Optimization::send_to_nlopt_optimize(double* x, int n, void* objPointer)
+  {
+    double u[128], l[128];
+    double fmin = 1;
+    int maxf = maxEvals*n;    
+    int ierror;
+
+    for (int i = 0; i < n; ++i) 
+      {
+	l[i] = mDown;	
+	u[i] = mUp;
+      
+	if (x[i] < l[i] || x[i] > u[i])
+	  {
+	    x[i]=(l[i]+u[i])/2.0;  
+	    //nlopt requires x to have a valid initial value even for algorithms that do
+	    //not need it
+	  }
+      }
+    
+    nlopt_opt opt;
+    double (*fpointer)(unsigned int, const double *, double *, void *);
+    double coef;  //Percentaje of resources used in local optimization
+
+    /* algorithm and dims */
+    if (alg == LBFGS)                                     //Require gradient
+      fpointer = &(NLOPT_WPR::evaluate_nlopt_grad);
+    else                                           //Do not require gradient
+      fpointer = &(NLOPT_WPR::evaluate_nlopt);
+
+    if (alg == COMBINED)  
+      coef = 0.8;
+    else
+      coef = 1.0;
+
+    switch(alg)
+      {
+      case DIRECT:      /* same as combined */
+      case COMBINED: 	opt = nlopt_create(NLOPT_GN_DIRECT_L, n); break;
+      case BOBYQA: 	opt = nlopt_create(NLOPT_LN_BOBYQA, n); break;
+      case LBFGS:       opt = nlopt_create(NLOPT_LD_LBFGS, n); break;
+      default: FILE_LOG(logERROR) << "Algorithm not supported"; return -1;
+      }
+
+    nlopt_set_lower_bounds(opt, l);
+    nlopt_set_upper_bounds(opt, u);
+    nlopt_set_min_objective(opt, fpointer, objPointer);
+    int nfeval = static_cast<int>(static_cast<double>(maxf)*coef);
+    nlopt_set_maxeval(opt, nfeval) ;
+
+
+    nlopt_result errortype = nlopt_optimize(opt, x, &fmin);
+    checkNLOPTerror(errortype);
+
+    // Local refinement
+    if ((alg == COMBINED) && (coef < 1)) 
+      {
+	nlopt_destroy(opt);  // Destroy previous one
+	opt = nlopt_create(NLOPT_LN_SBPLX, n); /* algorithm and dims */
+	nlopt_set_lower_bounds(opt, l);
+	nlopt_set_upper_bounds(opt, u);
+	nlopt_set_min_objective(opt, fpointer, objPointer);
+	nlopt_set_maxeval(opt, maxf-nfeval);
+	
+	errortype = nlopt_optimize(opt, x, &fmin);
+	checkNLOPTerror(errortype);
+      }
+      
+    nlopt_destroy(opt);  // Destroy opt
+    
+    ierror = static_cast<int>(errortype);
+    return ierror;
+
+  } // send_to_nlopt_optimize (C array)
+
+
+}// namespace bayesopt
+

devel/nlopt_optimization.hpp

+/** \file inneroptimization.hpp 
+    \brief C++ wrapper of the NLOPT library */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+
+#ifndef __NLOPT_OPTIMIZATION_HPP__
+#define __NLOPT_OPTIMIZATION_HPP__
+
+#include "optimizable.hpp"
+#include "nlopt.hpp"
+
+
+namespace bayesopt {
+
+  // We plan to add more in the future since nlopt actually support many of them
+  typedef enum {
+    DIRECT,    ///< Global optimization
+    LBFGS,     ///< Local, derivative based
+    BOBYQA,    ///< Local, derivative free
+    COMBINED   ///< Global exploration, local refinement (hand tuned)
+  } OptAlgorithms;
+
+
+  class NLOPT_Optimization: //: public Optimization
+  {
+  public:
+    NLOPT_Optimization();
+    virtual ~NLOPT_Optimization(){};
+
+    /** Sets the optimization algorithm  */
+    void setAlgorithm(innerOptAlgorithms newAlg);
+
+    /** Sets the optimization algorithm  */
+    void setMaxEvals(size_t meval);
+
+    /** Limits of the hypercube. */
+    void setLimits(const vectord& down, const vectord& up);
+
+    /** Compute the inner optimization algorithm
+     * @param Xnext input: initial guess, output: result
+     * @return error_code
+     */
+    int run(vectord &Xnext);
+    
+    double evaluate(const std::vector<double> &x, std::vector<double> &grad, void *data);
+
+    /** Dummy function to be overriden by the actual function to be
+     * evaluated.  
+     * Note: it is not pure virtual because we might want
+     * to use the other evaluate method
+     * @param query input point
+     * @return function value at query point
+     */
+    //    virtual double evaluate(const vectord& query) = 0;
+
+    /** Dummy function to be overriden by the actual function to be evaluated
+     * Note: it is not pure virtual because we might want
+     * to use the other evaluate method
+     * @param query input point
+     * @param grad output gradient at query point
+     * @return function value at query point
+     */
+    // virtual double evaluate(const vectord& query, 
+    // 				 vectord& grad)  {return 0.0;};
+
+
+  private:
+
+    //int send_to_nlopt_optimize(double* x, int n, void* objPointer);	
+
+    RBOptimizable *rbobj;
+
+    innerOptAlgorithms alg;
+    vectord mDown;
+    vectord mUp;
+    size_t maxEvals;
+  };
+
+
+  inline void NLOPT_Optimization::setAlgorithm(innerOptAlgorithms newAlg)
+  { alg = newAlg; }
+
+  inline void NLOPT_Optimization::setMaxEvals(size_t meval)
+  { maxEvals = meval; }
+
+  inline void NLOPT_Optimization::setLimits(double down, double up)
+  { mDown = down;   mUp = up; }
+
+  inline double NLOPT_Optimization::evaluate(const std::vector<double> &x, std::vector<double> &grad, void *data)
+  { 
+    vectord query(x.size());
+    std::copy(x.begin(),x.end(),query.begin);
+    NLOPT_Optimization* ptr = static_cast<void *>(data)
+    return ptr->rbobj->evaluate(query); 
+  }
+
+}//namespace bayesopt
+
+#endif

devel/nloptwpr.cpp

+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+// BOOST Libraries
+#include <boost/numeric/ublas/matrix.hpp>
+#include <boost/numeric/ublas/matrix_proxy.hpp>
+#include <boost/numeric/ublas/vector.hpp>
+
+#include "nloptwpr.h"
+#include "inneroptimization.hpp"
+
+namespace NLOPT_WPR
+{
+
+  namespace ublas = boost::numeric::ublas;
+  using bayesopt::RBOptimizableWrapper;
+  using bayesopt::RGBOptimizableWrapper;
+
+  double evaluate_nlopt (unsigned int n, const double *x,
+			 double *grad, void *my_func_data)
+
+  {
+    // double xcopy[128];
+    // for (unsigned int i=0;i<n;i++)
+    //   xcopy[i] = x[i];
+    // ublas::array_adaptor<double> shared(n, xcopy);
+    // ublas::vector<double, ublas::array_adaptor<double> > sharedN(n, shared); 
+
+    ublas::vector<double> vx(n);
+    std::copy(x,x+n,vx.begin());
+
+    // This is not very clever... but works!
+    void *objPointer = my_func_data;
+    RBOptimizableWrapper* OPTIMIZER = static_cast<RBOptimizableWrapper*>(objPointer);
+    
+    return OPTIMIZER->evaluate(vx);
+  } /* evaluate_criteria_nlopt */
+
+
+  double evaluate_nlopt_grad (unsigned int n, const double *x,
+			      double *grad, void *my_func_data)
+
+  {
+    ublas::vector<double> vx(n);
+    std::copy(x,x+n,vx.begin());
+    
+    // This is not very clever... but works!
+    void *objPointer = my_func_data;
+    RGBOptimizableWrapper* OPTIMIZER = static_cast<RGBOptimizableWrapper*>(objPointer);
+    
+
+    ublas::vector<double> vgrad = ublas::zero_vector<double>(n);
+    double f =  OPTIMIZER->evaluate(vx,vgrad);
+    if ((grad) && (grad != NULL) )
+      for (unsigned int i=0;i<n;i++)
+	grad[i] = vgrad(i);
+
+
+    return f;
+  } /* evaluate_criteria_nlopt */
+}
+/**  \file nloptwpr.h \brief Simple wrapper to optimize object methods with NLOPT */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+#ifndef _NLOPTWPR_HPP_
+#define _NLOPTWPR_HPP_
+
+
+namespace NLOPT_WPR
+{
+  extern "C" {
+
+  /** 
+   * Wrapper of inner optimization to be evaluated by NLOPT
+   * 
+   * @param n # of dimensions
+   * @param x input point
+   * @param grad (only for compatibily, not used or changed)
+   * @param my_func_data pointer to the NLOPT_Optimization object
+   * 
+   * @return function evaluation
+   */  
+  double evaluate_nlopt (unsigned int n, const double *x,
+			 double *grad, void *my_func_data);
+
+  /** 
+   * Wrapper of inner optimization to be evaluated by NLOPT
+   * 
+   * @param n # of dimensions
+   * @param x input point
+   * @param grad returns gradient evaluation
+   * @param my_func_data pointer to the NLOPT_Optimization object
+   * 
+   * @return function evaluation
+   */  
+  double evaluate_nlopt_grad (unsigned int n, const double *x,
+			      double *grad, void *my_func_data);
+
+  
+  }
+}
+
+#endif

devel/optimization.hpp

+/** \file optimization.hpp 
+    \brief Abstract class for optimization algorithms */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef __OPTIMIZATION_HPP__
+#define __OPTIMIZATION_HPP__
+
+#include "specialtypes.hpp"
+
+namespace bayesopt {
+
+  class Optimization
+  {
+  public:
+    Optimization(){};
+    virtual ~Optimization(){};
+
+    virtual int run(vectord& result) = 0;
+    virtual double evaluate(const vectord& query) = 0;
+  };
+
+  class DiscreteOptimization: public Optimization
+  {
+  public:
+    DiscreteOptimization(){};
+
+    DiscreteOptimization(vecOfvec *validSet): 
+      mInputSet(validSet){};
+
+    virtual ~DiscreteOptimization(){};
+
+    void setValidSet(vecOfvec* input)
+    { mInputSet = input; }
+
+    int run(vectord& result)
+    {
+      double current, min;
+  
+      result = *mInputSet->begin();
+      min = evaluate(result);
+  
+      for(vecOfvecIterator it = mInputSet->begin();
+	  it != mInputSet->end(); ++it)
+	{
+	  current = evaluate(*it);
+	  if (current < min)
+	    {
+	      result = *it;  
+	      min = current;
+	    }
+	}
+      return 0;
+    }
+
+  protected:
+    vecOfvec* mInputSet;               ///< List of input points
+  };
+
+}
+
+
+#endif

devel/optimizecriteria.hpp

+/** \file optimizecriteria.hpp 
+    \brief Class to continuous optimize criteria parameters */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef __OPTIMIZECRITERIA_HPP__
+#define __OPTIMIZECRITERIA_HPP__
+
+#include "inneroptimization.hpp"
+#include "bayesopt.hpp"
+
+namespace bayesopt {
+
+  class OptimizeCriteria: public NLOPT_Optimization
+  {
+  public:
+    explicit OptimizeCriteria(Criteria* crit):
+      NLOPT_Optimization(), mCrit(crit) {};
+    virtual ~OptimizeCriteria(){};
+
+    double evaluate(const vectord& query)
+    {  return (*mCrit)(query);  }
+    
+  private:
+    Criteria* mCrit;
+  };
+
+  class OptimizeCriteriaRestricted: public NLOPT_Optimization
+  {
+  public:
+    explicit OptimizeCriteriaRestricted(ContinuousModel* model):
+      NLOPT_Optimization(), model_(model) {};
+    virtual ~OptimizeCriteriaRestricted(){};
+
+    double evaluate(const vectord& query)
+    {  return model_->evaluateCriteria(query);  }
+    
+  private:
+    ContinuousModel* model_;
+  };
+
+
+}
+
+
+#endif

devel/optimizekernel.hpp

+/** \file optimizekernel.hpp 
+    \brief Class to continuous optimize kernel parameters */
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef __OPTIMIZEKERNEL_HPP__
+#define __OPTIMIZEKERNEL_HPP__
+
+#include "inneroptimization.hpp"
+#include "conditionalbayesprocess.hpp"
+
+namespace bayesopt {
+
+  class OptimizeKernel: public NLOPT_Optimization
+  {
+  public:
+    explicit OptimizeKernel(ConditionalBayesProcess* npp):
+      NLOPT_Optimization(), npp_(npp) {};
+    virtual ~OptimizeKernel(){};
+
+    double evaluate(const vectord& query)
+    {
+      return npp_->evaluateKernelParams(query);
+    }
+    
+  private:
+    ConditionalBayesProcess* npp_;
+  };
+
+}
+
+
+#endif

doxygen/reference.dox

 for other languages such as Fortran, Ada, etc.
 
 The function to optimize must agree with the template provided in
-bayesoptwpr.h
+bayesopt.h
 
 \code{.c}
 double my_function (unsigned int n, const double *x, double *gradient, void *func_data);

examples/bo_branin.cpp

 #include <cmath>
 #include <algorithm>
 //#include <valarray>
-#include "bayesoptcont.hpp"
+#include "bayesopt.hpp"
 
 #ifndef M_PI
 #define M_PI           3.14159265358979323846

examples/bo_cont.cpp

 */
 
 #include <ctime>
-#include "bayesoptwpr.h"                 // For the C API
-#include "bayesoptcont.hpp"              // For the C++ API
+#include "bayesopt.h"                 // For the C API
+#include "bayesopt.hpp"               // For the C++ API
 
 
 /* Function to be used for C-API testing */

examples/bo_disc.cpp

 ------------------------------------------------------------------------
 */
 
-#include "bayesoptwpr.h"               // For the C API
-#include "bayesoptdisc.hpp"            // For the C++ API
+#include "bayesopt.h"               // For the C API
+#include "bayesopt.hpp"             // For the C++ API
 #include "lhs.hpp"
 
 

examples/bo_display.cpp

 */
 
 #include <valarray>
-#include "bayesoptcont.hpp"
+#include "bayesopt.hpp"
 
 class ExampleOneD: public bayesopt::ContinuousModel
 {

examples/bo_oned.cpp

 */
 
 #include <valarray>
-#include "bayesoptcont.hpp"
+#include "bayesopt.hpp"
 
 class ExampleOneD: public bayesopt::ContinuousModel
 {

include/bayesopt.hpp

+/**  \file bayesopt.hpp \brief Bayesian optimization C++-API*/
+/*
+-------------------------------------------------------------------------
+   This file is part of BayesOpt, an efficient C++ library for 
+   Bayesian optimization.
+
+   Copyright (C) 2011-2014 Ruben Martinez-Cantin <rmcantin@unizar.es>
+ 
+   BayesOpt is free software: you can redistribute it and/or modify it 
+   under the terms of the GNU General Public License as published by
+   the Free Software Foundation, either version 3 of the License, or
+   (at your option) any later version.
+
+   BayesOpt is distributed in the hope that it will be useful, but 
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+   GNU General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+------------------------------------------------------------------------
+*/
+
+#ifndef  _BAYESOPTAPI_HPP_
+#define  _BAYESOPTAPI_HPP_
+
+#include "boundingbox.hpp"
+#include "bayesoptbase.hpp"
+#include "inneroptimization.hpp"
+
+namespace bayesopt  {
+
+  /** \addtogroup BayesOpt */
+  /**@{*/
+
+  /**
+   * \brief Bayesian optimization using different non-parametric
+   * processes as distributions over surrogate functions. The
+   * exploration spaces is assumed to be continous and box-bounded.
+   */
+  class BAYESOPT_API ContinuousModel: public BayesOptBase
+  {
+  public:
+   
+    /** Default constructor */
+    ContinuousModel();
+
+    /** 
+     * Constructor
+     * @param dim number of input dimensions
+     * @param params set of parameters (see parameters.h)
+     */
+    ContinuousModel(size_t dim, bopt_params params);
+
+    /**  Default destructor  */
+    virtual ~ContinuousModel();
+  
+    /** Initialize the optimization process.  */
+    void initializeOptimization();
+
+    /** 
+     * Once the optimization has been perfomed, return the optimal
+     * point.
+     */
+    vectord getFinalResult();
+
+    /** 
+     * \brief Sets the bounding box. 
+     *
+     * @param lowerBound vector with the lower bounds of the hypercube
+     * @param upperBound vector with the upper bounds of the hypercube
+     * 
+     * @return 0 if terminate successfully, nonzero otherwise
+     */
+    int setBoundingBox( const vectord &lowerBound,
+			const vectord &upperBound);
+
+
+  protected:
+
+    /** 
+     * \brief Print data for every step according to the verbose level
+     * 
+     * @param iteration iteration number 
+     * @param xNext next point
+     * @param yNext function value at next point
+     */
+    void plotStepData(size_t iteration, const vectord& xNext,
+		      double yNext);
+
+    /** Selects the initial set of points to build the surrogate model. */
+    void sampleInitialPoints();
+
+    /** Sample a single point in the input space. Used for epsilon greedy exploration. */
+    vectord samplePoint();
+
+    /** 
+     * \brief Wrapper for the target function normalize in the hypercube
+     * [0,1]
+     * @param query point to evaluate in [0,1] hypercube
+     * @return actual return value of the target function
+     */
+    double evaluateSampleInternal( const vectord &query );
+    
+    /** 
+     * \brief Wrapper of the innerOptimization class to find the optimal
+     * point acording to the criteria.
+     * @param xOpt optimal point
+     */
+    void findOptimal(vectord &xOpt);
+
+  private:
+    utils::BoundingBox<vectord> *mBB;      ///< Bounding Box (input space limits)
+    NLOPT_Optimization* cOptimizer;
+  };
+  
+
+  /**
+   * \brief Sequential Kriging Optimization using different non-parametric 
+   * processes as surrogate (kriging) functions. 
+   */
+  class BAYESOPT_API DiscreteModel : public BayesOptBase
+  {
+  public:
+
+    /** 
+     * Constructor
+     * @param validSet  Set of potential inputs
+     */
+    DiscreteModel(const vecOfvec &validSet );
+
+    /** 
+     * Constructor
+     * @param validSet  Set of potential inputs
+     * @param params set of parameters (see parameters.h)
+     */
+    DiscreteModel(const vecOfvec &validSet, bopt_params params);
+    
+    /** Default destructor  */
+    virtual ~DiscreteModel();
+
+    /** Initialize the optimization process. */
+    void initializeOptimization();
+
+    /** Once the optimization has been perfomed, return the optimal point. */
+    vectord getFinalResult();
+
+    
+  protected:
+    
+    
+    /** Print data for every step according to the verbose level */
+    void plotStepData(size_t iteration, const vectord& xNext,
+		     double yNext);
+
+    /** Selects the initial set of points to build the surrogate model. */
+    void sampleInitialPoints();
+
+    /** Sample a single point in the input space. Used for epsilon greedy exploration. */
+    vectord samplePoint();
+
+    /** 
+     * \brief Wrapper for the target function normalize in the hypercube
+     * [0,1]
+     * @param query point to evaluate in [0,1] hypercube
+     * @return actual return value of the target function
+     */
+    double evaluateSampleInternal( const vectord &query ); 
+
+    void findOptimal(vectord &xOpt);
+
+  protected: