Commits

Ruben Martinez-Cantin  committed 59442d0 Merge

Merging

  • Participants
  • Parent commits 5851494, c3d378a

Comments (0)

Files changed (57)

 323dd8f1963e16c1a227b6e4cd7877933d352c17 v4.1
 0000000000000000000000000000000000000000 v4.1
 4f88d930aee4d993ff8256586a5d811b1d9a3130 v0.4.1
+c20fdd2c37dfcb0c9f425f0e821a5b1d046098f9 v0.5
+71d10a5c8d7ed3d944f9ffbc1473ce1402e5bf0a v0.5.1

File CMakeLists.txt

+# -------------------------------------------------------------------------
+#    This file is part of BayesOpt, an efficient C++ library for 
+#    Bayesian optimization.
+#
+#    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+# 
+#    BayesOpt is free software: you can redistribute it and/or modify it 
+#    under the terms of the GNU General Public License as published by
+#    the Free Software Foundation, either version 3 of the License, or
+#    (at your option) any later version.
+#
+#    BayesOpt is distributed in the hope that it will be useful, but 
+#    WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+# ------------------------------------------------------------------------
+
 PROJECT(BayesOpt CXX)
 CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
 
 
 set(CMAKE_LIBRARY_PATH ${CMAKE_LIBRARY_PATH} /opt/local/lib /opt/local/Library)
 set(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} /opt/local/include
-/opt/local/Library)
+                                             /opt/local/Library)
 set(CMAKE_PROGRAM_PATH ${CMAKE_PROGRAM_PATH} /opt/local/bin/ /opt/local/Library)
 
 
       FORCE)
 endif(NOT CMAKE_BUILD_TYPE)
 
-#SET(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS} -Wall -Wextra -Wdouble-promotion -Wpedantic -Wfloat-equal -Wno-endif-labels -Wshadow")
-#SET(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS} -Wall -Wextra -Wdouble-promotion -Wpedantic -Wfloat-equal -Wno-endif-labels -Wshadow")
-
-# SET(CMAKE_CXX_FLAGS_RELEASE "-O3 -DNDEBUG ${CMAKE_CXX_FLAGS}")
-# SET(CMAKE_C_FLAGS_RELEASE "-O3 -DNDEBUG ${CMAKE_C_FLAGS}")
-
-# SET(CMAKE_CXX_FLAGS_DEBUG "-g -pg ${CMAKE_CXX_FLAGS}")
-# SET(CMAKE_C_FLAGS_DEBUG "-g -pg ${CMAKE_C_FLAGS}")
-  
-# enable warnings
-# ADD_DEFINITIONS("-Wall")
-# ADD_DEFINITIONS("-Wextra -Wdouble-promotion -pedantic -Wfloat-equal -Wno-endif-labels -Wshadow")
-
 SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/lib)
 SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/lib)
 SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin)
 
-option(BUILD_EXAMPLES "Build examples and demos?" ON)
-option(BUILD_PYTHON_INTERFACE "Build Python interface?" OFF)
-option(MATLAB_COMPATIBLE "Build library compatible with Matlab?" ON)
-
-if(BUILD_PYTHON_INTERFACE)
-  INCLUDE(PythonMagic)
-  SET(PYTHON_LIB  ${PYTHON_LIBRARIES} )
-  INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH})
-  SET(BUILD_SHARED_LIBS ON CACHE BOOL "Build shared libraries?" FORCE)
-ELSE()
-  SET(BUILD_SHARED_LIBS OFF CACHE BOOL "Build shared libraries?")
-ENDIF(BUILD_PYTHON_INTERFACE)
+option(BAYESOPT_BUILD_EXAMPLES "Build examples and demos?" ON)
+option(BAYESOPT_PYTHON_INTERFACE "Build Python interface?" OFF)
+option(BAYESOPT_MATLAB_COMPATIBLE "Build library compatible with Matlab?" ON)
+option(BAYESOPT_BUILD_SOBOL "Build support for Sobol sequences?" ON)
+option(BAYESOPT_BUILD_SHARED "Build BayesOpt as a shared library?" OFF)
 
 find_package( Boost REQUIRED )
 if(Boost_FOUND)
 endif()
 
 FIND_LIBRARY(NLOPT nlopt)
-
 IF(NLOPT MATCHES NLOPT-NOTFOUND)
-  SET(BUILD_NLOPT ON CACHE BOOL "Build included version of NLOPT?")
+  SET(NLOPT_BUILD ON CACHE BOOL "Build included version of NLOPT?")
 ELSE(NLOPT MATCHES NLOPT-NOTFOUND)
-  SET(BUILD_NLOPT OFF CACHE BOOL "Build included version of NLOPT?")
+  SET(NLOPT_BUILD OFF CACHE BOOL "Build included version of NLOPT?")
 ENDIF(NLOPT MATCHES NLOPT-NOTFOUND)
 
-#SET(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR})
 INCLUDE(UseDoxygen)
 
+IF(BAYESOPT_BUILD_SOBOL)
+  ADD_DEFINITIONS(-DUSE_SOBOL)	
+  SET(SOBOL_SRC
+    ./sobol/sobol.cpp
+    ./sobol/sobol_i4.cpp
+    ./sobol/sobol_i8.cpp
+    )
+ELSE(BAYESOPT_BUILD_SOBOL)
+  SET(SOBOL_SRC )
+ENDIF(BAYESOPT_BUILD_SOBOL)
+
+
 SET( BAYESOPT_SRCS
   ./src/bayesoptcont.cpp
   ./src/bayesoptdisc.cpp
   ./src/gaussian_process_normal.cpp
   ./src/student_t_process_jef.cpp
   ./src/student_t_process_nig.cpp
-#  ./src/gaussian_process_ign.cpp
-#  ./src/student_t_process.cpp
   ./src/parameters.cpp
   ./src/kernel_functors.cpp
   ./src/criteria_functors.cpp
 SET(UTILS_SRC
   ./utils/parser.cpp
   ./utils/ublas_extra.cpp
-  ./sobol/sobol.cpp
-  ./sobol/sobol_i4.cpp
-  ./sobol/sobol_i8.cpp
+  ${SOBOL_SRC}
   )
 
 SET(WRAPPPERS_SRC 
   ./wrappers/bayesoptwpr.cpp
   )
 
-IF(BUILD_PYTHON_INTERFACE)
-  SET(PYTHON_SRC  ./python/bayesopt.cpp)
-ELSE(BUILD_PYTHON_INTERFACE)
-  SET(PYTHON_SRC )
-ENDIF(BUILD_PYTHON_INTERFACE)
-
 
 INCLUDE_DIRECTORIES( ${CMAKE_SOURCE_DIR}/include 
                      ${CMAKE_SOURCE_DIR}/wrappers
 
 LINK_DIRECTORIES( ${CMAKE_SOURCE_DIR}/lib )
 
-# IF(BUILD_NLOPT)
-#   ADD_SUBDIRECTORY(nlopt)
-#   include_directories(${CMAKE_SOURCE_DIR}/nlopt/api)
-# ENDIF(BUILD_NLOPT)
+IF(NLOPT_BUILD)
+  ADD_SUBDIRECTORY(nlopt)
+  include_directories(${CMAKE_SOURCE_DIR}/nlopt/api)
+  SET(EXT_LIBS nlopt)
+ELSE(NLOPT_BUILD)
+  SET(EXT_LIBS ${NLOPT})
+ENDIF(NLOPT_BUILD)
 
-IF(BUILD_SHARED_LIBS)
+
+IF(BAYESOPT_BUILD_SHARED)
   ADD_LIBRARY(bayesopt SHARED ${BAYESOPT_SRCS}   
-    ${WRAPPPERS_SRC} ${UTILS_SRC} ${PYTHON_SRC} )
+    ${WRAPPPERS_SRC} ${UTILS_SRC} )
   IF(WIN32)
     ADD_DEFINITIONS(-DBAYESOPT_DLL)	
     # In new versions of CMAKE they use a different system and the
     # symbol is not defined
     ADD_DEFINITIONS(-Dbayesopt_EXPORT )
-  ELSE()
-    SET_TARGET_PROPERTIES(bayesopt PROPERTIES PREFIX "" SUFFIX ".so")
   ENDIF()
 ELSE()
   ADD_LIBRARY(bayesopt STATIC ${BAYESOPT_SRCS}   
-    ${WRAPPPERS_SRC} ${UTILS_SRC} ${PYTHON_SRC} )
+    ${WRAPPPERS_SRC} ${UTILS_SRC} )
 ENDIF()
 
-IF((BUILD_SHARED_LIBS OR MATLAB_COMPATIBLE) AND NOT WIN32)
+
+IF(NLOPT_BUILD)
+  add_dependencies(bayesopt nlopt)
+ENDIF(NLOPT_BUILD)
+
+
+IF((BAYESOPT_BUILD_SHARED OR BAYESOPT_MATLAB_COMPATIBLE) AND NOT WIN32)
   SET_TARGET_PROPERTIES(bayesopt PROPERTIES COMPILE_FLAGS "-fPIC")
 ENDIF()
   
+TARGET_LINK_LIBRARIES(bayesopt ${EXT_LIBS})
 
-IF(BUILD_NLOPT)
-  ADD_SUBDIRECTORY(nlopt)
-  include_directories(${CMAKE_SOURCE_DIR}/nlopt/api)
-  add_dependencies(bayesopt nlopt)
-  SET(EXT_LIBS nlopt)
-ELSE(BUILD_NLOPT)
-  SET(EXT_LIBS ${NLOPT})
-ENDIF(BUILD_NLOPT)
-
-TARGET_LINK_LIBRARIES(bayesopt
-  ${EXT_LIBS} ${PYTHON_LIB} )
-
-
-IF(BUILD_EXAMPLES)
+IF(BAYESOPT_BUILD_EXAMPLES)
   ADD_SUBDIRECTORY(examples)
-endif(BUILD_EXAMPLES)
+endif(BAYESOPT_BUILD_EXAMPLES)
 
 INSTALL(FILES 
   ./include/bayesoptcont.hpp 
   LIBRARY DESTINATION lib
   ARCHIVE DESTINATION lib
 )
+
+IF(BAYESOPT_PYTHON_INTERFACE)
+  INCLUDE(PythonMagic)
+  INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_PATH})
+
+  ADD_LIBRARY(esopt MODULE ${BAYESOPT_SRCS}   
+    ${WRAPPPERS_SRC} ${UTILS_SRC} ./python/bayesopt.cpp)
+
+  IF(NLOPT_BUILD)
+    add_dependencies(esopt nlopt)
+  ENDIF(NLOPT_BUILD)
+
+  TARGET_LINK_LIBRARIES(esopt ${EXT_LIBS} ${PYTHON_LIBRARIES} )
+
+  IF(NOT WIN32)
+    # Kind of a hack but it works
+    SET_TARGET_PROPERTIES(esopt PROPERTIES PREFIX "bay" SUFFIX ".so")
+  ENDIF()
+
+  INSTALL(
+    TARGETS esopt
+    LIBRARY DESTINATION lib
+    ARCHIVE DESTINATION lib
+  )
+ENDIF(BAYESOPT_PYTHON_INTERFACE)

File cmake/PythonMagic.cmake

 
 include(FindPythonInterp)
 ## check python
-find_package(PythonLibs 2 EXACT) # using PYTHON_INCLUDE_PATH instead of PYTHON_INCLUDE_DIR
+find_package(PythonLibs 2) # using PYTHON_INCLUDE_PATH instead of PYTHON_INCLUDE_DIR
 if( NOT PYTHON_EXECUTABLE )
   # look specifically for 2.7
   find_program(PYTHON_EXECUTABLE NAMES python2.7 python27 python PATHS [HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\2.7\\InstallPath])

File cmake/UseDoxygen.cmake

 		ADDITIONAL_MAKE_CLEAN_FILES
 		"${DOXYFILE_OUTPUT_DIR}/${DOXYFILE_LATEX_DIR}")
 
-	if(DOXYFILE_LATEX STREQUAL "ON")
+	if(DOXYFILE_LATEX)
 		set(DOXYFILE_GENERATE_LATEX "YES")
 		find_package(LATEX)
 		find_program(DOXYFILE_MAKE make)

File doxygen/contribute.dox

 used in the library. Python would only find the library if it is
 called exactly \c bayesopt.so
 
+\section relsoft Related software
+
+\li Spearmint (Python): A library based on \cite Snoek2012. It is more oriented to cluster computing https://github.com/JasperSnoek/spearmint
+\li Hyperopt (Python): A library mainly based on \cite Bergstra2011. It uses different models for the surrogate model https://github.com/jaberg/hyperopt
+\li Perk (Fortran+Shell): A library for surrogate modelling. It was mainly used in \cite Santner03. http://www.stat.osu.edu/~comp_exp/book.html
+\li SUMO (Matlab): A library for surrogate modelling. Its main purpose it is not optimization but it also includes the Expected Imporvement algorithm. http://www.sumowiki.intec.ugent.be/Main_Page
+\li GPML (Matlab): The most popular library for Gaussian Process. BayesOpt uses some of its design principles. http://www.gaussianprocess.org/gpml/code/matlab/doc/
+\li NLOPT (C-Many languages): One of the best libraries for general purpose nonlinear optimization. http://ab-initio.mit.edu/wiki/index.php/NLopt
+
 */

File doxygen/demos.dox

 f(x,y) = \left(y-\frac{5.1}{4\pi^2}x^2 + \frac{5}{\pi}x-6\right)^2 + 10\left(1-\frac{1}{8\pi}\right) \cos(x) + 10
 \f]
 
-with a search domain \f$5 \leq x \leq 10\f$, \f$0 \leq y \leq 15\f$.
+with a search domain \f$-5 \leq x \leq 10\f$, \f$0 \leq y \leq 15\f$.
 
 \image html doxygen/branin.jpg
 

File doxygen/install.dox

 /*! \page install Installing BayesOpt
 \tableofcontents
 
-The core of BayesOpt uses standard C/C++ code (C++98) and it can be
-compiled in different platforms and used from different languages
-thanks to the wrappers provided.
+The core of BayesOpt uses standard C/C++ code (C++98) so it can be
+compiled from many C++ compilers (gcc, clang, MSVC...). The library
+also include wrappers for Python, Matlab and Octave interfaces which requires
+extra dependencies or compilation steps. Note that the
+Python or Matlab/Octave interfaces are not included by default.
 
 \section unixinst Installing in Linux/MacOS:
 
-The compilation is very similar in any *nix system. Note that the
-Python or Matlab interfaces are not included by default.
+The compilation is very similar in any *nix system. Following these
+instructions, the library will be compiled using the default
+configuration. You can modify that easily as explained in 
+\ref confinst
 
 \subsection getDepend Getting dependencies:
 
 >> sudo apt-get install python-dev python-numpy
 \endverbatim
 
-If you want the Octave interface:
+If you want the Octave interface (note that the \a octave package does not include all the necessary files):
 \verbatim
 >> sudo apt-get install octave-headers
 \endverbatim
 This section assumes \b macports is installed. Similar packages can be
 found in \b fink or \b homebrew. For the minimal install, run:
 \verbatim
->> sudo port install boost gcc46 cmake
+>> sudo port install boost gcc47 cmake
 \endverbatim
 
 If you want the Python interface:
 
 Again, for all dependencies:
 \verbatim
->> sudo port install boost python27 py27-numpy gcc46 cmake py27-cython octave
+>> sudo port install boost python27 py27-numpy gcc47 cmake py27-cython octave
 \endverbatim
 
 
 \subsection compile Compile the library:
-In order to compile the source code in a *nix system, run this from a terminal.
+In order to compile the source code in a *nix system, run this from a
+terminal.
 \verbatim
 >> cmake . 
 >> make
 \endverbatim
 
 \b Important: If you use \b ccmake instead of \b cmake you will access a graphical
-interface to select features such as the include the Python and Matlab
-interfaces, debug/release mode or if you want to use shared libraries
-or not. \b Shared libraries are required to run the Python interface.
-
-\subsubsection instpath Install the library in a different path
-
-CMake allows to select the install path during compilation. You just
-need to change the CMAKE_INSTALL_PREFIX variable.  This can be done
-directly from the command line with the -D option, like this:
-
-\verbatim
->> cmake -DCMAKE_INSTALL_PREFIX=/your/desired/path .
-\endverbatim
-
-or if you use ccmake, just modify the value of the variable with the
-corresponding path.
+interface to select features such as the include the Python and Matlab interfaces,
+debug/release mode or if you want to use shared libraries or not. More details about how
+to configure it can be found in \ref confinst
 
 \subsubsection docbuild Building the documentation
 
 \endverbatim
 Thid documentation will appear in the "doc" subdirectory.
 
+
+
 \subsection instpython Python interface:
 
 Both Python development files (Python.h) and Numpy are needed if you
 and 2.7. The interface relies on Numpy arrays. If we want to select
 the option to compile the Python interface we can just run:
 \verbatim
+>> cmake -DBAYESOPT_PYTHON_INTERFACE=ON . 
+\endverbatim
+or 
+\verbatim
 >> ccmake . 
->> make
->> sudo make install
 \endverbatim
+and select the corresponding option.
 
-\b Important: Python requires bayesopt to be a \b shared library.
+\b Important: Python requires a special module with shared access and nonstandard
+name. Thus, it will build a separate module called "bayesopt.so". This module can be
+accessible from Python provided that it is in the PYTHONPATH or sys.path. It cannot be
+linked to any executable or other libraries. Use libbayesopt.* instead.
 
 \subsection instmatlab MATLAB/Octave interface:
 
 Make sure the library is compiled with the MATLAB_COMPATIBLE option
-using ccmake and configure Matlab/Octave to compile mex files. For
-example, in Matlab you can run to check the supported compilers:
+using ccmake. Undex Mac OS they must be shared. Also, configure 
+Matlab/Octave to compile mex files. For example, in Matlab you can run
+to check the supported compilers:
 \verbatim
 >> mex -setup
 \endverbatim
 are. If the install path is the default, you can execute the
 exportlocalpath.sh script before calling MATLAB.
 
-
+On MacOS there are known issues both in Matlab and Octave about the compiler linking with
+the worng std++ library for different reasons. See:
+\li http://www.mathworks.com/matlabcentral/newsreader/view_thread/291752
+\li https://mailman.cae.wisc.edu/pipermail/octave-maintainers/2012-January/026341.html
 
 <HR>
 
 can also add an entry named BOOST_ROOT in CMake with the corresponding
 path to the library.
 
-\li MinGW: http://www.mingw.org
+\li MinGW(optional): http://www.mingw.org
 
 If you do not have a C++ compiler, we recomend MinGW+MSYS. Then, you
 just need to compile from the command line with:
 >> mingw32-make
 \endverbatim
 
-\subsection instpythonwin Python interface:
-
-The Python interface has not been tested in \b Windows because getting
-the correct dependencies is highly involved. You might need to
-download and install:
-\li Python (binary and \b sources): http://www.python.org
-\li Numpy: http://new.scipy.org/download.html 
-
-Also, read this article about how to link everything:
-http://docs.python.org/2/extending/windows.html#building-on-windows
-
+The most important options/variables are explained in \ref confinst.
 
 \subsection instmatlabwin MATLAB/Octave interface:
 
 modify the PATH variable or copy the dll files in the same bolder as
 the generated mexfile.
 
+\b Important: It is strongly recommended to compile bayesopt with
+exactly the same compiler that was select for mex files. For a list of
+the supported compilers for your Matlab version, you can check the
+online docs at mathworks. 
+
+\subsubsection matlabmingw MATLAB and MinGW
+
+Unfortunately, MinGW has never been suported by Matlab. Thus I have
+also included a Makefile to generate the mex files outside Matlab. You
+might need to change the \c MATLABROOT path with the root folder of
+your Matlab install and copy the dlls. Then, run \c mingw32-make. Note
+that \c mingw32-make only supports 32 bits libraries, so you need a 32
+bit version of Matlab. There is a fork of MinGW with 64 bit support
+under development, but it has not been tested here.
+
+\subsection instpythonwin Python interface:
+
+The Python interface has not been tested in \b Windows because getting
+the correct dependencies is highly involved. You might need to
+download and install:
+\li Python (binary and \b sources): http://www.python.org
+\li Numpy: http://new.scipy.org/download.html 
+
+Also, read this article about how to link everything:
+http://docs.python.org/2/extending/windows.html#building-on-windows
+
+
+<HR>
+
+\section confinst Configure the compilation/install
+
+CMake allows to configure the compilation using some variables (see
+for example how to compile the Python module in Linux). These
+variables can be set in Linux/MacOS from the command line with the -D
+flag:
+\verbatim
+>> cmake -DVARIABLE=VALUE .
+\endverbatim
+For example
+\verbatim
+>> cmake -DCMAKE_BUILD_TYPE=Debug .
+\endverbatim
+
+If you use ccmake in Linux/MacOS or CMake for Windows, you can access
+to a list of all the variables and their values. Just modify the value
+of the desired variable.
+
+\subsection instshared Compile as shared libraries
+
+We can select if we want BayesOpt and NLOPT compiled as shared libraries
+\verbatim
+BAYESOPT_BUILD_SHARED=ON 
+NLOPT_BUILD_SHARED=ON
+\endverbatim
+In this case, we also need to force rebuild NLOPT (by default it is
+not compiled if it is found in the system).
+
+\subsection instpath Install the library in a different path
+
+CMake allows to select the install path before compilation
+compilation. You just need to change the CMAKE_INSTALL_PREFIX
+variable.
+\verbatim
+CMAKE_INSTALL_PREFIX=/your/desired/path
+\endverbatim
+
+
+\subsection mininst Minimal installation (fast compilation)
+
+Sobol sequences can be used to for the initial design (see \ref
+initpar). In many cases, the performance is similar to latin hypercube
+sampling, however including the Sobol components increases
+considerably the library size and the compilation time. Thus, it can
+be removed from compilation:
+\verbatim
+BAYESOPT_BUILD_SOBOL=OFF
+\endverbatim
+Similarly, we can avoid to compile the example files and demos:
+\verbatim
+BAYESOPT_BUILD_EXAMPLES=OFF
+\endverbatim
+
 
  */

File doxygen/models.dox

 /*! \page modelopt Models and functions
+\tableofcontents
 
 This library was originally developed for as part of a robotics
 research project \cite MartinezCantin09AR \cite MartinezCantin07RSS,
 
 "cHedge(cSum(cEI,cDistance),cLCB,cPOI,cOptimisticSampling)"
 
-\subsection learnmod Methods for learning the kernel parameters  
+\section learnmod Methods for learning the kernel parameters  
 
 As commented before, we consider that the prior of the kernel
 hyperparameters \f$\theta\f$ --if available-- is independent of other
 assume no prior. Since we assume that the hyperparameters are independent,
 we can apply priors selectively only to a small set.
 
+\section initdes Initial design methods
+
+In order to build a suitable surrogate function, we a need a
+preliminar set of samples. In Bayesian optimization this is typically
+performed using alternative experimental design criteria. In this
+first step, usually the main criteria is space filling. Thus, we have
+implemented the subsequent designs:
+
+\li Latin hypercube sampling: Each dimension of the space is divided
+in several intervals. Samples are then taken according to a
+generalization of the Latin square
+scheme. http://en.wikipedia.org/wiki/Latin_hypercube_sampling
+
+\li Sobol sequences: It is a set of quasi-random low-discrepancy
+sequences. Thus the space is sampled more evenly than with uniform
+sampling. http://en.wikipedia.org/wiki/Sobol_sequence
+
+\li Uniform sampling: The search space is sampled uniformly.
+
+Note: Since we do not assume any struture in the set of discrete
+points during discrete optimization, only uniform sampling of the
+discrete set is available in that case.
+
 */

File doxygen/reference.dox

 not included by default in the linker, Python or Matlab paths by
 default. This is specially critical when building shared libraries
 (mandatory for Python usage). The script \em exportlocalpaths.sh makes
-sure that the folder is included in all the necessary paths.
+sure that the folder with the libraries is included in all the
+necessary paths.
 
 After that, there are 3 steps that should be follow:
 \li Define the function to optimize.
 \section params Understanding the parameters
 
 BayesOpt relies on a complex and highly configurable mathematical
-model. Also, the key to nonlinear optimization is to include as much
-knowledge as possible about the target function or about the
-problem. Or, if the knowledge is not available, keep the model as
-general as possible (to avoid bias).
+model. In theory, it should work reasonably well for many problems in
+its default configuration. However, Bayesian optimization shines when
+we can include as much knowledge as possible about the target function
+or about the problem. Or, if the knowledge is not available, keep the
+model as general as possible (to avoid bias). In this part, knowledge
+about Gaussian process or nonparametric models in general might be
+useful. 
+
+For example, with the parameters we can select the kind of kernel,
+mean or surrogate model that we want to use. With the kernel we can
+play with the smoothness of the function and it's derivatives. The
+mean function can be use to model the overall trend (is it flat?
+linear?). If we know the overall signal variance we better use a
+Gaussian process, if we don't, we should use a Student's t process
+instead.
 
 For that reason, the parameters are bundled in a structure or
 dictionary, depending on the API that we use. This is a brief
 parameters. If it is set to 0, they are only learned after the initial
 set of samples. [Default 0]
 
+\subsection initpar Initialization parameters
+
+\li \b init_method: (unsigned integer value) For continuous
+optimization, we can choose among diferent strategies for the initial
+design (1-Latin Hypercube Sampling, 2-Sobol sequences (if available,
+see \ref mininst), Other-Uniform Sampling).
+
+
 \subsection logpar Logging parameters
 
-\li \b verbose_level: (integer value) Verbose level 0,3 -> warnings,
+\li \b verbose_level: (unsigned integer value) Verbose level 0,3 -> warnings,
 1,4 -> general information, 2,5 -> debug information, any other value
 -> only errors. Levels < 3 send the messages to stdout. Levels > 4
 send them to a log file. [Default 1].
 functions like "cHedge(cEI,cLCB,cPOI,cThompsonSampling)". See section
 critmod for the different possibilities. [Default: "cEI]"
 \li \b crit_params, \b n_crit_params: Array with the set of parameters
-for the selected criteria. If there are more than one, the parameters
-are split among them according to the number of parameters required
-for each criterion. [Default: 1, 1]
+for the selected criteria. If there are more than one criterium, the
+parameters are split among them according to the number of parameters
+required for each criterion. If n_crit_params is 0, then the default
+parameter is selected for each criteria. [Default: n_crit_params = 0]
 
 \subsection kernelpar Kernel parameters
 

File examples/bo_branin.cpp

 
 #define _USE_MATH_DEFINES
 #include <cmath>
-#include <valarray>
+#include <algorithm>
+//#include <valarray>
 #include "bayesoptcont.hpp"
 
 #ifndef M_PI
   par.n_init_samples = 50;
   par.kernel.hp_mean[0] = 1.0;
   par.kernel.n_hp = 1;
-  par.crit_name = "cHedge(cEI,cLCB,cPOI)";
+  par.crit_name = "cHedge(cLCB,cEI,cPOI)";
+  double cParams[] = {5.0, 1.0, 0.01};
+  std::copy(cParams, cParams+3, par.crit_params);
+  par.n_crit_params = 3;
   
   ExampleBranin branin(2,par);
   vectord result(2);

File examples/bo_display.cpp

  
   //plot
   subplot(2,1,1);
-  title("press r to run and stop");
+  title("Press r to run and stop, s to run a step and q to quit.");
   plot(x,y); set(3);
   plot(lx,ly);set("k");set("*");
   plot(x,su);set("g"); set(2);

File examples/bo_oned.cpp

   bopt_params parameters = initialize_parameters_to_default();
   parameters.n_init_samples = 10;
   parameters.n_iterations = 300;
-  parameters.surr_name = "sGaussianProcessML";
-  /*  parameters.kernel.hp_mean[0] = 1.0;
+  parameters.surr_name = "sGaussianProcess";
+  parameters.kernel.hp_mean[0] = 1.0;
   parameters.kernel.hp_std[0] = 100.0;
   parameters.kernel.n_hp = 1;
-  parameters.crit_name = "cHedge(cEI,cLCB,cExpReturn,cOptimisticSampling)";
+  /*  parameters.crit_name = "cHedge(cEI,cLCB,cExpReturn,cOptimisticSampling)";
   parameters.epsilon = 0.0;*/
 
   ExampleOneD opt(dim,parameters);

File examples/testrand.cpp

   for (int i = 0; i<1000; i++)
     std::cout << sample() << std::endl;
 
-  return 1;
+  return 0;
 }

File exportlocalpaths.sh

 #!/bin/bash
 
-export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib
-export PYTHONPATH=${PYTHONPATH}:/usr/local/lib
+BAYESOPT_PATH=$PWD
+export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${BAYESOPT_PATH}/lib
+export PYTHONPATH=${PYTHONPATH}:${BAYESOPT_PATH}/lib

File include/bayesoptbase.hpp

      */
     virtual vectord getFinalResult() = 0;
 
+    /** 
+     * Once the optimization has been perfomed, return the value of
+     * the optimal point.
+     */
+    double getMinimumValue()
+    { return mGP->getValueAtMinimum(); };
+
 
     /** 
      * \brief Function that defines the actual function to be optimized.

File include/dll_stuff.h

    This file is part of BayesOpt, an efficient C++ library for 
    Bayesian optimization.
 
-   Copyright (C) 2011-2012 Ruben Martinez-Cantin <rmcantin@unizar.es>
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
  
    BayesOpt is free software: you can redistribute it and/or modify it 
    under the terms of the GNU General Public License as published by
 #ifndef  _DLL_STUFF_HPP_
 #define  _DLL_STUFF_HPP_
 
+
 /* WINDOWS DLLs stuff */
-#if defined (_WIN32) && defined (BAYESOPT_DLL)
+#if defined (BAYESOPT_DLL) && (defined(_WIN32) || defined(__WIN32__)) && !defined(__LCC__)
   #if defined(bayesopt_EXPORTS)
     #define  BAYESOPT_API __declspec(dllexport)
   #else

File include/kernel_atomic.hpp

     {
       if(theta.size() != n_params)
 	{
-	  FILE_LOG(logERROR) << "Wrong number of hyperparameters"; 
+	  FILE_LOG(logERROR) << "Wrong number of kernel hyperparameters"; 
 	  return -1; 
 	}
       params = theta;

File include/kernel_combined.hpp

       size_t n_rhs = right->nHyperParameters();
       if (theta.size() != n_lhs + n_rhs)
 	{
-	  FILE_LOG(logERROR) << "Wrong number of hyperparameters"; 
+	  FILE_LOG(logERROR) << "Wrong number of kernel hyperparameters"; 
 	  return -1; 
 	}
       left->setHyperParameters(subrange(theta,0,n_lhs));

File include/mean_atomic.hpp

       n_inputs = input_dim;
       return 0;
     };
-    void setParameters(const vectord &theta) 
+    int setParameters(const vectord &theta) 
     {
-      //      assert(theta.size() == n_params);
+      if(theta.size() != n_params)
+	{
+	  FILE_LOG(logERROR) << "Wrong number of mean function parameters"; 
+	  return -1; 
+	}
+   
       mParameters = theta;
+      return 0;
     };
     vectord getParameters() {return mParameters;};
     size_t nParameters() {return n_params;};
     int init(size_t input_dim)
     {
       n_inputs = input_dim;
-      n_params = 0;
+      n_params = 1;
       n_features = 1;
       return 0;
     };
     int init(size_t input_dim)
     {
       n_inputs = input_dim;
-      n_params = 0;
+      n_params = 1;
       n_features = 1;
       return 0;
     };
       n_features = input_dim + 1;
       return 0;
     };
-    void setParameters(const vectord& params)
+    int setParameters(const vectord& params)
     { 
+      if(params.size() != n_params)
+	{
+	  FILE_LOG(logERROR) << "Wrong number of mean function parameters"; 
+	  return -1; 
+	}
+
       mConstParam = params(0);
       mParameters = boost::numeric::ublas::project(params, 
 						   boost::numeric::ublas::range(1, params.size())); 
+      return 0;
     };
   
     double getMean (const vectord& x)

File include/mean_combined.hpp

       this->right = right;
       return 0;
     };
-    void setParameters(const vectord &theta) 
+    int setParameters(const vectord &theta) 
     {
       using boost::numeric::ublas::subrange;
 
       size_t n_lhs = left->nParameters();
       size_t n_rhs = right->nParameters();
-      //assert(theta.size() == n_lhs + n_rhs);
+      if (theta.size() != n_lhs + n_rhs)
+	{
+	  FILE_LOG(logERROR) << "Wrong number of mean function parameters"; 
+	  return -1; 
+	}
+
       left->setParameters(subrange(theta,0,n_lhs));
       right->setParameters(subrange(theta,n_lhs,n_lhs+n_rhs));
+
+      return 0;
     };
 
     vectord getParameters() 

File include/mean_functors.hpp

     virtual int init(size_t input_dim, ParametricFunction* left, 
 		     ParametricFunction* right) {return 0;};
 
-    virtual void setParameters(const vectord& params) = 0;
+    virtual int setParameters(const vectord& params) = 0;
     virtual vectord getParameters() = 0;
     virtual size_t nParameters() = 0;
 

File include/nonparametricprocess.hpp

File contents unchanged.

File include/parameters.h

   /*** Type definitions                                       **/
   /*************************************************************/
   
-  typedef enum {  
-    S_GAUSSIAN_PROCESS,
-    S_GAUSSIAN_PROCESS_ML,
-    S_STUDENT_T_PROCESS_JEFFREYS,
-    S_STUDENT_T_PROCESS_NORMAL_INV_GAMMA,
-    S_ERROR = -1
-  } surrogate_name;
-
   typedef enum {
     L_ML,
     L_MAP,
     size_t n_inner_iterations;   /**< Maximum inner optimizer evaluations */
     size_t n_init_samples;       /**< Number of samples before optimization */
     size_t n_iter_relearn;       /**< Number of samples before relearn kernel */
+    size_t init_method;   /**< Sampling method for initial set 1-LHS, 2-Sobol (if available), other uniform */
 
     size_t verbose_level;        /**< 1-Error,2-Warning,3-Info. 4-6 log file*/
     char* log_filename;          /**< Log file path (if applicable) */
   /* These functions are added to simplify wrapping code       */
   /*************************************************************/
   /* surrogate_name str2surrogate (const char* name); */
-  learning_type  str2learn     (const char* name);
+  BAYESOPT_API learning_type str2learn(const char* name);
 
   /* BAYESOPT_API const char* surrogate2str(surrogate_name name); */
   BAYESOPT_API const char* learn2str(learning_type name);

File matlab/Makefile

+# -------------------------------------------------------------------------
+#    This file is part of BayesOpt, an efficient C++ library for 
+#    Bayesian optimization.
+#
+#    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+#
+#    BayesOpt is free software: you can redistribute it and/or modify it 
+#    under the terms of the GNU General Public License as published by
+#    the Free Software Foundation, either version 3 of the License, or
+#    (at your option) any later version.
+#
+#    BayesOpt is distributed in the hope that it will be useful, but 
+#    WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+# ------------------------------------------------------------------------
+
+
+MATLABROOT= C:\Program Files (x86)\MATLAB\R2010a
+CC=mingw32-c++
+INCLUDES=-I"$(MATLABROOT)\extern\include" -I"." -I"..\include" -I"..\wrappers"
+LIBS=-L"$(MATLABROOT)\bin\win32" -lmex -lmx -leng -lmat
+FLAGS=-shared -DMATLAB_MEX_FILE -Wl,--export-all-symbols
+BOPT=libbayesopt.dll
+
+all: disc cont
+
+cont:
+	$(CC) $(FLAGS) $(INCLUDES) -o bayesoptcont.mexw32  bayesoptmex.c $(BOPT) $(LIBS)
+
+disc:
+	$(CC) $(FLAGS) $(INCLUDES) -o bayesoptdisc.mexw32  bayesoptdiscmex.c $(BOPT) $(LIBS)
+
+
+
+
+
+
+
+
+
+

File matlab/bayesopt.m

-% BAYESOPT Optimization (minimization) of target function using bayesian
-% optimization.
-%
-% Usage: [xmin, fmin] = bayesopt(@function_handler, nDimensions, params)
-%        [xmin, fmin] = bayesopt(@function_handler, nDimensions, params,
-%                                lowerBound, upperBound) 
-%
-%        [xmin, fmin] = bayesopt('function_name', nDimensions, params)
-%        [xmin, fmin] = bayesopt('function_name', nDimensions, params,
-%                                lowerBound, upperBound) 
-%
-%        [xmin, fmin] = bayesoptdisc(@function_handler, validset, params)
-%        [xmin, fmin] = bayesoptdisc('function_name', validset, params)
-%
-%
-% nDimensions is the number of dimensions (d) of the query vector.
-%
-% Params is a struct which have the same fields as the C/C++ interface 
-%   (see include/parameters.h)
-%
-% lowerBound and upperBound should be a d x 1 or 1 x d vectors with
-%      the lower and upper bound for each component. (optional, default 0-1)
-%
-% validset is the set of discrete points for discrete optimization,
-%      stacked in a single matrix. Thus, it must be a d x n matrix.
-%
-% 
-% -------------------------------------------------------------------------
-%    This file is part of BayesOpt, an efficient C++ library for 
-%    Bayesian optimization.
-%
-%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
-%
-%    BayesOpt is free software: you can redistribute it and/or modify it 
-%    under the terms of the GNU General Public License as published by
-%    the Free Software Foundation, either version 3 of the License, or
-%    (at your option) any later version.
-%
-%    BayesOpt is distributed in the hope that it will be useful, but 
-%    WITHOUT ANY WARRANTY; without even the implied warranty of
-%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-%    GNU General Public License for more details.
-%
-%    You should have received a copy of the GNU General Public License
-%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
-% ------------------------------------------------------------------------
-%
-

File matlab/bayesoptcont.m

+% BAYESOPTCONT Optimization (minimization) of continuous target function 
+% using Bayesian optimization.
+%
+% Usage: [xmin, fmin] = bayesopt(@function_handler, nDimensions, params)
+%        [xmin, fmin] = bayesopt(@function_handler, nDimensions, params,
+%                                lowerBound, upperBound) 
+%
+%        [xmin, fmin] = bayesopt('function_name', nDimensions, params)
+%        [xmin, fmin] = bayesopt('function_name', nDimensions, params,
+%                                lowerBound, upperBound) 
+%
+% nDimensions is the number of dimensions (d) of the query vector.
+%
+% params is a struct which have the same fields as the C/C++ interface 
+%   (see include/parameters.h)
+%
+% lowerBound and upperBound should be a d x 1 or 1 x d vectors with
+%      the lower and upper bound for each component. (optional, default 0-1)
+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+

File matlab/bayesoptdisc.m

+% BAYESOPTDISC Optimization (minimization) of discrete target function 
+% using Bayesian optimization.
+%
+% Usage: [xmin, fmin] = bayesoptdisc(@function_handler, validset, params)
+%        [xmin, fmin] = bayesoptdisc('function_name', validset, params)
+%
+%
+% params is a struct which have the same fields as the C/C++ interface 
+%   (see include/parameters.h)
+%
+% validset is the set of discrete points for discrete optimization,
+%      stacked in a single matrix. Thus, it must be a d x n matrix.
+%
+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+

File matlab/bayesoptdiscmex.c

    This file is part of BayesOpt, an efficient C++ library for 
    Bayesian optimization.
 
-   Copyright (C) 2011-2012 Ruben Martinez-Cantin <rmcantin@unizar.es>
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
  
    BayesOpt is free software: you can redistribute it and/or modify it 
    under the terms of the GNU General Public License as published by

File matlab/bayesoptextras.h

 #include <math.h>
 #include <mex.h>
 
+#include "bayesoptwpr.h"
 #include "parameters.h"
-#include "bayesoptwpr.h"
+
 
 #define CHECK0(cond, msg) if (!(cond)) mexErrMsgTxt(msg);
 
 static void struct_string(const mxArray *s, const char *name, char* result)
 {
   mxArray *val = mxGetField(s, 0, name);
-  char *valstr;
 
   if (val) {
     if( mxIsChar(val) ) {
-      if ( mxGetString(val, result, 1+(mxGetM(val) * mxGetN(val)))) {
-	  mexErrMsgTxt("Error loading string.");
-	}
+      result = mxArrayToString(val);
     } else {
       mexErrMsgTxt("Method name must be a string");
     }
   
   /* See parameters.h for the available options */
   
-  char log_str[100], k_s_str[100];
   char l_str[100];
   size_t n_hp_test, n_coef_test;
 
   struct_size(params,"n_inner_iterations", &parameters.n_inner_iterations);
   struct_size(params, "n_init_samples", &parameters.n_init_samples);
   struct_size(params, "n_iter_relearn", &parameters.n_iter_relearn);
+  struct_size(params, "init_method", &parameters.init_method);
   
   struct_size(params, "verbose_level", &parameters.verbose_level);
   struct_string(params, "log_filename", parameters.log_filename);
-
+  
   struct_string(params, "surr_name", parameters.surr_name);
 
   struct_value(params, "sigma_s", &parameters.sigma_s);

File matlab/bayesoptmex.c

    This file is part of BayesOpt, an efficient C++ library for 
    Bayesian optimization.
 
-   Copyright (C) 2011-2012 Ruben Martinez-Cantin <rmcantin@unizar.es>
+   Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
  
    BayesOpt is free software: you can redistribute it and/or modify it 
    under the terms of the GNU General Public License as published by
     {
       params = mxCreateStructMatrix(1,1,0,NULL);
     }
-
   parameters = load_parameters(params);
 
   if(nrhs == 5)
     {
       /* Load bounds */
+      mexPrintf("Loading bounds...");
       CHECK0(mxIsDouble(prhs[3]) && !mxIsComplex(prhs[3])
 	     && (mxGetM(prhs[3]) == 1    || mxGetN(prhs[3]) == 1)
 	     && (mxGetM(prhs[3]) == nDim || mxGetN(prhs[3]) == nDim),
 	     "lowerBound must be real row or column vector");
 
       lb = mxGetPr(prhs[3]);
-      mexPrintf("Loading bounds \n");
+
 
       CHECK0(mxIsDouble(prhs[4]) && !mxIsComplex(prhs[4])
 	     && (mxGetM(prhs[4]) == 1    || mxGetN(prhs[4]) == 1)
 	     "upperBound must be real row or column vector");
 
       ub = mxGetPr(prhs[4]);
+      mexPrintf("done. \n");
     }
   else
     {
-      lb = mxCalloc(nDim,sizeof(double));
-      ub = mxCalloc(nDim,sizeof(double));
+      lb = (double*)(mxCalloc(nDim,sizeof(double)));
+      ub = (double*)(mxCalloc(nDim,sizeof(double)));
 	 
 
       

File matlab/compile_matlab.m

+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+
 % You can also change ../lib for the correspoding install path
 % MATLAB
 if (ispc)
-    mex -output bayesopt bayesoptmex.c ..\lib\Release\bayesopt.lib ...
-        ..\lib\Release\nlopt.lib ...
-        -I..\include -I..\wrappers -I..\nlopt\api 
+    if exist('../bin/Release/bayesopt.dll','file')
+        disp('Compiling dynamic library');
+        mex -DBAYESOPT_DLL -output bayesoptcont bayesoptmex.c ...
+            -L..\lib\Release -L. -lbayesopt ...
+            -I..\include -I..\wrappers
+        mex -DBAYESOPT_DLL -output bayesoptdisc bayesoptdiscmex.c ...
+            -L..\lib\Release -L. -lbayesopt ...
+            -I..\include -I..\wrappers
+    else
+        disp('Compiling static library');
+        mex -output bayesoptcont bayesoptmex.c ...
+            -L../lib/Release -lbayesopt -lnlopt ...
+            -I../include -I../wrappers
+        
+        mex -output bayesoptdisc bayesoptdiscmex.c ...
+            -L../lib/Release -lbayesopt -lnlopt ...
+            -I../include -I../wrappers
+    end
+else
+    mex -output bayesoptcont bayesoptmex.c -L../lib -lbayesopt ...
+        -lnlopt -I../include -I../wrappers -I../nlopt/api 
 
-   mex -output bayesoptdisc bayesoptdiscmex.c ../lib/Release/bayesopt.lib ...
-        ../lib/Release/nlopt.lib -I../include -I../wrappers -I../nlopt/api 
-else
-    if exist('../lib/libbayesopt.a','file')
-        disp('Compiling static library');
-        mex -output bayesopt bayesoptmex.c ../lib/libbayesopt.a ...
-        ../lib/libnlopt.a -I../include -I../wrappers -I../nlopt/api 
-
-        mex -output bayesoptdisc bayesoptdiscmex.c ../lib/libbayesopt.a ...
-            ../lib/libnlopt.a -I../include -I../wrappers -I../nlopt/api 
-    else
-        if exist('../lib/bayesopt.so','file')
-            disp('Compiling dynamic library');
-            mex -output bayesopt bayesoptmex.c ../lib/bayesopt.so ../lib/libnlopt.so ...
-                -I../include -I../wrappers -I../nlopt/api 
-
-            mex -output bayesoptdisc bayesoptdiscmex.c ../lib/bayesopt.so ...
-                ../lib/libnlopt.so -I../include -I../wrappers -I../nlopt/api ...
-                
-        else
-            disp('Error: File not found');
-        end
-    end
+    mex -output bayesoptdisc bayesoptdiscmex.c -L../lib -lbayesopt ...
+        -lnlopt -I../include -I../wrappers -I../nlopt/api 
 end

File matlab/compile_octave.m

+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+
 % You can also change ../lib for the correspoding install path
 % Octave
-mkoctfile -L../lib -l:bayesopt.so -lnlopt -I../include -I../wrappers ...
-    -I../nlopt/api --mex "-Wl,-rpath=../lib" --output bayesopt.mex ...
-    bayesoptmex.c
+mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
+    --mex --output bayesoptcont.mex bayesoptmex.c
 
-mkoctfile -L../lib -l:bayesopt.so -lnlopt -I../include -I../wrappers ...
-    -I../nlopt/api --mex "-Wl,-rpath=../lib" --output bayesoptdisc.mex ...
-    bayesoptdiscmex.c
+mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
+    --mex --output bayesoptdisc.mex bayesoptdiscmex.c
 
-    

File matlab/demo_rembo.m

 addpath('testfunctions')
 
 params.n_iterations = 300;
-params.n_init_iterations = 50;
+params.n_init_samples = 50;
 params.crit_name = 'cEI';
 params.surr_name = 'sGaussianProcessNormal';
 params.noise = 0.005;
     disp('Continuous optimization');
     MATRIX_A = randn(nh,n);
     tic;
-    result = bayesopt(fun,n,params,lb,ub);
+    result = bayesoptcont(fun,n,params,lb,ub);
     toc;
 
     values(i) = braninhighdim(result);

File matlab/runtest.m

 addpath('testfunctions')
 
 params.n_iterations = 100;
-params.n_init_iterations = 50;
+params.n_init_samples = 5;
 params.crit_name = 'cEI';
 params.surr_name = 'sGaussianProcessNormal';
 params.noise = 0.005;
 params.kernel_name = 'kMaternISO3';
 params.kernel_hp_mean = [0.5];
 params.kernel_hp_std = [10];
-params.verbose_level = 0;
+params.verbose_level = 1;
 params.log_filename = 'matbopt.log';
 
 % n = 5;
 
 disp('Continuous optimization');
 tic;
-bayesopt(fun,n,params,lb,ub)
+bayesoptcont(fun,n,params,lb,ub)
 toc;
 
 disp('Discrete optimization');
-% The set of points must be nDim x nPoints.
-xset = repmat((ub-lb),1,100) .* rand(n,100) - repmat(lb,1,100);
+% The set of points must be numDimension x numPoints.
+np = 100;
+xset = repmat((ub-lb),1,np) .* rand(n,np) - repmat(lb,1,np);
 
 tic;
 bayesoptdisc(fun,xset, params);

File matplotpp/matplotpp.h

 Author: Yuichi Katori (yuichi.katori@gmail.com)
 Project:MATPLOT++ (MATLAB-like plotting tool in C++).
 Version:0.3.13
+
+Modified: Ruben Martinez-Cantin (2013)
+    - Fixed std namespace issue
+    - Fixed bugs
 ****************************************************************************/
 
 #include <GL/glut.h>

File nlopt/CMakeLists.txt

 	stogo/global.cc stogo/linalg.cc stogo/local.cc stogo/stogo.cc stogo/tools.cc stogo/global.h stogo/linalg.h stogo/local.h stogo/stogo_config.h stogo/stogo.h stogo/tools.h 
         )
 
-OPTION(NLOPT_BUILD_SHARED "Build NLOPT as a shared library" ON )
+# IF(BUILD_SHARED_LIBS)
+#   SET(NLOPT_BUILD_SHARED ON CACHE BOOL "Build NLOPT as a shared library")
+# ELSE(BUILD_SHARED_LIBS)
+#   SET(NLOPT_BUILD_SHARED OFF CACHE BOOL "Build NLOPT as a shared library")
+# ENDIF(BUILD_SHARED_LIBS)
+
+OPTION(NLOPT_BUILD_SHARED "Build NLOPT as a shared library" OFF )
 
 IF(NLOPT_BUILD_SHARED)
   ADD_DEFINITIONS(-DNLOPT_DLL)
   ADD_LIBRARY (nlopt STATIC ${NLOPT_SOURCES} )
 ENDIF(NLOPT_BUILD_SHARED)
 
-IF((BUILD_SHARED_LIBS OR MATLAB_COMPATIBLE) AND NOT WIN32)
+IF((BAYESOPT_BUILD_SHARED OR BAYESOPT_MATLAB_COMPATIBLE) AND NOT WIN32)
   SET_TARGET_PROPERTIES(nlopt PROPERTIES COMPILE_FLAGS "-fPIC")
 ENDIF()
 

File optimization.bib

   timestamp = {2013.02.22}
 }
 
+@INPROCEEDINGS{Bergstra2011,
+  author = {James Bergstra and Remi Bardenet and Yoshua Bengio and Balázs Kégl},
+  title = {Algorithms for Hyper-parameter Optimization. },
+  booktitle = {Advances in Neural Information Processing Systems},
+  year = {2011},
+  pages = {2546–2554},
+  owner = {rmcantin},
+  timestamp = {2013.07.24}
+}
+
 @ARTICLE{Song2012,
   author = {Le Song and Alex Smola and Arthur Gretton and Justin Bedo and Karsten
 	Borgwardt},

File python/bayesopt.cpp

File contents unchanged.

File python/bayesopt.pyx

     ctypedef struct bopt_params:
         unsigned int n_iterations, n_inner_iterations,
         unsigned int n_init_samples, n_iter_relearn,
+        unsigned int init_method
         unsigned int verbose_level
         char* log_filename
         char* surr_name
+        double sigma_s
         double noise
         double alpha, beta
         learning_type l_type
     params = initialize_parameters_to_default()
 
     params.n_iterations = dparams.get('n_iterations',params.n_iterations)
+    params.n_inner_iterations = dparams.get('n_inner_iterations',
+                                            params.n_inner_iterations)
     params.n_init_samples = dparams.get('n_init_samples',params.n_init_samples)
+    params.n_iter_relearn = dparams.get('n_iter_relearn',params.n_iter_relearn)
+    params.init_method = dparams.get('init_method',params.init_method)
+
     params.verbose_level = dparams.get('verbose_level',params.verbose_level)
-
     logname = dparams.get('log_filename',params.log_filename)
     params.log_filename = logname
 
     sname = dparams.get('surr_name',params.surr_name)
     params.surr_name = sname;
+    params.sigma_s = dparams.get('sigma_s',params.sigma_s)
+    params.noise = dparams.get('noise',params.noise)
+    params.alpha = dparams.get('alpha',params.alpha)
+    params.beta = dparams.get('beta',params.beta)
 
     learning = dparams.get('learning_type', None)
     if learning is not None:
         params.l_type = str2learn(learning)
 
+    params.epsilon = dparams.get('epsilon',params.epsilon)
 
-    params.alpha = dparams.get('alpha',params.alpha)
-    params.beta = dparams.get('beta',params.beta)
-    params.noise = dparams.get('noise',params.noise)
+    kname = dparams.get('kernel_name',params.kernel.name)
+    params.kernel.name = kname;
 
-    theta = dparams.get('theta',None)
-    stheta = dparams.get('s_theta',None)
+    theta = dparams.get('kernel_hp_mean',None)
+    stheta = dparams.get('kernel_hp_std',None)
     if theta is not None and stheta is not None:
         params.kernel.n_hp = len(theta)
         for i in range(0,params.kernel.n_hp):
             params.kernel.hp_mean[i] = theta[i]
             params.kernel.hp_std[i] = stheta[i]
 
+    mname = dparams.get('mean_name',params.mean.name)
+    params.mean.name = mname
 
-    mu = dparams.get('mu',None)
-    smu = dparams.get('s_mu',None)
+    mu = dparams.get('mean_coef_mean',None)
+    smu = dparams.get('mean_coef_std',None)
     if mu is not None and smu is not None:
         params.mean.n_coef = len(mu)
         for i in range(0,params.mean.n_coef):
             params.mean.coef_mean[i] = mu[i]
             params.mean.coef_std[i] = smu[i]
 
+    cname = dparams.get('crit_name',params.crit_name)
+    params.crit_name = cname
+
     cp = dparams.get('crit_params',None)
     if cp is not None:
         params.n_crit_params = len(cp)
         for i in range(0,params.n_crit_params):
             params.crit_params[i] = cp[i]
 
-    kname = dparams.get('kernel_name',params.kernel.name)
-    params.kernel.name = kname;
-
-    mname = dparams.get('mean_name',params.mean.name)
-    params.mean.name = mname
-
-    cname = dparams.get('crit_name',params.crit_name)
-    params.crit_name = cname
-
     return params
 
 cdef double callback(unsigned int n, const_double_ptr x,
     x_np = np.zeros(n)
     for i in range(0,n):
         x_np[i] = <double>x[i]
-        result = (<object>func_data)(x_np)
+    result = (<object>func_data)(x_np)
     return result
 
 
         "n_iterations"   : 300,
         "n_inner_iterations" : 500,
         "n_init_samples" : 30,
-        "n_iter_relearn" : 30,
+        "n_iter_relearn" : 0,
+        "init_method" : 1,
         "verbose_level"  : 1,
         "log_filename"   : "bayesopt.log" ,
         "surr_name" : "sGaussianProcess" ,
         "epsilon" : 0.0,
         "kernel_name" : "kMaternISO3",
         "kernel_hp_mean"  : [1.0],
-        "kernel_hp_std": [1.0],
-        "mean_name" : "mOne",
+        "kernel_hp_std": [100.0],
+        "mean_name" : "mConst",
         "mean_coef_mean"     : [1.0],
-        "mean_coef_std"   : [1.0],
+        "mean_coef_std"   : [1000.0],
         "crit_name" : "cEI",
         "crit_params" : [1.0],
         }

File python/bayesoptmodule.py

 #    This file is part of BayesOptimization, an efficient C++ library for 
 #    Bayesian optimization.
 #
-#    Copyright (C) 2011-2012 Ruben Martinez-Cantin <rmcantin@unizar.es>
+#    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
 #
 #    BayesOptimization is free software: you can redistribute it and/or modify
 #    it under the terms of the GNU General Public License as published by

File python/demo_dimscaling.py

 #!/usr/bin/env python
 # -------------------------------------------------------------------------
-#    This file is part of BayesOpt, an efficient C++ library for 
+#    This file is part of BayesOpt, an efficient C++ library for
 #    Bayesian optimization.
 #
 #    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
-# 
-#    BayesOpt is free software: you can redistribute it and/or modify it 
+#
+#    BayesOpt is free software: you can redistribute it and/or modify it
 #    under the terms of the GNU General Public License as published by
 #    the Free Software Foundation, either version 3 of the License, or
 #    (at your option) any later version.
 #
-#    BayesOpt is distributed in the hope that it will be useful, but 
+#    BayesOpt is distributed in the hope that it will be useful, but
 #    WITHOUT ANY WARRANTY; without even the implied warranty of
 #    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 #    GNU General Public License for more details.
 # This example was provided by Janto Dreijer <jantod@gmail.com>
 
 import sys
+#Assume default install.
 sys.path.append('/usr/local/lib')
 
+import math
 import numpy as np
 import bayesopt
 
+def quad(x,mu):
+    return ((np.asarray(x) - mu)**2).mean()
+
 def func(x):
-	#print "x", x
-	#~ target = np.ones(len(x))*0.3
-	target = np.arange(1,1+len(x))
-	#print "target", target
-	e = ((np.asarray(x) - target)**2).mean()
-	#print "e", e
-	return e
+    #print "x", x
+    #~ target = np.ones(len(x))*0.3
+    target = np.arange(1,1+len(x))
+    target2 = np.ones(len(x))*10
+    #print "target", target
+    e = quad(x,target)
+    return e
 
 # Initialize the parameters by default
 params = bayesopt.initialize_params()
 
 # We decided to change some of them
 params['n_init_samples'] = 150
+params['n_iter_relearn'] = 20
 #params['noise'] = 0.01
-#params['kernel_name'] = "kMaternISO3"
-params['surr_name'] = "sStudentTProcessJef"
+params['kernel_name'] = "kMaternISO3"
+params['kernel_hp_mean'] = [1]
+params['kernel_hp_std'] = [5]
+params['surr_name'] = "sStudentTProcessNIG"
 
 dim = 20
 lb = np.ones((dim,))*0
 
 mvalue, x_out, error = bayesopt.optimize(func, dim, lb, ub, params)
 
-print mvalue, x_out, error
+print "Result", mvalue, x_out
+
+print "Global optimal", 0, np.arange(1,1+dim)
+
+print "Distance", math.sqrt(mvalue*dim)

File python/demo_quad.py

 params = bayesopt.initialize_params()
 params['n_iterations'] = 50
 params['n_init_samples'] = 20
-#params['surr_name'] = "GAUSSIAN_PROCESS_INV_GAMMA_NORMAL"
 params['crit_name'] = "cEI"
 params['kernel_name'] = "kMaternISO3"
+
+
 print "Callback implementation"
 
 n = 5                     # n dimensions
 ub = np.ones((n,))
 
 start = clock()
-
 mvalue, x_out, error = bayesopt.optimize(testfunc, n, lb, ub, params)
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
-
+raw_input('Press INTRO to continue')
 
 print "OO implementation"
 bo_test = BayesOptTest()
 bo_test.params = params
-bo_test.n = n
-bo_test.lb = lb
-bo_test.ub = ub
+bo_test.n_dim = n
+bo_test.lower_bound = lb
+bo_test.upper_bound = ub
 
 start = clock()
 mvalue, x_out, error = bo_test.optimize()
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
-
+raw_input('Press INTRO to continue')
 
 print "Callback discrete implementation"
 x_set = np.random.rand(100,n)
 
 mvalue, x_out, error = bayesopt.optimize_discrete(testfunc, x_set, params)
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
 
 value = np.array([testfunc(i) for i in x_set])

File src/bayesoptbase.cpp

 ------------------------------------------------------------------------
 */
 
+#include <cstdlib>
 #include "log.hpp"
 #include "bayesoptbase.hpp"
 
     if (mGP == NULL) 
       {
 	FILE_LOG(logERROR) << "Error setting the surrogate function"; 
-	return -1;
+	exit(EXIT_FAILURE);
       } 
     return 0;
   } // setSurrogateModel
   int BayesOptBase::setCriteria()
   {
     mCrit.reset(mCFactory.create(mParameters.crit_name,mGP.get()));
-    if ((mCrit == NULL) || (mCrit->nParameters() != mParameters.n_crit_params))
+    if (mCrit == NULL)
       {
 	FILE_LOG(logERROR) << "Error in criterium"; 
-	if (mCrit->nParameters() != mParameters.n_crit_params)
+	exit(EXIT_FAILURE);
+      }
+    
+    if (mCrit->nParameters() != mParameters.n_crit_params)
+      {
+	if (mParameters.n_crit_params != 0)
 	  {
 	    FILE_LOG(logERROR) << "Expected " << mCrit->nParameters() 
 			       << " parameters. Got " 
 			       << mParameters.n_crit_params << " instead.";
 	  }
-	return -1;
+	FILE_LOG(logINFO) << "Usign default parameters for criteria.";
+	return 0;
       }
+      
+    // If we have the correct number of parameters.
     vectord critParams = utils::array2vector(mParameters.crit_params,
-					     mParameters.n_crit_params);
+					       mParameters.n_crit_params);
     mCrit->setParameters(critParams);
     return 0;
   } // setCriteria

File src/bayesoptcont.cpp

   {
     
     size_t nSamples = mParameters.n_init_samples;
-    int useLatinBox = 2;
     
     matrixd xPoints(nSamples,mDims);
     vectord yPoints(nSamples);
     vectord sample(mDims);
-    randEngine mtRandom;
     
-    if (useLatinBox == 1)           utils::lhs(xPoints, mtRandom);
-    else if (useLatinBox == 2)           utils::sobol(xPoints, 0);
-    else                utils::uniformSampling(xPoints, mtRandom);
-    
+    utils::samplePoints(xPoints,mParameters.init_method);
+
     for(size_t i = 0; i < nSamples; i++)
       {
 	sample = row(xPoints,i);

File src/bayesoptdisc.cpp

 	FILE_LOG(logINFO) << "Best found at: " << mGP->getPointAtMinimum() ; 
 	FILE_LOG(logINFO) << "Best outcome: " <<  mGP->getValueAtMinimum() ;    
       }
-    return 1;
+    return 0;
   }
 
 
 	      }
 	  }  
       }
-    return 1;
+    return 0;
   } // sampleInitialPoints
   
 
 	    min = current;
 	  }
       }
-    return 1;
+    return 0;
   }
 
 }  // namespace bayesopt

File src/criteria_combined.cpp

     else
       {
 	int optIndex = update_hedge();
-	name = mCriteriaList[optIndex]->name();
-      
 	if (optIndex >= 0)
 	  {
-	    best = mBestLists[optIndex];
+	    name = mCriteriaList[optIndex]->name();
+      	    best = mBestLists[optIndex];
 	    error_code = 0;
 	  }
 	else
 	  {
+	    name = mCriteriaList[0]->name();
+      	    best = mBestLists[0];
+	    FILE_LOG(logERROR) << "Error updating Hedge algorithm. Selecting " << name;
 	    error_code = optIndex; 
 	  }
 	return true;	
 
   int GP_Hedge::update_hedge()
   {
-    double max_g = *std::max_element(gain_.begin(),gain_.end());
-    double min_g = *std::min_element(gain_.begin(),gain_.end());
+    // We just care about the differences
     double max_l = *std::max_element(loss_.begin(),loss_.end());
-
-    // We just care about the differences
     loss_ += svectord(loss_.size(),max_l);
 
     // To avoid overflow
-    if (std::abs(max_g) > std::abs(min_g))
-      gain_ -= svectord(gain_.size(),max_g);
-    else
-      gain_ -= svectord(gain_.size(),min_g);
+    double mean_g = std::accumulate(gain_.begin(),gain_.end(),0.0) 
+      / static_cast<double>(gain_.size());
+    gain_ -= svectord(gain_.size(),mean_g);
 
     // Optimal eta according to Shapire
-    max_g = *std::max_element(gain_.begin(),gain_.end());
+    double max_g = *std::max_element(gain_.begin(),gain_.end());
     double eta = (std::min)(10.0,sqrt(2.0*log(3.0)/max_g));
+    
+    // Compute probabilities
     std::transform(gain_.begin(), gain_.end(), prob_.begin(),
-		   boost::bind(softmax,_1,eta));       
-    
+		   boost::bind(softmax,_1,eta));
+       
     //Normalize
-    double sum_p =std::accumulate(prob_.begin(),prob_.end(),0);
+    double sum_p =std::accumulate(prob_.begin(),prob_.end(),0.0);
     prob_ /= sum_p;
 
     //Update bandits gain

File src/gaussian_process.cpp

     mAlphaV = mGPY-prod(mMu,mFeatM);
     inplace_solve(mL,mAlphaV,ublas::lower_tag());
 
-    return 1; 
+    return 0; 
   }
 	
 } //namespace bayesopt

File src/gaussian_process_ml.cpp

     inplace_solve(mL,mAlphaF,ublas::lower_tag());
     mSigma = inner_prod(mAlphaF,mAlphaF)/(n-p);
   
-    return 1;
+    return 0;
   }
 
 } //namespace bayesopt

File src/gaussian_process_normal.cpp

 ------------------------------------------------------------------------
 */
 
+#include <cstdlib>
 #include <boost/math/special_functions/fpclassify.hpp>
 #include <boost/numeric/ublas/banded.hpp>
 #include "log.hpp"
     if ((boost::math::isnan(yPred)) || (boost::math::isnan(sPred)))
       {
 	FILE_LOG(logERROR) << "Error in prediction. NaN found.";
-	throw 1;
+	exit(EXIT_FAILURE);
       }
 					
 
     if (boost::math::isnan(mWMap(0)))
       {
 	FILE_LOG(logERROR) << "Error in precomputed prediction. NaN found.";
-	throw 1;
+	return -1;
       }
     return 0;
   }

File src/nonparametricprocess.cpp

+
 /*
 -------------------------------------------------------------------------
    This file is part of BayesOpt, an efficient C++ library for 
 
 
 #include <cstdio>
+#include <cstdlib>
 #include "nonparametricprocess.hpp"
 #include "log.hpp"
 #include "cholesky.hpp"
       }
     kOptimizer->setLimits(1e-10,100.);
     setLearnType(parameters.l_type);
-    setKernel(parameters.kernel,dim);
-    setMean(parameters.mean,dim);
+    int errorK = setKernel(parameters.kernel,dim);
+    int errorM = setMean(parameters.mean,dim);
+    if (errorK || errorM)
+      {
+	FILE_LOG(logERROR) << "Error initializing nonparametric process.";
+	exit(EXIT_FAILURE);
+      }
   }
 
   NonParametricProcess::~NonParametricProcess()
 	FILE_LOG(logDEBUG) << "Computing kernel parameters. Seed: " 
 			   << optimalTheta;
 	kOptimizer->run(optimalTheta);
-	mKernel->setHyperParameters(optimalTheta);
+	error = mKernel->setHyperParameters(optimalTheta);
+
+	if (error)
+	  {
+	    FILE_LOG(logERROR) << "Error updating kernel parameters.";
+	    exit(EXIT_FAILURE);
+	  }   
+
 	FILE_LOG(logDEBUG) << "Final kernel parameters: " << optimalTheta;	
       }
 
     error = computeCholeskyCorrelation();