Commits

Ruben Martinez-Cantin committed 95af9d9

Solved bug in Hedging algorithm. Improved stability (under/overflow). If error, generates message and selects default option.

Comments (0)

Files changed (7)

examples/bo_branin.cpp

 
 #define _USE_MATH_DEFINES
 #include <cmath>
-#include <valarray>
+#include <algorithm>
+//#include <valarray>
 #include "bayesoptcont.hpp"
 
 #ifndef M_PI
   par.n_init_samples = 50;
   par.kernel.hp_mean[0] = 1.0;
   par.kernel.n_hp = 1;
-  par.crit_name = "cHedge(cEI,cLCB,cPOI)";
+  par.crit_name = "cHedge(cLCB,cEI,cPOI)";
+  double cParams[] = {5.0, 1.0, 0.01};
+  std::copy(cParams, cParams+3, par.crit_params);
+  par.n_crit_params = 3;
   
   ExampleBranin branin(2,par);
   vectord result(2);

matlab/compile_matlab.m

+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+
 % You can also change ../lib for the correspoding install path
 % MATLAB
 if (ispc)
 
     mex -output bayesoptdisc bayesoptdiscmex.c -L../lib -lbayesopt ...
         -lnlopt -I../include -I../wrappers -I../nlopt/api 
-
-    % if exist('../lib/libbayesopt.a','file')
-    %     disp('Compiling static library');
-    %     mex -output bayesoptcont bayesoptmex.c ../lib/libbayesopt.a ...
-    %     ../lib/libnlopt.a -I../include -I../wrappers -I../nlopt/api 
-
-    %     mex -output bayesoptdisc bayesoptdiscmex.c ../lib/libbayesopt.a ...
-    %         ../lib/libnlopt.a -I../include -I../wrappers -I../nlopt/api 
-    % else
-    %     if exist('../lib/bayesopt.so','file')
-    %         disp('Compiling dynamic library');
-    %         mex -g -output bayesoptcont bayesoptmex.c ../lib/bayesopt.so ...
-    %             -I../include -I../wrappers
-
-    %         mex -g -output bayesoptdisc bayesoptdiscmex.c ../lib/bayesopt.so ...
-    %             -I../include -I../wrappers
-                
-    %     else
-    %         disp('Error: File not found');
-    %     end
-    % end
 end

matlab/compile_octave.m

+% 
+% -------------------------------------------------------------------------
+%    This file is part of BayesOpt, an efficient C++ library for 
+%    Bayesian optimization.
+%
+%    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
+%
+%    BayesOpt is free software: you can redistribute it and/or modify it 
+%    under the terms of the GNU General Public License as published by
+%    the Free Software Foundation, either version 3 of the License, or
+%    (at your option) any later version.
+%
+%    BayesOpt is distributed in the hope that it will be useful, but 
+%    WITHOUT ANY WARRANTY; without even the implied warranty of
+%    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+%    GNU General Public License for more details.
+%
+%    You should have received a copy of the GNU General Public License
+%    along with BayesOpt.  If not, see <http://www.gnu.org/licenses/>.
+% ------------------------------------------------------------------------
+%
+
 % You can also change ../lib for the correspoding install path
 % Octave
 mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
 mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
     --mex --output bayesoptdisc.mex bayesoptdiscmex.c
 
-%     if exist('../lib/libbayesopt.a','file')
-%     disp('Compiling static library');
-%      mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
-%         --mex --output bayesoptcont.mex bayesoptmex.c
-
-%     mkoctfile -L../lib -lbayesopt -lnlopt -I../include -I../wrappers ...
-%         --mex --output bayesoptdisc.mex bayesoptdiscmex.c
-% else % TODO: Does not work in MacOS
-%     if (~ismac)
-%         disp('Compiling dynamic library');
-%         mkoctfile -L../lib -l:bayesopt.so -lnlopt -I../include -I../wrappers ...
-%             --mex --output bayesoptcont.mex bayesoptmex.c
-
-%         mkoctfile -L../lib -l:bayesopt.so -lnlopt -I../include -I../wrappers ...
-%             --mex --output bayesoptdisc.mex bayesoptdiscmex.c
-%     else
-%         disp('Dynamic library not supported in MacOS');
-%     end
-% end
-    
 toc;
 
 disp('Discrete optimization');
-% The set of points must be nDim x nPoints.
-xset = repmat((ub-lb),1,100) .* rand(n,100) - repmat(lb,1,100);
+% The set of points must be numDimension x numPoints.
+np = 100;
+xset = repmat((ub-lb),1,np) .* rand(n,np) - repmat(lb,1,np);
 
 tic;
 bayesoptdisc(fun,xset, params);

python/demo_dimscaling.py

 #!/usr/bin/env python
 # -------------------------------------------------------------------------
-#    This file is part of BayesOpt, an efficient C++ library for 
+#    This file is part of BayesOpt, an efficient C++ library for
 #    Bayesian optimization.
 #
 #    Copyright (C) 2011-2013 Ruben Martinez-Cantin <rmcantin@unizar.es>
-# 
-#    BayesOpt is free software: you can redistribute it and/or modify it 
+#
+#    BayesOpt is free software: you can redistribute it and/or modify it
 #    under the terms of the GNU General Public License as published by
 #    the Free Software Foundation, either version 3 of the License, or
 #    (at your option) any later version.
 #
-#    BayesOpt is distributed in the hope that it will be useful, but 
+#    BayesOpt is distributed in the hope that it will be useful, but
 #    WITHOUT ANY WARRANTY; without even the implied warranty of
 #    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 #    GNU General Public License for more details.
 # This example was provided by Janto Dreijer <jantod@gmail.com>
 
 import sys
+#Assume default install.
 sys.path.append('/usr/local/lib')
 
 import numpy as np
 
 mvalue, x_out, error = bayesopt.optimize(func, dim, lb, ub, params)
 
-print mvalue, x_out, error
+print "Result", mvalue, x_out, error
+
+print "Optimal", 0, np.arange(1,1+dim)

python/demo_quad.py

 params = bayesopt.initialize_params()
 params['n_iterations'] = 50
 params['n_init_samples'] = 20
-#params['surr_name'] = "GAUSSIAN_PROCESS_INV_GAMMA_NORMAL"
 params['crit_name'] = "cEI"
 params['kernel_name'] = "kMaternISO3"
+
+
 print "Callback implementation"
 
 n = 5                     # n dimensions
 ub = np.ones((n,))
 
 start = clock()
-
 mvalue, x_out, error = bayesopt.optimize(testfunc, n, lb, ub, params)
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
-
+raw_input('Press INTRO to continue')
 
 print "OO implementation"
 bo_test = BayesOptTest()
 start = clock()
 mvalue, x_out, error = bo_test.optimize()
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
-
+raw_input('Press INTRO to continue')
 
 print "Callback discrete implementation"
 x_set = np.random.rand(100,n)
 
 mvalue, x_out, error = bayesopt.optimize_discrete(testfunc, x_set, params)
 
-print "Result", x_out
+print "Result", mvalue, "at", x_out
 print "Seconds", clock() - start
 
 value = np.array([testfunc(i) for i in x_set])

src/criteria_combined.cpp

     else
       {
 	int optIndex = update_hedge();
-	name = mCriteriaList[optIndex]->name();
-      
 	if (optIndex >= 0)
 	  {
-	    best = mBestLists[optIndex];
+	    name = mCriteriaList[optIndex]->name();
+      	    best = mBestLists[optIndex];
 	    error_code = 0;
 	  }
 	else
 	  {
+	    name = mCriteriaList[0]->name();
+      	    best = mBestLists[0];
+	    FILE_LOG(logERROR) << "Error updating Hedge algorithm. Selecting " << name;
 	    error_code = optIndex; 
 	  }
 	return true;	
 
   int GP_Hedge::update_hedge()
   {
-    double max_g = *std::max_element(gain_.begin(),gain_.end());
-    double min_g = *std::min_element(gain_.begin(),gain_.end());
+    // We just care about the differences
     double max_l = *std::max_element(loss_.begin(),loss_.end());
-
-    // We just care about the differences
     loss_ += svectord(loss_.size(),max_l);
 
     // To avoid overflow
-    if (std::abs(max_g) > std::abs(min_g))
-      gain_ -= svectord(gain_.size(),max_g);
-    else
-      gain_ -= svectord(gain_.size(),min_g);
+    double mean_g = std::accumulate(gain_.begin(),gain_.end(),0.0) 
+      / static_cast<double>(gain_.size());
+    gain_ -= svectord(gain_.size(),mean_g);
 
     // Optimal eta according to Shapire
-    max_g = *std::max_element(gain_.begin(),gain_.end());
+    double max_g = *std::max_element(gain_.begin(),gain_.end());
     double eta = (std::min)(10.0,sqrt(2.0*log(3.0)/max_g));
+    
+    // Compute probabilities
     std::transform(gain_.begin(), gain_.end(), prob_.begin(),
-		   boost::bind(softmax,_1,eta));       
-    
+		   boost::bind(softmax,_1,eta));
+       
     //Normalize
-    double sum_p =std::accumulate(prob_.begin(),prob_.end(),0);
+    double sum_p =std::accumulate(prob_.begin(),prob_.end(),0.0);
     prob_ /= sum_p;
 
     //Update bandits gain