Commits

Ruben Martinez-Cantin committed 8da4fb0

Removing dependency on nloptwpr. Integrated in nlopt_optimization

Comments (0)

Files changed (3)

   )
 
 SET(WRAPPPERS_SRC 
-  ./wrappers/nloptwpr.cpp
   ./wrappers/bayesoptwpr.cpp
   )
 

include/inneroptimization.hpp

      * @return error_code
      */
     int run(vectord &Xnext);
-    
 
-    /** Dummy function to be overriden by the actual function to be
-     * evaluated.  
-     * Note: it is not pure virtual because we might want
-     * to use the other evaluate method
-     * @param query input point
-     * @return function value at query point
-     */
-    //    virtual double evaluate(const vectord& query) = 0;
+    /** 
+     * Wrapper of inner optimization to be evaluated by NLOPT
+     * 
+     * @param n # of dimensions
+     * @param x input point
+     * @param grad (NOT USED. Only for compatibily with NLOPT template, see evaluate_nlopt_grad)
+     * @param my_func_data pointer to the NLOPT_Optimization object
+     * 
+     * @return function evaluation
+     */  
+    static double evaluate_nlopt (unsigned int n, const double *x,
+				  double *grad, void *my_func_data);
 
-    /** Dummy function to be overriden by the actual function to be evaluated
-     * Note: it is not pure virtual because we might want
-     * to use the other evaluate method
-     * @param query input point
-     * @param grad output gradient at query point
-     * @return function value at query point
-     */
-    // virtual double evaluate(const vectord& query, 
-    // 				 vectord& grad)  {return 0.0;};
-
+    /** 
+     * Wrapper of inner optimization to be evaluated by NLOPT
+     * 
+     * @param n # of dimensions
+     * @param x input point
+     * @param grad returns gradient evaluation
+     * @param my_func_data pointer to the NLOPT_Optimization object
+     * 
+     * @return function evaluation
+     */  
+    static double evaluate_nlopt_grad (unsigned int n, const double *x,
+				       double *grad, void *my_func_data);
 
   private:
-
-    //    int send_to_nlopt_optimize(double* x, int n, void* objPointer);	
-    // TODO: Consider adding a container object to avoid casting to and from a polymorphic object
     RBOptimizableWrapper *rbobj;
     RGBOptimizableWrapper *rgbobj;
 

src/inneroptimization.cpp

 */
 #include <cmath>
 #include <nlopt.h>
-#include "nloptwpr.h"
 #include "parameters.h"
 #include "log.hpp"
 #include "inneroptimization.hpp"
       {
       case DIRECT: // Pure global. No gradient
 	opt = nlopt_create(NLOPT_GN_DIRECT_L, n); 
-	fpointer = &(NLOPT_WPR::evaluate_nlopt);
+	fpointer = &(NLOPT_Optimization::evaluate_nlopt);
 	objPointer = static_cast<void *>(rbobj);
 	break;
       case COMBINED: // Combined local-global (80% DIRECT -> 20% BOBYQA). No gradient
 	opt = nlopt_create(NLOPT_GN_DIRECT_L, n); 
 	maxf2 = static_cast<int>(static_cast<double>(maxf1)*coef_local);
 	maxf1 -= maxf2;  // That way, the number of evaluations is the same in all methods.
-	fpointer = &(NLOPT_WPR::evaluate_nlopt);
+	fpointer = &(NLOPT_Optimization::evaluate_nlopt);
 	objPointer = static_cast<void *>(rbobj);
 	break;
       case BOBYQA:  // Pure local. No gradient
 	opt = nlopt_create(NLOPT_LN_BOBYQA, n); 
-	fpointer = &(NLOPT_WPR::evaluate_nlopt);
+	fpointer = &(NLOPT_Optimization::evaluate_nlopt);
 	objPointer = static_cast<void *>(rbobj);
 	break;
       case LBFGS:  // Pure local. Gradient based
 	opt = nlopt_create(NLOPT_LD_LBFGS, n); 	
-	fpointer = &(NLOPT_WPR::evaluate_nlopt_grad);
+	fpointer = &(NLOPT_Optimization::evaluate_nlopt_grad);
 	objPointer = static_cast<void *>(rgbobj);
 	break;
       default: 
     return ierror;
   } // innerOptimize (uBlas)
 
+  double NLOPT_Optimization::evaluate_nlopt (unsigned int n, const double *x,
+					     double *grad, void *my_func_data)
 
-  // {   
-  //   void *objPointer = static_cast<void *>(rbobj);
-  //   int n = static_cast<int>(Xnext.size());
-  //   int error;
+  {
+    vectord vx(n);
+    std::copy(x,x+n,vx.begin());
 
-  //   assert(objPointer != NULL);
-  //   error = send_to_nlopt_optimize(&Xnext(0), n, objPointer);
+    void *objPointer = my_func_data;
+    RBOptimizableWrapper* OPTIMIZER = static_cast<RBOptimizableWrapper*>(objPointer);
+    
+    return OPTIMIZER->evaluate(vx);
+  } /* evaluate_criteria_nlopt */
 
-  //   return error;
-  // } // run (uBlas)
 
+  double NLOPT_Optimization::evaluate_nlopt_grad (unsigned int n, const double *x,
+						  double *grad, void *my_func_data)
 
+  {
+    vectord vx(n);
+    std::copy(x,x+n,vx.begin());
+    
+    void *objPointer = my_func_data;
+    RGBOptimizableWrapper* OPTIMIZER = static_cast<RGBOptimizableWrapper*>(objPointer);
+    
 
-  // int NLOPT_Optimization::send_to_nlopt_optimize(double* x, int n, void* objPointer)
-  // {
-  //   double u[128], l[128];
-  //   double fmin = 1;
-  //   int maxf = maxEvals*n;    
-  //   int ierror;
+    vectord vgrad = zvectord(n);
+    double f =  OPTIMIZER->evaluate(vx,vgrad);
+    if (grad && n)  std::copy(vgrad.begin(),vgrad.end(),grad);
 
-  //   for (int i = 0; i < n; ++i) 
-  //     {
-  // 	l[i] = mDown;	
-  // 	u[i] = mUp;
-      
-  // 	if (x[i] < l[i] || x[i] > u[i])
-  // 	  {
-  // 	    x[i]=(l[i]+u[i])/2.0;  
-  // 	    //nlopt requires x to have a valid initial value even for algorithms that do
-  // 	    //not need it
-  // 	  }
-  //     }
-    
-  //   nlopt_opt opt;
-  //   double (*fpointer)(unsigned int, const double *, double *, void *);
-  //   double coef;  //Percentaje of resources used in local optimization
+    return f;
+  } /* evaluate_criteria_nlopt */
 
-  //   /* algorithm and dims */
-  //   if (alg == LBFGS)                                     //Require gradient
-  //     fpointer = &(NLOPT_WPR::evaluate_nlopt_grad);
-  //   else                                           //Do not require gradient
-  //     fpointer = &(NLOPT_WPR::evaluate_nlopt);
-
-  //   if (alg == COMBINED)  
-  //     coef = 0.8;
-  //   else
-  //     coef = 1.0;
-
-  //   switch(alg)
-  //     {
-  //     case DIRECT:      /* same as combined */
-  //     case COMBINED: 	opt = nlopt_create(NLOPT_GN_DIRECT_L, n); break;
-  //     case BOBYQA: 	opt = nlopt_create(NLOPT_LN_BOBYQA, n); break;
-  //     case LBFGS:       opt = nlopt_create(NLOPT_LD_LBFGS, n); break;
-  //     default: FILE_LOG(logERROR) << "Algorithm not supported"; return -1;
-  //     }
-
-  //   nlopt_set_lower_bounds(opt, l);
-  //   nlopt_set_upper_bounds(opt, u);
-  //   nlopt_set_min_objective(opt, fpointer, objPointer);
-  //   int nfeval = static_cast<int>(static_cast<double>(maxf)*coef);
-  //   nlopt_set_maxeval(opt, nfeval) ;
-
-
-  //   nlopt_result errortype = nlopt_optimize(opt, x, &fmin);
-  //   checkNLOPTerror(errortype);
-
-  //   // Local refinement
-  //   if ((alg == COMBINED) && (coef < 1)) 
-  //     {
-  // 	nlopt_destroy(opt);  // Destroy previous one
-  // 	opt = nlopt_create(NLOPT_LN_SBPLX, n); /* algorithm and dims */
-  // 	nlopt_set_lower_bounds(opt, l);
-  // 	nlopt_set_upper_bounds(opt, u);
-  // 	nlopt_set_min_objective(opt, fpointer, objPointer);
-  // 	nlopt_set_maxeval(opt, maxf-nfeval);
-	
-  // 	errortype = nlopt_optimize(opt, x, &fmin);
-  // 	checkNLOPTerror(errortype);
-  //     }
-      
-  //   nlopt_destroy(opt);  // Destroy opt
-    
-  //   ierror = static_cast<int>(errortype);
-  //   return ierror;
-
-  // } // send_to_nlopt_optimize (C array)
 
 
 }// namespace bayesopt