SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
NLOPTMinimizer.cpp
Go to the documentation of this file.
1  /*
2  * Copyright (c) The Shogun Machine Learning Toolbox
3  * Written (w) 2015 Wu Lin
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice, this
10  * list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18  * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  *
26  * The views and conclusions contained in the software and documentation are those
27  * of the authors and should not be interpreted as representing official policies,
28  * either expressed or implied, of the Shogun Development Team.
29  *
30  */
31 
34 #include <shogun/base/Parameter.h>
35 
36 using namespace shogun;
37 #ifdef USE_GPL_SHOGUN
38 CNLOPTMinimizer::CNLOPTMinimizer()
40 {
41  init();
42 }
43 
44 CNLOPTMinimizer::~CNLOPTMinimizer()
45 {
46 }
47 
48 CNLOPTMinimizer::CNLOPTMinimizer(FirstOrderCostFunction *fun)
50 {
51  init();
52 }
53 
54 void CNLOPTMinimizer::init()
55 {
56 #ifdef HAVE_NLOPT
57  m_target_variable=SGVector<float64_t>();
58  set_nlopt_parameters();
59  SG_ADD(&m_max_iterations, "CNLOPTMinimizer__m_max_iterations",
60  "max_iterations in CNLOPTMinimizer", MS_NOT_AVAILABLE);
61  SG_ADD(&m_variable_tolerance, "CNLOPTMinimizer__m_variable_tolerance",
62  "variable_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
63  SG_ADD(&m_function_tolerance, "CNLOPTMinimizer__m_function_tolerance",
64  "function_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
65  SG_ADD(&m_nlopt_algorithm_id, "CNLOPTMinimizer__m_nlopt_algorithm_id",
66  "nlopt_algorithm_id in CNLOPTMinimizer", MS_NOT_AVAILABLE);
67 #endif
68 }
69 
70 float64_t CNLOPTMinimizer::minimize()
71 {
72 #ifdef HAVE_NLOPT
73  init_minimization();
74 
75  nlopt_opt opt=nlopt_create(NLOPTAlgorithmHelper::get_nlopt_algorithm(m_nlopt_algorithm_id),
76  m_target_variable.vlen);
77 
78  //add bound constraints
79  FirstOrderBoundConstraintsCostFunction* bound_constraints_fun
80  =dynamic_cast<FirstOrderBoundConstraintsCostFunction *>(m_fun);
81  if(bound_constraints_fun)
82  {
83  SGVector<float64_t> bound=bound_constraints_fun->get_lower_bound();
84  if(bound.vlen==1)
85  {
86  nlopt_set_lower_bounds1(opt, bound[0]);
87  }
88  else if (bound.vlen>1)
89  {
90  REQUIRE(bound.vlen==m_target_variable.vlen,
91  "The length of target variable (%d) and the length of lower bound (%d) do not match\n",
92  m_target_variable.vlen, bound.vlen);
93  nlopt_set_lower_bounds(opt, bound.vector);
94  }
95 
96  bound=bound_constraints_fun->get_upper_bound();
97  if(bound.vlen==1)
98  {
99  nlopt_set_upper_bounds1(opt, bound[0]);
100  }
101  else if (bound.vlen>1)
102  {
103  REQUIRE(bound.vlen==m_target_variable.vlen,
104  "The length of target variable (%d) and the length of upper bound (%d) do not match\n",
105  m_target_variable.vlen, bound.vlen);
106  nlopt_set_upper_bounds(opt, bound.vector);
107  }
108 
109  }
110  // set maximum number of evaluations
111  nlopt_set_maxeval(opt, m_max_iterations);
112  // set absolute argument tolearance
113  nlopt_set_xtol_abs1(opt, m_variable_tolerance);
114  nlopt_set_ftol_abs(opt, m_function_tolerance);
115 
116  nlopt_set_min_objective(opt, CNLOPTMinimizer::nlopt_function, this);
117 
118 #endif
119  // the minimum objective value, upon return
120  double cost=0.0;
121 
122 #ifdef HAVE_NLOPT
123  // optimize our function
124  nlopt_result error_code=nlopt_optimize(opt, m_target_variable.vector, &cost);
125  if(error_code!=1)
126  {
127  SG_SWARNING("Error(s) happened and NLopt failed during minimization (error code:%d)\n",
128  error_code);
129  }
130 
131  // clean up
132  nlopt_destroy(opt);
133 #endif
134 
135  return cost;
136 }
137 
138 #ifdef HAVE_NLOPT
139 void CNLOPTMinimizer::set_nlopt_parameters(nlopt_algorithm algorithm,
140  float64_t max_iterations,
141  float64_t variable_tolerance,
142  float64_t function_tolerance)
143 {
144  m_nlopt_algorithm_id=NLOPTAlgorithmHelper::get_nlopt_algorithm_id(algorithm);
145  m_max_iterations=max_iterations;
146  m_variable_tolerance=variable_tolerance;
147  m_function_tolerance=function_tolerance;
148 };
149 
150 double CNLOPTMinimizer::nlopt_function(unsigned dim, const double* variable, double* gradient,
151  void* func_data)
152 {
153  CNLOPTMinimizer* obj_prt=static_cast<CNLOPTMinimizer *>(func_data);
154  REQUIRE(obj_prt, "The instance object passed to NLopt optimizer should not be NULL\n");
155 
156  double cost=obj_prt->m_fun->get_cost();
157 
158  //get the gradient wrt variable_new
159  SGVector<float64_t> grad=obj_prt->m_fun->get_gradient();
160 
161  REQUIRE(grad.vlen==(index_t)dim,
162  "The length of gradient (%d) and the length of variable (%d) do not match\n",
163  grad.vlen,dim);
164 
165  std::copy(grad.vector,grad.vector+dim,gradient);
166  return cost;
167 }
168 
169 void CNLOPTMinimizer::init_minimization()
170 {
171  REQUIRE(m_fun, "Cost function not set!\n");
172  m_target_variable=m_fun->obtain_variable_reference();
173  REQUIRE(m_target_variable.vlen>0,"Target variable from cost function must not empty!\n");
174 }
175 #endif
176 
177 #endif //USE_GPL_SHOGUN
virtual SGVector< float64_t > get_upper_bound()=0
int32_t index_t
Definition: common.h:62
#define SG_SWARNING(...)
Definition: SGIO.h:178
#define REQUIRE(x,...)
Definition: SGIO.h:206
The first order cost function base class with bound constrains.
index_t vlen
Definition: SGVector.h:494
double float64_t
Definition: common.h:50
The first order cost function base class.
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
virtual SGVector< float64_t > get_lower_bound()=0
#define SG_ADD(...)
Definition: SGObject.h:84
The first order minimizer base class.

SHOGUN Machine Learning Toolbox - Documentation