21 using namespace shogun;
23 #ifndef DOXYGEN_SHOULD_SKIP_THIS
51 double nlopt_function(
unsigned n,
const double* x,
double* grad,
void* func_data)
53 nlopt_params* params=(nlopt_params*)func_data;
58 bool print_state=params->print_state;
65 CMapNode<TParameter*, CSGObject*>*
node=parameter_dictionary->
get_node_ptr(i);
76 for (
index_t j=0; j<*(param->m_datatype.m_length_y); j++)
81 REQUIRE(result,
"Parameter %s not found in combination tree\n",
89 REQUIRE(result,
"Parameter %s not found in combination tree\n",
122 CMapNode<TParameter*, CSGObject*>* node=parameter_dictionary->
get_node_ptr(i);
128 CMapNode<TParameter*, CSGObject*>* gradient_node=
131 if (gradient_node->data==node->data &&
132 !strcmp(gradient_node->key->m_name, node->key->m_name))
134 derivative=gradient->
get_element(gradient_node->key);
138 REQUIRE(derivative.
vlen,
"Can't find gradient wrt %s parameter!\n",
141 memcpy(grad+offset, derivative.
vector,
sizeof(
double)*derivative.
vlen);
143 offset+=derivative.
vlen;
170 void CGradientModelSelection::init()
188 SG_REF(current_combination);
210 CMapNode<TParameter*, SGVector<float64_t> >* node=argument->
get_node_ptr(i);
211 memcpy(x.
vector+offset, node->data.vector,
sizeof(
double)*node->data.vlen);
212 offset+=node->data.vlen;
219 nlopt_opt opt=nlopt_create(NLOPT_LD_MMA, total_variables);
230 nlopt_set_lower_bounds(opt, lower_bound.
vector);
231 nlopt_set_upper_bounds(opt, upper_bound.
vector);
248 params.current_combination=current_combination;
250 params.print_state=print_state;
251 params.parameter_dictionary=parameter_dictionary;
257 SG_PRINT(
"Minimizing objective function:\n");
259 nlopt_set_min_objective(opt, nlopt_function, ¶ms);
264 SG_PRINT(
"Maximizing objective function:\n");
266 nlopt_set_max_objective(opt, nlopt_function, ¶ms);
273 nlopt_result result=nlopt_optimize(opt, x.
vector, &minf);
275 REQUIRE(result>0,
"NLopt failed while optimizing objective function!\n");
288 return current_combination;