25 #ifndef DOXYGEN_SHOULD_SKIP_THIS
53 double nlopt_function(
unsigned n,
const double* x,
double* grad,
void* func_data)
55 nlopt_params* params=(nlopt_params*)func_data;
60 bool print_state=params->print_state;
67 CMapNode<TParameter*, CSGObject*>*
node=parameter_dictionary->
get_node_ptr(i);
83 REQUIRE(result,
"Parameter %s not found in combination tree\n",
91 REQUIRE(result,
"Parameter %s not found in combination tree\n",
130 CMapNode<TParameter*, CSGObject*>* node=parameter_dictionary->
get_node_ptr(i);
136 CMapNode<TParameter*, CSGObject*>* gradient_node=
139 if (gradient_node->data==node->data &&
140 !strcmp(gradient_node->key->m_name, node->key->m_name))
142 derivative=gradient->
get_element(gradient_node->key);
146 REQUIRE(derivative.
vlen,
"Can't find gradient wrt %s parameter!\n",
149 memcpy(grad+offset, derivative.
vector,
sizeof(
double)*derivative.
vlen);
151 offset+=derivative.
vlen;
174 CGradientModelSelection::~CGradientModelSelection()
178 void CGradientModelSelection::init()
180 m_max_evaluations=1000;
181 m_grad_tolerance=1e-6;
183 SG_ADD(&m_grad_tolerance,
"gradient_tolerance",
"Gradient tolerance",
185 SG_ADD(&m_max_evaluations,
"max_evaluations",
"Maximum number of evaluations",
191 if (!m_model_parameters)
193 CMachine* machine=m_machine_eval->get_machine();
196 SG_REF(current_combination);
218 CMapNode<TParameter*, SGVector<float64_t> >* node=argument->
get_node_ptr(i);
219 memcpy(x.vector+offset, node->data.vector,
sizeof(
double)*node->data.vlen);
220 offset+=node->data.vlen;
227 nlopt_opt opt=nlopt_create(NLOPT_LD_MMA, total_variables);
244 nlopt_set_maxeval(opt, m_max_evaluations);
247 nlopt_set_xtol_abs1(opt, m_grad_tolerance);
248 nlopt_set_ftol_abs(opt, m_grad_tolerance);
258 params.current_combination=current_combination;
259 params.machine_eval=m_machine_eval;
260 params.print_state=print_state;
261 params.parameter_dictionary=parameter_dictionary;
264 if (m_machine_eval->get_evaluation_direction()==
ED_MINIMIZE)
267 SG_PRINT(
"Minimizing objective function:\n");
269 nlopt_set_min_objective(opt, nlopt_function, ¶ms);
274 SG_PRINT(
"Maximizing objective function:\n");
276 nlopt_set_max_objective(opt, nlopt_function, ¶ms);
283 nlopt_result result=nlopt_optimize(opt, x.vector, &minf);
285 REQUIRE(result>0,
"NLopt failed while optimizing objective function!\n");
298 return current_combination;
309 #endif //USE_GPL_SHOGUN
bool set_parameter(const char *name, T value, CSGObject *parent, index_t index=-1)
T get_element(const K &key)
#define SG_NOTIMPLEMENTED
int64_t get_num_elements()
void print_tree(int prefix_num=0) const
A generic learning machine interface.
Class to select parameters and their ranges for model selection. The structure is organized as a tree...
virtual void print_result()
CMapNode< K, T > * get_node_ptr(int32_t index)
virtual CEvaluationResult * evaluate()=0
Abstract base class for model selection.
static CGradientResult * obtain_from_generic(CEvaluationResult *eval_result)
Class SGObject is the base class of all shogun objects.
CMachine * get_machine() const
virtual CMap< TParameter *, CSGObject * > * get_paramter_dictionary()
Abstract class that contains the result generated by the MachineEvaluation class. ...
Machine Evaluation is an abstract class that evaluates a machine according to some criterion...
virtual CMap< TParameter *, SGVector< float64_t > > * get_gradient()
static T sum(T *vec, int32_t len)
Return sum(vec)
Class that holds ONE combination of parameters for a learning machine. The structure is organized as ...
virtual void build_parameter_parent_map(CMap< TParameter *, CSGObject * > *parent_map)
all of classes and functions are contained in the shogun namespace
Container class that returns results from GradientEvaluation. It contains the function value as well ...
virtual SGVector< float64_t > get_value()
virtual uint32_t get_parameters_length()
virtual void build_parameter_values_map(CMap< TParameter *, SGVector< float64_t > > *values_map)
int32_t get_num_elements() const
void apply_to_machine(CMachine *machine) const
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...