23 #ifndef DOXYGEN_SHOULD_SKIP_THIS
51 double nlopt_function(
unsigned n,
const double* x,
double* grad,
void* func_data)
53 nlopt_params* params=(nlopt_params*)func_data;
58 bool print_state=params->print_state;
65 CMapNode<TParameter*, CSGObject*>*
node=parameter_dictionary->
get_node_ptr(i);
81 REQUIRE(result,
"Parameter %s not found in combination tree\n",
89 REQUIRE(result,
"Parameter %s not found in combination tree\n",
128 CMapNode<TParameter*, CSGObject*>* node=parameter_dictionary->
get_node_ptr(i);
134 CMapNode<TParameter*, CSGObject*>* gradient_node=
137 if (gradient_node->data==node->data &&
138 !strcmp(gradient_node->key->m_name, node->key->m_name))
140 derivative=gradient->
get_element(gradient_node->key);
144 REQUIRE(derivative.
vlen,
"Can't find gradient wrt %s parameter!\n",
147 memcpy(grad+offset, derivative.
vector,
sizeof(
double)*derivative.
vlen);
149 offset+=derivative.
vlen;
176 void CGradientModelSelection::init()
194 SG_REF(current_combination);
216 CMapNode<TParameter*, SGVector<float64_t> >* node=argument->
get_node_ptr(i);
217 memcpy(x.
vector+offset, node->data.vector,
sizeof(
double)*node->data.vlen);
218 offset+=node->data.vlen;
225 nlopt_opt opt=nlopt_create(NLOPT_LD_MMA, total_variables);
256 params.current_combination=current_combination;
258 params.print_state=print_state;
259 params.parameter_dictionary=parameter_dictionary;
265 SG_PRINT(
"Minimizing objective function:\n");
267 nlopt_set_min_objective(opt, nlopt_function, ¶ms);
272 SG_PRINT(
"Maximizing objective function:\n");
274 nlopt_set_max_objective(opt, nlopt_function, ¶ms);
281 nlopt_result result=nlopt_optimize(opt, x.
vector, &minf);
283 REQUIRE(result>0,
"NLopt failed while optimizing objective function!\n");
296 return current_combination;
virtual CParameterCombination * select_model(bool print_state=false)
virtual ~CGradientModelSelection()
bool set_parameter(const char *name, T value, CSGObject *parent, index_t index=-1)
T get_element(const K &key)
#define SG_NOTIMPLEMENTED
int64_t get_num_elements()
void print_tree(int prefix_num=0) const
A generic learning machine interface.
Class to select parameters and their ranges for model selection. The structure is organized as a tree...
virtual void print_result()
CMapNode< K, T > * get_node_ptr(int32_t index)
virtual CEvaluationResult * evaluate()=0
Abstract base class for model selection.
CModelSelectionParameters * m_model_parameters
static CGradientResult * obtain_from_generic(CEvaluationResult *eval_result)
CMachineEvaluation * m_machine_eval
float64_t m_grad_tolerance
Class SGObject is the base class of all shogun objects.
CMachine * get_machine() const
virtual CMap< TParameter *, CSGObject * > * get_paramter_dictionary()
Abstract class that contains the result generated by the MachineEvaluation class. ...
Machine Evaluation is an abstract class that evaluates a machine according to some criterion...
CGradientModelSelection()
virtual CMap< TParameter *, SGVector< float64_t > > * get_gradient()
static T sum(T *vec, int32_t len)
Return sum(vec)
Class that holds ONE combination of parameters for a learning machine. The structure is organized as ...
EEvaluationDirection get_evaluation_direction()
virtual void build_parameter_parent_map(CMap< TParameter *, CSGObject * > *parent_map)
all of classes and functions are contained in the shogun namespace
Container class that returns results from GradientEvaluation. It contains the function value as well ...
uint32_t m_max_evaluations
virtual SGVector< float64_t > get_value()
virtual uint32_t get_parameters_length()
virtual void build_parameter_values_map(CMap< TParameter *, SGVector< float64_t > > *values_map)
int32_t get_num_elements() const
void apply_to_machine(CMachine *machine) const
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...