46 using namespace Eigen;
61 void CSingleSparseInferenceBase::init()
73 "max number of iterations used in inducing features optimization",
MS_NOT_AVAILABLE);
77 "opt_inducing_features",
"whether optimize inducing features",
MS_NOT_AVAILABLE);
103 SG_WARNING(
"The provided kernel does not support to optimize inducing features\n");
112 REQUIRE(param,
"Param not set\n");
114 && strcmp(param->
m_name,
"log_inducing_noise")
115 && strcmp(param->
m_name,
"inducing_features")),
116 "Can't compute derivative of"
117 " the nagative log marginal likelihood wrt %s.%s parameter\n",
120 if (!strcmp(param->
m_name,
"log_inducing_noise"))
124 else if (!strcmp(param->
m_name,
"inducing_features"))
132 SG_WARNING(
"Derivative wrt %s cannot be computed since the kernel does not support fully sparse inference\n",
163 REQUIRE(param,
"Param not set\n");
165 int64_t len=
const_cast<TParameter *
>(param)->m_datatype.get_num_elements();
208 "The length of inducing features (%dx%d)",
209 " and the length of bound constraints (%d) are different\n",
212 else if(bound.
vlen==1)
214 SG_WARNING(
"All inducing_features (%dx%d) are constrainted by the single value (%f) in the %s bound\n",
232 REQUIRE(it>0,
"Iteration (%d) must be positive\n",it);
238 REQUIRE(tol>0,
"Tolearance (%f) must be positive\n",tol);
241 double CSingleSparseInferenceBase::nlopt_function(
unsigned n,
const double* x,
double* grad,
void* func_data)
244 REQUIRE(
object,
"func_data must be SingleSparseInferenceBase pointer\n");
246 double nlz=
object->get_negative_log_marginal_likelihood();
247 object->compute_gradient();
249 TParameter* param=
object->m_gradient_parameters->get_parameter(
"inducing_features");
252 std::copy(derivatives.
vector,derivatives.
vector+n,grad);
290 nlopt_set_lower_bounds(opt, lower_bound.
vector);
304 nlopt_set_upper_bounds(opt, upper_bound.
vector);
314 nlopt_set_min_objective(opt, CSingleSparseInferenceBase::nlopt_function,
this);
320 nlopt_result result=nlopt_optimize(opt, x.
vector, &minf);
321 REQUIRE(result>0,
"NLopt failed while optimizing objective function!\n");
326 SG_PRINT(
"For this functionality we require NLOPT library\n");
virtual const char * get_name() const =0
virtual bool init(CFeatures *lhs, CFeatures *rhs)
SGVector< float64_t > m_ktrtr_diag
virtual SGVector< float64_t > get_derivative_wrt_inducing_noise(const TParameter *param)=0
The class Labels models labels, i.e. class assignments of objects.
bool m_opt_inducing_features
SGMatrix< float64_t > m_kuu
SGVector< float64_t > m_upper_bound
virtual SGVector< float64_t > get_derivative_wrt_inference_method(const TParameter *param)
SGMatrix< float64_t > m_ktru
An abstract class of the mean function.
virtual ~CSingleSparseInferenceBase()
SGMatrix< float64_t > m_inducing_features
float64_t m_ind_tolerance
virtual void set_tolearance_for_inducing_features(float64_t tol)
Class Lock used for synchronization in concurrent programs.
CSingleSparseInferenceBase()
SGVector< float64_t > m_lower_bound
virtual void enable_optimizing_inducing_features(bool is_optmization)
virtual void set_upper_bound_of_inducing_features(SGVector< float64_t > bound)
virtual void set_kernel(CKernel *kern)
The sparse inference base class for classification and regression for 1-D labels (1D regression and b...
virtual void set_max_iterations_for_inducing_features(int32_t it)
all of classes and functions are contained in the shogun namespace
virtual void check_fully_sparse()
virtual CFeatures * get_inducing_features()
The class Features is the base class of all feature objects.
static float64_t exp(float64_t x)
virtual SGVector< float64_t > get_derivative_wrt_kernel(const TParameter *param)
virtual const char * get_name() const
virtual SGMatrix< float64_t > get_parameter_gradient(const TParameter *param, index_t index=-1)
virtual void set_kernel(CKernel *kern)
SGVector< T > clone() const
virtual SGVector< float64_t > get_derivative_wrt_inducing_features(const TParameter *param)=0
virtual void check_bound(SGVector< float64_t > bound, const char *name)
virtual void set_lower_bound_of_inducing_features(SGVector< float64_t > bound)
The Fully Independent Conditional Training inference base class.
virtual float64_t get_derivative_related_cov(SGVector< float64_t > ddiagKi, SGMatrix< float64_t > dKuui, SGMatrix< float64_t > dKui)=0
virtual void optimize_inducing_features()
virtual SGVector< float64_t > get_parameter_gradient_diagonal(const TParameter *param, index_t index=-1)
The Likelihood model base class.
float64_t m_max_ind_iterations