44 #ifndef DOXYGEN_SHOULD_SKIP_THIS
45 struct GRADIENT_THREAD_PARAM
67 REQUIRE(scale>0,
"Scale (%f) must be positive", scale);
101 void CInference::init()
130 REQUIRE(minimizer,
"Minimizer must set\n");
140 int32_t num_importance_samples,
float64_t ridge_size)
147 cov(i,i)+=ridge_size;
173 scaled_kernel(i,i)+=ridge_size;
186 ASSERT(log_likelihood.
vlen==num_importance_samples);
187 ASSERT(log_likelihood.
vlen==log_pdf_prior.vlen);
191 sum[i]=log_likelihood[i]+log_pdf_prior[i]-log_pdf_post_approx[i];
221 for (
index_t i=0; i<num_deriv; i++)
225 GRADIENT_THREAD_PARAM thread_params;
227 thread_params.inf=
this;
228 thread_params.obj=node->data;
229 thread_params.param=node->key;
230 thread_params.grad=result;
231 thread_params.lock=&lock;
239 pthread_t* threads=SG_MALLOC(pthread_t, num_deriv);
240 GRADIENT_THREAD_PARAM* thread_params=SG_MALLOC(GRADIENT_THREAD_PARAM,
243 for (
index_t t=0; t<num_deriv; t++)
247 thread_params[t].inf=
this;
248 thread_params[t].obj=node->data;
249 thread_params[t].param=node->key;
250 thread_params[t].grad=result;
251 thread_params[t].lock=&lock;
254 (
void*)&thread_params[t]);
257 for (
index_t t=0; t<num_deriv; t++)
258 pthread_join(threads[t], NULL);
260 SG_FREE(thread_params);
270 GRADIENT_THREAD_PARAM* thread_param=(GRADIENT_THREAD_PARAM*)p;
276 CLock* lock=thread_param->lock;
278 REQUIRE(param,
"Parameter should not be NULL\n");
279 REQUIRE(obj,
"Object of the parameter should not be NULL\n");
298 else if (obj==inf->
m_mean)
305 SG_SERROR(
"Can't compute derivative of negative log marginal "
310 grad->
add(param, gradient);
326 "Number of training features must be greater than zero\n")
329 "Number of labels must be greater than zero\n")
331 "Number of training vectors (%d) must match number of labels (%d)\n",
virtual const char * get_name() const =0
virtual bool init(CFeatures *lhs, CFeatures *rhs)
virtual void update_train_kernel()
The class Labels models labels, i.e. class assignments of objects.
virtual int32_t get_num_labels() const =0
static void * get_derivative_helper(void *p)
virtual int32_t get_num_vectors() const =0
virtual void set_scale(float64_t scale)
virtual SGVector< float64_t > get_mean_vector(const CFeatures *features) const =0
SGMatrix< float64_t > m_E
An abstract class of the mean function.
SGMatrix< float64_t > get_kernel_matrix()
virtual void set_labels(CLabels *lab)
SGMatrix< float64_t > m_ktrtr
virtual SGVector< float64_t > get_derivative_wrt_mean(const TParameter *param)=0
virtual SGMatrix< float64_t > get_posterior_covariance()=0
CMapNode< K, T > * get_node_ptr(int32_t index)
int32_t add(const K &key, const T &data)
virtual SGVector< float64_t > get_log_probability_fmatrix(const CLabels *lab, SGMatrix< float64_t > F) const
virtual SGMatrix< float64_t > get_multiclass_E()
Class SGObject is the base class of all shogun objects.
Class Lock used for synchronization in concurrent programs.
virtual float64_t get_scale() const
virtual void compute_gradient()
virtual void set_model(CLikelihoodModel *mod)
virtual void set_kernel(CKernel *kern)
virtual SGVector< float64_t > get_derivative_wrt_inference_method(const TParameter *param)=0
SGMatrix< float64_t > m_L
virtual SGVector< float64_t > get_posterior_mean()=0
virtual SGVector< float64_t > log_pdf_multiple(SGMatrix< float64_t > samples) const
virtual void register_minimizer(Minimizer *minimizer)
virtual void set_features(CFeatures *feat)
virtual SGVector< float64_t > get_derivative_wrt_kernel(const TParameter *param)=0
all of classes and functions are contained in the shogun namespace
Dense version of the well-known Gaussian probability distribution, defined as .
The Inference Method base class.
float64_t get_marginal_likelihood_estimate(int32_t num_importance_samples=1, float64_t ridge_size=1e-15)
The class Features is the base class of all feature objects.
void scale(Matrix A, Matrix B, typename Matrix::Scalar alpha)
static float64_t exp(float64_t x)
static float64_t log(float64_t v)
static T log_mean_exp(SGVector< T > values)
virtual CMap< TParameter *, SGVector< float64_t > > * get_negative_log_marginal_likelihood_derivatives(CMap< TParameter *, CSGObject * > *parameters)
int32_t get_num_elements() const
The minimizer base class.
CLikelihoodModel * m_model
virtual bool parameter_hash_changed()
virtual void set_mean(CMeanFunction *m)
The Likelihood model base class.
SGVector< float64_t > m_alpha
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...
virtual SGMatrix< float64_t > sample(int32_t num_samples, SGMatrix< float64_t > pre_samples=SGMatrix< float64_t >()) const
virtual void check_members() const
virtual SGVector< float64_t > get_derivative_wrt_likelihood_model(const TParameter *param)=0