45 #ifndef DOXYGEN_SHOULD_SKIP_THIS
46 struct GRADIENT_THREAD_PARAM
68 REQUIRE(scale>0,
"Scale (%f) must be positive", scale);
101 void CInferenceMethod::init()
127 int32_t num_importance_samples,
float64_t ridge_size)
134 cov(i,i)+=ridge_size;
160 scaled_kernel(i,i)+=ridge_size;
173 ASSERT(log_likelihood.
vlen==num_importance_samples);
174 ASSERT(log_likelihood.
vlen==log_pdf_prior.vlen);
178 sum[i]=log_likelihood[i]+log_pdf_prior[i]-log_pdf_post_approx[i];
208 for (
index_t i=0; i<num_deriv; i++)
212 GRADIENT_THREAD_PARAM thread_params;
214 thread_params.inf=
this;
215 thread_params.obj=node->data;
216 thread_params.param=node->key;
217 thread_params.grad=result;
218 thread_params.lock=&lock;
226 pthread_t* threads=SG_MALLOC(pthread_t, num_deriv);
227 GRADIENT_THREAD_PARAM* thread_params=SG_MALLOC(GRADIENT_THREAD_PARAM,
230 for (
index_t t=0; t<num_deriv; t++)
234 thread_params[t].inf=
this;
235 thread_params[t].obj=node->data;
236 thread_params[t].param=node->key;
237 thread_params[t].grad=result;
238 thread_params[t].lock=&lock;
241 (
void*)&thread_params[t]);
244 for (
index_t t=0; t<num_deriv; t++)
245 pthread_join(threads[t], NULL);
247 SG_FREE(thread_params);
257 GRADIENT_THREAD_PARAM* thread_param=(GRADIENT_THREAD_PARAM*)p;
263 CLock* lock=thread_param->lock;
265 REQUIRE(param,
"Parameter should not be NULL\n");
266 REQUIRE(obj,
"Object of the parameter should not be NULL\n");
285 else if (obj==inf->
m_mean)
292 SG_SERROR(
"Can't compute derivative of negative log marginal "
297 grad->
add(param, gradient);
313 "Number of training features must be greater than zero\n")
316 "Number of labels must be greater than zero\n")
318 "Number of training vectors (%d) must match number of labels (%d)\n",
virtual void set_labels(CLabels *lab)
virtual const char * get_name() const =0
virtual void set_model(CLikelihoodModel *mod)
virtual bool init(CFeatures *lhs, CFeatures *rhs)
SGVector< float64_t > m_alpha
The Inference Method base class.
virtual void set_features(CFeatures *feat)
The class Labels models labels, i.e. class assignments of objects.
virtual int32_t get_num_labels() const =0
virtual ~CInferenceMethod()
virtual int32_t get_num_vectors() const =0
virtual SGVector< float64_t > get_mean_vector(const CFeatures *features) const =0
virtual void set_scale(float64_t scale)
An abstract class of the mean function.
virtual SGVector< float64_t > get_derivative_wrt_likelihood_model(const TParameter *param)=0
SGMatrix< float64_t > get_kernel_matrix()
CMapNode< K, T > * get_node_ptr(int32_t index)
int32_t add(const K &key, const T &data)
SGMatrix< float64_t > m_L
virtual SGVector< float64_t > get_log_probability_fmatrix(const CLabels *lab, SGMatrix< float64_t > F) const
Class SGObject is the base class of all shogun objects.
Class Lock used for synchronization in concurrent programs.
virtual SGMatrix< float64_t > get_multiclass_E()
virtual void compute_gradient()
SGMatrix< float64_t > m_E
virtual SGVector< float64_t > get_derivative_wrt_inference_method(const TParameter *param)=0
virtual void update_train_kernel()
virtual SGVector< float64_t > get_derivative_wrt_kernel(const TParameter *param)=0
virtual SGVector< float64_t > log_pdf_multiple(SGMatrix< float64_t > samples) const
virtual void set_kernel(CKernel *kern)
float64_t get_marginal_likelihood_estimate(int32_t num_importance_samples=1, float64_t ridge_size=1e-15)
all of classes and functions are contained in the shogun namespace
Dense version of the well-known Gaussian probability distribution, defined as .
virtual SGVector< float64_t > get_derivative_wrt_mean(const TParameter *param)=0
virtual void set_mean(CMeanFunction *m)
virtual SGMatrix< float64_t > get_posterior_covariance()=0
The class Features is the base class of all feature objects.
void scale(Matrix A, Matrix B, typename Matrix::Scalar alpha)
static float64_t exp(float64_t x)
static float64_t log(float64_t v)
virtual void check_members() const
virtual SGVector< float64_t > get_posterior_mean()=0
static T log_mean_exp(SGVector< T > values)
int32_t get_num_elements() const
virtual bool parameter_hash_changed()
The Likelihood model base class.
SGMatrix< float64_t > m_ktrtr
virtual CMap< TParameter *, SGVector< float64_t > > * get_negative_log_marginal_likelihood_derivatives(CMap< TParameter *, CSGObject * > *parameters)
CLikelihoodModel * m_model
virtual float64_t get_scale() const
the class CMap, a map based on the hash-table. w: http://en.wikipedia.org/wiki/Hash_table ...
virtual SGMatrix< float64_t > sample(int32_t num_samples, SGMatrix< float64_t > pre_samples=SGMatrix< float64_t >()) const
static void * get_derivative_helper(void *p)