50 using namespace Eigen;
67 void CKLCovarianceInferenceMethod::init()
70 "V is L'*V=diag(sW)*K",
73 "A is A=I-K*diag(sW)*inv(L)'*inv(L)*diag(sW)",
79 "Square root of noise matrix W",
82 "the gradient of the variational expection wrt sigma2",
85 "the gradient of the variational expection wrt mu",
108 eigen_result=eigen_alpha;
124 SG_SERROR(
"Provided inference is not of type CKLCovarianceInferenceMethod!\n")
149 eigen_W=(2.0*eigen_log_neg_lambda.array().exp()).matrix();
152 eigen_sW=eigen_W.array().sqrt().matrix();
159 eigen_V=eigen_L.triangularView<Upper>().adjoint().solve(eigen_sW.asDiagonal()*eigen_K*
CMath::exp(
m_log_scale*2.0));
163 eigen_s2=(eigen_K.diagonal().array()*
CMath::exp(
m_log_scale*2.0)-(eigen_V.array().pow(2).colwise().sum().transpose())).abs().matrix();
173 "The length of gradients (%d) should the same as the length of parameters (%d)\n",
198 MatrixXd eigen_U=eigen_L.triangularView<Upper>().adjoint().solve(
MatrixXd(eigen_sW.asDiagonal()));
201 eigen_A=MatrixXd::Identity(len, len)-eigen_V.transpose()*eigen_U;
215 eigen_dnlz_log_neg_lambda=(eigen_Sigma.array().pow(2)*2.0).matrix()*eigen_dv+eigen_s2;
216 eigen_dnlz_log_neg_lambda=eigen_dnlz_log_neg_lambda-(eigen_Sigma.array()*eigen_A.array()).rowwise().sum().matrix();
217 eigen_dnlz_log_neg_lambda=(eigen_log_neg_lambda.array().exp()*eigen_dnlz_log_neg_lambda.array()).matrix();
237 MatrixXd eigen_t=eigen_L.triangularView<Upper>().adjoint().solve(MatrixXd::Identity(eigen_L.rows(),eigen_L.cols()));
239 for(
index_t idx=0; idx<eigen_t.rows(); idx++)
240 trace +=(eigen_t.col(idx).array().pow(2)).sum();
243 float64_t result=-a+eigen_L.diagonal().array().log().sum();
244 result+=0.5*(-eigen_K.rows()+eigen_alpha.dot(eigen_mu-eigen_mean)+trace);
266 VectorXd z=AdK.diagonal()+(eigen_A.array()*AdK.array()).rowwise().sum().matrix()
267 -(eigen_A.transpose().array()*AdK.array()).colwise().sum().transpose().matrix();
270 return eigen_alpha.dot(eigen_dK*(eigen_alpha/2.0-eigen_df))-z.dot(eigen_dv);
286 MatrixXd::Identity(eigen_K.rows(), eigen_K.cols()));
288 MatrixXd tt=LL.triangularView<Upper>().adjoint().solve(MatrixXd::Identity(LL.rows(),LL.cols()));
290 for(
index_t idx=0; idx<tt.rows(); idx++)
291 trace+=(tt.col(idx).array().pow(2)).sum();
296 eigen_s2=(eigen_K.diagonal().array()*
CMath::exp(
m_log_scale*2.0)-(eigen_V.array().pow(2).colwise().sum().transpose())).abs().matrix();
303 nlml_def=-a+LL.diagonal().array().log().sum();
304 nlml_def+=0.5*(-eigen_K.rows()+trace);
306 if (nlml_new<=nlml_def)
virtual bool set_variational_distribution(SGVector< float64_t > mu, SGVector< float64_t > s2, const CLabels *lab)
virtual void update_approx_cov()
SGVector< float64_t > m_alpha
The Inference Method base class.
virtual SGVector< float64_t > get_variational_first_derivative(const TParameter *param) const =0
static SGMatrix< float64_t > get_choleksy(SGVector< float64_t > W, SGVector< float64_t > sW, SGMatrix< float64_t > kernel, float64_t scale)
virtual void update_deriv()
virtual void get_gradient_of_nlml_wrt_parameters(SGVector< float64_t > gradient)
The class Labels models labels, i.e. class assignments of objects.
virtual int32_t get_num_labels() const =0
virtual float64_t get_derivative_related_cov(SGMatrix< float64_t > dK)
The variational Gaussian Likelihood base class. The variational distribution is Gaussian.
static SGMatrix< float64_t > get_inverse(SGMatrix< float64_t > L, SGMatrix< float64_t > kernel, SGVector< float64_t > sW, SGMatrix< float64_t > V, float64_t scale)
TParameter * get_parameter(int32_t idx)
virtual SGVector< float64_t > get_mean_vector(const CFeatures *features) const =0
static CKLCovarianceInferenceMethod * obtain_from_generic(CInferenceMethod *inference)
An abstract class of the mean function.
SGMatrix< float64_t > m_Sigma
virtual ~CKLCovarianceInferenceMethod()
SGMatrix< float64_t > m_L
virtual float64_t lbfgs_optimization()
static T sum(T *vec, int32_t len)
Return sum(vec)
virtual SGVector< float64_t > get_variational_expection()=0
Matrix< float64_t,-1,-1, 0,-1,-1 > MatrixXd
The KL approximation inference method class.
The KL approximation inference method class.
all of classes and functions are contained in the shogun namespace
The class Features is the base class of all feature objects.
static float64_t exp(float64_t x)
virtual void update_alpha()
SGVector< float64_t > m_mu
SGVector< float64_t > m_s2
static float64_t log(float64_t v)
virtual EInferenceType get_inference_type() const
virtual SGVector< float64_t > get_diagonal_vector()
virtual CVariationalGaussianLikelihood * get_variational_likelihood() const
virtual SGVector< float64_t > get_alpha()
CKLCovarianceInferenceMethod()
virtual float64_t get_negative_log_marginal_likelihood_helper()
virtual bool parameter_hash_changed()
The Likelihood model base class.
SGMatrix< float64_t > m_ktrtr
virtual void update_chol()
virtual bool lbfgs_precompute()