16 #ifdef HAVE_LINALG_LIB 
   31 void CGaussianARDKernel::init()
 
   34 #ifdef HAVE_LINALG_LIB 
   45 #ifdef HAVE_LINALG_LIB 
   47     REQUIRE(
rhs, 
"Right features (rhs) not set!\n")
 
   49     if (
lhs==
rhs && idx_a==idx_b)
 
   62         result=compute_helper(avec, avec);
 
   68 #ifdef HAVE_LINALG_LIB 
   84     bool status=CExponentialARDKernel::init(l,r);
 
   94     REQUIRE(df, 
"Features not set\n")
 
   95     int32_t num_vec=df->get_num_vectors();
 
   97     for (int32_t i=0; i<num_vec; i++)
 
   98         sq[i]=df->
dot(i,df, i);
 
  119         SG_SERROR(
"Provided kernel is not of type CGaussianARDKernel!\n");
 
  139         left_transpose=get_weighted_vector(avec);
 
  146     return res[0]*scalar_weight;
 
  164             res=linalg::matrix_product(left, right);
 
  170             int32_t col_index=index;
 
  172             int32_t total_offset=0;
 
  173             while(col_index>=offset && offset>0)
 
  176                 total_offset+=offset;
 
  180             col_index+=row_index;
 
  188             res=linalg::matrix_product(left, row_vec_r);
 
  189             result=res[0]*bvec[col_index];
 
  194             res=linalg::matrix_product(row_vec_l, right);
 
  195             result+=res[0]*avec[col_index];
 
  197             if(row_index==col_index)
 
  214     REQUIRE(param, 
"Param not set\n");
 
  220         if (!strcmp(param->
m_name, 
"log_weights"))
 
  231         check_weight_gradient_index(index);
 
  232         for (
index_t j=0; j<length; j++)
 
  234             if (!strcmp(param->
m_name, 
"log_weights") )
 
  245                     derivative[j]=get_parameter_gradient_helper(param,index,j,j,avec,bvec);
 
  253     SG_ERROR(
"Can't compute derivative wrt %s parameter\n", param->
m_name);
 
  258 float64_t CGaussianARDKernel::get_parameter_gradient_helper(
 
  262     REQUIRE(param, 
"Param not set\n");
 
  264     if (!strcmp(param->
m_name, 
"log_weights"))
 
  268         return  compute_gradient_helper(bvec, bvec, scale, index);
 
  272         SG_ERROR(
"Can't compute derivative wrt %s parameter\n", param->
m_name);
 
  280     REQUIRE(param, 
"Param not set\n");
 
  284     if (!strcmp(param->
m_name, 
"log_weights"))
 
  287         check_weight_gradient_index(index);
 
  296                     derivative(j,k)=
CMath::exp(-dist)*(-dist*2.0);
 
  301                     derivative(j,k)=get_parameter_gradient_helper(param,index,j,k,avec,bvec);
 
  309         SG_ERROR(
"Can't compute derivative wrt %s parameter\n", param->
m_name);
 
SGVector< float64_t > m_log_weights
int32_t num_rhs
number of feature vectors on right hand side 
Vector::Scalar dot(Vector a, Vector b)
virtual float64_t distance(int32_t idx_a, int32_t idx_b)
float64_t kernel(int32_t idx_a, int32_t idx_b)
virtual float64_t compute(int32_t idx_a, int32_t idx_b)
Features that support dot products among other operations. 
Gaussian Kernel with Automatic Relevance Detection computed on CDotFeatures. 
void add(Matrix A, Matrix B, Matrix C, typename Matrix::Scalar alpha=1.0, typename Matrix::Scalar beta=1.0)
virtual SGVector< float64_t > get_feature_vector(int32_t idx, CFeatures *hs)
int32_t num_lhs
number of feature vectors on left hand side 
virtual ~CGaussianARDKernel()
CFeatures * rhs
feature vectors to occur on right hand side 
static CKernel * obtain_from_generic(CSGObject *kernel)
all of classes and functions are contained in the shogun namespace 
virtual EKernelType get_kernel_type()=0
EARDKernelType m_ARD_type
CFeatures * lhs
feature vectors to occur on left hand side 
The class Features is the base class of all feature objects. 
void scale(Matrix A, Matrix B, typename Matrix::Scalar alpha)
static float64_t exp(float64_t x)
virtual SGMatrix< float64_t > get_parameter_gradient(const TParameter *param, index_t index=-1)
static float64_t log(float64_t v)
Exponential Kernel with Automatic Relevance Detection computed on CDotFeatures. 
virtual SGVector< float64_t > get_parameter_gradient_diagonal(const TParameter *param, index_t index=-1)