22 using namespace Eigen;
67 SG_ERROR(
"Specified features are not of type CDotFeatures\n")
76 ->get_feature_matrix();
77 int32_t num_feat=feature_matrix.
num_rows;
78 int32_t num_vec=feature_matrix.
num_cols;
79 REQUIRE(num_vec==train_labels.
vlen,
"Number of training examples(%d) should be "
80 "equal to number of labels specified(%d)!\n", num_vec, train_labels.
vlen);
89 for(i=0; i<train_labels.
vlen; i++)
91 if (train_labels.
vector[i]==-1)
92 classidx_neg[num_neg++]=i;
94 else if(train_labels.
vector[i]==+1)
95 classidx_pos[num_pos++]=i;
101 VectorXd mean_neg(num_feat);
102 mean_neg=VectorXd::Zero(num_feat);
103 VectorXd mean_pos(num_feat);
104 mean_pos=VectorXd::Zero(num_feat);
107 for(i=0; i<num_neg; i++)
108 mean_neg+=fmatrix.col(classidx_neg[i]);
112 for(i=0; i<num_neg; i++)
113 fmatrix.col(classidx_neg[i])-=mean_neg;
116 for(i=0; i<num_pos; i++)
117 mean_pos+=fmatrix.col(classidx_pos[i]);
121 for(i=0; i<num_pos; i++)
122 fmatrix.col(classidx_pos[i])-=mean_pos;
130 MatrixXd cov_mat(num_feat, num_feat);
131 cov_mat=fmatrix*fmatrix.transpose();
132 scatter=cov_mat/(num_vec-1);
136 scatter.diagonal()+=VectorXd::Constant(num_feat, trace*
m_gamma/num_feat);
146 LLT<MatrixXd> decomposition(scatter);
147 x=decomposition.solve(mean_pos-mean_neg);
150 VectorXd w_neg=decomposition.solve(mean_neg);
151 VectorXd w_pos=decomposition.solve(mean_pos);
154 bias=0.5*(w_neg.dot(mean_neg)-w_pos.dot(mean_pos));
169 for(i=0; i<num_pos;i++)
170 cen_pos.col(i)=fmatrix.col(classidx_pos[i]);
172 for(i=0; i<num_neg;i++)
173 cen_neg.col(i)=fmatrix.col(classidx_neg[i]);
176 cen_pos=cen_pos*cen_pos.transpose()/(
float64_t(num_pos-1));
179 cen_neg=cen_neg*cen_neg.transpose()/(
float64_t(num_neg-1));
182 MatrixXd Sw= num_pos*cen_pos+num_neg*cen_neg;
186 Sw.diagonal()+=VectorXd::Constant(num_feat, trace*
m_gamma/num_feat);
189 VectorXd mean_total=(num_pos*mean_pos+num_neg*mean_neg)/(
float64_t)num_vec;
193 Sb.col(0)=sqrt(num_pos)*(mean_pos-mean_total);
194 Sb.col(1)=sqrt(num_neg)*(mean_neg-mean_total);
196 JacobiSVD<MatrixXd> svd(fmatrix1, ComputeThinU);
201 Sb=Q.transpose()*(Sb*(Sb.transpose()))*Q;
204 Sw=Q.transpose()*Sw*Q;
209 HouseholderQR<MatrixXd> decomposition(Sw.llt().matrixU().transpose());
215 JacobiSVD<MatrixXd> svd2(decomposition.solve((decomposition.solve(Sb))
216 .transpose()).transpose(), ComputeThinU);
221 x=Q*(svd2.matrixU().col(0));
223 bias=(x.transpose()*mean_total);
virtual const char * get_name() const =0
virtual bool train_machine(CFeatures *data=NULL)
virtual ELabelType get_label_type() const =0
The class Labels models labels, i.e. class assignments of objects.
virtual void set_features(CDotFeatures *feat)
Features that support dot products among other operations.
CLDA(float64_t gamma=0, ELDAMethod method=AUTO_LDA)
Class LinearMachine is a generic interface for all kinds of linear machines like classifiers.
all of classes and functions are contained in the shogun namespace
The class Features is the base class of all feature objects.
Binary Labels for binary classification.
bool has_property(EFeatureProperty p) const
virtual void set_labels(CLabels *lab)