22 CFeatureBlockLogisticRegression::CFeatureBlockLogisticRegression() :
26 register_parameters();
29 CFeatureBlockLogisticRegression::CFeatureBlockLogisticRegression(
30 float64_t z, CDotFeatures* train_features,
31 CBinaryLabels* train_labels, CIndexBlockRelation* feature_relation) :
35 set_feature_relation(feature_relation);
37 set_features(train_features);
38 set_labels(train_labels);
39 register_parameters();
42 void CFeatureBlockLogisticRegression::init()
44 m_feature_relation=NULL;
53 CFeatureBlockLogisticRegression::~CFeatureBlockLogisticRegression()
58 void CFeatureBlockLogisticRegression::register_parameters()
69 CIndexBlockRelation* CFeatureBlockLogisticRegression::get_feature_relation()
const
71 SG_REF(m_feature_relation);
72 return m_feature_relation;
75 void CFeatureBlockLogisticRegression::set_feature_relation(CIndexBlockRelation* feature_relation)
79 m_feature_relation = feature_relation;
82 int32_t CFeatureBlockLogisticRegression::get_max_iter()
const
87 int32_t CFeatureBlockLogisticRegression::get_regularization()
const
89 return m_regularization;
92 int32_t CFeatureBlockLogisticRegression::get_termination()
const
97 float64_t CFeatureBlockLogisticRegression::get_tolerance()
const
102 float64_t CFeatureBlockLogisticRegression::get_z()
const
107 float64_t CFeatureBlockLogisticRegression::get_q()
const
112 void CFeatureBlockLogisticRegression::set_max_iter(int32_t max_iter)
115 m_max_iter = max_iter;
118 void CFeatureBlockLogisticRegression::set_regularization(int32_t regularization)
120 ASSERT(regularization==0 || regularization==1)
121 m_regularization = regularization;
124 void CFeatureBlockLogisticRegression::set_termination(int32_t termination)
126 ASSERT(termination>=0 && termination<=4)
127 m_termination = termination;
130 void CFeatureBlockLogisticRegression::set_tolerance(
float64_t tolerance)
133 m_tolerance = tolerance;
136 void CFeatureBlockLogisticRegression::set_z(
float64_t z)
141 void CFeatureBlockLogisticRegression::set_q(
float64_t q)
146 bool CFeatureBlockLogisticRegression::train_machine(CFeatures* data)
148 if (data && (CDotFeatures*)data)
149 set_features((CDotFeatures*)data);
154 int32_t n_vecs = m_labels->get_num_labels();
156 for (int32_t i=0; i<n_vecs; i++)
157 y[i] = ((CBinaryLabels*)m_labels)->get_label(i);
159 slep_options options = slep_options::default_options();
161 options.regularization = m_regularization;
162 options.termination = m_termination;
163 options.tolerance = m_tolerance;
164 options.max_iter = m_max_iter;
165 options.loss = LOGISTIC;
167 EIndexBlockRelationType relation_type = m_feature_relation->get_relation_type();
168 switch (relation_type)
172 CIndexBlockGroup* feature_group = (CIndexBlockGroup*)m_feature_relation;
173 SGVector<index_t> ind = feature_group->get_SLEP_ind();
174 options.ind = ind.vector;
175 options.n_feature_blocks = ind.vlen-1;
176 if (ind[ind.vlen-1] > features->get_dim_feature_space())
177 SG_ERROR(
"Group of features covers more features than available\n")
179 options.gWeight = SG_MALLOC(
double, options.n_feature_blocks);
180 for (int32_t i=0; i<options.n_feature_blocks; i++)
181 options.gWeight[i] = 1.0;
182 options.mode = FEATURE_GROUP;
183 options.loss = LOGISTIC;
185 slep_result_t result = slep_solver(features, y.vector, m_z, options);
187 SG_FREE(options.gWeight);
188 int32_t n_feats = features->get_dim_feature_space();
189 SGVector<float64_t> new_w(n_feats);
190 for (
int i=0; i<n_feats; i++)
191 new_w[i] = result.w[i];
192 set_bias(result.c[0]);
199 CIndexBlockTree* feature_tree = (CIndexBlockTree*)m_feature_relation;
201 SGVector<float64_t> ind_t = feature_tree->get_SLEP_ind_t();
202 SGVector<float64_t> G;
203 if (feature_tree->is_general())
205 G = feature_tree->get_SLEP_G();
206 options.general =
true;
208 options.ind_t = ind_t.vector;
209 options.G = G.vector;
210 options.n_nodes = ind_t.vlen/3;
211 options.n_feature_blocks = ind_t.vlen/3;
212 options.mode = FEATURE_TREE;
213 options.loss = LOGISTIC;
215 slep_result_t result = slep_solver(features, y.vector, m_z, options);
217 int32_t n_feats = features->get_dim_feature_space();
218 SGVector<float64_t> new_w(n_feats);
219 for (
int i=0; i<n_feats; i++)
220 new_w[i] = result.w[i];
222 set_bias(result.c[0]);
228 SG_ERROR(
"Not supported feature relation type\n")
234 float64_t CFeatureBlockLogisticRegression::apply_one(int32_t vec_idx)
236 return CMath::exp(-(features->dense_dot(vec_idx, w.vector, w.vlen) + bias));
239 SGVector<float64_t> CFeatureBlockLogisticRegression::apply_get_outputs(CFeatures* data)
243 if (!data->has_property(
FP_DOT))
244 SG_ERROR(
"Specified features are not of type CDotFeatures\n")
246 set_features((CDotFeatures*) data);
250 return SGVector<float64_t>();
252 int32_t num=features->get_num_vectors();
254 ASSERT(w.vlen==features->get_dim_feature_space())
257 features->dense_dot_range(out, 0, num, NULL, w.vector, w.vlen, bias);
258 for (int32_t i=0; i<num; i++)
259 out[i] = 2.0/(1.0+CMath::exp(-out[i])) - 1.0;
260 return SGVector<float64_t>(out,num);
264 #endif //USE_GPL_SHOGUN
all of classes and functions are contained in the shogun namespace
static float64_t exp(float64_t x)