26 model(NULL), norm_wc(NULL), norm_wcw(NULL), rho(0), m_num_classes(0)
33 norm_wc(NULL), norm_wcw(NULL), rho(0), m_num_classes(0)
39 norm_wc(NULL), norm_wcw(NULL), rho(0), m_num_classes(0)
60 SG_ERROR(
"Number of training vectors does not match number of labels\n")
67 for (int32_t i=0; i<num_vectors; i++)
71 int32_t Nmin=num_vectors;
88 result=train_no_bias_libsvm();
93 result=train_no_bias_svmlight();
101 SG_INFO(
"valid nu interval [%f ... %f]\n", nu_min, nu_max)
104 SG_ERROR(
"nu out of valid range [%f ... %f]\n", nu_min, nu_max)
106 result=train_testrule12();
114 bool CScatterSVM::train_no_bias_libsvm()
116 struct svm_node* x_space;
123 x_space=SG_MALLOC(struct svm_node, 2*
problem.l);
125 for (int32_t i=0; i<
problem.l; i++)
129 x_space[2*i].index=i;
130 x_space[2*i+1].index=-1;
133 int32_t weights_label[2]={-1,+1};
139 param.svm_type=C_SVC;
140 param.kernel_type = LINEAR;
155 param.weight_label = weights_label;
156 param.weight = weights;
160 const char* error_msg = svm_check_parameter(&
problem,&
param);
166 m_kernel->set_normalizer(prev_normalizer);
171 ASSERT((model->l==0) || (model->l>0 && model->SV && model->sv_coef && model->sv_coef))
181 for (int32_t i=0; i<m_num_classes; i++)
183 int32_t num_sv=model->nSV[i];
187 norm_wcw[i]=model->normwcw[i];
190 for (int32_t j=0; j<num_sv; j++)
204 SG_FREE(model->SV[i]);
207 svm_destroy_model(model);
220 bool CScatterSVM::train_no_bias_svmlight()
224 m_num_classes-1, -1,
m_labels, prev_normalizer);
233 norm_wcw = SG_MALLOC(float64_t, m_num_classes);
238 for (int32_t i=0; i<num_sv; i++)
247 #endif //USE_SVMLIGHT
249 bool CScatterSVM::train_testrule12()
251 struct svm_node* x_space;
253 SG_INFO(
"%d trainlabels\n", problem.l)
255 problem.y=SG_MALLOC(float64_t, problem.l);
256 problem.x=SG_MALLOC(struct svm_node*, problem.l);
257 x_space=SG_MALLOC(struct svm_node, 2*problem.l);
259 for (int32_t i=0; i<problem.l; i++)
262 problem.x[i]=&x_space[2*i];
263 x_space[2*i].index=i;
264 x_space[2*i+1].index=-1;
267 int32_t weights_label[2]={-1,+1};
273 param.svm_type=NU_MULTICLASS_SVC;
274 param.kernel_type = LINEAR;
286 param.weight_label = weights_label;
287 param.weight = weights;
291 const char* error_msg = svm_check_parameter(&problem,¶m);
296 model = svm_train(&problem, ¶m);
300 ASSERT((model->l==0) || (model->l>0 && model->SV && model->sv_coef && model->sv_coef))
302 ASSERT(model->nr_class==m_num_classes)
308 norm_wcw = SG_MALLOC(float64_t,
m_machines->get_num_elements());
310 for (int32_t i=0; i<m_num_classes; i++)
312 int32_t num_sv=model->nSV[i];
316 norm_wcw[i]=model->normwcw[i];
319 for (int32_t j=0; j<num_sv; j++)
333 SG_FREE(model->SV[i]);
336 svm_destroy_model(model);
348 void CScatterSVM::compute_norm_wc()
361 for (int32_t i=0; i<num_sv; i++)
364 for (int32_t j=0; j<num_sv; j++)
383 SG_ERROR(
"SVM can not proceed without kernel!\n")
398 for (int32_t i=0; i<num_vectors; i++)
404 float64_t* outputs=SG_MALLOC(float64_t, num_vectors*m_num_classes);
407 for (int32_t i=0; i<num_vectors; i++)
415 float64_t s= (label==c) ? (m_num_classes-1) : (-1);
421 for (int32_t i=0; i<num_vectors; i++)
424 float64_t max_out=outputs[i*m_num_classes+0];
428 float64_t out=outputs[i*m_num_classes+j];
442 #endif //USE_SVMLIGHT
456 outputs[i]=svm->
apply();
460 for (int32_t i=0; i<num_vectors; i++)
516 outputs[j]/=norm_wcw[j];
518 float64_t max_out=outputs[0];
521 if (outputs[j]>max_out)
533 #endif //USE_SVMLIGHT
541 if (outputs[i]>max_out)
virtual float64_t apply_one(int32_t num)
virtual bool init(CFeatures *lhs, CFeatures *rhs)
int32_t get_num_support_vectors()
virtual bool train_machine(CFeatures *data=NULL)
static void fill_vector(T *vec, int32_t len, T value)
virtual ELabelType get_label_type() const =0
Real Labels are real-valued labels.
virtual float64_t apply_one(int32_t num)
virtual int32_t get_num_labels() const
The class Labels models labels, i.e. class assignments of objects.
virtual int32_t get_num_labels() const =0
multi-class labels 0,1,...
virtual bool set_normalizer(CKernelNormalizer *normalizer)
CDynamicObjectArray * m_machines
virtual int32_t get_num_vectors() const =0
Trains a one class C SVM.
float64_t kernel(int32_t idx_a, int32_t idx_b)
int32_t get_num_elements() const
virtual int32_t get_num_vec_lhs()
int32_t cache_size
cache_size in MB
bool set_label(int32_t idx, float64_t label)
void display_vector(const char *name="vector", const char *prefix="") const
Multiclass Labels for multi-class classification.
virtual CKernelNormalizer * get_normalizer()
void set_bias(float64_t bias)
CMulticlassStrategy * m_multiclass_strategy
bool set_alpha(int32_t idx, float64_t val)
SCATTER_TYPE scatter_type
float64_t get_alpha(int32_t idx)
the scatter kernel normalizer
bool set_support_vector(int32_t idx, int32_t val)
The class Kernel Normalizer defines a function to post-process kernel values.
int32_t get_support_vector(int32_t idx)
virtual int32_t get_num_vec_rhs()
virtual bool init_normalizer()
int32_t get_num_classes() const
all of classes and functions are contained in the shogun namespace
training with bias using test rule 2
The class Features is the base class of all feature objects.
training with bias using test rule 1
bool create_multiclass_svm(int32_t num_classes)
virtual bool train(CFeatures *data=NULL)
A generic Support Vector Machine Interface.
void set_linadd_enabled(bool enable)
void set_kernel(CKernel *k)
multiclass one vs rest strategy used to train generic multiclass machines for K-class problems with b...
bool set_svm(int32_t num, CSVM *svm)
static float32_t sqrt(float32_t x)
virtual CLabels * classify_one_vs_rest()
virtual bool has_features()
virtual void set_labels(CLabels *lab)
#define SG_UNSTABLE(func,...)
bool create_new_model(int32_t num)
CSVM * get_svm(int32_t num)
virtual CLabels * apply(CFeatures *data=NULL)