SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
GUIClassifier.cpp
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 1999-2009 Soeren Sonnenburg
8  * Written (W) 1999-2008 Gunnar Raetsch
9  * Copyright (C) 1999-2009 Fraunhofer Institute FIRST and Max-Planck-Society
10  */
12 #include <shogun/ui/SGInterface.h>
13 
14 #include <shogun/lib/config.h>
15 #include <shogun/io/SGIO.h>
16 
19 #include <shogun/labels/Labels.h>
20 
22 
23 #include <shogun/multiclass/KNN.h>
27 
28 #include <shogun/classifier/LDA.h>
29 #include <shogun/classifier/LPM.h>
32 
34 
35 #ifdef USE_SVMLIGHT
39 #endif //USE_SVMLIGHT
40 
47 #ifdef USE_GPL_SHOGUN
49 #endif //USE_GPL_SHOGUN
52 
55 
61 
66 
68 
69 using namespace shogun;
70 
72 : CSGObject(), ui(ui_)
73 {
75  classifier=NULL;
77 
78  // Perceptron parameters
80  perceptron_maxiter=1000;
81 
82  // SVM parameters
83  svm_qpsize=41;
84  svm_bufsize=3000;
85  svm_max_qpsize=1000;
86  mkl_norm=1;
87  ent_lambda=0;
89  svm_C1=1;
90  svm_C2=1;
91  C_mkl=0;
93  svm_weight_epsilon=1e-5;
94  svm_epsilon=1e-5;
95  svm_tube_epsilon=1e-2;
96  svm_nu=0.5;
97  svm_use_shrinking = true ;
98 
99  svm_use_bias = true;
101  svm_use_linadd = true ;
102  svm_do_auc_maximization = false ;
103 
104  // KRR parameters
105  krr_tau=1;
106 
108 }
109 
111 {
114 }
115 
116 bool CGUIClassifier::new_classifier(char* name, int32_t d, int32_t from_d)
117 {
118  if (strcmp(name,"LIBSVM_ONECLASS")==0)
119  {
121  classifier = new CLibSVMOneClass();
122  SG_INFO("created SVMlibsvm object for oneclass\n")
123  }
124  else if (strcmp(name,"LIBSVM_MULTICLASS")==0)
125  {
128  SG_INFO("created SVMlibsvm object for multiclass\n")
129  }
130  else if (strcmp(name,"LIBSVM_NUMULTICLASS")==0)
131  {
133  classifier= new CMulticlassLibSVM(LIBSVM_NU_SVC);
134  SG_INFO("created SVMlibsvm object for multiclass\n")
135  }
136 #ifdef USE_SVMLIGHT
137  else if (strcmp(name,"SCATTERSVM_NO_BIAS_SVMLIGHT")==0)
138  {
141  SG_INFO("created ScatterSVM NO BIAS SVMLIGHT object\n")
142  }
143 #endif //USE_SVMLIGHT
144  else if (strcmp(name,"SCATTERSVM_NO_BIAS_LIBSVM")==0)
145  {
148  SG_INFO("created ScatterSVM NO BIAS LIBSVM object\n")
149  }
150  else if (strcmp(name,"SCATTERSVM_TESTRULE1")==0)
151  {
154  SG_INFO("created ScatterSVM TESTRULE1 object\n")
155  }
156  else if (strcmp(name,"SCATTERSVM_TESTRULE2")==0)
157  {
160  SG_INFO("created ScatterSVM TESTRULE2 object\n")
161  }
162  else if (strcmp(name,"LIBSVM_NU")==0)
163  {
165  classifier= new CLibSVM(LIBSVM_NU_SVC);
166  SG_INFO("created SVMlibsvm object\n")
167  }
168  else if (strcmp(name,"LIBSVM")==0)
169  {
171  classifier= new CLibSVM();
172  SG_INFO("created SVMlibsvm object\n")
173  }
174  else if (strcmp(name,"LARANK")==0)
175  {
177  classifier= new CLaRank();
178  SG_INFO("created LaRank object\n")
179  }
180 #ifdef USE_SVMLIGHT
181  else if ((strcmp(name,"LIGHT")==0) || (strcmp(name,"SVMLIGHT")==0))
182  {
184  classifier= new CSVMLight();
185  SG_INFO("created SVMLight object\n")
186  }
187  else if (strcmp(name,"SVMLIGHT_ONECLASS")==0)
188  {
191  SG_INFO("created SVMLightOneClass object\n")
192  }
193  else if (strcmp(name,"SVRLIGHT")==0)
194  {
196  classifier= new CSVRLight();
197  SG_INFO("created SVRLight object\n")
198  }
199 #endif //USE_SVMLIGHT
200 #ifdef USE_GPL_SHOGUN
201  else if (strcmp(name,"GPBTSVM")==0)
202  {
204  classifier= new CGPBTSVM();
205  SG_INFO("created GPBT-SVM object\n")
206  }
207 #endif //USE_GPL_SHOGUN
208  else if (strcmp(name,"MPDSVM")==0)
209  {
211  classifier= new CMPDSVM();
212  SG_INFO("created MPD-SVM object\n")
213  }
214  else if (strcmp(name,"GNPPSVM")==0)
215  {
217  classifier= new CGNPPSVM();
218  SG_INFO("created GNPP-SVM object\n")
219  }
220  else if (strcmp(name,"GMNPSVM")==0)
221  {
223  classifier= new CGMNPSVM();
224  SG_INFO("created GMNP-SVM object\n")
225  }
226  else if (strcmp(name,"LIBSVR")==0)
227  {
229  classifier= new CLibSVR();
230  SG_INFO("created SVRlibsvm object\n")
231  }
232 #ifdef HAVE_LAPACK
233  else if (strcmp(name, "KERNELRIDGEREGRESSION")==0)
234  {
236  classifier=new CKernelRidgeRegression(krr_tau, ui->ui_kernel->get_kernel(),
237  ui->ui_labels->get_train_labels());
238  SG_INFO("created KernelRidgeRegression object %p\n", classifier)
239  }
240 #endif //HAVE_LAPACK
241  else if (strcmp(name,"PERCEPTRON")==0)
242  {
244  classifier= new CPerceptron();
245  SG_INFO("created Perceptron object\n")
246  }
247 #ifdef HAVE_LAPACK
248  else if (strncmp(name,"LIBLINEAR",9)==0)
249  {
251 
252  if (strcmp(name,"LIBLINEAR_L2R_LR")==0)
253  {
254  st=L2R_LR;
255  SG_INFO("created LibLinear l2 regularized logistic regression object\n")
256  }
257  else if (strcmp(name,"LIBLINEAR_L2R_L2LOSS_SVC_DUAL")==0)
258  {
260  SG_INFO("created LibLinear l2 regularized l2 loss SVM dual object\n")
261  }
262  else if (strcmp(name,"LIBLINEAR_L2R_L2LOSS_SVC")==0)
263  {
264  st=L2R_L2LOSS_SVC;
265  SG_INFO("created LibLinear l2 regularized l2 loss SVM primal object\n")
266  }
267  else if (strcmp(name,"LIBLINEAR_L1R_L2LOSS_SVC")==0)
268  {
269  st=L1R_L2LOSS_SVC;
270  SG_INFO("created LibLinear l1 regularized l2 loss SVM primal object\n")
271  }
272  else if (strcmp(name,"LIBLINEAR_L2R_L1LOSS_SVC_DUAL")==0)
273  {
275  SG_INFO("created LibLinear l2 regularized l1 loss dual SVM object\n")
276  }
277  else
278  SG_ERROR("unknown liblinear type\n")
279 
281  classifier= new CLibLinear(st);
282  ((CLibLinear*) classifier)->set_C(svm_C1, svm_C2);
283  ((CLibLinear*) classifier)->set_epsilon(svm_epsilon);
284  ((CLibLinear*) classifier)->set_bias_enabled(svm_use_bias);
285  }
286 #endif //HAVE_LAPACK
287  else if (strcmp(name,"LDA")==0)
288  {
290  classifier= new CLDA();
291  SG_INFO("created LDA object\n")
292  }
293 #ifdef USE_CPLEX
294  else if (strcmp(name,"LPM")==0)
295  {
297  classifier= new CLPM();
298  ((CLPM*) classifier)->set_C(svm_C1, svm_C2);
299  ((CLPM*) classifier)->set_epsilon(svm_epsilon);
300  ((CLPM*) classifier)->set_bias_enabled(svm_use_bias);
301  ((CLPM*) classifier)->set_max_train_time(max_train_time);
302  SG_INFO("created LPM object\n")
303  }
304  else if (strcmp(name,"LPBOOST")==0)
305  {
307  classifier= new CLPBoost();
308  ((CLPBoost*) classifier)->set_C(svm_C1, svm_C2);
309  ((CLPBoost*) classifier)->set_epsilon(svm_epsilon);
310  ((CLPBoost*) classifier)->set_bias_enabled(svm_use_bias);
311  ((CLPBoost*) classifier)->set_max_train_time(max_train_time);
312  SG_INFO("created LPBoost object\n")
313  }
314 #endif //USE_CPLEX
315  else if (strncmp(name,"KNN", strlen("KNN"))==0)
316  {
318  classifier= new CKNN();
319  SG_INFO("created KNN object\n")
320  }
321  else if (strncmp(name,"KMEANS", strlen("KMEANS"))==0)
322  {
324  classifier= new CKMeans();
325  SG_INFO("created KMeans object\n")
326  }
327  else if (strncmp(name,"HIERARCHICAL", strlen("HIERARCHICAL"))==0)
328  {
330  classifier= new CHierarchical();
331  SG_INFO("created Hierarchical clustering object\n")
332  }
333  else if (strcmp(name,"SVMLIN")==0)
334  {
336  classifier= new CSVMLin();
337  ((CSVMLin*) classifier)->set_C(svm_C1, svm_C2);
338  ((CSVMLin*) classifier)->set_epsilon(svm_epsilon);
339  ((CSVMLin*) classifier)->set_bias_enabled(svm_use_bias);
340  SG_INFO("created SVMLin object\n")
341  }
342 #ifdef USE_GPL_SHOGUN
343  else if (strncmp(name,"WDSVMOCAS", strlen("WDSVMOCAS"))==0)
344  {
346  classifier= new CWDSVMOcas(SVM_OCAS);
347 
348  ((CWDSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
349  ((CWDSVMOcas*) classifier)->set_degree(d, from_d);
350  ((CWDSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
351  ((CWDSVMOcas*) classifier)->set_epsilon(svm_epsilon);
352  ((CWDSVMOcas*) classifier)->set_bufsize(svm_bufsize);
353  SG_INFO("created Weighted Degree Kernel SVM Ocas(OCAS) object of order %d (from order:%d)\n", d, from_d)
354  }
355  else if (strcmp(name,"SVMOCAS")==0)
356  {
358  classifier= new CSVMOcas(SVM_OCAS);
359 
360  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
361  ((CSVMOcas*) classifier)->set_epsilon(svm_epsilon);
362  ((CSVMOcas*) classifier)->set_bufsize(svm_bufsize);
363  ((CSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
364  SG_INFO("created SVM Ocas(OCAS) object\n")
365  }
366 #endif //USE_GPL_SHOGUN
367  else if (strcmp(name,"SVMSGD")==0)
368  {
370  classifier= new CSVMSGD(svm_C1);
371  ((CSVMSGD*) classifier)->set_bias_enabled(svm_use_bias);
372  SG_INFO("created SVM SGD object\n")
373  }
374 #ifdef USE_GPL_SHOGUN
375  else if (strcmp(name,"SVMBMRM")==0 || (strcmp(name,"SVMPERF")==0))
376  {
378  classifier= new CSVMOcas(SVM_BMRM);
379 
380  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
381  ((CSVMOcas*) classifier)->set_epsilon(svm_epsilon);
382  ((CSVMOcas*) classifier)->set_bufsize(svm_bufsize);
383  ((CSVMOcas*) classifier)->set_bias_enabled(svm_use_bias);
384  SG_INFO("created SVM Ocas(BMRM/PERF) object\n")
385  }
386 #endif
387  else if (strcmp(name,"MKL_CLASSIFICATION")==0)
388  {
391  }
392  else if (strcmp(name,"MKL_ONECLASS")==0)
393  {
395  classifier= new CMKLOneClass();
396  }
397  else if (strcmp(name,"MKL_MULTICLASS")==0)
398  {
400  classifier= new CMKLMulticlass();
401  }
402  else if (strcmp(name,"MKL_REGRESSION")==0)
403  {
405  classifier= new CMKLRegression();
406  }
407  else
408  {
409  SG_ERROR("Unknown classifier %s.\n", name)
410  return false;
411  }
413 
414  return (classifier!=NULL);
415 }
416 
418 {
420  if (!mkl)
421  SG_ERROR("No MKL available.\n")
422 
423  CLabels* trainlabels=ui->ui_labels->get_train_labels();
424  if (!trainlabels)
425  SG_ERROR("No trainlabels available.\n")
426 
427  CKernel* kernel=ui->ui_kernel->get_kernel();
428  if (!kernel)
429  SG_ERROR("No kernel available.\n")
430 
431  bool success=ui->ui_kernel->init_kernel("TRAIN");
432 
433  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
434  SG_ERROR("Kernel not initialized / no train features available.\n")
435 
436  int32_t num_vec=kernel->get_num_vec_lhs();
437  if (trainlabels->get_num_labels() != num_vec)
438  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
439 
440  SG_INFO("Starting MC-MKL training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
441 
443  mkl->set_mkl_norm(mkl_norm);
444  //mkl->set_max_num_mkliters(-1);
447  mkl->set_epsilon(svm_epsilon);
450  mkl->set_nu(svm_nu);
451  mkl->set_C(svm_C1);
452  mkl->set_qpsize(svm_qpsize);
456 
457  ((CKernelMulticlassMachine*) mkl)->set_labels(trainlabels);
458  ((CKernelMulticlassMachine*) mkl)->set_kernel(kernel);
459 
460  return mkl->train();
461 }
462 
464 {
465  CMKL* mkl= (CMKL*) classifier;
466  if (!mkl)
467  SG_ERROR("No SVM available.\n")
468 
469  bool oneclass=(mkl->get_classifier_type()==CT_LIBSVMONECLASS);
470  CLabels* trainlabels=NULL;
471  if(!oneclass)
472  trainlabels=ui->ui_labels->get_train_labels();
473  else
474  SG_INFO("Training one class mkl.\n")
475  if (!trainlabels && !oneclass)
476  SG_ERROR("No trainlabels available.\n")
477 
478  CKernel* kernel=ui->ui_kernel->get_kernel();
479  if (!kernel)
480  SG_ERROR("No kernel available.\n")
481 
482  bool success=ui->ui_kernel->init_kernel("TRAIN");
483  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
484  SG_ERROR("Kernel not initialized.\n")
485 
486  int32_t num_vec=kernel->get_num_vec_lhs();
487  if (!oneclass && trainlabels->get_num_labels() != num_vec)
488  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
489 
490  SG_INFO("Starting SVM training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
491 
496  mkl->set_epsilon(svm_epsilon);
499  mkl->set_nu(svm_nu);
500  mkl->set_C(svm_C1, svm_C2);
501  mkl->set_qpsize(svm_qpsize);
506  mkl->set_mkl_norm(mkl_norm);
509  mkl->set_C_mkl(C_mkl);
511 
513  {
514  CAUCKernel* auc_kernel = new CAUCKernel(10, kernel);
515  CLabels* auc_labels= auc_kernel->setup_auc_maximization(trainlabels);
516  ((CKernelMachine*) mkl)->set_labels(auc_labels);
517  ((CKernelMachine*) mkl)->set_kernel(auc_kernel);
518  SG_UNREF(auc_labels);
519  }
520  else
521  {
522  if(!oneclass)
523  ((CKernelMachine*) mkl)->set_labels(trainlabels);
524  ((CKernelMachine*) mkl)->set_kernel(kernel);
525  }
526 
527  bool result=mkl->train();
528 
529  return result;
530 }
531 
533 {
535 
536  if (!classifier)
537  SG_ERROR("No SVM available.\n")
538 
539  bool oneclass=(type==CT_LIBSVMONECLASS);
540  CLabels* trainlabels=NULL;
541  if(!oneclass)
542  trainlabels=ui->ui_labels->get_train_labels();
543  else
544  SG_INFO("Training one class svm.\n")
545  if (!trainlabels && !oneclass)
546  SG_ERROR("No trainlabels available.\n")
547 
548  CKernel* kernel=ui->ui_kernel->get_kernel();
549  if (!kernel)
550  SG_ERROR("No kernel available.\n")
551 
552  bool success=ui->ui_kernel->init_kernel("TRAIN");
553 
554  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
555  SG_ERROR("Kernel not initialized / no train features available.\n")
556 
557  int32_t num_vec=kernel->get_num_vec_lhs();
558  if (!oneclass && trainlabels->get_num_labels() != num_vec)
559  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
560 
561  SG_INFO("Starting SVM training on %ld vectors using C1=%lf C2=%lf epsilon=%lf\n", num_vec, svm_C1, svm_C2, svm_epsilon)
562 
563  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS)
564  {
568  svm->set_epsilon(svm_epsilon);
571  svm->set_nu(svm_nu);
572  svm->set_C(svm_C1);
573  svm->set_qpsize(svm_qpsize);
577  }
578  else
579  {
580  CSVM* svm = (CSVM*)classifier;
583  svm->set_epsilon(svm_epsilon);
586  svm->set_nu(svm_nu);
587  svm->set_C(svm_C1, svm_C2);
588  svm->set_qpsize(svm_qpsize);
592  }
593 
594  if (type==CT_MKLMULTICLASS)
595  {
596  ((CMKLMulticlass *)classifier)->set_mkl_epsilon(svm_weight_epsilon);
597  }
598 
600  {
601  CAUCKernel* auc_kernel = new CAUCKernel(10, kernel);
602  CLabels* auc_labels = auc_kernel->setup_auc_maximization(trainlabels);
603  ((CKernelMachine*)classifier)->set_labels(auc_labels);
604  ((CKernelMachine*)classifier)->set_kernel(auc_kernel);
605  SG_UNREF(auc_labels);
606  }
607  else
608  {
609  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS)
610  {
611  ((CKernelMulticlassMachine*)classifier)->set_labels(trainlabels);
612  ((CKernelMulticlassMachine*)classifier)->set_kernel(kernel);
613  }
614  else
615  {
616  if(!oneclass)
617  ((CKernelMachine*)classifier)->set_labels(trainlabels);
618 
619  ((CKernelMachine*)classifier)->set_kernel(kernel);
620  }
621  }
622 
623  bool result = classifier->train();
624 
625  return result;
626 }
627 
628 bool CGUIClassifier::train_clustering(int32_t k, int32_t max_iter)
629 {
630  bool result=false;
631  CDistance* distance=ui->ui_distance->get_distance();
632 
633  if (!distance)
634  SG_ERROR("No distance available\n")
635 
636  if (!ui->ui_distance->init_distance("TRAIN"))
637  SG_ERROR("Initializing distance with train features failed.\n")
638 
639  ((CDistanceMachine*) classifier)->set_distance(distance);
640 
642  switch (type)
643  {
644  case CT_KMEANS:
645  {
646  ((CKMeans*) classifier)->set_k(k);
647  ((CKMeans*) classifier)->set_max_iter(max_iter);
648  result=((CKMeans*) classifier)->train();
649  break;
650  }
651  case CT_HIERARCHICAL:
652  {
653  ((CHierarchical*) classifier)->set_merges(k);
654  result=((CHierarchical*) classifier)->train();
655  break;
656  }
657  default:
658  SG_ERROR("Unknown clustering type %d\n", type)
659  }
660 
661  return result;
662 }
663 
665 {
666  CLabels* trainlabels=ui->ui_labels->get_train_labels();
667  CDistance* distance=ui->ui_distance->get_distance();
668 
669  bool result=false;
670 
671  if (trainlabels)
672  {
673  if (distance)
674  {
675  if (!ui->ui_distance->init_distance("TRAIN"))
676  SG_ERROR("Initializing distance with train features failed.\n")
677  ((CKNN*) classifier)->set_labels(trainlabels);
678  ((CKNN*) classifier)->set_distance(distance);
679  ((CKNN*) classifier)->set_k(k);
680  result=((CKNN*) classifier)->train();
681  }
682  else
683  SG_ERROR("No distance available.\n")
684  }
685  else
686  SG_ERROR("No labels available\n")
687 
688  return result;
689 }
690 
692 {
693 #ifdef HAVE_LAPACK
695  if (!krr)
696  SG_ERROR("No SVM available.\n")
697 
698  CLabels* trainlabels=NULL;
699  trainlabels=ui->ui_labels->get_train_labels();
700  if (!trainlabels)
701  SG_ERROR("No trainlabels available.\n")
702 
703  CKernel* kernel=ui->ui_kernel->get_kernel();
704  if (!kernel)
705  SG_ERROR("No kernel available.\n")
706 
707  bool success=ui->ui_kernel->init_kernel("TRAIN");
708 
709  if (!success || !ui->ui_kernel->is_initialized() || !kernel->has_features())
710  SG_ERROR("Kernel not initialized / no train features available.\n")
711 
712  int32_t num_vec=kernel->get_num_vec_lhs();
713  if (trainlabels->get_num_labels() != num_vec)
714  SG_ERROR("Number of train labels (%d) and training vectors (%d) differs!\n", trainlabels->get_num_labels(), num_vec)
715 
716 
717  // Set training labels and kernel
718  krr->set_labels(trainlabels);
719  krr->set_kernel(kernel);
720 
721  bool result=krr->train();
722  return result;
723 #else
724  return false;
725 #endif
726 }
727 
729 {
732  CFeatures* trainfeatures=ui->ui_features->get_train_features();
733  CLabels* trainlabels=ui->ui_labels->get_train_labels();
734  bool result=false;
735 
736  if (!trainfeatures)
737  SG_ERROR("No trainfeatures available.\n")
738 
739  if (!trainfeatures->has_property(FP_DOT))
740  SG_ERROR("Trainfeatures not based on DotFeatures.\n")
741 
742  if (!trainlabels)
743  SG_ERROR("No labels available\n")
744 
745  if (ctype==CT_PERCEPTRON)
746  {
747  ((CPerceptron*) classifier)->set_learn_rate(perceptron_learnrate);
748  ((CPerceptron*) classifier)->set_max_iter(perceptron_maxiter);
749  }
750 
751  if (ctype==CT_LDA)
752  {
753  if (trainfeatures->get_feature_type()!=F_DREAL ||
754  trainfeatures->get_feature_class()!=C_DENSE)
755  SG_ERROR("LDA requires train features of class SIMPLE type REAL.\n")
756  ((CLDA*) classifier)->set_gamma(gamma);
757  }
758  if (ctype==CT_SVMLIN)
759  ((CSVMLin*) classifier)->set_C(svm_C1, svm_C2);
760 #ifdef USE_GPL_SHOGUN
761  else if (ctype==CT_SVMOCAS)
762  ((CSVMOcas*) classifier)->set_C(svm_C1, svm_C2);
763 #endif
764 #ifdef HAVE_LAPACK
765  else if (ctype==CT_LIBLINEAR)
766  ((CLibLinear*) classifier)->set_C(svm_C1, svm_C2);
767 #endif
768  else if (ctype==CT_SVMSGD)
769  ((CSVMSGD*) classifier)->set_C(svm_C1, svm_C2);
770  else if (ctype==CT_LPM || ctype==CT_LPBOOST)
771  {
772  if (trainfeatures->get_feature_class()!=C_SPARSE ||
773  trainfeatures->get_feature_type()!=F_DREAL)
774  SG_ERROR("LPM and LPBOOST require trainfeatures of class SPARSE type REAL.\n")
775  }
776 
777  ((CLinearMachine*) classifier)->set_labels(trainlabels);
778  ((CLinearMachine*) classifier)->set_features((CDenseFeatures<float64_t>*) trainfeatures);
779  result=((CLinearMachine*) classifier)->train();
780 
781  return result;
782 }
783 
784 #ifdef USE_GPL_SHOGUN
785 bool CGUIClassifier::train_wdocas()
786 {
787  CFeatures* trainfeatures=ui->ui_features->get_train_features();
788  CLabels* trainlabels=ui->ui_labels->get_train_labels();
789 
790  bool result=false;
791 
792  if (!trainfeatures)
793  SG_ERROR("No trainfeatures available.\n")
794 
795  if (trainfeatures->get_feature_class()!=C_STRING ||
796  trainfeatures->get_feature_type()!=F_BYTE )
797  SG_ERROR("Trainfeatures are not of class STRING type BYTE.\n")
798 
799  if (!trainlabels)
800  SG_ERROR("No labels available.\n")
801 
802  ((CWDSVMOcas*) classifier)->set_labels(trainlabels);
803  ((CWDSVMOcas*) classifier)->set_features((CStringFeatures<uint8_t>*) trainfeatures);
804  result=((CWDSVMOcas*) classifier)->train();
805 
806  return result;
807 }
808 #endif //USE_GPL_SHOGUN
809 
810 bool CGUIClassifier::load(char* filename, char* type)
811 {
812  bool result=false;
813 
814  if (new_classifier(type))
815  {
816  FILE* model_file=fopen(filename, "r");
817  REQUIRE(model_file != NULL, "SVM/Classifier loading failed on file %s.\n", filename);
818 
819  CSerializableAsciiFile* ascii_file = new CSerializableAsciiFile(model_file,'r');
820 
821  if (ascii_file)
822  {
823  if (classifier && classifier->load_serializable(ascii_file))
824  {
825  SG_DEBUG("file successfully read.\n")
826  result=true;
827  }
828  else
829  SG_ERROR("SVM/Classifier creation/loading failed on file %s.\n", filename)
830 
831  delete ascii_file;
832  }
833  else
834  SG_ERROR("Opening file %s failed.\n", filename)
835 
836  return result;
837  }
838  else
839  SG_ERROR("Type %s of SVM/Classifier unknown.\n", type)
840 
841  return false;
842 }
843 
844 bool CGUIClassifier::save(char* param)
845 {
846  bool result=false;
847  param=SGIO::skip_spaces(param);
848 
849  if (classifier)
850  {
851  FILE* file=fopen(param, "w");
852  CSerializableAsciiFile* ascii_file = new CSerializableAsciiFile(file,'w');
853 
854  if ((!ascii_file) || (!classifier->save_serializable(ascii_file)))
855  printf("writing to file %s failed!\n", param);
856  else
857  {
858  printf("successfully written classifier into \"%s\" !\n", param);
859  result=true;
860  }
861 
862  if (ascii_file)
863  delete ascii_file;
864  }
865  else
866  SG_ERROR("create classifier first\n")
867 
868  return result;
869 }
870 
872  float64_t learnrate, int32_t maxiter)
873 {
874  if (learnrate<=0)
876  else
877  perceptron_learnrate=learnrate;
878 
879  if (maxiter<=0)
880  perceptron_maxiter=1000;
881  else
882  perceptron_maxiter=maxiter;
883  SG_INFO("Setting to perceptron parameters (learnrate %f and maxiter: %d\n", perceptron_learnrate, perceptron_maxiter)
884 
885  return true;
886 }
887 
889 {
890  if (epsilon<0)
891  svm_epsilon=1e-4;
892  else
893  svm_epsilon=epsilon;
894  SG_INFO("Set to svm_epsilon=%f.\n", svm_epsilon)
895 
896  return true;
897 }
898 
900 {
901  if (max>0)
902  {
904  SG_INFO("Set to max_train_time=%f.\n", max_train_time)
905  }
906  else
907  SG_INFO("Disabling max_train_time.\n")
908 
909  return true;
910 }
911 
913 {
914  if (!classifier)
915  SG_ERROR("No regression method allocated\n")
916 
920  {
921  SG_ERROR("Underlying method not capable of SV-regression\n")
922  }
923 
924  if (tube_epsilon<0)
925  svm_tube_epsilon=1e-2;
926  svm_tube_epsilon=tube_epsilon;
927 
928  ((CSVM*) classifier)->set_tube_epsilon(svm_tube_epsilon);
929  SG_INFO("Set to svr_tube_epsilon=%f.\n", svm_tube_epsilon)
930 
931  return true;
932 }
933 
935 {
936  if (nu<0 || nu>1)
937  nu=0.5;
938 
939  svm_nu=nu;
940  SG_INFO("Set to nu=%f.\n", svm_nu)
941 
942  return true;
943 }
944 
946  float64_t weight_epsilon, float64_t C, float64_t norm)
947 {
948  if (weight_epsilon<0)
949  weight_epsilon=1e-4;
950  if (C<0)
951  C=0;
952  if (norm<0)
953  SG_ERROR("MKL norm >= 0\n")
954 
955  svm_weight_epsilon=weight_epsilon;
956  C_mkl=C;
957  mkl_norm=norm;
958 
959  SG_INFO("Set to weight_epsilon=%f.\n", svm_weight_epsilon)
960  SG_INFO("Set to C_mkl=%f.\n", C_mkl)
961  SG_INFO("Set to mkl_norm=%f.\n", mkl_norm)
962 
963  return true;
964 }
965 
967 {
968  if (lambda<0 || lambda>1)
969  SG_ERROR("0 <= ent_lambda <= 1\n")
970 
971  ent_lambda = lambda;
972  return true;
973 }
974 
976 {
977  if (mkl_bnorm<1)
978  SG_ERROR("1 <= mkl_block_norm <= inf\n")
979 
980  mkl_block_norm=mkl_bnorm;
981  return true;
982 }
983 
984 
986 {
987  if (C1<0)
988  svm_C1=1.0;
989  else
990  svm_C1=C1;
991 
992  if (C2<0)
993  svm_C2=svm_C1;
994  else
995  svm_C2=C2;
996 
997  SG_INFO("Set to C1=%f C2=%f.\n", svm_C1, svm_C2)
998 
999  return true;
1000 }
1001 
1003 {
1004  if (qpsize<2)
1005  svm_qpsize=41;
1006  else
1007  svm_qpsize=qpsize;
1008  SG_INFO("Set qpsize to svm_qpsize=%d.\n", svm_qpsize)
1009 
1010  return true;
1011 }
1012 
1013 bool CGUIClassifier::set_svm_max_qpsize(int32_t max_qpsize)
1014 {
1015  if (max_qpsize<50)
1016  svm_max_qpsize=50;
1017  else
1018  svm_max_qpsize=max_qpsize;
1019  SG_INFO("Set max qpsize to svm_max_qpsize=%d.\n", svm_max_qpsize)
1020 
1021  return true;
1022 }
1023 
1024 bool CGUIClassifier::set_svm_bufsize(int32_t bufsize)
1025 {
1026  if (svm_bufsize<0)
1027  svm_bufsize=3000;
1028  else
1029  svm_bufsize=bufsize;
1030  SG_INFO("Set bufsize to svm_bufsize=%d.\n", svm_bufsize)
1031 
1032  return true ;
1033 }
1034 
1036 {
1037  svm_use_shrinking=enabled;
1038  if (svm_use_shrinking)
1039  SG_INFO("Enabling shrinking optimization.\n")
1040  else
1041  SG_INFO("Disabling shrinking optimization.\n")
1042 
1043  return true;
1044 }
1045 
1047 {
1048  svm_use_batch_computation=enabled;
1050  SG_INFO("Enabling batch computation.\n")
1051  else
1052  SG_INFO("Disabling batch computation.\n")
1053 
1054  return true;
1055 }
1056 
1058 {
1059  svm_use_linadd=enabled;
1060  if (svm_use_linadd)
1061  SG_INFO("Enabling LINADD optimization.\n")
1062  else
1063  SG_INFO("Disabling LINADD optimization.\n")
1064 
1065  return true;
1066 }
1067 
1069 {
1070  svm_use_bias=enabled;
1071  if (svm_use_bias)
1072  SG_INFO("Enabling svm bias.\n")
1073  else
1074  SG_INFO("Disabling svm bias.\n")
1075 
1076  return true;
1077 }
1078 
1080 {
1081  mkl_use_interleaved=enabled;
1082  if (mkl_use_interleaved)
1083  SG_INFO("Enabling mkl interleaved optimization.\n")
1084  else
1085  SG_INFO("Disabling mkl interleaved optimization.\n")
1086 
1087  return true;
1088 }
1089 
1091 {
1092  svm_do_auc_maximization=do_auc;
1093 
1095  SG_INFO("Enabling AUC maximization.\n")
1096  else
1097  SG_INFO("Disabling AUC maximization.\n")
1098 
1099  return true;
1100 }
1101 
1102 
1104 {
1106 
1107  switch (classifier->get_classifier_type())
1108  {
1109  case CT_LIGHT:
1110  case CT_LIGHTONECLASS:
1111  case CT_LIBSVM:
1112  case CT_SCATTERSVM:
1113  case CT_MPD:
1114  case CT_GPBT:
1115  case CT_CPLEXSVM:
1116  case CT_GMNPSVM:
1117  case CT_GNPPSVM:
1118  case CT_LIBSVR:
1119  case CT_LIBSVMMULTICLASS:
1120  case CT_LIBSVMONECLASS:
1121  case CT_SVRLIGHT:
1122  case CT_MKLCLASSIFICATION:
1123  case CT_MKLMULTICLASS:
1124  case CT_MKLREGRESSION:
1125  case CT_MKLONECLASS:
1127  return classify_kernelmachine();
1128  case CT_KNN:
1129  return classify_distancemachine();
1130  case CT_PERCEPTRON:
1131  case CT_LDA:
1132  return classify_linear();
1133  case CT_SVMLIN:
1134  case CT_SVMPERF:
1135  case CT_SVMOCAS:
1136  case CT_SVMSGD:
1137  case CT_LPM:
1138  case CT_LPBOOST:
1139  case CT_LIBLINEAR:
1140  return classify_linear();
1141 #ifdef USE_GPL_SHOGUN
1142  case CT_WDSVMOCAS:
1143  return classify_byte_linear();
1144 #endif
1145  default:
1146  SG_ERROR("unknown classifier type\n")
1147  break;
1148  };
1149 
1150  return NULL;
1151 }
1152 
1154 {
1155  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1156  CFeatures* testfeatures=ui->ui_features->get_test_features();
1157 
1158  if (!classifier)
1159  SG_ERROR("No kernelmachine available.\n")
1160 
1161  bool success=true;
1162 
1163  REQUIRE(ui->ui_kernel->get_kernel(), "No kernel set");
1164  if (ui->ui_kernel->get_kernel()->get_kernel_type()!=K_CUSTOM)
1165  {
1166  if (ui->ui_kernel->get_kernel()->get_kernel_type()==K_COMBINED
1167  && ( !trainfeatures || !testfeatures ))
1168  {
1169  SG_DEBUG("skipping initialisation of combined kernel "
1170  "as train/test features are unavailable\n")
1171  }
1172  else
1173  {
1174  if (!trainfeatures)
1175  SG_ERROR("No training features available.\n")
1176  if (!testfeatures)
1177  SG_ERROR("No test features available.\n")
1178 
1179  success=ui->ui_kernel->init_kernel("TEST");
1180  }
1181  }
1182 
1183  if (!success || !ui->ui_kernel->is_initialized())
1184  SG_ERROR("Kernel not initialized.\n")
1185 
1187  if (type==CT_LARANK || type==CT_GMNPSVM || type==CT_LIBSVMMULTICLASS ||
1188  type==CT_MKLMULTICLASS)
1189  {
1191  kmcm->set_kernel(ui->ui_kernel->get_kernel());
1192  }
1193  else
1194  {
1196  km->set_kernel(ui->ui_kernel->get_kernel());
1198  }
1199 
1200  SG_INFO("Starting kernel machine testing.\n")
1201  return classifier->apply();
1202 }
1203 
1205  float64_t* &weights, int32_t &rows, int32_t &cols, float64_t*& bias,
1206  int32_t& brows, int32_t& bcols,
1207  int32_t idx) // which SVM for Multiclass
1208 {
1210 
1211  switch (classifier->get_classifier_type())
1212  {
1213  case CT_SCATTERSVM:
1214  case CT_GNPPSVM:
1215  case CT_LIBSVMMULTICLASS:
1216  case CT_LIGHT:
1217  case CT_LIGHTONECLASS:
1218  case CT_LIBSVM:
1219  case CT_MPD:
1220  case CT_GPBT:
1221  case CT_CPLEXSVM:
1222  case CT_GMNPSVM:
1223  case CT_LIBSVR:
1224  case CT_LIBSVMONECLASS:
1225  case CT_SVRLIGHT:
1226  case CT_MKLCLASSIFICATION:
1227  case CT_MKLREGRESSION:
1228  case CT_MKLONECLASS:
1229  case CT_MKLMULTICLASS:
1231  return get_svm(weights, rows, cols, bias, brows, bcols, idx);
1232  break;
1233  case CT_PERCEPTRON:
1234  case CT_LDA:
1235  case CT_LPM:
1236  case CT_LPBOOST:
1237  case CT_SVMOCAS:
1238  case CT_SVMSGD:
1239  case CT_SVMLIN:
1240  case CT_SVMPERF:
1241  case CT_LIBLINEAR:
1242  return get_linear(weights, rows, cols, bias, brows, bcols);
1243  break;
1244  case CT_KMEANS:
1245  case CT_HIERARCHICAL:
1246  return get_clustering(weights, rows, cols, bias, brows, bcols);
1247  break;
1248  case CT_KNN:
1249  SG_ERROR("not implemented")
1250  break;
1251  default:
1252  SG_ERROR("unknown classifier type\n")
1253  break;
1254  };
1255  return false;
1256 }
1257 
1258 
1260 {
1262  return ((CMulticlassSVM*) classifier)->get_num_machines();
1263 }
1264 
1266  float64_t* &weights, int32_t& rows, int32_t& cols, float64_t*& bias,
1267  int32_t& brows, int32_t& bcols, int32_t idx)
1268 {
1269  CSVM* svm=(CSVM*) classifier;
1270 
1271  if (idx>-1) // should be MulticlassSVM
1272  svm=((CMulticlassSVM*) svm)->get_svm(idx);
1273 
1274  if (svm)
1275  {
1276  brows=1;
1277  bcols=1;
1278  bias=SG_MALLOC(float64_t, 1);
1279  *bias=svm->get_bias();
1280 
1281  rows=svm->get_num_support_vectors();
1282  cols=2;
1283  weights=SG_MALLOC(float64_t, rows*cols);
1284 
1285  for (int32_t i=0; i<rows; i++)
1286  {
1287  weights[i]=svm->get_alpha(i);
1288  weights[i+rows]=svm->get_support_vector(i);
1289  }
1290 
1291  return true;
1292  }
1293 
1294  return false;
1295 }
1296 
1298  float64_t* &centers, int32_t& rows, int32_t& cols, float64_t*& radi,
1299  int32_t& brows, int32_t& bcols)
1300 {
1301  if (!classifier)
1302  return false;
1303 
1304  switch (classifier->get_classifier_type())
1305  {
1306  case CT_KMEANS:
1307  {
1308  CKMeans* clustering=(CKMeans*) classifier;
1309 
1310  bcols=1;
1311  SGVector<float64_t> r=clustering->get_radiuses();
1312  brows=r.vlen;
1313  radi=SG_MALLOC(float64_t, brows);
1314  memcpy(radi, r.vector, sizeof(float64_t)*brows);
1315 
1316  cols=1;
1317  SGMatrix<float64_t> c=clustering->get_cluster_centers();
1318  rows=c.num_rows;
1319  cols=c.num_cols;
1320  centers=SG_MALLOC(float64_t, rows*cols);
1321  memcpy(centers, c.matrix, sizeof(float64_t)*rows*cols);
1322  break;
1323  }
1324 
1325  case CT_HIERARCHICAL:
1326  {
1327  CHierarchical* clustering=(CHierarchical*) classifier;
1328 
1329  // radi == merge_distances, centers == pairs
1330  bcols=1;
1331  SGVector<float64_t> r=clustering->get_merge_distances();
1332  brows=r.vlen;
1333  radi=SG_MALLOC(float64_t, brows);
1334  memcpy(radi, r.vector, sizeof(float64_t)*brows);
1335 
1336  SGMatrix<int32_t> p=clustering->get_cluster_pairs();
1337  rows=p.num_rows;
1338  cols=p.num_cols;
1339  centers=SG_MALLOC(float64_t, rows*cols);
1340  for (int32_t i=0; i<rows*cols; i++)
1341  centers[i]=(float64_t) p.matrix[i];
1342 
1343  break;
1344  }
1345 
1346  default:
1347  SG_ERROR("internal error - unknown clustering type\n")
1348  }
1349 
1350  return true;
1351 }
1352 
1354  float64_t* &weights, int32_t& rows, int32_t& cols, float64_t*& bias,
1355  int32_t& brows, int32_t& bcols)
1356 {
1358 
1359  if (!linear)
1360  return false;
1361 
1362  bias=SG_MALLOC(float64_t, 1);
1363  *bias=linear->get_bias();
1364  brows=1;
1365  bcols=1;
1366 
1367  SGVector<float64_t> w=linear->get_w();
1368  cols=1;
1369  rows=w.vlen;
1370 
1371  weights= SG_MALLOC(float64_t, w.vlen);
1372  memcpy(weights, w.vector, sizeof(float64_t)*w.vlen);
1373 
1374  return true;
1375 }
1376 
1378 {
1379  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1380  CFeatures* testfeatures=ui->ui_features->get_test_features();
1381 
1382  if (!classifier)
1383  {
1384  SG_ERROR("no kernelmachine available\n")
1385  return NULL;
1386  }
1387  if (!trainfeatures)
1388  {
1389  SG_ERROR("no training features available\n")
1390  return NULL;
1391  }
1392 
1393  if (!testfeatures)
1394  {
1395  SG_ERROR("no test features available\n")
1396  return NULL;
1397  }
1398 
1399  bool success=ui->ui_distance->init_distance("TEST");
1400 
1401  if (!success || !ui->ui_distance->is_initialized())
1402  {
1403  SG_ERROR("distance not initialized\n")
1404  return NULL;
1405  }
1406 
1407  ((CDistanceMachine*) classifier)->set_distance(
1408  ui->ui_distance->get_distance());
1409  SG_INFO("starting distance machine testing\n")
1410  return classifier->apply();
1411 }
1412 
1413 
1415 {
1416  CFeatures* testfeatures=ui->ui_features->get_test_features();
1417 
1418  if (!classifier)
1419  {
1420  SG_ERROR("no classifier available\n")
1421  return NULL;
1422  }
1423  if (!testfeatures)
1424  {
1425  SG_ERROR("no test features available\n")
1426  return NULL;
1427  }
1428  if (!(testfeatures->has_property(FP_DOT)))
1429  {
1430  SG_ERROR("testfeatures not based on DotFeatures\n")
1431  return NULL;
1432  }
1433 
1434  ((CLinearMachine*) classifier)->set_features((CDotFeatures*) testfeatures);
1435  SG_INFO("starting linear classifier testing\n")
1436  return classifier->apply();
1437 }
1438 
1439 #ifdef USE_GPL_SHOGUN
1441 {
1442  CFeatures* testfeatures=ui->ui_features->get_test_features();
1443 
1444  if (!classifier)
1445  {
1446  SG_ERROR("no svm available\n")
1447  return NULL;
1448  }
1449  if (!testfeatures)
1450  {
1451  SG_ERROR("no test features available\n")
1452  return NULL;
1453  }
1454  if (testfeatures->get_feature_class() != C_STRING ||
1455  testfeatures->get_feature_type() != F_BYTE )
1456  {
1457  SG_ERROR("testfeatures not of class STRING type BYTE\n")
1458  return NULL;
1459  }
1460 
1461  ((CWDSVMOcas*) classifier)->set_features((CStringFeatures<uint8_t>*) testfeatures);
1462  SG_INFO("starting linear classifier testing\n")
1463  return classifier->apply();
1464 }
1465 #endif //USE_GPL_SHOGUN
1466 
1468 {
1469  CFeatures* trainfeatures=ui->ui_features->get_train_features();
1470  CFeatures* testfeatures=ui->ui_features->get_test_features();
1471 
1472  if (!classifier)
1473  {
1474  SG_ERROR("no svm available\n")
1475  return false;
1476  }
1477 
1478  if (!ui->ui_kernel->is_initialized())
1479  {
1480  SG_ERROR("kernel not initialized\n")
1481  return false;
1482  }
1483 
1484  if (!ui->ui_kernel->get_kernel() ||
1485  ui->ui_kernel->get_kernel()->get_kernel_type()!=K_CUSTOM)
1486  {
1487  if (!trainfeatures)
1488  {
1489  SG_ERROR("no training features available\n")
1490  return false;
1491  }
1492 
1493  if (!testfeatures)
1494  {
1495  SG_ERROR("no test features available\n")
1496  return false;
1497  }
1498  }
1499 
1500  ((CKernelMachine*) classifier)->set_kernel(
1501  ui->ui_kernel->get_kernel());
1502 
1503  result=((CKernelMachine*)classifier)->apply_one(idx);
1504  return true ;
1505 }
1506 
1507 
1509 {
1510 #ifdef HAVE_LAPACK
1511  krr_tau=tau;
1512  ((CKernelRidgeRegression*) classifier)->set_tau(krr_tau);
1513  SG_INFO("Set to krr_tau=%f.\n", krr_tau)
1514 
1515  return true;
1516 #else
1517  return false;
1518 #endif
1519 }
1520 
1521 bool CGUIClassifier::set_solver(char* solver)
1522 {
1523  ESolverType s=ST_AUTO;
1524 
1525  if (strncmp(solver,"NEWTON", 6)==0)
1526  {
1527  SG_INFO("Using NEWTON solver.\n")
1528  s=ST_NEWTON;
1529  }
1530  else if (strncmp(solver,"DIRECT", 6)==0)
1531  {
1532  SG_INFO("Using DIRECT solver\n")
1533  s=ST_DIRECT;
1534  }
1535  else if (strncmp(solver,"BLOCK_NORM", 9)==0)
1536  {
1537  SG_INFO("Using BLOCK_NORM solver\n")
1538  s=ST_BLOCK_NORM;
1539  }
1540  else if (strncmp(solver,"ELASTICNET", 10)==0)
1541  {
1542  SG_INFO("Using ELASTICNET solver\n")
1543  s=ST_ELASTICNET;
1544  }
1545  else if (strncmp(solver,"AUTO", 4)==0)
1546  {
1547  SG_INFO("Automagically determining solver.\n")
1548  s=ST_AUTO;
1549  }
1550 #ifdef USE_CPLEX
1551  else if (strncmp(solver, "CPLEX", 5)==0)
1552  {
1553  SG_INFO("USING CPLEX METHOD selected\n")
1554  s=ST_CPLEX;
1555  }
1556 #endif
1557 #ifdef USE_GLPK
1558  else if (strncmp(solver,"GLPK", 4)==0)
1559  {
1560  SG_INFO("Using GLPK solver\n")
1561  s=ST_GLPK;
1562  }
1563 #endif
1564  else
1565  SG_ERROR("Unknown solver type, %s (not compiled in?)\n", solver)
1566 
1567 
1568  solver_type=s;
1569  return true;
1570 }
1571 
1573 {
1574  if (strcmp(name,"LIBSVM_ONECLASS")==0)
1575  {
1578  SG_INFO("created SVMlibsvm object for oneclass\n")
1579  }
1580  else if (strcmp(name,"LIBSVM_NU")==0)
1581  {
1583  constraint_generator= new CLibSVM(LIBSVM_NU_SVC);
1584  SG_INFO("created SVMlibsvm object\n")
1585  }
1586  else if (strcmp(name,"LIBSVM")==0)
1587  {
1590  SG_INFO("created SVMlibsvm object\n")
1591  }
1592 #ifdef USE_SVMLIGHT
1593  else if ((strcmp(name,"LIGHT")==0) || (strcmp(name,"SVMLIGHT")==0))
1594  {
1597  SG_INFO("created SVMLight object\n")
1598  }
1599  else if (strcmp(name,"SVMLIGHT_ONECLASS")==0)
1600  {
1603  SG_INFO("created SVMLightOneClass object\n")
1604  }
1605  else if (strcmp(name,"SVRLIGHT")==0)
1606  {
1609  SG_INFO("created SVRLight object\n")
1610  }
1611 #endif //USE_SVMLIGHT
1612 #ifdef USE_GPL_SHOGUN
1613  else if (strcmp(name,"GPBTSVM")==0)
1614  {
1616  constraint_generator= new CGPBTSVM();
1617  SG_INFO("created GPBT-SVM object\n")
1618  }
1619 #endif //USE_GPL_SHOGUN
1620  else if (strcmp(name,"MPDSVM")==0)
1621  {
1624  SG_INFO("created MPD-SVM object\n")
1625  }
1626  else if (strcmp(name,"GNPPSVM")==0)
1627  {
1630  SG_INFO("created GNPP-SVM object\n")
1631  }
1632  else if (strcmp(name,"LIBSVR")==0)
1633  {
1636  SG_INFO("created SVRlibsvm object\n")
1637  }
1638  else
1639  {
1640  SG_ERROR("Unknown SV-classifier %s.\n", name)
1641  return false;
1642  }
1644 
1645  return (constraint_generator!=NULL);
1646 }
void set_epsilon(float64_t eps)
SGVector< float64_t > get_radiuses()
Definition: KMeansBase.cpp:232
float distance(CJLCoverTreePoint p1, CJLCoverTreePoint p2, float64_t upper_bound)
void set_shrinking_enabled(bool enable)
Definition: SVM.h:179
bool set_perceptron_parameters(float64_t lernrate, int32_t maxiter)
bool set_svm_epsilon(float64_t epsilon)
class SVMLin
Definition: SVMLin.h:22
void set_bias_enabled(bool enable_bias)
EMachineType
Definition: Machine.h:33
void set_mkl_block_norm(float64_t q)
Definition: MKL.cpp:480
Class KernelRidgeRegression implements Kernel Ridge Regression - a regularized least square method fo...
bool get_trained_classifier(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols, int32_t idx=-1)
#define SG_INFO(...)
Definition: SGIO.h:118
void set_max_train_time(float64_t t)
Definition: Machine.cpp:82
bool set_svm_shrinking_enabled(bool enabled)
bool set_svm_linadd_enabled(bool enabled)
bool train_knn(int32_t k=3)
MKLMulticlass is a class for L1-norm Multiclass MKL.
Definition: MKLMulticlass.h:40
Template class StringFeatures implements a list of strings.
static char * skip_spaces(char *str)
Definition: SGIO.cpp:257
SGVector< float64_t > get_merge_distances()
Class Distance, a base class for all the distances used in the Shogun toolbox.
Definition: Distance.h:87
void set_qpsize(int32_t qps)
bool set_constraint_generator(char *cg)
bool train_clustering(int32_t k=3, int32_t max_iter=1000)
no bias w/ libsvm
Definition: ScatterSVM.h:27
LibSVM.
Definition: LibSVM.h:30
The class Labels models labels, i.e. class assignments of objects.
Definition: Labels.h:43
virtual int32_t get_num_labels() const =0
bool set_svm_mkl_parameters(float64_t weight_epsilon, float64_t C_mkl, float64_t mkl_norm)
void set_shrinking_enabled(bool enable)
float64_t perceptron_learnrate
bool set_do_auc_maximization(bool do_auc)
L2 regularized SVM with L2-loss using newton in the primal.
Definition: LibLinear.h:32
class MPDSVM
Definition: MPDSVM.h:24
Class LPM trains a linear classifier called Linear Programming Machine, i.e. a SVM using a norm regu...
Definition: LPM.h:42
bool set_svm_nu(float64_t nu)
ESolverType
Definition: Machine.h:98
#define SG_ERROR(...)
Definition: SGIO.h:129
#define REQUIRE(x,...)
Definition: SGIO.h:206
Trains a one class C SVM.
bool save(char *param)
L1 regularized SVM with L2-loss using dual coordinate descent.
Definition: LibLinear.h:37
CLabels * classify_byte_linear()
virtual bool load_serializable(CSerializableFile *file, const char *prefix="")
Definition: SGObject.cpp:402
index_t num_cols
Definition: SGMatrix.h:376
CLabels * setup_auc_maximization(CLabels *labels)
Definition: AUCKernel.cpp:46
void set_mkl_norm(float64_t norm)
Definition: MKL.cpp:458
A generic KernelMachine interface.
Definition: KernelMachine.h:51
Multiple Kernel Learning for one-class-classification.
Definition: MKLOneClass.h:27
Agglomerative hierarchical single linkage clustering.
Definition: Hierarchical.h:38
Features that support dot products among other operations.
Definition: DotFeatures.h:44
virtual int32_t get_num_vec_lhs()
Definition: Kernel.h:517
class LibSVMMultiClass. Does one vs one classification.
Multiple Kernel Learning for regression.
Definition: MKLRegression.h:27
#define SG_REF(x)
Definition: SGObject.h:54
Class LDA implements regularized Linear Discriminant Analysis.
Definition: LDA.h:89
A generic DistanceMachine interface.
index_t num_rows
Definition: SGMatrix.h:374
virtual void set_mkl_norm(float64_t norm)
class LibSVMOneClass
void set_nu(float64_t nue)
Definition: SVM.h:107
bool classify_example(int32_t idx, float64_t &result)
bool set_svm_batch_computation_enabled(bool enabled)
CLabels * classify_distancemachine()
The AUC kernel can be used to maximize the area under the receiver operator characteristic curve (AUC...
Definition: AUCKernel.h:35
void set_mkl_epsilon(float64_t eps)
Definition: MKL.h:199
void set_interleaved_optimization_enabled(bool enable)
Definition: MKL.h:159
Class SVRLight, performs support vector regression using SVMLight.
Definition: SVRLight.h:62
CLabels * classify_kernelmachine()
LIBLINEAR_SOLVER_TYPE
Definition: LibLinear.h:25
index_t vlen
Definition: SGVector.h:494
bool new_classifier(char *name, int32_t d=6, int32_t from_d=40)
bool set_svr_tube_epsilon(float64_t tube_epsilon)
#define ASSERT(x)
Definition: SGIO.h:201
Class SGObject is the base class of all shogun objects.
Definition: SGObject.h:115
void set_constraint_generator(CSVM *s)
Definition: MKL.h:102
class MultiClassSVM
Definition: MulticlassSVM.h:28
void set_batch_computation_enabled(bool enable)
KMeans clustering, partitions the data into k (a-priori specified) clusters.
Definition: KMeans.h:45
void set_batch_computation_enabled(bool enable)
bool set_svm_max_qpsize(int32_t max_qpsize)
void set_nu(float64_t nue)
bool set_solver(char *solver)
SGMatrix< int32_t > get_cluster_pairs()
no bias w/ svmlight
Definition: ScatterSVM.h:30
L2 regularized linear logistic regression.
Definition: LibLinear.h:28
double float64_t
Definition: common.h:50
bool set_mkl_block_norm(float64_t mkl_bnorm)
void set_C(float64_t C)
This class provides an interface to the LibLinear library for large- scale linear learning focusing o...
Definition: LibLinear.h:61
bool set_mkl_interleaved_enabled(bool enabled)
class SVMSGD
Definition: SVMSGD.h:36
Multiple Kernel Learning for two-class-classification.
void set_qpsize(int32_t qps)
Definition: SVM.h:143
L2 regularized SVM with L2-loss using dual coordinate descent.
Definition: LibLinear.h:30
void set_tube_epsilon(float64_t eps)
bool set_svm_bufsize(int32_t bufsize)
float64_t get_alpha(int32_t idx)
virtual EFeatureClass get_feature_class() const =0
Class KNN, an implementation of the standard k-nearest neigbor classifier.
Definition: KNN.h:68
Class LinearMachine is a generic interface for all kinds of linear machines like classifiers.
Definition: LinearMachine.h:63
Multiple Kernel Learning.
Definition: MKL.h:85
bool set_svm_bias_enabled(bool enabled)
virtual EMachineType get_classifier_type()
Definition: Machine.cpp:92
bool get_clustering(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols)
virtual SGVector< float64_t > get_w() const
int32_t get_support_vector(int32_t idx)
bool get_svm(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols, int32_t idx=-1)
Class LPBoost trains a linear classifier called Linear Programming Machine, i.e. a SVM using a norm ...
Definition: LPBoost.h:47
bool get_linear(float64_t *&weights, int32_t &rows, int32_t &cols, float64_t *&bias, int32_t &brows, int32_t &bcols)
bool train_linear(float64_t gamma=0)
Class LibSVR, performs support vector regression using LibSVM.
Definition: LibSVR.h:70
Class Perceptron implements the standard linear (online) perceptron.
Definition: Perceptron.h:31
#define SG_UNREF(x)
Definition: SGObject.h:55
ScatterSVM - Multiclass SVM.
Definition: ScatterSVM.h:52
#define SG_DEBUG(...)
Definition: SGIO.h:107
virtual bool save_serializable(CSerializableFile *file, const char *prefix="")
Definition: SGObject.cpp:347
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
Class GMNPSVM implements a one vs. rest MultiClass SVM.
Definition: GMNPSVM.h:26
training with bias using test rule 2
Definition: ScatterSVM.h:35
The class Features is the base class of all feature objects.
Definition: Features.h:68
training with bias using test rule 1
Definition: ScatterSVM.h:33
void set_linadd_enabled(bool enable)
virtual float64_t get_bias()
virtual bool train(CFeatures *data=NULL)
Definition: Machine.cpp:39
class SVMlight
Definition: SVMLight.h:225
SGMatrix< float64_t > get_cluster_centers()
Definition: KMeansBase.cpp:237
void set_mkl_epsilon(float64_t eps)
A generic Support Vector Machine Interface.
Definition: SVM.h:49
void set_linadd_enabled(bool enable)
void set_elasticnet_lambda(float64_t elasticnet_lambda)
Definition: MKL.cpp:467
the LaRank multiclass SVM machine This implementation uses LaRank algorithm from Bordes, Antoine, et al., 2007. "Solving multiclass support vector machines with LaRank."
Definition: LaRank.h:318
The Kernel base class.
Definition: Kernel.h:159
void set_bias_enabled(bool enable_bias)
void set_epsilon(float64_t eps)
Definition: SVM.h:125
Matrix::Scalar max(Matrix m)
Definition: Redux.h:68
L2 regularized linear SVM with L1-loss using dual coordinate descent.
Definition: LibLinear.h:35
class GNPPSVM
Definition: GNPPSVM.h:21
void set_kernel(CKernel *k)
bool has_property(EFeatureProperty p) const
Definition: Features.cpp:295
virtual bool has_features()
Definition: Kernel.h:535
virtual void set_labels(CLabels *lab)
Definition: Machine.cpp:65
bool set_krr_tau(float64_t tau=1)
bool set_svm_C(float64_t C1, float64_t C2)
bool load(char *filename, char *type)
void set_solver_type(ESolverType st)
Definition: Machine.cpp:97
bool set_elasticnet_lambda(float64_t lambda)
bool set_svm_qpsize(int32_t qpsize)
void set_C_mkl(float64_t C)
Definition: MKL.h:132
virtual EFeatureType get_feature_type() const =0
void set_C(float64_t c_neg, float64_t c_pos)
Definition: SVM.h:118
bool set_max_train_time(float64_t max)
void set_tube_epsilon(float64_t eps)
Definition: SVM.h:131
virtual CLabels * apply(CFeatures *data=NULL)
Definition: Machine.cpp:152

SHOGUN Machine Learning Toolbox - Documentation