SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
KNN.cpp
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 2006 Christian Gehl
8  * Written (W) 2006-2009 Soeren Sonnenburg
9  * Written (W) 2011 Sergey Lisitsyn
10  * Written (W) 2012 Fernando José Iglesias García, cover tree support
11  * Copyright (C) 2011 Berlin Institute of Technology and Max-Planck-Society
12  */
13 
14 #include <shogun/multiclass/KNN.h>
15 #include <shogun/labels/Labels.h>
18 #include <shogun/lib/Signal.h>
19 #include <shogun/lib/JLCoverTree.h>
20 #include <shogun/lib/Time.h>
21 #include <shogun/base/Parameter.h>
24 
25 #ifdef HAVE_CXX11
26 #include <shogun/lib/external/falconn/lsh_nn_table.h>
27 #endif
28 
29 //#define DEBUG_KNN
30 
31 using namespace shogun;
32 using namespace Eigen;
33 
36 {
37  init();
38 }
39 
40 CKNN::CKNN(int32_t k, CDistance* d, CLabels* trainlab, KNN_SOLVER knn_solver)
42 {
43  init();
44 
45  m_k=k;
46 
47  ASSERT(d)
48  ASSERT(trainlab)
49 
50  set_distance(d);
51  set_labels(trainlab);
53  m_knn_solver=knn_solver;
54 }
55 
56 void CKNN::init()
57 {
58  /* do not store model features by default (CDistanceMachine::apply(...) is
59  * overwritten */
61 
62  m_k=3;
63  m_q=1.0;
64  m_num_classes=0;
65  m_leaf_size=1;
67 #ifdef HAVE_CXX11
68  m_lsh_l = 0;
69  m_lsh_t = 0;
70 #endif
71 
72  /* use the method classify_multiply_k to experiment with different values
73  * of k */
74  SG_ADD(&m_k, "m_k", "Parameter k", MS_NOT_AVAILABLE);
75  SG_ADD(&m_q, "m_q", "Parameter q", MS_AVAILABLE);
76  SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", MS_NOT_AVAILABLE);
77  SG_ADD(&m_leaf_size, "m_leaf_size", "Leaf size for KDTree", MS_NOT_AVAILABLE);
78  SG_ADD((machine_int_t*) &m_knn_solver, "m_knn_solver", "Algorithm to solve knn", MS_NOT_AVAILABLE);
79 }
80 
82 {
83 }
84 
86 {
89 
90  if (data)
91  {
92  if (m_labels->get_num_labels() != data->get_num_vectors())
93  SG_ERROR("Number of training vectors does not match number of labels\n")
94  distance->init(data, data);
95  }
96 
97  SGVector<int32_t> lab=((CMulticlassLabels*) m_labels)->get_int_labels();
98  m_train_labels=lab.clone();
100 
101  int32_t max_class=m_train_labels[0];
102  int32_t min_class=m_train_labels[0];
103 
104  for (int32_t i=1; i<m_train_labels.vlen; i++)
105  {
106  max_class=CMath::max(max_class, m_train_labels[i]);
107  min_class=CMath::min(min_class, m_train_labels[i]);
108  }
109 
110  for (int32_t i=0; i<m_train_labels.vlen; i++)
111  m_train_labels[i]-=min_class;
112 
113  m_min_label=min_class;
114  m_num_classes=max_class-min_class+1;
115 
116  SG_INFO("m_num_classes: %d (%+d to %+d) num_train: %d\n", m_num_classes,
117  min_class, max_class, m_train_labels.vlen);
118 
119  return true;
120 }
121 
123 {
124  //number of examples to which kNN is applied
125  int32_t n=distance->get_num_vec_rhs();
126  //distances to train data
127  float64_t* dists=SG_MALLOC(float64_t, m_train_labels.vlen);
128  //indices to train data
129  index_t* train_idxs=SG_MALLOC(index_t, m_train_labels.vlen);
130  //pre-allocation of the nearest neighbors
131  SGMatrix<index_t> NN(m_k, n);
132 
134 
135  //for each test example
136  for (int32_t i=0; i<n && (!CSignal::cancel_computations()); i++)
137  {
138  SG_PROGRESS(i, 0, n)
139 
140  //lhs idx 0..num train examples-1 (i.e., all train examples) and rhs idx i
141  distances_lhs(dists,0,m_train_labels.vlen-1,i);
142 
143  //fill in an array with 0..num train examples-1
144  for (int32_t j=0; j<m_train_labels.vlen; j++)
145  train_idxs[j]=j;
146 
147  //sort the distance vector between test example i and all train examples
148  CMath::qsort_index(dists, train_idxs, m_train_labels.vlen);
149 
150 #ifdef DEBUG_KNN
151  SG_PRINT("\nQuick sort query %d\n", i)
152  for (int32_t j=0; j<m_k; j++)
153  SG_PRINT("%d ", train_idxs[j])
154  SG_PRINT("\n")
155 #endif
156 
157  //fill in the output the indices of the nearest neighbors
158  for (int32_t j=0; j<m_k; j++)
159  NN(j,i) = train_idxs[j];
160  }
161 
163 
164  SG_FREE(train_idxs);
165  SG_FREE(dists);
166 
167  return NN;
168 }
169 
171 {
172  if (data)
173  init_distance(data);
174 
175  //redirecting to fast (without sorting) classify if k==1
176  if (m_k == 1)
177  return classify_NN();
178 
182 
183  int32_t num_lab=distance->get_num_vec_rhs();
184  ASSERT(m_k<=distance->get_num_vec_lhs())
185 
186  CMulticlassLabels* output=new CMulticlassLabels(num_lab);
187 
188  //labels of the k nearest neighbors
189  int32_t* train_lab=SG_MALLOC(int32_t, m_k);
190 
191  SG_INFO("%d test examples\n", num_lab)
193 
194  //histogram of classes and returned output
195  float64_t* classes=SG_MALLOC(float64_t, m_num_classes);
196 
197  switch (m_knn_solver)
198  {
199  case KNN_BRUTE:
200  {
201  //get the k nearest neighbors of each example
203 
204  //from the indices to the nearest neighbors, compute the class labels
205  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
206  {
207  //write the labels of the k nearest neighbors from theirs indices
208  for (int32_t j=0; j<m_k; j++)
209  train_lab[j] = m_train_labels[ NN(j,i) ];
210 
211  //get the index of the 'nearest' class
212  int32_t out_idx = choose_class(classes, train_lab);
213  //write the label of 'nearest' in the output
214  output->set_label(i, out_idx + m_min_label);
215  }
216 
217  break;
218  }
219  case KNN_COVER_TREE: // Use cover tree
220  {
221  // m_q != 1.0 not supported with cover tree because the neighbors
222  // are not retrieved in increasing order of distance to the query
223  float64_t old_q = m_q;
224  if ( old_q != 1.0 )
225  SG_INFO("q != 1.0 not supported with cover tree, using q = 1\n")
226 
227  // From the sets of features (lhs and rhs) stored in distance,
228  // build arrays of cover tree points
229  v_array< CJLCoverTreePoint > set_of_points =
231  v_array< CJLCoverTreePoint > set_of_queries =
233 
234  // Build the cover trees, one for the test vectors (rhs features)
235  // and another for the training vectors (lhs features)
237  node< CJLCoverTreePoint > top = batch_create(set_of_points);
238  CFeatures* l = distance->replace_lhs(r);
239  distance->replace_rhs(r);
240  node< CJLCoverTreePoint > top_query = batch_create(set_of_queries);
241 
242  // Get the k nearest neighbors to all the test vectors (batch method)
243  distance->replace_lhs(l);
245  k_nearest_neighbor(top, top_query, res, m_k);
246 
247 #ifdef DEBUG_KNN
248  SG_PRINT("\nJL Results:\n")
249  for ( int32_t i = 0 ; i < res.index ; ++i )
250  {
251  for ( int32_t j = 0 ; j < res[i].index ; ++j )
252  {
253  printf("%d ", res[i][j].m_index);
254  }
255  printf("\n");
256  }
257  SG_PRINT("\n")
258 #endif
259 
260  for ( int32_t i = 0 ; i < res.index ; ++i )
261  {
262  // Translate from indices to labels of the nearest neighbors
263  for ( int32_t j = 0; j < m_k; ++j )
264  // The first index in res[i] points to the test vector
265  train_lab[j] = m_train_labels.vector[ res[i][j+1].m_index ];
266 
267  // Get the index of the 'nearest' class
268  int32_t out_idx = choose_class(classes, train_lab);
269  output->set_label(res[i][0].m_index, out_idx+m_min_label);
270  }
271 
272  m_q = old_q;
273 
274  break;
275  }
276  case KNN_KDTREE:
277  {
278  CFeatures* lhs = distance->get_lhs();
279  CKDTree* kd_tree = new CKDTree(m_leaf_size);
280  kd_tree->build_tree(dynamic_cast<CDenseFeatures<float64_t>*>(lhs));
281  SG_UNREF(lhs);
282 
283  CFeatures* query = distance->get_rhs();
284  kd_tree->query_knn(dynamic_cast<CDenseFeatures<float64_t>*>(query), m_k);
285  SGMatrix<index_t> NN = kd_tree->get_knn_indices();
286  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
287  {
288  //write the labels of the k nearest neighbors from theirs indices
289  for (int32_t j=0; j<m_k; j++)
290  train_lab[j] = m_train_labels[ NN(j,i) ];
291 
292  //get the index of the 'nearest' class
293  int32_t out_idx = choose_class(classes, train_lab);
294  //write the label of 'nearest' in the output
295  output->set_label(i, out_idx + m_min_label);
296  }
297  SG_UNREF(query);
298  break;
299  }
300 #ifdef HAVE_CXX11
301  case KNN_LSH:
302  {
304  std::vector<falconn::DenseVector<double>> feats;
305  for(int32_t i=0; i < features->get_num_vectors(); i++)
306  {
307  int32_t len;
308  bool free;
309  float64_t* vec = features->get_feature_vector(i, len, free);
310  falconn::DenseVector<double> temp = Map<VectorXd> (vec, len);
311  feats.push_back(temp);
312  }
313 
314  falconn::LSHConstructionParameters params
315  = falconn::get_default_parameters<falconn::DenseVector<double>>(features->get_num_vectors(),
316  features->get_num_features(),
317  falconn::DistanceFunction::EuclideanSquared,
318  true);
319  SG_UNREF(features);
320  if (m_lsh_l && m_lsh_t)
321  params.l = m_lsh_l;
322 
323  auto lsh_table = falconn::construct_table<falconn::DenseVector<double>>(feats, params);
324  if (m_lsh_t)
325  lsh_table->set_num_probes(m_lsh_t);
326 
327  CDenseFeatures<float64_t>* query_features = dynamic_cast<CDenseFeatures<float64_t>*>(distance->get_rhs());
328  std::vector<falconn::DenseVector<double>> query_feats;
329 
330  SGMatrix<index_t> NN (m_k, query_features->get_num_vectors());
331  for(int32_t i=0; i < query_features->get_num_vectors(); i++)
332  {
333  int32_t len;
334  bool free;
335  float64_t* vec = query_features->get_feature_vector(i, len, free);
336  falconn::DenseVector<double> temp = Map<VectorXd> (vec, len);
337  auto indices = new std::vector<int32_t> ();
338  lsh_table->find_k_nearest_neighbors(temp, (int_fast64_t)m_k, indices);
339  memcpy(NN.get_column_vector(i), indices->data(), sizeof(int32_t)*m_k);
340  delete indices;
341  }
342 
343  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
344  {
345  //write the labels of the k nearest neighbors from theirs indices
346  for (int32_t j=0; j<m_k; j++)
347  train_lab[j] = m_train_labels[ NN(j,i) ];
348 
349  //get the index of the 'nearest' class
350  int32_t out_idx = choose_class(classes, train_lab);
351  //write the label of 'nearest' in the output
352  output->set_label(i, out_idx + m_min_label);
353  }
354  SG_UNREF(query_features);
355  break;
356  }
357 #endif /* HAVE_CXX11 */
358  }
359 
360  SG_FREE(classes);
361  SG_FREE(train_lab);
362 
363  return output;
364 }
365 
367 {
370 
371  int32_t num_lab = distance->get_num_vec_rhs();
372  ASSERT(num_lab)
373 
374  CMulticlassLabels* output = new CMulticlassLabels(num_lab);
375  float64_t* distances = SG_MALLOC(float64_t, m_train_labels.vlen);
376 
377  SG_INFO("%d test examples\n", num_lab)
379 
381 
382  // for each test example
383  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
384  {
385  SG_PROGRESS(i,0,num_lab)
386 
387  // get distances from i-th test example to 0..num_m_train_labels-1 train examples
388  distances_lhs(distances,0,m_train_labels.vlen-1,i);
389  int32_t j;
390 
391  // assuming 0th train examples as nearest to i-th test example
392  int32_t out_idx = 0;
393  float64_t min_dist = distances[0];
394 
395  // searching for nearest neighbor by comparing distances
396  for (j=0; j<m_train_labels.vlen; j++)
397  {
398  if (distances[j]<min_dist)
399  {
400  min_dist = distances[j];
401  out_idx = j;
402  }
403  }
404 
405  // label i-th test example with label of nearest neighbor with out_idx index
406  output->set_label(i,m_train_labels.vector[out_idx]+m_min_label);
407  }
408 
410 
411  SG_FREE(distances);
412  return output;
413 }
414 
416 {
420 
421  int32_t num_lab=distance->get_num_vec_rhs();
422  ASSERT(m_k<=num_lab)
423 
424  int32_t* output=SG_MALLOC(int32_t, m_k*num_lab);
425 
426  //working buffer of m_train_labels
427  int32_t* train_lab=SG_MALLOC(int32_t, m_k);
428 
429  //histogram of classes and returned output
430  int32_t* classes=SG_MALLOC(int32_t, m_num_classes);
431 
432  SG_INFO("%d test examples\n", num_lab)
434 
435  switch (m_knn_solver)
436  {
437  case KNN_COVER_TREE: // Use cover tree
438  {
439  //allocation for distances to nearest neighbors
440  float64_t* dists=SG_MALLOC(float64_t, m_k);
441 
442  // From the sets of features (lhs and rhs) stored in distance,
443  // build arrays of cover tree points
444  v_array< CJLCoverTreePoint > set_of_points =
446  v_array< CJLCoverTreePoint > set_of_queries =
448 
449  // Build the cover trees, one for the test vectors (rhs features)
450  // and another for the training vectors (lhs features)
452  node< CJLCoverTreePoint > top = batch_create(set_of_points);
453  CFeatures* l = distance->replace_lhs(r);
454  distance->replace_rhs(r);
455  node< CJLCoverTreePoint > top_query = batch_create(set_of_queries);
456 
457  // Get the k nearest neighbors to all the test vectors (batch method)
458  distance->replace_lhs(l);
460  k_nearest_neighbor(top, top_query, res, m_k);
461 
462  for ( int32_t i = 0 ; i < res.index ; ++i )
463  {
464  // Handle the fact that cover tree doesn't return neighbors
465  // ordered by distance
466 
467  for ( int32_t j = 0 ; j < m_k ; ++j )
468  {
469  // The first index in res[i] points to the test vector
470  dists[j] = distance->distance(res[i][j+1].m_index,
471  res[i][0].m_index);
472  train_lab[j] = m_train_labels.vector[
473  res[i][j+1].m_index ];
474  }
475 
476  // Now we get the indices to the neighbors sorted by distance
477  CMath::qsort_index(dists, train_lab, m_k);
478 
479  choose_class_for_multiple_k(output+res[i][0].m_index, classes,
480  train_lab, num_lab);
481  }
482 
483  SG_FREE(dists);
484  break;
485  }
486  case KNN_KDTREE:
487  {
488  //allocation for distances to nearest neighbors
489  float64_t* dists=SG_MALLOC(float64_t, m_k);
490 
491  CFeatures* lhs = distance->get_lhs();
492  CKDTree* kd_tree = new CKDTree(m_leaf_size);
493  kd_tree->build_tree(dynamic_cast<CDenseFeatures<float64_t>*>(lhs));
494  SG_UNREF(lhs);
495 
496  CFeatures* data = distance->get_rhs();
497  kd_tree->query_knn(dynamic_cast<CDenseFeatures<float64_t>*>(data), m_k);
498  SGMatrix<index_t> NN = kd_tree->get_knn_indices();
499  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
500  {
501  //write the labels of the k nearest neighbors from theirs indices
502  for (int32_t j=0; j<m_k; j++)
503  {
504  train_lab[j] = m_train_labels[ NN(j,i) ];
505  dists[j] = distance->distance(i, NN(j,i));
506  }
507  CMath::qsort_index(dists, train_lab, m_k);
508 
509  choose_class_for_multiple_k(output+i, classes, train_lab, num_lab);
510  }
511  break;
512  }
513  default:
514  {
515  //get the k nearest neighbors of each example
517 
518  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
519  {
520  //write the labels of the k nearest neighbors from theirs indices
521  for (int32_t j=0; j<m_k; j++)
522  train_lab[j] = m_train_labels[ NN(j,i) ];
523 
524  choose_class_for_multiple_k(output+i, classes, train_lab, num_lab);
525  }
526 
527  }
528 
529  }
530 
531  SG_FREE(train_lab);
532  SG_FREE(classes);
533 
534  return SGMatrix<int32_t>(output,num_lab,m_k,true);
535 }
536 
538 {
539  if (!distance)
540  SG_ERROR("No distance assigned!\n")
541  CFeatures* lhs=distance->get_lhs();
542  if (!lhs || !lhs->get_num_vectors())
543  {
544  SG_UNREF(lhs);
545  SG_ERROR("No vectors on left hand side\n")
546  }
547  distance->init(lhs, data);
548  SG_UNREF(lhs);
549 }
550 
551 bool CKNN::load(FILE* srcfile)
552 {
555  return false;
556 }
557 
558 bool CKNN::save(FILE* dstfile)
559 {
562  return false;
563 }
564 
566 {
567  CFeatures* d_lhs=distance->get_lhs();
568  CFeatures* d_rhs=distance->get_rhs();
569 
570  /* copy lhs of underlying distance */
571  distance->init(d_lhs->duplicate(), d_rhs);
572 
573  SG_UNREF(d_lhs);
574  SG_UNREF(d_rhs);
575 }
576 
577 int32_t CKNN::choose_class(float64_t* classes, int32_t* train_lab)
578 {
579  memset(classes, 0, sizeof(float64_t)*m_num_classes);
580 
581  float64_t multiplier = m_q;
582  for (int32_t j=0; j<m_k; j++)
583  {
584  classes[train_lab[j]]+= multiplier;
585  multiplier*= multiplier;
586  }
587 
588  //choose the class that got 'outputted' most often
589  int32_t out_idx=0;
590  float64_t out_max=0;
591 
592  for (int32_t j=0; j<m_num_classes; j++)
593  {
594  if (out_max< classes[j])
595  {
596  out_idx= j;
597  out_max= classes[j];
598  }
599  }
600 
601  return out_idx;
602 }
603 
604 void CKNN::choose_class_for_multiple_k(int32_t* output, int32_t* classes, int32_t* train_lab, int32_t step)
605 {
606  //compute histogram of class outputs of the first k nearest neighbours
607  memset(classes, 0, sizeof(int32_t)*m_num_classes);
608 
609  for (int32_t j=0; j<m_k; j++)
610  {
611  classes[train_lab[j]]++;
612 
613  //choose the class that got 'outputted' most often
614  int32_t out_idx=0;
615  int32_t out_max=0;
616 
617  for (int32_t c=0; c<m_num_classes; c++)
618  {
619  if (out_max< classes[c])
620  {
621  out_idx= c;
622  out_max= classes[c];
623  }
624  }
625 
626  output[j*step]=out_idx+m_min_label;
627  }
628 }
virtual void store_model_features()
Definition: KNN.cpp:565
#define SG_INFO(...)
Definition: SGIO.h:118
#define SG_RESET_LOCALE
Definition: SGIO.h:86
virtual bool save(FILE *dstfile)
Definition: KNN.cpp:558
ST * get_feature_vector(int32_t num, int32_t &len, bool &dofree)
Class Distance, a base class for all the distances used in the Shogun toolbox.
Definition: Distance.h:87
int32_t get_num_features() const
virtual void reset_precompute()
Definition: Distance.h:150
int32_t index_t
Definition: common.h:62
#define SG_PROGRESS(...)
Definition: SGIO.h:142
void init_distance(CFeatures *data)
Definition: KNN.cpp:537
KNN_SOLVER m_knn_solver
Definition: KNN.h:288
CFeatures * get_lhs()
Definition: Distance.h:224
The class Labels models labels, i.e. class assignments of objects.
Definition: Labels.h:43
SGMatrix< index_t > get_knn_indices()
Definition: NbodyTree.cpp:155
virtual int32_t get_num_labels() const =0
static void qsort_index(T1 *output, T2 *index, uint32_t size)
Definition: Math.h:2202
node< P > batch_create(v_array< P > points)
Definition: JLCoverTree.h:295
virtual CFeatures * replace_lhs(CFeatures *lhs)
Definition: Distance.cpp:188
SGMatrix< int32_t > classify_for_multiple_k()
Definition: KNN.cpp:415
Class v_array taken directly from JL's implementation.
Definition: SGMatrix.h:20
KNN_SOLVER
Definition: KNN.h:27
virtual int32_t get_num_vectors() const =0
CLabels * m_labels
Definition: Machine.h:361
void distances_lhs(float64_t *result, int32_t idx_a1, int32_t idx_a2, int32_t idx_b)
#define SG_ERROR(...)
Definition: SGIO.h:129
CFeatures * get_rhs()
Definition: Distance.h:230
virtual CFeatures * duplicate() const =0
int32_t m_min_label
smallest label, i.e. -1
Definition: KNN.h:283
virtual bool train_machine(CFeatures *data=NULL)
Definition: KNN.cpp:85
SGMatrix< index_t > nearest_neighbors()
Definition: KNN.cpp:122
#define SG_SET_LOCALE_C
Definition: SGIO.h:85
void build_tree(CDenseFeatures< float64_t > *data)
Definition: NbodyTree.cpp:45
A generic DistanceMachine interface.
bool set_label(int32_t idx, float64_t label)
virtual bool load(FILE *srcfile)
Definition: KNN.cpp:551
v_array< CJLCoverTreePoint > parse_points(CDistance *distance, EFeaturesContainer fc)
int32_t m_num_classes
number of classes (i.e. number of values labels can take)
Definition: KNN.h:280
Multiclass Labels for multi-class classification.
index_t vlen
Definition: SGVector.h:494
int32_t m_k
the k parameter in KNN
Definition: KNN.h:274
void query_knn(CDenseFeatures< float64_t > *data, int32_t k)
Definition: NbodyTree.cpp:59
#define SG_PRINT(...)
Definition: SGIO.h:137
virtual void set_store_model_features(bool store_model)
Definition: Machine.cpp:107
#define ASSERT(x)
Definition: SGIO.h:201
virtual int32_t get_num_vectors() const
static void clear_cancel()
Definition: Signal.cpp:129
double float64_t
Definition: common.h:50
static T max(T a, T b)
Definition: Math.h:168
virtual int32_t get_num_vec_rhs()
Definition: Distance.h:321
This class implements KD-Tree. cf. http://www.autonlab.org/autonweb/14665/version/2/part/5/data/moore...
Definition: KDTree.h:45
static bool cancel_computations()
Definition: Signal.h:86
virtual CFeatures * replace_rhs(CFeatures *rhs)
Definition: Distance.cpp:170
float64_t m_q
parameter q of rank weighting
Definition: KNN.h:277
SGVector< int32_t > m_train_labels
Definition: KNN.h:286
virtual float64_t distance(int32_t idx_a, int32_t idx_b)
Definition: Distance.cpp:206
#define SG_UNREF(x)
Definition: SGObject.h:55
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
virtual ~CKNN()
Definition: KNN.cpp:81
int machine_int_t
Definition: common.h:59
The class Features is the base class of all feature objects.
Definition: Features.h:68
static T min(T a, T b)
Definition: Math.h:157
void set_distance(CDistance *d)
virtual void precompute_lhs()
Definition: Distance.h:143
SGVector< T > clone() const
Definition: SGVector.cpp:207
virtual CMulticlassLabels * classify_NN()
Definition: KNN.cpp:366
virtual CMulticlassLabels * apply_multiclass(CFeatures *data=NULL)
Definition: KNN.cpp:170
void k_nearest_neighbor(const node< P > &top_node, const node< P > &query, v_array< v_array< P > > &results, int k)
Definition: JLCoverTree.h:828
#define SG_ADD(...)
Definition: SGObject.h:84
virtual bool init(CFeatures *lhs, CFeatures *rhs)
Definition: Distance.cpp:78
virtual void set_labels(CLabels *lab)
Definition: Machine.cpp:65
int32_t m_leaf_size
Definition: KNN.h:290

SHOGUN Machine Learning Toolbox - Documentation