SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
MulticlassOneVsOneStrategy.cpp
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 2012 Chiyuan Zhang
8  * Written (W) 2013 Shell Hu and Heiko Strathmann
9  * Copyright (C) 2012 Chiyuan Zhang
10  */
11 
16 
17 using namespace shogun;
18 
20  :CMulticlassStrategy(), m_num_machines(0), m_num_samples(SGVector<int32_t>())
21 {
22  register_parameters();
23 }
24 
26  :CMulticlassStrategy(prob_heuris), m_num_machines(0), m_num_samples(SGVector<int32_t>())
27 {
28  register_parameters();
29 }
30 
31 void CMulticlassOneVsOneStrategy::register_parameters()
32 {
33  //SG_ADD(&m_num_samples, "num_samples", "Number of samples in each training machine", MS_NOT_AVAILABLE);
34  SG_WARNING("%s::CMulticlassOneVsOneStrategy(): register parameters!\n", get_name());
35 }
36 
38 {
39  CMulticlassStrategy::train_start(orig_labels, train_labels);
41 
44 
46 }
47 
49 {
51 }
52 
54 {
56 
58  int32_t tot=0;
59  for (int32_t k=0; k < m_orig_labels->get_num_labels(); ++k)
60  {
61  if (((CMulticlassLabels*) m_orig_labels)->get_int_label(k)==m_train_pair_idx_1)
62  {
63  ((CBinaryLabels*) m_train_labels)->set_label(k, +1.0);
64  subset[tot]=k;
65  tot++;
66  }
67  else if (((CMulticlassLabels*) m_orig_labels)->get_int_label(k)==m_train_pair_idx_2)
68  {
69  ((CBinaryLabels*) m_train_labels)->set_label(k, -1.0);
70  subset[tot]=k;
71  tot++;
72  }
73  }
74 
77  {
80  }
81 
82  // collect num samples each machine
83  m_num_samples[m_train_iter-1] = tot;
84 
85  subset.resize_vector(tot);
86  return subset;
87 }
88 
90 {
91  // if OVO with prob outputs, find max posterior
92  if (outputs.vlen==m_num_classes)
93  return CMath::arg_max(outputs.vector, 1, outputs.vlen);
94 
95  int32_t s=0;
98  votes.zero();
99  dec_vals.zero();
100 
101  for (int32_t i=0; i<m_num_classes; i++)
102  {
103  for (int32_t j=i+1; j<m_num_classes; j++)
104  {
105  if (outputs[s]>0)
106  {
107  votes[i]++;
108  dec_vals[i] += CMath::abs(outputs[s]);
109  }
110  else
111  {
112  votes[j]++;
113  dec_vals[j] += CMath::abs(outputs[s]);
114  }
115  s++;
116  }
117  }
118 
119  int32_t i_max=0;
120  int32_t vote_max=-1;
121  float64_t dec_val_max=-1;
122 
123  for (int32_t i=0; i < m_num_classes; ++i)
124  {
125  if (votes[i] > vote_max)
126  {
127  i_max = i;
128  vote_max = votes[i];
129  dec_val_max = dec_vals[i];
130  }
131  else if (votes[i] == vote_max)
132  {
133  if (dec_vals[i] > dec_val_max)
134  {
135  i_max = i;
136  dec_val_max = dec_vals[i];
137  }
138  }
139  }
140 
141  return i_max;
142 }
143 
145 {
146  if (m_num_machines < 1)
147  return;
148 
151 
152  int32_t tot = 0;
153  for (int32_t j=0; j<m_num_classes; j++)
154  {
155  for (int32_t k=j+1; k<m_num_classes; k++)
156  {
157  indx1[tot] = j;
158  indx2[tot] = k;
159  tot++;
160  }
161  }
162 
163  if(tot!=m_num_machines)
164  SG_ERROR("%s::rescale_output(): size(outputs) is not num_machines.\n", get_name());
165 
166  switch(get_prob_heuris_type())
167  {
168  case OVO_PRICE:
169  rescale_heuris_price(outputs,indx1,indx2);
170  break;
171  case OVO_HASTIE:
172  rescale_heuris_hastie(outputs,indx1,indx2);
173  break;
174  case OVO_HAMAMURA:
175  rescale_heuris_hamamura(outputs,indx1,indx2);
176  break;
177  case PROB_HEURIS_NONE:
178  break;
179  default:
180  SG_ERROR("%s::rescale_outputs(): Unknown OVO probability heuristic type!\n", get_name());
181  break;
182  }
183 }
184 
186  const SGVector<int32_t> indx1, const SGVector<int32_t> indx2)
187 {
188  if (m_num_machines != outputs.vlen)
189  {
190  SG_ERROR("%s::rescale_heuris_price(): size(outputs) = %d != m_num_machines = %d\n",
191  get_name(), outputs.vlen, m_num_machines);
192  }
193 
194  SGVector<float64_t> new_outputs(m_num_classes);
195  new_outputs.zero();
196 
197  for (int32_t j=0; j<m_num_classes; j++)
198  {
199  for (int32_t m=0; m<m_num_machines; m++)
200  {
201  if (indx1[m]==j)
202  new_outputs[j] += 1.0 / (outputs[m]+1E-12);
203  if (indx2[m]==j)
204  new_outputs[j] += 1.0 / (1.0-outputs[m]+1E-12);
205  }
206 
207  new_outputs[j] = 1.0 / (new_outputs[j] - m_num_classes + 2);
208  }
209 
210  //outputs.resize_vector(m_num_classes);
211 
212  float64_t norm = SGVector<float64_t>::sum(new_outputs);
213  for (int32_t i=0; i<new_outputs.vlen; i++)
214  outputs[i] = new_outputs[i] / norm;
215 }
216 
218  const SGVector<int32_t> indx1, const SGVector<int32_t> indx2)
219 {
220  if (m_num_machines != outputs.vlen)
221  {
222  SG_ERROR("%s::rescale_heuris_hastie(): size(outputs) = %d != m_num_machines = %d\n",
223  get_name(), outputs.vlen, m_num_machines);
224  }
225 
226  SGVector<float64_t> new_outputs(m_num_classes);
227  new_outputs.zero();
228 
229  for (int32_t j=0; j<m_num_classes; j++)
230  {
231  for (int32_t m=0; m<m_num_machines; m++)
232  {
233  if (indx1[m]==j)
234  new_outputs[j] += outputs[m];
235  if (indx2[m]==j)
236  new_outputs[j] += 1.0-outputs[m];
237  }
238 
239  new_outputs[j] *= 2.0 / (m_num_classes * (m_num_classes - 1));
240  new_outputs[j] += 1E-10;
241  }
242 
244  SGVector<float64_t> prev_outputs(m_num_classes);
245  float64_t gap = 1.0;
246 
247  while (gap > 1E-12)
248  {
249  prev_outputs = new_outputs.clone();
250 
251  for (int32_t m=0; m<m_num_machines; m++)
252  mu[m] = new_outputs[indx1[m]] / (new_outputs[indx1[m]] + new_outputs[indx2[m]]);
253 
254  for (int32_t j=0; j<m_num_classes; j++)
255  {
256  float64_t numerator = 0.0;
257  float64_t denominator = 0.0;
258  for (int32_t m=0; m<m_num_machines; m++)
259  {
260  if (indx1[m]==j)
261  {
262  numerator += m_num_samples[m] * outputs[m];
263  denominator += m_num_samples[m] * mu[m];
264  }
265 
266  if (indx2[m]==j)
267  {
268  numerator += m_num_samples[m] * (1.0-outputs[m]);
269  denominator += m_num_samples[m] * (1.0-mu[m]);
270  }
271  }
272 
273  // update posterior
274  new_outputs[j] *= numerator / denominator;
275  }
276 
277  float64_t norm = SGVector<float64_t>::sum(new_outputs);
278  for (int32_t i=0; i<new_outputs.vlen; i++)
279  new_outputs[i] /= norm;
280 
281  // gap is Euclidean distance
282  for (int32_t i=0; i<new_outputs.vlen; i++)
283  prev_outputs[i] -= new_outputs[i];
284 
285  gap = SGVector<float64_t>::qsq(prev_outputs.vector, prev_outputs.vlen, 2);
286  SG_DEBUG("[Hastie's heuristic] gap = %.12f\n", gap);
287  }
288 
289  for (int32_t i=0; i<new_outputs.vlen; i++)
290  outputs[i] = new_outputs[i];
291 }
292 
294  const SGVector<int32_t> indx1, const SGVector<int32_t> indx2)
295 {
296  if (m_num_machines != outputs.vlen)
297  {
298  SG_ERROR("%s::rescale_heuris_hamamura(): size(outputs) = %d != m_num_machines = %d\n",
299  get_name(), outputs.vlen, m_num_machines);
300  }
301 
302  SGVector<float64_t> new_outputs(m_num_classes);
303  SGVector<float64_t>::fill_vector(new_outputs.vector, new_outputs.vlen, 1.0);
304 
305  for (int32_t j=0; j<m_num_classes; j++)
306  {
307  for (int32_t m=0; m<m_num_machines; m++)
308  {
309  if (indx1[m]==j)
310  new_outputs[j] *= outputs[m];
311  if (indx2[m]==j)
312  new_outputs[j] *= 1-outputs[m];
313  }
314 
315  new_outputs[j] += 1E-10;
316  }
317 
318  float64_t norm = SGVector<float64_t>::sum(new_outputs);
319 
320  for (int32_t i=0; i<new_outputs.vlen; i++)
321  outputs[i] = new_outputs[i] / norm;
322 }
323 
virtual SGVector< int32_t > train_prepare_next()
int32_t m_train_pair_idx_2
2nd index of current submachine being trained
static void fill_vector(T *vec, int32_t len, T value)
Definition: SGVector.cpp:221
void rescale_heuris_hamamura(SGVector< float64_t > outputs, const SGVector< int32_t > indx1, const SGVector< int32_t > indx2)
CMulticlassLabels * m_orig_labels
original multiclass labels
static int32_t arg_max(T *vec, int32_t inc, int32_t len, T *maxv_ptr=NULL)
Definition: Math.h:262
virtual int32_t get_num_labels() const
virtual void train_start(CMulticlassLabels *orig_labels, CBinaryLabels *train_labels)
int32_t m_train_pair_idx_1
1st index of current submachine being trained
SGVector< int32_t > m_num_samples
number of samples per machine
#define SG_ERROR(...)
Definition: SGIO.h:129
int32_t m_num_classes
number of classes in this problem
Multiclass Labels for multi-class classification.
void rescale_heuris_hastie(SGVector< float64_t > outputs, const SGVector< int32_t > indx1, const SGVector< int32_t > indx2)
index_t vlen
Definition: SGVector.h:494
int32_t m_train_iter
index of current iterations
shogun vector
double float64_t
Definition: common.h:50
virtual void rescale_outputs(SGVector< float64_t > outputs)
static T sum(T *vec, int32_t len)
Return sum(vec)
Definition: SGVector.h:354
void rescale_heuris_price(SGVector< float64_t > outputs, const SGVector< int32_t > indx1, const SGVector< int32_t > indx2)
#define SG_DEBUG(...)
Definition: SGIO.h:107
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
CBinaryLabels * m_train_labels
labels used to train the submachines
SGVector< T > clone() const
Definition: SGVector.cpp:207
Binary Labels for binary classification.
Definition: BinaryLabels.h:37
class MulticlassStrategy used to construct generic multiclass classifiers with ensembles of binary cl...
static T qsq(T *x, int32_t len, float64_t q)
|| x ||_q^q
Definition: SGVector.cpp:724
void resize_vector(int32_t n)
Definition: SGVector.cpp:257
virtual SGVector< int32_t > train_prepare_next()
#define SG_WARNING(...)
Definition: SGIO.h:128
virtual void train_start(CMulticlassLabels *orig_labels, CBinaryLabels *train_labels)
EProbHeuristicType get_prob_heuris_type()
static T abs(T a)
Definition: Math.h:179
virtual int32_t decide_label(SGVector< float64_t > outputs)

SHOGUN Machine Learning Toolbox - Documentation