20 using namespace shogun;
34 void CShareBoost::init_sb_params()
41 return m_activeset.
clone();
50 SG_ERROR(
"No features given for training\n")
52 SG_ERROR(
"No labels given for training\n")
54 if (m_nonzero_feas <= 0)
55 SG_ERROR(
"Set a valid (> 0) number of non-zero features to seek before training\n")
57 SG_ERROR(
"It doesn't make sense to use ShareBoost with num non-zero features >= num features in the data\n")
75 for (int32_t t=0; t < m_nonzero_feas; ++t)
79 int32_t i_fea = choose_feature();
81 m_activeset.
vlen += 1;
84 optimize_coefficients();
87 SG_SDEBUG(
" SB[round %03d]: (%8.4f + %8.4f) sec.\n", t,
88 t_compute_pred + t_choose_feature, t_optimize);
106 void CShareBoost::compute_pred()
123 void CShareBoost::compute_pred(
const float64_t *W)
125 int32_t w_len = m_activeset.
vlen;
131 std::copy(W + i*w_len, W + (i+1)*w_len, w.vector);
138 void CShareBoost::compute_rho()
141 for (int32_t i=0; i < m_rho.
num_rows; ++i)
143 for (int32_t j=0; j < m_rho.
num_cols; ++j)
147 m_rho(i,j) =
CMath::exp((label == i) - m_pred(j, label) + m_pred(j, i));
152 for (int32_t j=0; j < m_rho.
num_cols; ++j)
155 for (int32_t i=0; i < m_rho.
num_rows; ++i)
156 m_rho_norm[j] += m_rho(i,j);
160 int32_t CShareBoost::choose_feature()
163 for (int32_t j=0; j < m_fea.
num_rows; ++j)
165 if (std::find(&m_activeset[0], &m_activeset[m_activeset.
vlen], j) !=
166 &m_activeset[m_activeset.
vlen])
177 for (int32_t ii=0; ii < m_fea.
num_cols; ++ii)
179 abssum += m_fea(j, ii)*(m_rho(k, ii)/m_rho_norm[ii] -
191 void CShareBoost::optimize_coefficients()
194 optimizer.optimize();
201 SG_ERROR(
"Require DenseFeatures<float64_t>\n")