18 index_t num_vec,
bool ref_counting) :
20 num_vectors(num_vec), num_features(num_feat),
28 num_vectors(num_vec), num_features(num_feat)
51 template <>
template <>
57 "Dimension mismatch! %d vs %d\n",
58 v.
vlen, num_features);
59 for (
index_t i=0; i<num_vectors; ++i)
60 result[i]=sparse_matrix[i].dense_dot(v);
64 template <>
template <>
70 "Dimension mismatch! %d vs %d\n",
71 v.
vlen, num_features);
72 for (
index_t i=0; i<num_vectors; ++i)
73 result[i]=sparse_matrix[i].dense_dot(v);
77 template <>
template <>
83 "Dimension mismatch! %d vs %d\n",
84 v.
vlen, num_features);
85 for (
index_t i=0; i<num_vectors; ++i)
86 result[i]=sparse_matrix[i].dense_dot(v);
104 SG_SERROR(
"SGSparseMatrix::load():: Not supported for complex128_t");
117 if (do_sort_features)
139 SG_SERROR(
"SGSparseMatrix::save():: Not supported for complex128_t");
146 int32_t num=labels.
vlen;
169 sparse_matrix = NULL;
177 SG_FREE(sparse_matrix);
186 int32_t* hist=SG_CALLOC(int32_t, num_features);
189 for (int32_t v=0; v<num_vectors; v++)
197 for (int32_t v=0; v<num_features; v++)
202 int32_t* index=SG_CALLOC(int32_t, num_features);
205 for (int32_t v=0; v<num_vectors; v++)
211 int32_t vidx=sv.
features[i].feat_index;
213 sfm[vidx].features[index[vidx]].feat_index=fidx;
214 sfm[vidx].features[index[vidx]].entry=sv.
features[i].entry;
226 for (int32_t i=0; i<num_vectors; i++)
238 REQUIRE(num_vec>0,
"Matrix should have > 0 vectors!\n");
240 SG_SINFO(
"converting dense feature matrix to sparse one\n")
241 int32_t* num_feat_entries=SG_MALLOC(
int, num_vec);
244 int64_t num_total_entries=0;
247 for (int32_t i=0; i<num_vec; i++)
249 num_feat_entries[i]=0;
250 for (int32_t j=0; j<num_feat; j++)
252 if (src[i*((int64_t) num_feat) + j] != static_cast<T>(0))
253 num_feat_entries[i]++;
257 num_features=num_feat;
261 for (int32_t i=0; i< num_vec; i++)
264 int32_t sparse_feat_idx=0;
266 for (int32_t j=0; j< num_feat; j++)
268 int64_t pos= i*num_feat + j;
270 if (src[pos] != static_cast<T>(0))
272 sparse_matrix[i].
features[sparse_feat_idx].entry=src[pos];
273 sparse_matrix[i].features[sparse_feat_idx].feat_index=j;
280 SG_SINFO(
"sparse feature matrix has %ld entries (full matrix had %ld, sparsity %2.2f%%)\n",
281 num_total_entries, int64_t(num_feat)*num_vec, (100.0*num_total_entries)/(int64_t(num_feat)*num_vec));
282 SG_FREE(num_feat_entries);
virtual void set_sparse_matrix(const SGSparseVector< bool > *matrix, int32_t num_feat, int32_t num_vec)
template class SGSparseMatrix
virtual void set_sparse_matrix(const SGSparseVector< bool > *matrix, int32_t num_feat, int32_t num_vec)
shogun reference count managed data
A File access base class.
void from_dense(SGMatrix< T > full)
SGSparseVector< T > * sparse_matrix
array of sparse vectors of size num_vectors
SGSparseVectorEntry< T > * features
const SGVector< T > operator*(SGVector< T > v) const
virtual void copy_data(const SGReferencedData &orig)
all of classes and functions are contained in the shogun namespace
read sparse real valued features in svm light format e.g. -1 1:10.0 2:100.2 1000:1.3 with -1 == (optional) label and dim 1 - value 10.0 dim 2 - value 100.2 dim 1000 - value 1.3
template class SGSparseVector The assumtion is that the stored SGSparseVectorEntry* vector is orde...
virtual void get_sparse_matrix(SGSparseVector< bool > *&matrix, int32_t &num_feat, int32_t &num_vec)
virtual void get_sparse_matrix(SGSparseVector< bool > *&matrix, int32_t &num_feat, int32_t &num_vec)
index_t num_vectors
total number of vectors
virtual ~SGSparseMatrix()