57 for (int32_t i=0; i<input_indices.
vlen; i++)
122 BG = LG.rowwise().sum();
128 for (int32_t i=0; i<len; i++)
140 weights_index_offset;
147 EMappedMatrix WG(weight_gradients,
153 WG = LG*X.transpose();
157 IG += W.transpose()*LG;
174 for (int32_t i=0; i<length; i++)
180 for (int32_t i=0; i<length; i++)
190 for (int32_t i=0; i<length; i++)
205 for (int32_t i=0; i<length; i+=m_input_sizes[l])
213 for (int32_t j=0; j<m_input_sizes[l]; j++)
214 weights[i+j] *= multiplier;
224 contraction_term += parameters[i]*parameters[i];
static T twonorm(const T *x, int32_t len)
|| x ||_2
SGVector< int32_t > m_input_sizes
static float32_t normal_random(float32_t mean, float32_t std_dev)
virtual SGMatrix< float64_t > get_activation_gradients()
SGMatrix< float64_t > m_activations
virtual void initialize_neural_layer(CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
virtual int32_t get_num_neurons()
SGVector< int32_t > m_input_indices
Base class for neural network layers.
SGMatrix< float64_t > m_activation_gradients
virtual SGMatrix< float64_t > get_activations()
SGMatrix< float64_t > m_local_gradients
virtual void initialize_parameters(SGVector< float64_t > parameters, SGVector< bool > parameter_regularizable, float64_t sigma)
virtual void initialize_neural_layer(CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
virtual void compute_local_gradients(SGMatrix< float64_t > targets)
CSGObject * element(int32_t idx1, int32_t idx2=0, int32_t idx3=0)
virtual float64_t compute_contraction_term(SGVector< float64_t > parameters)
Dynamic array class for CSGObject pointers that creates an array that can be used like a list or an a...
virtual void enforce_max_norm(SGVector< float64_t > parameters, float64_t max_norm)
virtual void compute_activations(SGVector< float64_t > parameters, CDynamicObjectArray *layers)
all of classes and functions are contained in the shogun namespace
virtual float64_t compute_error(SGMatrix< float64_t > targets)
virtual void compute_gradients(SGVector< float64_t > parameters, SGMatrix< float64_t > targets, CDynamicObjectArray *layers, SGVector< float64_t > parameter_gradients)
virtual void compute_contraction_term_gradients(SGVector< float64_t > parameters, SGVector< float64_t > gradients)
SGMatrix< bool > m_dropout_mask
float64_t contraction_coefficient