48 int32_t radius_x, int32_t radius_y,
49 int32_t pooling_width, int32_t pooling_height,
50 int32_t stride_x, int32_t stride_y,
109 for (int32_t l=0; l<input_indices.
vlen; l++)
129 int32_t num_parameters_per_map =
135 bool* map_param_regularizable =
136 parameter_regularizable.
vector+m*num_parameters_per_map;
138 for (int32_t i=0; i<num_parameters_per_map; i++)
144 map_param_regularizable[i] = (i != 0);
151 map_param_regularizable[i] = 0;
161 int32_t num_parameters_per_map =
167 parameters.
vector+m*num_parameters_per_map,
168 num_parameters_per_map,
false);
193 for (int32_t i=0; i<length; i++)
200 for (int32_t i=0; i<len; i++)
212 int32_t num_parameters_per_map =
218 parameters.
vector+m*num_parameters_per_map,
219 num_parameters_per_map,
false);
222 parameter_gradients.
vector+m*num_parameters_per_map,
223 num_parameters_per_map,
false);
240 for (int32_t i=0; i<length; i++)
253 for (int32_t offset=1; offset<parameters.
vlen; offset+=num_parameters_per_map)
263 for (int32_t i=0; i<num_weights; i++)
264 weights[i] *= multiplier;
269 void CNeuralConvolutionalLayer::init()
CNeuralConvolutionalLayer()
static T twonorm(const T *x, int32_t len)
|| x ||_2
EConvMapActivationFunction m_activation_function
virtual void compute_activations(SGVector< float64_t > parameters, CDynamicObjectArray *layers)
static float32_t normal_random(float32_t mean, float32_t std_dev)
virtual void set_batch_size(int32_t batch_size)
SGMatrix< float64_t > m_activations
virtual void initialize_neural_layer(CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
EInitializationMode m_initialization_mode
virtual void enforce_max_norm(SGVector< float64_t > parameters, float64_t max_norm)
SGMatrix< float64_t > m_convolution_output_gradients
virtual void compute_gradients(SGVector< float64_t > parameters, SGMatrix< float64_t > targets, CDynamicObjectArray *layers, SGVector< float64_t > parameter_gradients)
virtual int32_t get_num_neurons()
SGVector< int32_t > m_input_indices
Base class for neural network layers.
SGMatrix< float64_t > m_activation_gradients
Handles convolution and gradient calculation for a single feature map in a convolutional neural netwo...
virtual int32_t get_height()
CSGObject * element(int32_t idx1, int32_t idx2=0, int32_t idx3=0)
ENLAutoencoderPosition autoencoder_position
void compute_activations(SGVector< float64_t > parameters, CDynamicObjectArray *layers, SGVector< int32_t > input_indices, SGMatrix< float64_t > activations)
virtual void initialize_parameters(SGVector< float64_t > parameters, SGVector< bool > parameter_regularizable, float64_t sigma)
Dynamic array class for CSGObject pointers that creates an array that can be used like a list or an a...
virtual void initialize_neural_layer(CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
virtual int32_t get_width()
all of classes and functions are contained in the shogun namespace
virtual void set_batch_size(int32_t batch_size)
SGMatrix< float64_t > m_max_indices
virtual float64_t compute_error(SGMatrix< float64_t > targets)
EConvMapActivationFunction
Determines the activation function for neurons in a convolutional feature map.
int32_t m_input_num_channels
SGMatrix< float64_t > m_convolution_output
static float32_t sqrt(float32_t x)
void compute_gradients(SGVector< float64_t > parameters, SGMatrix< float64_t > activations, SGMatrix< float64_t > activation_gradients, CDynamicObjectArray *layers, SGVector< int32_t > input_indices, SGVector< float64_t > parameter_gradients)
SGMatrix< bool > m_dropout_mask
void pool_activations(SGMatrix< float64_t > activations, int32_t pooling_width, int32_t pooling_height, SGMatrix< float64_t > pooled_activations, SGMatrix< float64_t > max_indices)