SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
List of all members | Public Member Functions | Public Attributes | Protected Member Functions | Protected Attributes
CNeuralSoftmaxLayer Class Reference

Detailed Description

Neural layer with linear neurons, with a softmax activation function. can be only be used as an output layer. Cross entropy error measure is used.

Definition at line 48 of file NeuralSoftmaxLayer.h.

Inheritance diagram for CNeuralSoftmaxLayer:
[legend]

Public Member Functions

 CNeuralSoftmaxLayer ()
 
 CNeuralSoftmaxLayer (int32_t num_neurons)
 
virtual ~CNeuralSoftmaxLayer ()
 
virtual void compute_activations (SGVector< float64_t > parameters, CDynamicObjectArray *layers)
 
virtual void compute_local_gradients (SGMatrix< float64_t > targets)
 
virtual float64_t compute_error (SGMatrix< float64_t > targets)
 
virtual const char * get_name () const
 
virtual void initialize_neural_layer (CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
 
virtual void initialize_parameters (SGVector< float64_t > parameters, SGVector< bool > parameter_regularizable, float64_t sigma)
 
virtual void compute_activations (SGMatrix< float64_t > inputs)
 
virtual void compute_gradients (SGVector< float64_t > parameters, SGMatrix< float64_t > targets, CDynamicObjectArray *layers, SGVector< float64_t > parameter_gradients)
 
virtual void enforce_max_norm (SGVector< float64_t > parameters, float64_t max_norm)
 
virtual float64_t compute_contraction_term (SGVector< float64_t > parameters)
 
virtual void compute_contraction_term_gradients (SGVector< float64_t > parameters, SGVector< float64_t > gradients)
 
virtual void set_batch_size (int32_t batch_size)
 
virtual bool is_input ()
 
virtual void dropout_activations ()
 
virtual int32_t get_num_neurons ()
 
virtual int32_t get_width ()
 
virtual int32_t get_height ()
 
virtual void set_num_neurons (int32_t num_neurons)
 
virtual int32_t get_num_parameters ()
 
virtual SGMatrix< float64_tget_activations ()
 
virtual SGMatrix< float64_tget_activation_gradients ()
 
virtual SGMatrix< float64_tget_local_gradients ()
 
virtual SGVector< int32_t > get_input_indices ()
 
virtual CSGObjectshallow_copy () const
 
virtual CSGObjectdeep_copy () const
 
virtual bool is_generic (EPrimitiveType *generic) const
 
template<class T >
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
template<>
void set_generic ()
 
void unset_generic ()
 
virtual void print_serializable (const char *prefix="")
 
virtual bool save_serializable (CSerializableFile *file, const char *prefix="")
 
virtual bool load_serializable (CSerializableFile *file, const char *prefix="")
 
void set_global_io (SGIO *io)
 
SGIOget_global_io ()
 
void set_global_parallel (Parallel *parallel)
 
Parallelget_global_parallel ()
 
void set_global_version (Version *version)
 
Versionget_global_version ()
 
SGStringList< char > get_modelsel_names ()
 
void print_modsel_params ()
 
char * get_modsel_param_descr (const char *param_name)
 
index_t get_modsel_param_index (const char *param_name)
 
void build_gradient_parameter_dictionary (CMap< TParameter *, CSGObject * > *dict)
 
bool has (const std::string &name) const
 
template<typename T >
bool has (const Tag< T > &tag) const
 
template<typename T , typename U = void>
bool has (const std::string &name) const
 
template<typename T >
void set (const Tag< T > &_tag, const T &value)
 
template<typename T , typename U = void>
void set (const std::string &name, const T &value)
 
template<typename T >
get (const Tag< T > &_tag) const
 
template<typename T , typename U = void>
get (const std::string &name) const
 
virtual void update_parameter_hash ()
 
virtual bool parameter_hash_changed ()
 
virtual bool equals (CSGObject *other, float64_t accuracy=0.0, bool tolerant=false)
 
virtual CSGObjectclone ()
 

Public Attributes

bool is_training
 
float64_t dropout_prop
 
float64_t contraction_coefficient
 
ENLAutoencoderPosition autoencoder_position
 
SGIOio
 
Parallelparallel
 
Versionversion
 
Parameterm_parameters
 
Parameterm_model_selection_parameters
 
Parameterm_gradient_parameters
 
uint32_t m_hash
 

Protected Member Functions

virtual void load_serializable_pre () throw (ShogunException)
 
virtual void load_serializable_post () throw (ShogunException)
 
virtual void save_serializable_pre () throw (ShogunException)
 
virtual void save_serializable_post () throw (ShogunException)
 
template<typename T >
void register_param (Tag< T > &_tag, const T &value)
 
template<typename T >
void register_param (const std::string &name, const T &value)
 

Protected Attributes

int32_t m_num_neurons
 
int32_t m_width
 
int32_t m_height
 
int32_t m_num_parameters
 
SGVector< int32_t > m_input_indices
 
SGVector< int32_t > m_input_sizes
 
int32_t m_batch_size
 
SGMatrix< float64_tm_activations
 
SGMatrix< float64_tm_activation_gradients
 
SGMatrix< float64_tm_local_gradients
 
SGMatrix< bool > m_dropout_mask
 

Constructor & Destructor Documentation

default constructor

Definition at line 40 of file NeuralSoftmaxLayer.cpp.

CNeuralSoftmaxLayer ( int32_t  num_neurons)

Constuctor

Parameters
num_neuronsNumber of neurons in this layer

Definition at line 44 of file NeuralSoftmaxLayer.cpp.

virtual ~CNeuralSoftmaxLayer ( )
virtual

Definition at line 60 of file NeuralSoftmaxLayer.h.

Member Function Documentation

void build_gradient_parameter_dictionary ( CMap< TParameter *, CSGObject * > *  dict)
inherited

Builds a dictionary of all parameters in SGObject as well of those of SGObjects that are parameters of this object. Dictionary maps parameters to the objects that own them.

Parameters
dictdictionary of parameters to be built.

Definition at line 630 of file SGObject.cpp.

CSGObject * clone ( )
virtualinherited

Creates a clone of the current object. This is done via recursively traversing all parameters, which corresponds to a deep copy. Calling equals on the cloned object always returns true although none of the memory of both objects overlaps.

Returns
an identical copy of the given object, which is disjoint in memory. NULL if the clone fails. Note that the returned object is SG_REF'ed

Definition at line 747 of file SGObject.cpp.

void compute_activations ( SGVector< float64_t parameters,
CDynamicObjectArray layers 
)
virtual

Computes the activations of the neurons in this layer, results should be stored in m_activations. To be used only with non-input layers

Parameters
parametersVector of size get_num_parameters(), contains the parameters of the layer
layersArray of layers that form the network that this layer is being used with

Reimplemented from CNeuralLinearLayer.

Definition at line 49 of file NeuralSoftmaxLayer.cpp.

virtual void compute_activations ( SGMatrix< float64_t inputs)
virtualinherited

Computes the activations of the neurons in this layer, results should be stored in m_activations. To be used only with input layers

Parameters
inputsactivations of the neurons in the previous layer, matrix of size previous_layer_num_neurons * batch_size

Reimplemented in CNeuralInputLayer.

Definition at line 153 of file NeuralLayer.h.

float64_t compute_contraction_term ( SGVector< float64_t parameters)
virtualinherited

Computes

\[ \frac{\lambda}{N} \sum_{k=0}^{N-1} \left \| J(x_k) \right \|^2_F \]

where \( \left \| J(x_k)) \right \|^2_F \) is the Frobenius norm of the Jacobian of the activations of the hidden layer with respect to its inputs, \( N \) is the batch size, and \( \lambda \) is the contraction coefficient.

Should be implemented by layers that support being used as a hidden layer in a contractive autoencoder.

Parameters
parametersVector of size get_num_parameters(), contains the parameters of the layer

Reimplemented from CNeuralLayer.

Reimplemented in CNeuralLogisticLayer, and CNeuralRectifiedLinearLayer.

Definition at line 220 of file NeuralLinearLayer.cpp.

void compute_contraction_term_gradients ( SGVector< float64_t parameters,
SGVector< float64_t gradients 
)
virtualinherited

Adds the gradients of

\[ \frac{\lambda}{N} \sum_{k=0}^{N-1} \left \| J(x_k) \right \|^2_F \]

to the gradients vector, where \( \left \| J(x_k)) \right \|^2_F \) is the Frobenius norm of the Jacobian of the activations of the hidden layer with respect to its inputs, \( N \) is the batch size, and \( \lambda \) is the contraction coefficient.

Should be implemented by layers that support being used as a hidden layer in a contractive autoencoder.

Parameters
parametersVector of size get_num_parameters(), contains the parameters of the layer
gradientsVector of size get_num_parameters(). Gradients of the contraction term will be added to it

Reimplemented in CNeuralLogisticLayer, and CNeuralRectifiedLinearLayer.

Definition at line 229 of file NeuralLinearLayer.cpp.

float64_t compute_error ( SGMatrix< float64_t targets)
virtual

Computes the error between the layer's current activations and the given target activations. Should only be used with output layers

Parameters
targetsdesired values for the layer's activations, matrix of size num_neurons*batch_size

Reimplemented from CNeuralLinearLayer.

Definition at line 88 of file NeuralSoftmaxLayer.cpp.

void compute_gradients ( SGVector< float64_t parameters,
SGMatrix< float64_t targets,
CDynamicObjectArray layers,
SGVector< float64_t parameter_gradients 
)
virtualinherited

Computes the gradients that are relevent to this layer:

  • The gradients of the error with respect to the layer's parameters -The gradients of the error with respect to the layer's inputs

    Input gradients for layer i that connects into this layer as input are added to m_layers.element(i).get_activation_gradients()

    Deriving classes should make sure to account for dropout [Hinton, 2012] during gradient computations

    Parameters
    parametersVector of size get_num_parameters(), contains the parameters of the layer
    targetsa matrix of size num_neurons*batch_size. If the layer is being used as an output layer, targets is the desired values for the layer's activations, otherwise it's an empty matrix
    layersArray of layers that form the network that this layer is being used with
    parameter_gradientsVector of size get_num_parameters(). To be filled with gradients of the error with respect to each parameter of the layer

Reimplemented from CNeuralLayer.

Definition at line 106 of file NeuralLinearLayer.cpp.

void compute_local_gradients ( SGMatrix< float64_t targets)
virtual

Computes the gradients of the error with respect to this layer's pre-activations. Results are stored in m_local_gradients.

This is used by compute_gradients() and can be overriden to implement layers with different activation functions

Parameters
targetsa matrix of size num_neurons*batch_size. If is_output is true, targets is the desired values for the layer's activations, otherwise it's an empty matrix

Reimplemented from CNeuralLinearLayer.

Definition at line 76 of file NeuralSoftmaxLayer.cpp.

CSGObject * deep_copy ( ) const
virtualinherited

A deep copy. All the instance variables will also be copied.

Definition at line 231 of file SGObject.cpp.

void dropout_activations ( )
virtualinherited

Applies dropout [Hinton, 2012] to the activations of the layer

If is_training is true, fills m_dropout_mask with random values (according to dropout_prop) and multiplies it into the activations, otherwise, multiplies the activations by (1-dropout_prop) to compensate for using dropout during training

Definition at line 90 of file NeuralLayer.cpp.

void enforce_max_norm ( SGVector< float64_t parameters,
float64_t  max_norm 
)
virtualinherited

Constrains the weights of each neuron in the layer to have an L2 norm of at most max_norm

Parameters
parameterspointer to the layer's parameters, array of size get_num_parameters()
max_normmaximum allowable norm for a neuron's weights

Reimplemented from CNeuralLayer.

Definition at line 196 of file NeuralLinearLayer.cpp.

bool equals ( CSGObject other,
float64_t  accuracy = 0.0,
bool  tolerant = false 
)
virtualinherited

Recursively compares the current SGObject to another one. Compares all registered numerical parameters, recursion upon complex (SGObject) parameters. Does not compare pointers!

May be overwritten but please do with care! Should not be necessary in most cases.

Parameters
otherobject to compare with
accuracyaccuracy to use for comparison (optional)
tolerantallows linient check on float equality (within accuracy)
Returns
true if all parameters were equal, false if not

Definition at line 651 of file SGObject.cpp.

T get ( const Tag< T > &  _tag) const
inherited

Getter for a class parameter, identified by a Tag. Throws an exception if the class does not have such a parameter.

Parameters
_tagname and type information of parameter
Returns
value of the parameter identified by the input tag

Definition at line 367 of file SGObject.h.

T get ( const std::string &  name) const
inherited

Getter for a class parameter, identified by a name. Throws an exception if the class does not have such a parameter.

Parameters
namename of the parameter
Returns
value of the parameter corresponding to the input name and type

Definition at line 388 of file SGObject.h.

virtual SGMatrix<float64_t> get_activation_gradients ( )
virtualinherited

Gets the layer's activation gradients, a matrix of size num_neurons * batch_size

Returns
layer's activation gradients

Definition at line 294 of file NeuralLayer.h.

virtual SGMatrix<float64_t> get_activations ( )
virtualinherited

Gets the layer's activations, a matrix of size num_neurons * batch_size

Returns
layer's activations

Definition at line 287 of file NeuralLayer.h.

SGIO * get_global_io ( )
inherited

get the io object

Returns
io object

Definition at line 268 of file SGObject.cpp.

Parallel * get_global_parallel ( )
inherited

get the parallel object

Returns
parallel object

Definition at line 310 of file SGObject.cpp.

Version * get_global_version ( )
inherited

get the version object

Returns
version object

Definition at line 323 of file SGObject.cpp.

virtual int32_t get_height ( )
virtualinherited

Returns the height assuming that the layer's activations are interpreted as images (i.e for convolutional nets)

Returns
Height

Definition at line 265 of file NeuralLayer.h.

virtual SGVector<int32_t> get_input_indices ( )
virtualinherited

Gets the indices of the layers that are connected to this layer as input

Returns
layer's input indices

Definition at line 313 of file NeuralLayer.h.

virtual SGMatrix<float64_t> get_local_gradients ( )
virtualinherited

Gets the layer's local gradients, a matrix of size num_neurons * batch_size

Returns
layer's local gradients

Definition at line 304 of file NeuralLayer.h.

SGStringList< char > get_modelsel_names ( )
inherited
Returns
vector of names of all parameters which are registered for model selection

Definition at line 531 of file SGObject.cpp.

char * get_modsel_param_descr ( const char *  param_name)
inherited

Returns description of a given parameter string, if it exists. SG_ERROR otherwise

Parameters
param_namename of the parameter
Returns
description of the parameter

Definition at line 555 of file SGObject.cpp.

index_t get_modsel_param_index ( const char *  param_name)
inherited

Returns index of model selection parameter with provided index

Parameters
param_namename of model selection parameter
Returns
index of model selection parameter with provided name, -1 if there is no such

Definition at line 568 of file SGObject.cpp.

virtual const char* get_name ( ) const
virtual

Returns the name of the SGSerializable instance. It MUST BE the CLASS NAME without the prefixed `C'.

Returns
name of the SGSerializable

Reimplemented from CNeuralLinearLayer.

Definition at line 94 of file NeuralSoftmaxLayer.h.

virtual int32_t get_num_neurons ( )
virtualinherited

Gets the number of neurons in the layer

Returns
number of neurons in the layer

Definition at line 251 of file NeuralLayer.h.

virtual int32_t get_num_parameters ( )
virtualinherited

Gets the number of parameters used in this layer

Returns
number of parameters used in this layer

Definition at line 281 of file NeuralLayer.h.

virtual int32_t get_width ( )
virtualinherited

Returns the width assuming that the layer's activations are interpreted as images (i.e for convolutional nets)

Returns
Width

Definition at line 258 of file NeuralLayer.h.

bool has ( const std::string &  name) const
inherited

Checks if object has a class parameter identified by a name.

Parameters
namename of the parameter
Returns
true if the parameter exists with the input name

Definition at line 289 of file SGObject.h.

bool has ( const Tag< T > &  tag) const
inherited

Checks if object has a class parameter identified by a Tag.

Parameters
tagtag of the parameter containing name and type information
Returns
true if the parameter exists with the input tag

Definition at line 301 of file SGObject.h.

bool has ( const std::string &  name) const
inherited

Checks if a type exists for a class parameter identified by a name.

Parameters
namename of the parameter
Returns
true if the parameter exists with the input name and type

Definition at line 312 of file SGObject.h.

void initialize_neural_layer ( CDynamicObjectArray layers,
SGVector< int32_t >  input_indices 
)
virtualinherited

Initializes the layer, computes the number of parameters needed for the layer

Parameters
layersArray of layers that form the network that this layer is being used with
input_indicesIndices of the layers that are connected to this layer as input

Reimplemented from CNeuralLayer.

Definition at line 51 of file NeuralLinearLayer.cpp.

void initialize_parameters ( SGVector< float64_t parameters,
SGVector< bool >  parameter_regularizable,
float64_t  sigma 
)
virtualinherited

Initializes the layer's parameters. The layer should fill the given arrays with the initial value for its parameters

Parameters
parametersVector of size get_num_parameters()
parameter_regularizableVector of size get_num_parameters(). This controls which of the layer's parameter are subject to regularization, i.e to turn off regularization for parameter i, set parameter_regularizable[i] = false. This is usally used to turn off regularization for bias parameters.
sigmastandard deviation of the gaussian used to random the parameters

Reimplemented from CNeuralLayer.

Definition at line 61 of file NeuralLinearLayer.cpp.

bool is_generic ( EPrimitiveType *  generic) const
virtualinherited

If the SGSerializable is a class template then TRUE will be returned and GENERIC is set to the type of the generic.

Parameters
genericset to the type of the generic if returning TRUE
Returns
TRUE if a class template.

Definition at line 329 of file SGObject.cpp.

virtual bool is_input ( )
virtualinherited

returns true if the layer is an input layer. Input layers are the root layers of a network, that is, they don't receive signals from other layers, they receive signals from the inputs features to the network.

Local and activation gradients are not computed for input layers

Reimplemented in CNeuralInputLayer.

Definition at line 127 of file NeuralLayer.h.

bool load_serializable ( CSerializableFile file,
const char *  prefix = "" 
)
virtualinherited

Load this object from file. If it will fail (returning FALSE) then this object will contain inconsistent data and should not be used!

Parameters
filewhere to load from
prefixprefix for members
Returns
TRUE if done, otherwise FALSE

Definition at line 402 of file SGObject.cpp.

void load_serializable_post ( )
throw (ShogunException
)
protectedvirtualinherited

Can (optionally) be overridden to post-initialize some member variables which are not PARAMETER::ADD'ed. Make sure that at first the overridden method BASE_CLASS::LOAD_SERIALIZABLE_POST is called.

Exceptions
ShogunExceptionwill be thrown if an error occurs.

Reimplemented in CKernel, CWeightedDegreePositionStringKernel, CList, CAlphabet, CLinearHMM, CGaussianKernel, CInverseMultiQuadricKernel, CCircularKernel, and CExponentialKernel.

Definition at line 459 of file SGObject.cpp.

void load_serializable_pre ( )
throw (ShogunException
)
protectedvirtualinherited

Can (optionally) be overridden to pre-initialize some member variables which are not PARAMETER::ADD'ed. Make sure that at first the overridden method BASE_CLASS::LOAD_SERIALIZABLE_PRE is called.

Exceptions
ShogunExceptionwill be thrown if an error occurs.

Reimplemented in CDynamicArray< T >, CDynamicArray< float64_t >, CDynamicArray< float32_t >, CDynamicArray< int32_t >, CDynamicArray< char >, CDynamicArray< bool >, and CDynamicObjectArray.

Definition at line 454 of file SGObject.cpp.

bool parameter_hash_changed ( )
virtualinherited
Returns
whether parameter combination has changed since last update

Definition at line 295 of file SGObject.cpp.

void print_modsel_params ( )
inherited

prints all parameter registered for model selection and their type

Definition at line 507 of file SGObject.cpp.

void print_serializable ( const char *  prefix = "")
virtualinherited

prints registered parameters out

Parameters
prefixprefix for members

Definition at line 341 of file SGObject.cpp.

void register_param ( Tag< T > &  _tag,
const T &  value 
)
protectedinherited

Registers a class parameter which is identified by a tag. This enables the parameter to be modified by set() and retrieved by get(). Parameters can be registered in the constructor of the class.

Parameters
_tagname and type information of parameter
valuevalue of the parameter

Definition at line 439 of file SGObject.h.

void register_param ( const std::string &  name,
const T &  value 
)
protectedinherited

Registers a class parameter which is identified by a name. This enables the parameter to be modified by set() and retrieved by get(). Parameters can be registered in the constructor of the class.

Parameters
namename of the parameter
valuevalue of the parameter along with type information

Definition at line 452 of file SGObject.h.

bool save_serializable ( CSerializableFile file,
const char *  prefix = "" 
)
virtualinherited

Save this object to file.

Parameters
filewhere to save the object; will be closed during returning if PREFIX is an empty string.
prefixprefix for members
Returns
TRUE if done, otherwise FALSE

Definition at line 347 of file SGObject.cpp.

void save_serializable_post ( )
throw (ShogunException
)
protectedvirtualinherited

Can (optionally) be overridden to post-initialize some member variables which are not PARAMETER::ADD'ed. Make sure that at first the overridden method BASE_CLASS::SAVE_SERIALIZABLE_POST is called.

Exceptions
ShogunExceptionwill be thrown if an error occurs.

Reimplemented in CKernel.

Definition at line 469 of file SGObject.cpp.

void save_serializable_pre ( )
throw (ShogunException
)
protectedvirtualinherited

Can (optionally) be overridden to pre-initialize some member variables which are not PARAMETER::ADD'ed. Make sure that at first the overridden method BASE_CLASS::SAVE_SERIALIZABLE_PRE is called.

Exceptions
ShogunExceptionwill be thrown if an error occurs.

Reimplemented in CKernel, CDynamicArray< T >, CDynamicArray< float64_t >, CDynamicArray< float32_t >, CDynamicArray< int32_t >, CDynamicArray< char >, CDynamicArray< bool >, and CDynamicObjectArray.

Definition at line 464 of file SGObject.cpp.

void set ( const Tag< T > &  _tag,
const T &  value 
)
inherited

Setter for a class parameter, identified by a Tag. Throws an exception if the class does not have such a parameter.

Parameters
_tagname and type information of parameter
valuevalue of the parameter

Definition at line 328 of file SGObject.h.

void set ( const std::string &  name,
const T &  value 
)
inherited

Setter for a class parameter, identified by a name. Throws an exception if the class does not have such a parameter.

Parameters
namename of the parameter
valuevalue of the parameter along with type information

Definition at line 354 of file SGObject.h.

void set_batch_size ( int32_t  batch_size)
virtualinherited

Sets the batch_size and allocates memory for m_activations and m_input_gradients accordingly. Must be called before forward or backward propagation is performed

Parameters
batch_sizenumber of training/test cases the network is currently working with

Reimplemented in CNeuralConvolutionalLayer.

Definition at line 75 of file NeuralLayer.cpp.

void set_generic ( )
inherited

Definition at line 74 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 79 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 84 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 89 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 94 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 99 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 104 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 109 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 114 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 119 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 124 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 129 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 134 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 139 of file SGObject.cpp.

void set_generic ( )
inherited

Definition at line 144 of file SGObject.cpp.

void set_generic ( )
inherited

set generic type to T

void set_global_io ( SGIO io)
inherited

set the io object

Parameters
ioio object to use

Definition at line 261 of file SGObject.cpp.

void set_global_parallel ( Parallel parallel)
inherited

set the parallel object

Parameters
parallelparallel object to use

Definition at line 274 of file SGObject.cpp.

void set_global_version ( Version version)
inherited

set the version object

Parameters
versionversion object to use

Definition at line 316 of file SGObject.cpp.

virtual void set_num_neurons ( int32_t  num_neurons)
virtualinherited

Gets the number of neurons in the layer

Parameters
num_neuronsnumber of neurons in the layer

Definition at line 271 of file NeuralLayer.h.

CSGObject * shallow_copy ( ) const
virtualinherited

A shallow copy. All the SGObject instance variables will be simply assigned and SG_REF-ed.

Reimplemented in CGaussianKernel.

Definition at line 225 of file SGObject.cpp.

void unset_generic ( )
inherited

unset generic type

this has to be called in classes specializing a template class

Definition at line 336 of file SGObject.cpp.

void update_parameter_hash ( )
virtualinherited

Updates the hash of current parameter combination

Definition at line 281 of file SGObject.cpp.

Member Data Documentation

ENLAutoencoderPosition autoencoder_position
inherited

For autoencoders, specifies the position of the layer in the autoencoder, i.e an encoding layer or a decoding layer. Default value is NLAP_NONE

Definition at line 343 of file NeuralLayer.h.

float64_t contraction_coefficient
inherited

For hidden layers in a contractive autoencoders [Rifai, 2011] a term:

\[ \frac{\lambda}{N} \sum_{k=0}^{N-1} \left \| J(x_k) \right \|^2_F \]

is added to the error, where \( \left \| J(x_k)) \right \|^2_F \) is the Frobenius norm of the Jacobian of the activations of the hidden layer with respect to its inputs, \( N \) is the batch size, and \( \lambda \) is the contraction coefficient.

Default value is 0.0.

Definition at line 338 of file NeuralLayer.h.

float64_t dropout_prop
inherited

probabilty of dropping out a neuron in the layer

Definition at line 327 of file NeuralLayer.h.

SGIO* io
inherited

io

Definition at line 537 of file SGObject.h.

bool is_training
inherited

Should be true if the layer is currently used during training initial value is false

Definition at line 324 of file NeuralLayer.h.

SGMatrix<float64_t> m_activation_gradients
protectedinherited

gradients of the error with respect to the layer's inputs size previous_layer_num_neurons * batch_size

Definition at line 381 of file NeuralLayer.h.

SGMatrix<float64_t> m_activations
protectedinherited

activations of the neurons in this layer size num_neurons * batch_size

Definition at line 376 of file NeuralLayer.h.

int32_t m_batch_size
protectedinherited

number of training/test cases the network is currently working with

Definition at line 371 of file NeuralLayer.h.

SGMatrix<bool> m_dropout_mask
protectedinherited

binary mask that determines whether a neuron will be kept or dropped out during the current iteration of training size num_neurons * batch_size

Definition at line 393 of file NeuralLayer.h.

Parameter* m_gradient_parameters
inherited

parameters wrt which we can compute gradients

Definition at line 552 of file SGObject.h.

uint32_t m_hash
inherited

Hash of parameter values

Definition at line 555 of file SGObject.h.

int32_t m_height
protectedinherited

Width of the image (if the layer's activations are to be interpreted as images. Default value is 1

Definition at line 357 of file NeuralLayer.h.

SGVector<int32_t> m_input_indices
protectedinherited

Indices of the layers that are connected to this layer as input

Definition at line 363 of file NeuralLayer.h.

SGVector<int32_t> m_input_sizes
protectedinherited

Number of neurons in the layers that are connected to this layer as input

Definition at line 368 of file NeuralLayer.h.

SGMatrix<float64_t> m_local_gradients
protectedinherited

gradients of the error with respect to the layer's pre-activations, this is usually used as a buffer when computing the input gradients size num_neurons * batch_size

Definition at line 387 of file NeuralLayer.h.

Parameter* m_model_selection_parameters
inherited

model selection parameters

Definition at line 549 of file SGObject.h.

int32_t m_num_neurons
protectedinherited

Number of neurons in this layer

Definition at line 347 of file NeuralLayer.h.

int32_t m_num_parameters
protectedinherited

Number of neurons in this layer

Definition at line 360 of file NeuralLayer.h.

Parameter* m_parameters
inherited

parameters

Definition at line 546 of file SGObject.h.

int32_t m_width
protectedinherited

Width of the image (if the layer's activations are to be interpreted as images. Default value is m_num_neurons

Definition at line 352 of file NeuralLayer.h.

Parallel* parallel
inherited

parallel

Definition at line 540 of file SGObject.h.

Version* version
inherited

version

Definition at line 543 of file SGObject.h.


The documentation for this class was generated from the following files:

SHOGUN Machine Learning Toolbox - Documentation