SHOGUN  4.1.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
NeuralLinearLayer.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2014, Shogun Toolbox Foundation
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7 
8  * 1. Redistributions of source code must retain the above copyright notice,
9  * this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * 3. Neither the name of the copyright holder nor the names of its
16  * contributors may be used to endorse or promote products derived from this
17  * software without specific prior written permission.
18 
19  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
22  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
23  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  * POSSIBILITY OF SUCH DAMAGE.
30  *
31  * Written (W) 2014 Khaled Nasr
32  */
33 
34 #ifndef __NEURALLINEARLAYER_H__
35 #define __NEURALLINEARLAYER_H__
36 
37 #include <shogun/lib/common.h>
39 
40 namespace shogun
41 {
64 {
65 public:
68 
73  CNeuralLinearLayer(int32_t num_neurons);
74 
75  virtual ~CNeuralLinearLayer() {}
76 
86  virtual void initialize_neural_layer(CDynamicObjectArray* layers,
87  SGVector<int32_t> input_indices);
88 
103  virtual void initialize_parameters(SGVector<float64_t> parameters,
104  SGVector<bool> parameter_regularizable,
105  float64_t sigma);
106 
116  virtual void compute_activations(SGVector<float64_t> parameters,
117  CDynamicObjectArray* layers);
118 
144  virtual void compute_gradients(SGVector<float64_t> parameters,
145  SGMatrix<float64_t> targets,
146  CDynamicObjectArray* layers,
147  SGVector<float64_t> parameter_gradients);
148 
156 
165  virtual void enforce_max_norm(SGVector<float64_t> parameters,
166  float64_t max_norm);
167 
182 
199  SGVector<float64_t> parameters, SGVector<float64_t> gradients);
200 
211  virtual void compute_local_gradients(SGMatrix<float64_t> targets);
212 
213  virtual const char* get_name() const { return "NeuralLinearLayer"; }
214 };
215 
216 }
217 #endif
Base class for neural network layers.
Definition: NeuralLayer.h:87
virtual void initialize_parameters(SGVector< float64_t > parameters, SGVector< bool > parameter_regularizable, float64_t sigma)
virtual void initialize_neural_layer(CDynamicObjectArray *layers, SGVector< int32_t > input_indices)
virtual void compute_local_gradients(SGMatrix< float64_t > targets)
virtual float64_t compute_contraction_term(SGVector< float64_t > parameters)
double float64_t
Definition: common.h:50
Dynamic array class for CSGObject pointers that creates an array that can be used like a list or an a...
virtual void enforce_max_norm(SGVector< float64_t > parameters, float64_t max_norm)
Neural layer with linear neurons, with an identity activation function. can be used as a hidden layer...
virtual void compute_activations(SGVector< float64_t > parameters, CDynamicObjectArray *layers)
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
virtual float64_t compute_error(SGMatrix< float64_t > targets)
virtual void compute_gradients(SGVector< float64_t > parameters, SGMatrix< float64_t > targets, CDynamicObjectArray *layers, SGVector< float64_t > parameter_gradients)
virtual void compute_contraction_term_gradients(SGVector< float64_t > parameters, SGVector< float64_t > gradients)
virtual const char * get_name() const

SHOGUN Machine Learning Toolbox - Documentation