SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
AdamUpdater.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) The Shogun Machine Learning Toolbox
3  * Written (w) 2015 Wu Lin
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice, this
10  * list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18  * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  *
26  * The views and conclusions contained in the software and documentation are those
27  * of the authors and should not be interpreted as representing official policies,
28  * either expressed or implied, of the Shogun Development Team.
29  *
30  */
31 
32 #ifndef AdamUpdater_H
33 #define AdamUpdater_H
35 namespace shogun
36 {
43 {
44 public:
45  /* Constructor */
46  AdamUpdater();
47 
55  AdamUpdater(float64_t learning_rate,float64_t epsilon,
56  float64_t first_moment_decay_factor,
57  float64_t second_moment_decay_factor);
58 
59 
60  /* Destructor */
61  virtual ~AdamUpdater();
62 
67  virtual void set_learning_rate(float64_t learning_rate);
68 
73  virtual void set_epsilon(float64_t epsilon);
74 
79  virtual void set_first_moment_decay_factor(float64_t decay_factor);
80 
81 
86  virtual void set_second_moment_decay_factor(float64_t decay_factor);
87 
92  virtual const char* get_name() const { return "AdamUpdater"; }
93 
103  virtual void update_variable(SGVector<float64_t> variable_reference,
104  SGVector<float64_t> raw_negative_descend_direction, float64_t learning_rate);
105 
106 protected:
119  float64_t gradient, index_t idx, float64_t learning_rate);
120 
121  /* learning_rate at iteration */
123 
124  /* epsilon */
126 
127  /* counter of iteration */
129 
130  /* decay_factor in first moment */
132 
133  /* decay_factor in second moment */
135 
136  /* weighted factor in logarithmic domain */
138 
139  /* first moment of gradient */
141 
142  /* second moment of gradient */
144 private:
145  /* Init */
146  void init();
147 };
148 
149 }
150 #endif
SGVector< float64_t > m_gradient_first_moment
Definition: AdamUpdater.h:140
virtual void set_second_moment_decay_factor(float64_t decay_factor)
Definition: AdamUpdater.cpp:78
virtual ~AdamUpdater()
Definition: AdamUpdater.cpp:86
float64_t m_decay_factor_first_moment
Definition: AdamUpdater.h:131
int32_t index_t
Definition: common.h:62
float64_t m_log_learning_rate
Definition: AdamUpdater.h:122
The class implements the Adam method.
Definition: AdamUpdater.h:42
virtual void set_learning_rate(float64_t learning_rate)
Definition: AdamUpdater.cpp:56
double float64_t
Definition: common.h:50
virtual const char * get_name() const
Definition: AdamUpdater.h:92
SGVector< float64_t > m_gradient_second_moment
Definition: AdamUpdater.h:143
float64_t m_log_scale_pre_iteration
Definition: AdamUpdater.h:137
virtual float64_t get_negative_descend_direction(float64_t variable, float64_t gradient, index_t idx, float64_t learning_rate)
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
This is a base class for descend update with descend based correction.
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
virtual void set_first_moment_decay_factor(float64_t decay_factor)
Definition: AdamUpdater.cpp:70
virtual void set_epsilon(float64_t epsilon)
Definition: AdamUpdater.cpp:63
int64_t m_iteration_counter
Definition: AdamUpdater.h:128
float64_t m_decay_factor_second_moment
Definition: AdamUpdater.h:134

SHOGUN Machine Learning Toolbox - Documentation