SHOGUN
4.1.0
Main Page
Related Pages
Modules
Classes
Files
File List
File Members
All
Classes
Namespaces
Files
Functions
Variables
Typedefs
Enumerations
Enumerator
Friends
Macros
Modules
Pages
src
shogun
optimization
AdaGradUpdater.h
Go to the documentation of this file.
1
/*
2
* Copyright (c) The Shogun Machine Learning Toolbox
3
* Written (w) 2015 Wu Lin
4
* All rights reserved.
5
*
6
* Redistribution and use in source and binary forms, with or without
7
* modification, are permitted provided that the following conditions are met:
8
*
9
* 1. Redistributions of source code must retain the above copyright notice, this
10
* list of conditions and the following disclaimer.
11
* 2. Redistributions in binary form must reproduce the above copyright notice,
12
* this list of conditions and the following disclaimer in the documentation
13
* and/or other materials provided with the distribution.
14
*
15
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
*
26
* The views and conclusions contained in the software and documentation are those
27
* of the authors and should not be interpreted as representing official policies,
28
* either expressed or implied, of the Shogun Development Team.
29
*
30
*/
31
32
#ifndef ADAGRADUPDATER_H
33
#define ADAGRADUPDATER_H
34
#include <
shogun/optimization/DescendUpdaterWithCorrection.h
>
35
#include <
shogun/optimization/LearningRate.h
>
36
namespace
shogun
37
{
56
class
AdaGradUpdater
:
public
DescendUpdaterWithCorrection
57
{
58
public
:
59
/* Constructor */
60
AdaGradUpdater
();
61
67
AdaGradUpdater
(
float64_t
learning_rate,
float64_t
epsilon
);
68
69
/* Destructor */
70
virtual
~AdaGradUpdater
();
71
76
virtual
void
set_learning_rate
(
float64_t
learning_rate);
77
82
virtual
void
set_epsilon
(
float64_t
epsilon);
83
91
virtual
void
update_context
(
CMinimizerContext
* context);
92
101
virtual
void
load_from_context
(
CMinimizerContext
* context);
102
113
virtual
void
update_variable
(
SGVector<float64_t>
variable_reference,
114
SGVector<float64_t>
raw_negative_descend_direction,
115
float64_t
learning_rate);
116
117
protected
:
130
virtual
float64_t
get_negative_descend_direction
(
float64_t
variable,
131
float64_t
gradient,
index_t
idx,
float64_t
learning_rate);
132
134
float64_t
m_build_in_learning_rate
;
135
137
float64_t
m_epsilon
;
138
140
SGVector<float64_t>
m_gradient_accuracy
;
141
private
:
143
void
init();
144
};
145
146
}
147
#endif
shogun::AdaGradUpdater
The class implements the AdaGrad method.
Definition:
AdaGradUpdater.h:56
shogun::AdaGradUpdater::load_from_context
virtual void load_from_context(CMinimizerContext *context)
Definition:
AdaGradUpdater.cpp:87
index_t
int32_t index_t
Definition:
common.h:62
shogun::CMinimizerContext
The class is used to serialize and deserialize variables for the optimization framework.
Definition:
MinimizerContext.h:45
DescendUpdaterWithCorrection.h
shogun::AdaGradUpdater::m_epsilon
float64_t m_epsilon
Definition:
AdaGradUpdater.h:137
shogun::AdaGradUpdater::AdaGradUpdater
AdaGradUpdater()
Definition:
AdaGradUpdater.cpp:36
shogun::epsilon
static const float64_t epsilon
Definition:
libbmrm.cpp:25
shogun::AdaGradUpdater::get_negative_descend_direction
virtual float64_t get_negative_descend_direction(float64_t variable, float64_t gradient, index_t idx, float64_t learning_rate)
Definition:
AdaGradUpdater.cpp:98
shogun::SGVector< float64_t >
float64_t
double float64_t
Definition:
common.h:50
shogun::AdaGradUpdater::update_variable
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
Definition:
AdaGradUpdater.cpp:108
shogun::AdaGradUpdater::~AdaGradUpdater
virtual ~AdaGradUpdater()
Definition:
AdaGradUpdater.cpp:64
shogun::AdaGradUpdater::m_gradient_accuracy
SGVector< float64_t > m_gradient_accuracy
Definition:
AdaGradUpdater.h:140
shogun::AdaGradUpdater::m_build_in_learning_rate
float64_t m_build_in_learning_rate
Definition:
AdaGradUpdater.h:134
shogun
all of classes and functions are contained in the shogun namespace
Definition:
class_list.h:18
LearningRate.h
shogun::DescendUpdaterWithCorrection
This is a base class for descend update with descend based correction.
Definition:
DescendUpdaterWithCorrection.h:52
shogun::AdaGradUpdater::set_learning_rate
virtual void set_learning_rate(float64_t learning_rate)
Definition:
AdaGradUpdater.cpp:50
shogun::AdaGradUpdater::set_epsilon
virtual void set_epsilon(float64_t epsilon)
Definition:
AdaGradUpdater.cpp:57
shogun::AdaGradUpdater::update_context
virtual void update_context(CMinimizerContext *context)
Definition:
AdaGradUpdater.cpp:75
SHOGUN
Machine Learning Toolbox - Documentation