SHOGUN
4.1.0
Main Page
Related Pages
Modules
Classes
Files
File List
File Members
All
Classes
Namespaces
Files
Functions
Variables
Typedefs
Enumerations
Enumerator
Friends
Macros
Modules
Pages
src
shogun
optimization
AdaDeltaUpdater.h
Go to the documentation of this file.
1
/*
2
* Copyright (c) The Shogun Machine Learning Toolbox
3
* Written (w) 2015 Wu Lin
4
* All rights reserved.
5
*
6
* Redistribution and use in source and binary forms, with or without
7
* modification, are permitted provided that the following conditions are met:
8
*
9
* 1. Redistributions of source code must retain the above copyright notice, this
10
* list of conditions and the following disclaimer.
11
* 2. Redistributions in binary form must reproduce the above copyright notice,
12
* this list of conditions and the following disclaimer in the documentation
13
* and/or other materials provided with the distribution.
14
*
15
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25
*
26
* The views and conclusions contained in the software and documentation are those
27
* of the authors and should not be interpreted as representing official policies,
28
* either expressed or implied, of the Shogun Development Team.
29
*
30
*/
31
32
#ifndef ADADELTAUPDATER_H
33
#define ADADELTAUPDATER_H
34
#include <
shogun/optimization/DescendUpdaterWithCorrection.h
>
35
#include <
shogun/optimization/LearningRate.h
>
36
namespace
shogun
37
{
58
class
AdaDeltaUpdater
:
public
DescendUpdaterWithCorrection
59
{
60
public
:
61
/* Constructor */
62
AdaDeltaUpdater
();
63
70
AdaDeltaUpdater
(
float64_t
learning_rate,
float64_t
epsilon
,
float64_t
decay_factor);
71
72
/* Destructor */
73
virtual
~AdaDeltaUpdater
();
74
79
virtual
void
set_learning_rate
(
float64_t
learning_rate);
80
85
virtual
void
set_epsilon
(
float64_t
epsilon);
86
91
virtual
void
set_decay_factor
(
float64_t
decay_factor);
92
100
virtual
void
update_context
(
CMinimizerContext
* context);
101
110
virtual
void
load_from_context
(
CMinimizerContext
* context);
111
121
virtual
void
update_variable
(
SGVector<float64_t>
variable_reference,
122
SGVector<float64_t>
raw_negative_descend_direction,
float64_t
learning_rate);
123
124
protected
:
137
virtual
float64_t
get_negative_descend_direction
(
float64_t
variable,
138
float64_t
gradient,
index_t
idx,
float64_t
learning_rate);
139
141
float64_t
m_build_in_learning_rate
;
142
144
float64_t
m_epsilon
;
145
147
float64_t
m_decay_factor
;
148
150
SGVector<float64_t>
m_gradient_accuracy
;
151
153
SGVector<float64_t>
m_gradient_delta_accuracy
;
154
private
:
156
void
init();
157
};
158
159
}
160
#endif
shogun::AdaDeltaUpdater::set_decay_factor
virtual void set_decay_factor(float64_t decay_factor)
Definition:
AdaDeltaUpdater.cpp:65
shogun::AdaDeltaUpdater::m_gradient_delta_accuracy
SGVector< float64_t > m_gradient_delta_accuracy
Definition:
AdaDeltaUpdater.h:153
index_t
int32_t index_t
Definition:
common.h:62
shogun::CMinimizerContext
The class is used to serialize and deserialize variables for the optimization framework.
Definition:
MinimizerContext.h:45
shogun::AdaDeltaUpdater::load_from_context
virtual void load_from_context(CMinimizerContext *context)
Definition:
AdaDeltaUpdater.cpp:106
DescendUpdaterWithCorrection.h
shogun::AdaDeltaUpdater::AdaDeltaUpdater
AdaDeltaUpdater()
Definition:
AdaDeltaUpdater.cpp:36
shogun::epsilon
static const float64_t epsilon
Definition:
libbmrm.cpp:25
shogun::AdaDeltaUpdater::m_epsilon
float64_t m_epsilon
Definition:
AdaDeltaUpdater.h:144
shogun::SGVector< float64_t >
float64_t
double float64_t
Definition:
common.h:50
shogun::AdaDeltaUpdater::~AdaDeltaUpdater
virtual ~AdaDeltaUpdater()
Definition:
AdaDeltaUpdater.cpp:73
shogun::AdaDeltaUpdater::m_decay_factor
float64_t m_decay_factor
Definition:
AdaDeltaUpdater.h:147
shogun::AdaDeltaUpdater::update_context
virtual void update_context(CMinimizerContext *context)
Definition:
AdaDeltaUpdater.cpp:86
shogun::AdaDeltaUpdater::set_epsilon
virtual void set_epsilon(float64_t epsilon)
Definition:
AdaDeltaUpdater.cpp:58
shogun
all of classes and functions are contained in the shogun namespace
Definition:
class_list.h:18
LearningRate.h
shogun::DescendUpdaterWithCorrection
This is a base class for descend update with descend based correction.
Definition:
DescendUpdaterWithCorrection.h:52
shogun::AdaDeltaUpdater::m_build_in_learning_rate
float64_t m_build_in_learning_rate
Definition:
AdaDeltaUpdater.h:141
shogun::AdaDeltaUpdater::update_variable
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
Definition:
AdaDeltaUpdater.cpp:140
shogun::AdaDeltaUpdater::get_negative_descend_direction
virtual float64_t get_negative_descend_direction(float64_t variable, float64_t gradient, index_t idx, float64_t learning_rate)
Definition:
AdaDeltaUpdater.cpp:124
shogun::AdaDeltaUpdater::m_gradient_accuracy
SGVector< float64_t > m_gradient_accuracy
Definition:
AdaDeltaUpdater.h:150
shogun::AdaDeltaUpdater
The class implements the AdaDelta method. .
Definition:
AdaDeltaUpdater.h:58
shogun::AdaDeltaUpdater::set_learning_rate
virtual void set_learning_rate(float64_t learning_rate)
Definition:
AdaDeltaUpdater.cpp:51
SHOGUN
Machine Learning Toolbox - Documentation