52 REQUIRE(learning_rate>0,
"Learning_rate (%f) must be positive\n",
59 REQUIRE(epsilon>=0,
"Epsilon (%f) must be non-negative\n",
68 void AdaGradUpdater::init()
78 REQUIRE(context,
"Context must set\n");
83 std::string key=
"AdaGradUpdater::m_gradient_accuracy";
90 REQUIRE(context,
"Context must set\n");
91 std::string key=
"AdaGradUpdater::m_gradient_accuracy";
112 "The length of variable (%d) and the length of negative descend direction (%d) do not match\n",
113 variable_reference.
vlen, raw_negative_descend_direction.
vlen);
120 raw_negative_descend_direction, learning_rate);
virtual void save_data(const std::string &key, SGVector< float64_t > value)
virtual void load_from_context(CMinimizerContext *context)
The class is used to serialize and deserialize variables for the optimization framework.
virtual SGVector< float64_t > get_data_sgvector_float64(const std::string &key)
static const float64_t epsilon
virtual float64_t get_negative_descend_direction(float64_t variable, float64_t gradient, index_t idx, float64_t learning_rate)
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
virtual ~AdaGradUpdater()
SGVector< float64_t > m_gradient_accuracy
float64_t m_build_in_learning_rate
virtual void load_from_context(CMinimizerContext *context)
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
all of classes and functions are contained in the shogun namespace
This is a base class for descend update with descend based correction.
void scale(Matrix A, Matrix B, typename Matrix::Scalar alpha)
static float32_t sqrt(float32_t x)
virtual void set_learning_rate(float64_t learning_rate)
void set_const(T const_elem)
virtual void set_epsilon(float64_t epsilon)
virtual void update_context(CMinimizerContext *context)
virtual void update_context(CMinimizerContext *context)