92 virtual const char*
get_name()
const {
return "AdamUpdater"; }
SGVector< float64_t > m_gradient_first_moment
virtual void set_second_moment_decay_factor(float64_t decay_factor)
float64_t m_decay_factor_first_moment
float64_t m_log_learning_rate
The class implements the Adam method.
virtual void set_learning_rate(float64_t learning_rate)
virtual const char * get_name() const
SGVector< float64_t > m_gradient_second_moment
float64_t m_log_scale_pre_iteration
virtual float64_t get_negative_descend_direction(float64_t variable, float64_t gradient, index_t idx, float64_t learning_rate)
all of classes and functions are contained in the shogun namespace
This is a base class for descend update with descend based correction.
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > raw_negative_descend_direction, float64_t learning_rate)
virtual void set_first_moment_decay_factor(float64_t decay_factor)
virtual void set_epsilon(float64_t epsilon)
int64_t m_iteration_counter
float64_t m_decay_factor_second_moment