52 void SVRGMinimizer::init()
65 SG_ADD(&m_previous_variable,
"SVRGMinimizer__m_previous_variable",
75 REQUIRE(fun,
"the cost function must be a stochastic average gradient cost function\n");
95 REQUIRE(fun,
"the cost function must be a stochastic average gradient cost function\n");
virtual SGVector< float64_t > get_gradient()=0
The class is about a stochastic cost function for stochastic average minimizers.
virtual float64_t minimize()
DescendUpdater * m_gradient_updater
virtual void set_learning_rate(LearningRate *learning_rate)
float64_t m_penalty_weight
virtual void init_minimization()
virtual void begin_sample()=0
virtual void set_penalty_type(Penalty *penalty_type)
FirstOrderCostFunction * m_fun
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > negative_descend_direction, float64_t learning_rate)=0
virtual float64_t minimize()
The base class for stochastic first-order gradient-based minimizers.
virtual void set_penalty_weight(float64_t penalty_weight)
virtual void init_minimization()
virtual void update_gradient(SGVector< float64_t > gradient, SGVector< float64_t > var)
LearningRate * m_learning_rate
SGVector< float64_t > m_previous_variable
SGVector< float64_t > m_average_gradient
virtual void set_number_passes(int32_t num_passes)
virtual void do_proximal_operation(SGVector< float64_t >variable_reference)
virtual float64_t get_penalty(SGVector< float64_t > var)
virtual void set_gradient_updater(DescendUpdater *gradient_updater)
virtual float64_t get_cost()=0
virtual SGVector< float64_t > get_average_gradient()=0
all of classes and functions are contained in the shogun namespace
virtual bool next_sample()=0
virtual float64_t get_learning_rate(int32_t iter_counter)=0
virtual SGVector< float64_t > obtain_variable_reference()=0
The class implements the stochastic gradient descend (SGD) minimizer.
virtual int32_t get_iteration_counter()