SHOGUN  4.2.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
SVRGMinimizer.cpp
Go to the documentation of this file.
1  /*
2  * Copyright (c) The Shogun Machine Learning Toolbox
3  * Written (w) 2015 Wu Lin
4  * All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions are met:
8  *
9  * 1. Redistributions of source code must retain the above copyright notice, this
10  * list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright notice,
12  * this list of conditions and the following disclaimer in the documentation
13  * and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
17  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18  * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
19  * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
20  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
21  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
22  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  *
26  * The views and conclusions contained in the software and documentation are those
27  * of the authors and should not be interpreted as representing official policies,
28  * either expressed or implied, of the Shogun Development Team.
29  *
30  */
33 #include <shogun/base/Parameter.h>
34 using namespace shogun;
35 
38 {
39  init();
40 }
41 
43 {
44 }
45 
48 {
49  init();
50 }
51 
52 void SVRGMinimizer::init()
53 {
58 
59  SG_ADD(&m_num_sgd_passes, "SVRGMinimizer__m_num_sgd_passes",
60  "num_sgd_passes in SVRGMinimizer", MS_NOT_AVAILABLE);
61  SG_ADD(&m_svrg_interval, "SVRGMinimizer__m_svrg_interval",
62  "svrg_interval in SVRGMinimizer", MS_NOT_AVAILABLE);
63  SG_ADD(&m_average_gradient, "SVRGMinimizer__m_average_gradient",
64  "average_gradient in SVRGMinimizer", MS_NOT_AVAILABLE);
65  SG_ADD(&m_previous_variable, "SVRGMinimizer__m_previous_variable",
66  "previous_variable in SVRGMinimizer", MS_NOT_AVAILABLE);
67 }
68 
70 {
72  REQUIRE(m_num_sgd_passes>=0, "sgd_passes must set\n");
73  REQUIRE(m_svrg_interval>0, "svrg_interval must set\n");
75  REQUIRE(fun,"the cost function must be a stochastic average gradient cost function\n");
76  if (m_num_sgd_passes>0)
77  {
78  SGDMinimizer sgd(fun);
84  sgd.minimize();
86  }
87 }
88 
90 {
92 
95  REQUIRE(fun,"the cost function must be a stochastic average gradient cost function\n");
97  {
99  {
101  m_previous_variable=SGVector<float64_t>(variable_reference.vlen);
102 
103  std::copy(variable_reference.vector, variable_reference.vector+variable_reference.vlen, m_previous_variable.vector);
105  }
106  fun->begin_sample();
107  while(fun->next_sample())
108  {
109  m_iter_counter++;
110  float64_t learning_rate=1.0;
111  if(m_learning_rate)
113 
115  SGVector<float64_t> var(variable_reference.vlen);
116  std::copy(variable_reference.vector, variable_reference.vector+variable_reference.vlen, var.vector);
117 
120 
121  std::copy(var.vector, var.vector+var.vlen, variable_reference.vector);
122  for(index_t idx=0; idx<grad_new.vlen; idx++)
123  grad_new[idx]+=(m_average_gradient[idx]-grad_old[idx]);
124 
125  update_gradient(grad_new,variable_reference);
126  m_gradient_updater->update_variable(variable_reference,grad_new,learning_rate);
127 
128  do_proximal_operation(variable_reference);
129  }
130  }
131  float64_t cost=m_fun->get_cost();
132  return cost+get_penalty(variable_reference);
133 }
virtual SGVector< float64_t > get_gradient()=0
The class is about a stochastic cost function for stochastic average minimizers.
virtual float64_t minimize()
virtual void set_learning_rate(LearningRate *learning_rate)
int32_t index_t
Definition: common.h:62
virtual void set_penalty_type(Penalty *penalty_type)
FirstOrderCostFunction * m_fun
#define REQUIRE(x,...)
Definition: SGIO.h:206
virtual void update_variable(SGVector< float64_t > variable_reference, SGVector< float64_t > negative_descend_direction, float64_t learning_rate)=0
virtual float64_t minimize()
The base class for stochastic first-order gradient-based minimizers.
virtual void set_penalty_weight(float64_t penalty_weight)
virtual void init_minimization()
index_t vlen
Definition: SGVector.h:494
virtual void update_gradient(SGVector< float64_t > gradient, SGVector< float64_t > var)
SGVector< float64_t > m_previous_variable
double float64_t
Definition: common.h:50
SGVector< float64_t > m_average_gradient
virtual void set_number_passes(int32_t num_passes)
virtual void do_proximal_operation(SGVector< float64_t >variable_reference)
virtual float64_t get_penalty(SGVector< float64_t > var)
virtual void set_gradient_updater(DescendUpdater *gradient_updater)
virtual float64_t get_cost()=0
virtual SGVector< float64_t > get_average_gradient()=0
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
virtual float64_t get_learning_rate(int32_t iter_counter)=0
#define SG_ADD(...)
Definition: SGObject.h:84
virtual SGVector< float64_t > obtain_variable_reference()=0
The class implements the stochastic gradient descend (SGD) minimizer.
Definition: SGDMinimizer.h:45

SHOGUN Machine Learning Toolbox - Documentation