go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
Public Types | Public Member Functions | Static Public Member Functions | Protected Member Functions | Protected Attributes | Private Member Functions | Private Attributes
itk::AdaptiveStochasticGradientDescentOptimizer Class Reference

#include <itkAdaptiveStochasticGradientDescentOptimizer.h>

Inheritance diagram for itk::AdaptiveStochasticGradientDescentOptimizer:
Inheritance graph
[legend]
Collaboration diagram for itk::AdaptiveStochasticGradientDescentOptimizer:
Collaboration graph
[legend]

Public Types

typedef SmartPointer< const SelfConstPointer
typedef
Superclass::CostFunctionType 
CostFunctionType
typedef Superclass::DerivativeType DerivativeType
typedef Superclass::MeasureType MeasureType
typedef Superclass::ParametersType ParametersType
typedef SmartPointer< SelfPointer
typedef
Superclass::ScaledCostFunctionPointer 
ScaledCostFunctionPointer
typedef
Superclass::ScaledCostFunctionType 
ScaledCostFunctionType
typedef Superclass::ScalesType ScalesType
typedef
AdaptiveStochasticGradientDescentOptimizer 
Self
typedef
Superclass::StopConditionType 
StopConditionType
typedef
StandardGradientDescentOptimizer 
Superclass
- Public Types inherited from itk::StandardGradientDescentOptimizer
typedef SmartPointer< const SelfConstPointer
typedef
Superclass::CostFunctionType 
CostFunctionType
typedef Superclass::DerivativeType DerivativeType
typedef Superclass::MeasureType MeasureType
typedef Superclass::ParametersType ParametersType
typedef SmartPointer< SelfPointer
typedef
Superclass::ScaledCostFunctionPointer 
ScaledCostFunctionPointer
typedef
Superclass::ScaledCostFunctionType 
ScaledCostFunctionType
typedef Superclass::ScalesType ScalesType
typedef
StandardGradientDescentOptimizer 
Self
typedef
Superclass::StopConditionType 
StopConditionType
typedef GradientDescentOptimizer2 Superclass
- Public Types inherited from itk::GradientDescentOptimizer2
typedef SmartPointer< const SelfConstPointer
typedef
Superclass::CostFunctionType 
CostFunctionType
typedef Superclass::DerivativeType DerivativeType
typedef Superclass::MeasureType MeasureType
typedef Superclass::ParametersType ParametersType
typedef SmartPointer< SelfPointer
typedef
Superclass::ScaledCostFunctionPointer 
ScaledCostFunctionPointer
typedef
Superclass::ScaledCostFunctionType 
ScaledCostFunctionType
typedef Superclass::ScalesType ScalesType
typedef GradientDescentOptimizer2 Self
enum  StopConditionType { MaximumNumberOfIterations, MetricError, MinimumStepSize }
typedef
ScaledSingleValuedNonLinearOptimizer 
Superclass
- Public Types inherited from itk::ScaledSingleValuedNonLinearOptimizer
typedef SmartPointer< const SelfConstPointer
typedef
Superclass::CostFunctionType 
CostFunctionType
typedef Superclass::DerivativeType DerivativeType
typedef Superclass::MeasureType MeasureType
typedef Superclass::ParametersType ParametersType
typedef SmartPointer< SelfPointer
typedef
ScaledCostFunctionType::Pointer 
ScaledCostFunctionPointer
typedef
ScaledSingleValuedCostFunction 
ScaledCostFunctionType
typedef
NonLinearOptimizer::ScalesType 
ScalesType
typedef
ScaledSingleValuedNonLinearOptimizer 
Self
typedef
SingleValuedNonLinearOptimizer 
Superclass

Public Member Functions

virtual const char * GetClassName () const
virtual double GetSigmoidMax () const
virtual double GetSigmoidMin () const
virtual double GetSigmoidScale () const
virtual bool GetUseAdaptiveStepSizes () const
virtual void SetSigmoidMax (double _arg)
virtual void SetSigmoidMin (double _arg)
virtual void SetSigmoidScale (double _arg)
virtual void SetUseAdaptiveStepSizes (bool _arg)
- Public Member Functions inherited from itk::StandardGradientDescentOptimizer
virtual void AdvanceOneStep (void)
virtual double GetCurrentTime () const
virtual double GetInitialTime () const
virtual double GetParam_a () const
virtual double GetParam_A () const
virtual double GetParam_alpha () const
virtual void ResetCurrentTimeToInitialTime (void)
virtual void SetInitialTime (double _arg)
virtual void SetParam_a (double _arg)
virtual void SetParam_A (double _arg)
virtual void SetParam_alpha (double _arg)
virtual void StartOptimization (void)
- Public Member Functions inherited from itk::GradientDescentOptimizer2
virtual unsigned int GetCurrentIteration () const
virtual const DerivativeTypeGetGradient ()
virtual const doubleGetLearningRate ()
virtual const unsigned long & GetNumberOfIterations ()
virtual const StopConditionTypeGetStopCondition ()
virtual const doubleGetValue ()
virtual void MetricErrorResponse (ExceptionObject &err)
virtual void ResumeOptimization (void)
virtual void SetLearningRate (double _arg)
virtual void SetNumberOfIterations (unsigned long _arg)
virtual void StopOptimization (void)
- Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual const ParametersTypeGetCurrentPosition (void) const
virtual bool GetMaximize () const
virtual const
ScaledCostFunctionType
GetScaledCostFunction ()
virtual const ParametersTypeGetScaledCurrentPosition ()
bool GetUseScales (void) const
virtual void InitializeScales (void)
virtual void MaximizeOff ()
virtual void MaximizeOn ()
virtual void SetCostFunction (CostFunctionType *costFunction)
virtual void SetMaximize (bool _arg)
virtual void SetUseScales (bool arg)

Static Public Member Functions

static Pointer New ()

Protected Member Functions

 AdaptiveStochasticGradientDescentOptimizer ()
virtual void UpdateCurrentTime (void)
virtual ~AdaptiveStochasticGradientDescentOptimizer ()
- Protected Member Functions inherited from itk::StandardGradientDescentOptimizer
virtual double Compute_a (double k) const
 StandardGradientDescentOptimizer ()
virtual ~StandardGradientDescentOptimizer ()
- Protected Member Functions inherited from itk::GradientDescentOptimizer2
 GradientDescentOptimizer2 ()
void PrintSelf (std::ostream &os, Indent indent) const
virtual ~GradientDescentOptimizer2 ()
- Protected Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual void GetScaledDerivative (const ParametersType &parameters, DerivativeType &derivative) const
virtual MeasureType GetScaledValue (const ParametersType &parameters) const
virtual void GetScaledValueAndDerivative (const ParametersType &parameters, MeasureType &value, DerivativeType &derivative) const
 ScaledSingleValuedNonLinearOptimizer ()
virtual void SetCurrentPosition (const ParametersType &param)
virtual void SetScaledCurrentPosition (const ParametersType &parameters)
virtual ~ScaledSingleValuedNonLinearOptimizer ()

Protected Attributes

DerivativeType m_PreviousGradient
- Protected Attributes inherited from itk::StandardGradientDescentOptimizer
double m_CurrentTime
- Protected Attributes inherited from itk::GradientDescentOptimizer2
DerivativeType m_Gradient
double m_LearningRate
StopConditionType m_StopCondition
- Protected Attributes inherited from itk::ScaledSingleValuedNonLinearOptimizer
ScaledCostFunctionPointer m_ScaledCostFunction
ParametersType m_ScaledCurrentPosition

Private Member Functions

 AdaptiveStochasticGradientDescentOptimizer (const Self &)
void operator= (const Self &)

Private Attributes

double m_SigmoidMax
double m_SigmoidMin
double m_SigmoidScale
bool m_UseAdaptiveStepSizes

Detailed Description

This class implements a gradient descent optimizer with adaptive gain.

If $C(x)$ is a costfunction that has to be minimised, the following iterative algorithm is used to find the optimal parameters $x$:

\[ x(k+1) = x(k) - a(t_k) dC/dx \]

The gain $a(t_k)$ at each iteration $k$ is defined by:

\[ a(t_k) = a / (A + t_k + 1)^alpha \]

.

And the time $t_k$ is updated according to:

\[ t_{k+1} = [ t_k + sigmoid( -g_k^T g_{k-1} ) ]^+ \]

where $g_k$ equals $dC/dx$ at iteration $k$. For $t_0$ the InitialTime is used, which is defined in the the superclass (StandardGradientDescentOptimizer). Whereas in the superclass this parameter is superfluous, in this class it makes sense.

This method is described in the following references:

[1] P. Cruz, "Almost sure convergence and asymptotical normality of a generalization of Kesten's stochastic approximation algorithm for multidimensional case." Technical Report, 2005. http://hdl.handle.net/2052/74

[2] S. Klein, J.P.W. Pluim, and M. Staring, M.A. Viergever, "Adaptive stochastic gradient descent optimisation for image registration," International Journal of Computer Vision, vol. 81, no. 3, pp. 227-239, 2009. http://dx.doi.org/10.1007/s11263-008-0168-y

It is very suitable to be used in combination with a stochastic estimate of the gradient $dC/dx$. For example, in image registration problems it is often advantageous to compute the metric derivative ( $dC/dx$) on a new set of randomly selected image samples in each iteration. You may set the parameter NewSamplesEveryIteration to "true" to achieve this effect. For more information on this strategy, you may have a look at:

See Also
AdaptiveStochasticGradientDescent, StandardGradientDescentOptimizer

Definition at line 68 of file itkAdaptiveStochasticGradientDescentOptimizer.h.

Member Typedef Documentation

Typedefs inherited from the superclass.

Definition at line 85 of file itkAdaptiveStochasticGradientDescentOptimizer.h.

Standard ITK.

Definition at line 74 of file itkAdaptiveStochasticGradientDescentOptimizer.h.

Constructor & Destructor Documentation

itk::AdaptiveStochasticGradientDescentOptimizer::AdaptiveStochasticGradientDescentOptimizer ( )
protected
virtual itk::AdaptiveStochasticGradientDescentOptimizer::~AdaptiveStochasticGradientDescentOptimizer ( )
inlineprotectedvirtual
itk::AdaptiveStochasticGradientDescentOptimizer::AdaptiveStochasticGradientDescentOptimizer ( const Self )
private

Member Function Documentation

virtual const char* itk::AdaptiveStochasticGradientDescentOptimizer::GetClassName ( ) const
virtual

Run-time type information (and related methods).

Reimplemented from itk::StandardGradientDescentOptimizer.

Reimplemented in elastix::AdaptiveStochasticGradientDescent< TElastix >.

virtual double itk::AdaptiveStochasticGradientDescentOptimizer::GetSigmoidMax ( ) const
virtual
virtual double itk::AdaptiveStochasticGradientDescentOptimizer::GetSigmoidMin ( ) const
virtual
virtual double itk::AdaptiveStochasticGradientDescentOptimizer::GetSigmoidScale ( ) const
virtual
virtual bool itk::AdaptiveStochasticGradientDescentOptimizer::GetUseAdaptiveStepSizes ( ) const
virtual
static Pointer itk::AdaptiveStochasticGradientDescentOptimizer::New ( )
static

Method for creation through the object factory.

Reimplemented from itk::StandardGradientDescentOptimizer.

Reimplemented in elastix::AdaptiveStochasticGradientDescent< TElastix >.

void itk::AdaptiveStochasticGradientDescentOptimizer::operator= ( const Self )
private
virtual void itk::AdaptiveStochasticGradientDescentOptimizer::SetSigmoidMax ( double  _arg)
virtual

Set/Get the maximum of the sigmoid. Should be >0. Default: 1.0

virtual void itk::AdaptiveStochasticGradientDescentOptimizer::SetSigmoidMin ( double  _arg)
virtual

Set/Get the maximum of the sigmoid. Should be <0. Default: -0.8

virtual void itk::AdaptiveStochasticGradientDescentOptimizer::SetSigmoidScale ( double  _arg)
virtual

Set/Get the scaling of the sigmoid width. Large values cause a more wide sigmoid. Default: 1e-8. Should be >0.

virtual void itk::AdaptiveStochasticGradientDescentOptimizer::SetUseAdaptiveStepSizes ( bool  _arg)
virtual

Set/Get whether the adaptive step size mechanism is desired. Default: true

virtual void itk::AdaptiveStochasticGradientDescentOptimizer::UpdateCurrentTime ( void  )
protectedvirtual

Function to update the current time If UseAdaptiveStepSizes is false this function just increments the CurrentTime by $E_0 = (sigmoid_{max} + sigmoid_{min})/2$. Else, the CurrentTime is updated according to:
time = max[ 0, time + sigmoid( -gradient*previousgradient) ]
In that case, also the m_PreviousGradient is updated.

Reimplemented from itk::StandardGradientDescentOptimizer.

Field Documentation

DerivativeType itk::AdaptiveStochasticGradientDescentOptimizer::m_PreviousGradient
protected

The PreviousGradient, necessary for the CruzAcceleration

Definition at line 131 of file itkAdaptiveStochasticGradientDescentOptimizer.h.

double itk::AdaptiveStochasticGradientDescentOptimizer::m_SigmoidMax
private
double itk::AdaptiveStochasticGradientDescentOptimizer::m_SigmoidMin
private
double itk::AdaptiveStochasticGradientDescentOptimizer::m_SigmoidScale
private
bool itk::AdaptiveStochasticGradientDescentOptimizer::m_UseAdaptiveStepSizes
private

Settings

Definition at line 139 of file itkAdaptiveStochasticGradientDescentOptimizer.h.



Generated on 21-03-2014 for elastix by doxygen 1.8.1.2 elastix logo