From 2c7add1681a2a05a3187e79d68b55c66e42babe4 Mon Sep 17 00:00:00 2001 From: sherm1 Date: Fri, 16 Aug 2013 13:28:11 -0700 Subject: [PATCH] Fixed doxygen documentation to make it clear that you have to call Optimizer::setDifferentiatorMethod() *before* calling useNumericalGradient() or useNumericalJacobian() or it won't take effect. It would be better to fix the API to make this less confusing, but at least this is an improvement. --- SimTKmath/Optimizers/src/OptimizerRep.cpp | 10 +++--- SimTKmath/include/simmath/Optimizer.h | 41 ++++++++++++++++------- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/SimTKmath/Optimizers/src/OptimizerRep.cpp b/SimTKmath/Optimizers/src/OptimizerRep.cpp index b0db6f308..a0533fa79 100644 --- a/SimTKmath/Optimizers/src/OptimizerRep.cpp +++ b/SimTKmath/Optimizers/src/OptimizerRep.cpp @@ -109,6 +109,12 @@ bool Optimizer::OptimizerRep::getAdvancedBoolOption( const std::string &option, return getAdvancedOptionHelper(advancedBoolOptions, option, value); } +// TODO: this only works if called *prior* to the routines below. +void Optimizer::OptimizerRep:: +setDifferentiatorMethod(Differentiator::Method method) { + diffMethod = method; +} + void Optimizer::OptimizerRep:: useNumericalGradient(bool flag, Real objEstAccuracy) { objectiveEstimatedAccuracy = @@ -135,10 +141,6 @@ useNumericalJacobian(bool flag, Real consEstAccuracy) { numericalJacobian = flag; } -void Optimizer::OptimizerRep:: -setDifferentiatorMethod(Differentiator::Method method) { - diffMethod = method; -} int Optimizer::OptimizerRep::objectiveFuncWrapper (int n, const Real* x, int newX, Real* f, void* vrep) diff --git a/SimTKmath/include/simmath/Optimizer.h b/SimTKmath/include/simmath/Optimizer.h index 0bc2682b7..5a2bed756 100644 --- a/SimTKmath/include/simmath/Optimizer.h +++ b/SimTKmath/include/simmath/Optimizer.h @@ -277,13 +277,34 @@ class SimTK_SIMMATH_EXPORT Optimizer { /// Set the value of an advanced option specified by an boolean value. bool setAdvancedBoolOption( const char *option, const bool value ); + + /// Set which numerical differentiation algorithm is to be used for the next + /// useNumericalGradient() or useNumericalJacobian() call. Choices are + /// Differentiator::ForwardDifference (first order) or + /// Differentiator::CentralDifference (second order) with central the + /// default. + /// @warning This has no effect if you have already called + /// useNumericalGradient() or useNumericalJacobian(). Those must be called + /// \e after setDifferentiatorMethod(). + /// @see SimTK::Differentiator + void setDifferentiatorMethod(Differentiator::Method method); + /// Return the differentiation method last supplied in a call to + /// setDifferentiatorMethod(), \e not necessarily the method currently + /// in use. See setDifferentiatorMethod() for more information. + /// @see SimTK::Differentiator + Differentiator::Method getDifferentiatorMethod() const; + /// Enable numerical calculation of gradient, with optional estimation of /// the accuracy to which the objective function is calculated. For example, /// if you are calculate about 6 significant digits, supply the estimated /// accuracy as 1e-6. Providing the estimated accuracy improves the quality /// of the calculated derivative. If no accuracy is provided we'll assume - /// the objective is calculated to near machine precision. See - /// SimTK::Differentiator for more information. + /// the objective is calculated to near machine precision. The method used + /// for calculating the derivative will be whatever was \e previously + /// supplied in a call to setDifferentiatorMethod(), or the default which + /// is to use central differencing (two function evaluations per + /// gradient entry). See SimTK::Differentiator for more information. + /// @see setDifferentiatorMethod(), SimTK::Differentiator void useNumericalGradient(bool flag, Real estimatedAccuracyOfObjective = SignificantReal); /// Enable numerical calculation of the constraint Jacobian, with optional @@ -292,16 +313,15 @@ class SimTK_SIMMATH_EXPORT Optimizer { /// digits, supply the estimated accuracy as 1e-6. Providing the estimated /// accuracy improves the quality of the calculated derivative. If no /// accuracy is provided we'll assume the constraints are calculated to near - /// machine precision. See SimTK::Differentiator for more information. + /// machine precision. The method used for calculating the derivative will + /// be whatever was \e previously supplied in a call to + /// setDifferentiatorMethod(), or the default which is to use central + /// differencing (two function evaluations per Jacobian column. See + /// SimTK::Differentiator for more information. + /// @see setDifferentiatorMethod(), SimTK::Differentiator void useNumericalJacobian(bool flag, Real estimatedAccuracyOfConstraints = SignificantReal); - /// Set which numerical gradient algorithm is used when numerical gradient - /// or Jacobian is being used. Choices are Differentiator::ForwardDifference - /// (first order) or Differentiator::CentralDifference (second order) with - /// central the default. - void setDifferentiatorMethod(Differentiator::Method method); - /// Compute optimization. Real optimize(Vector&); @@ -312,9 +332,6 @@ class SimTK_SIMMATH_EXPORT Optimizer { bool isUsingNumericalGradient() const; /// Indicate whether the Optimizer is currently set to use a numerical Jacobian. bool isUsingNumericalJacobian() const; - /// Return the differentiation method that will be used if numerical - /// gradient or Jacobian are required. - Differentiator::Method getDifferentiatorMethod() const; /// Return the estimated accuracy last specified in useNumericalGradient(). Real getEstimatedAccuracyOfObjective() const; /// Return the estimated accuracy last specified in useNumericalJacobian().