Skip to content

Commit

Permalink
Merge branch 'cg-methods' into rosenbrock
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal committed Oct 19, 2024
2 parents 893e69d + 07b11bc commit 3c2ddc8
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 49 deletions.
40 changes: 0 additions & 40 deletions gtsam/nonlinear/NonlinearConjugateGradientOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,46 +28,6 @@ namespace gtsam {

typedef internal::NonlinearOptimizerState State;

/* ************************************************************************* */
double FletcherReeves(const VectorValues& currentGradient,
const VectorValues& prevGradient) {
// Fletcher-Reeves: beta = g_n'*g_n/g_n-1'*g_n-1
const double beta = std::max(0.0, currentGradient.dot(currentGradient) /
prevGradient.dot(prevGradient));
return beta;
}

/* ************************************************************************* */
double PolakRibiere(const VectorValues& currentGradient,
const VectorValues& prevGradient) {
// Polak-Ribiere: beta = g_n'*(g_n-g_n-1)/g_n-1'*g_n-1
const double beta =
std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
prevGradient.dot(prevGradient));
return beta;
}

/* ************************************************************************* */
double HestenesStiefel(const VectorValues& currentGradient,
const VectorValues& prevGradient,
const VectorValues& direction) {
// Hestenes-Stiefel: beta = g_n'*(g_n-g_n-1)/(-s_n-1')*(g_n-g_n-1)
VectorValues d = currentGradient - prevGradient;
const double beta = std::max(0.0, currentGradient.dot(d) / -direction.dot(d));
return beta;
}

/* ************************************************************************* */
double DaiYuan(const VectorValues& currentGradient,
const VectorValues& prevGradient,
const VectorValues& direction) {
// Dai-Yuan: beta = g_n'*g_n/(-s_n-1')*(g_n-g_n-1)
const double beta =
std::max(0.0, currentGradient.dot(currentGradient) /
-direction.dot(currentGradient - prevGradient));
return beta;
}

/**
* @brief Return the gradient vector of a nonlinear factor graph
* @param nfg the graph
Expand Down
44 changes: 35 additions & 9 deletions gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,48 @@
namespace gtsam {

/// Fletcher-Reeves formula for computing β, the direction of steepest descent.
double FletcherReeves(const VectorValues &currentGradient,
const VectorValues &prevGradient);
template <typename Gradient>
double FletcherReeves(const Gradient &currentGradient,
const Gradient &prevGradient) {
// Fletcher-Reeves: beta = g_n'*g_n/g_n-1'*g_n-1
const double beta =
currentGradient.dot(currentGradient) / prevGradient.dot(prevGradient);
return beta;
}

/// Polak-Ribiere formula for computing β, the direction of steepest descent.
double PolakRibiere(const VectorValues &currentGradient,
const VectorValues &prevGradient);
template <typename Gradient>
double PolakRibiere(const Gradient &currentGradient,
const Gradient &prevGradient) {
// Polak-Ribiere: beta = g_n'*(g_n-g_n-1)/g_n-1'*g_n-1
const double beta =
std::max(0.0, currentGradient.dot(currentGradient - prevGradient) /
prevGradient.dot(prevGradient));
return beta;
}

/// The Hestenes-Stiefel formula for computing β,
/// the direction of steepest descent.
double HestenesStiefel(const VectorValues &currentGradient,
const VectorValues &prevGradient,
const VectorValues &direction);
template <typename Gradient>
double HestenesStiefel(const Gradient &currentGradient,
const Gradient &prevGradient,
const Gradient &direction) {
// Hestenes-Stiefel: beta = g_n'*(g_n-g_n-1)/(-s_n-1')*(g_n-g_n-1)
VectorValues d = currentGradient - prevGradient;
const double beta = std::max(0.0, currentGradient.dot(d) / -direction.dot(d));
return beta;
}

/// The Dai-Yuan formula for computing β, the direction of steepest descent.
double DaiYuan(const VectorValues &currentGradient,
const VectorValues &prevGradient, const VectorValues &direction);
template <typename Gradient>
double DaiYuan(const Gradient &currentGradient, const Gradient &prevGradient,
const VectorValues &direction) {
// Dai-Yuan: beta = g_n'*g_n/(-s_n-1')*(g_n-g_n-1)
const double beta =
std::max(0.0, currentGradient.dot(currentGradient) /
-direction.dot(currentGradient - prevGradient));
return beta;
}

enum class DirectionMethod {
FletcherReeves,
Expand Down
43 changes: 43 additions & 0 deletions gtsam/nonlinear/tests/testNonlinearConjugateGradientOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,49 @@ TEST(NonlinearConjugateGradientOptimizer, Optimization) {
EXPECT(assert_equal(expected, result, 1e-1));
}

/* ************************************************************************* */
/// Test different direction methods
TEST(NonlinearConjugateGradientOptimizer, DirectionMethods) {
const auto [graph, initialEstimate] = generateProblem();

NonlinearOptimizerParams param;
param.maxIterations =
500; /* requires a larger number of iterations to converge */
param.verbosity = NonlinearOptimizerParams::SILENT;

// Fletcher-Reeves
{
NonlinearConjugateGradientOptimizer optimizer(
graph, initialEstimate, param, DirectionMethod::FletcherReeves);
Values result = optimizer.optimize();

EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4);
}
// Polak-Ribiere
{
NonlinearConjugateGradientOptimizer optimizer(
graph, initialEstimate, param, DirectionMethod::PolakRibiere);
Values result = optimizer.optimize();

EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4);
}
// Hestenes-Stiefel
{
NonlinearConjugateGradientOptimizer optimizer(
graph, initialEstimate, param, DirectionMethod::HestenesStiefel);
Values result = optimizer.optimize();

EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4);
}
// Dai-Yuan
{
NonlinearConjugateGradientOptimizer optimizer(graph, initialEstimate, param,
DirectionMethod::DaiYuan);
Values result = optimizer.optimize();

EXPECT_DOUBLES_EQUAL(0.0, graph.error(result), 1e-4);
}
}
/* ************************************************************************* */
int main() {
TestResult tr;
Expand Down

0 comments on commit 3c2ddc8

Please sign in to comment.