blob: 62fcc81c70f2f5896883787f5a48e7472a168ea7 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
Austin Schuh3de38b02024-06-25 18:25:10 -07002// Copyright 2023 Google Inc. All rights reserved.
Austin Schuh70cc9552019-01-21 19:46:48 -08003// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30
31#include "ceres/line_search_direction.h"
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080032
Austin Schuh3de38b02024-06-25 18:25:10 -070033#include <memory>
34
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080035#include "ceres/internal/eigen.h"
Austin Schuh3de38b02024-06-25 18:25:10 -070036#include "ceres/internal/export.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080037#include "ceres/line_search_minimizer.h"
38#include "ceres/low_rank_inverse_hessian.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080039#include "glog/logging.h"
40
Austin Schuh3de38b02024-06-25 18:25:10 -070041namespace ceres::internal {
Austin Schuh70cc9552019-01-21 19:46:48 -080042
Austin Schuh3de38b02024-06-25 18:25:10 -070043class CERES_NO_EXPORT SteepestDescent final : public LineSearchDirection {
Austin Schuh70cc9552019-01-21 19:46:48 -080044 public:
Austin Schuh3de38b02024-06-25 18:25:10 -070045 bool NextDirection(const LineSearchMinimizer::State& /*previous*/,
Austin Schuh70cc9552019-01-21 19:46:48 -080046 const LineSearchMinimizer::State& current,
Austin Schuh3de38b02024-06-25 18:25:10 -070047 Vector* search_direction) override {
Austin Schuh70cc9552019-01-21 19:46:48 -080048 *search_direction = -current.gradient;
49 return true;
50 }
51};
52
Austin Schuh3de38b02024-06-25 18:25:10 -070053class CERES_NO_EXPORT NonlinearConjugateGradient final
54 : public LineSearchDirection {
Austin Schuh70cc9552019-01-21 19:46:48 -080055 public:
56 NonlinearConjugateGradient(const NonlinearConjugateGradientType type,
57 const double function_tolerance)
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080058 : type_(type), function_tolerance_(function_tolerance) {}
Austin Schuh70cc9552019-01-21 19:46:48 -080059
60 bool NextDirection(const LineSearchMinimizer::State& previous,
61 const LineSearchMinimizer::State& current,
Austin Schuh3de38b02024-06-25 18:25:10 -070062 Vector* search_direction) override {
Austin Schuh70cc9552019-01-21 19:46:48 -080063 double beta = 0.0;
64 Vector gradient_change;
65 switch (type_) {
66 case FLETCHER_REEVES:
67 beta = current.gradient_squared_norm / previous.gradient_squared_norm;
68 break;
69 case POLAK_RIBIERE:
70 gradient_change = current.gradient - previous.gradient;
71 beta = (current.gradient.dot(gradient_change) /
72 previous.gradient_squared_norm);
73 break;
74 case HESTENES_STIEFEL:
75 gradient_change = current.gradient - previous.gradient;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080076 beta = (current.gradient.dot(gradient_change) /
77 previous.search_direction.dot(gradient_change));
Austin Schuh70cc9552019-01-21 19:46:48 -080078 break;
79 default:
80 LOG(FATAL) << "Unknown nonlinear conjugate gradient type: " << type_;
81 }
82
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080083 *search_direction = -current.gradient + beta * previous.search_direction;
Austin Schuh70cc9552019-01-21 19:46:48 -080084 const double directional_derivative =
85 current.gradient.dot(*search_direction);
86 if (directional_derivative > -function_tolerance_) {
87 LOG(WARNING) << "Restarting non-linear conjugate gradients: "
88 << directional_derivative;
89 *search_direction = -current.gradient;
90 }
91
92 return true;
93 }
94
95 private:
96 const NonlinearConjugateGradientType type_;
97 const double function_tolerance_;
98};
99
Austin Schuh3de38b02024-06-25 18:25:10 -0700100class CERES_NO_EXPORT LBFGS final : public LineSearchDirection {
Austin Schuh70cc9552019-01-21 19:46:48 -0800101 public:
102 LBFGS(const int num_parameters,
103 const int max_lbfgs_rank,
104 const bool use_approximate_eigenvalue_bfgs_scaling)
105 : low_rank_inverse_hessian_(num_parameters,
106 max_lbfgs_rank,
107 use_approximate_eigenvalue_bfgs_scaling),
108 is_positive_definite_(true) {}
109
Austin Schuh70cc9552019-01-21 19:46:48 -0800110 bool NextDirection(const LineSearchMinimizer::State& previous,
111 const LineSearchMinimizer::State& current,
Austin Schuh3de38b02024-06-25 18:25:10 -0700112 Vector* search_direction) override {
Austin Schuh70cc9552019-01-21 19:46:48 -0800113 CHECK(is_positive_definite_)
114 << "Ceres bug: NextDirection() called on L-BFGS after inverse Hessian "
115 << "approximation has become indefinite, please contact the "
116 << "developers!";
117
118 low_rank_inverse_hessian_.Update(
119 previous.search_direction * previous.step_size,
120 current.gradient - previous.gradient);
121
122 search_direction->setZero();
Austin Schuh3de38b02024-06-25 18:25:10 -0700123 low_rank_inverse_hessian_.RightMultiplyAndAccumulate(
124 current.gradient.data(), search_direction->data());
Austin Schuh70cc9552019-01-21 19:46:48 -0800125 *search_direction *= -1.0;
126
127 if (search_direction->dot(current.gradient) >= 0.0) {
128 LOG(WARNING) << "Numerical failure in L-BFGS update: inverse Hessian "
129 << "approximation is not positive definite, and thus "
130 << "initial gradient for search direction is positive: "
131 << search_direction->dot(current.gradient);
132 is_positive_definite_ = false;
133 return false;
134 }
135
136 return true;
137 }
138
139 private:
140 LowRankInverseHessian low_rank_inverse_hessian_;
141 bool is_positive_definite_;
142};
143
Austin Schuh3de38b02024-06-25 18:25:10 -0700144class CERES_NO_EXPORT BFGS final : public LineSearchDirection {
Austin Schuh70cc9552019-01-21 19:46:48 -0800145 public:
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800146 BFGS(const int num_parameters, const bool use_approximate_eigenvalue_scaling)
Austin Schuh70cc9552019-01-21 19:46:48 -0800147 : num_parameters_(num_parameters),
148 use_approximate_eigenvalue_scaling_(use_approximate_eigenvalue_scaling),
149 initialized_(false),
150 is_positive_definite_(true) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800151 if (num_parameters_ >= 1000) {
152 LOG(WARNING) << "BFGS line search being created with: " << num_parameters_
153 << " parameters, this will allocate a dense approximate "
154 << "inverse Hessian of size: " << num_parameters_ << " x "
155 << num_parameters_
156 << ", consider using the L-BFGS memory-efficient line "
157 << "search direction instead.";
158 }
Austin Schuh70cc9552019-01-21 19:46:48 -0800159 // Construct inverse_hessian_ after logging warning about size s.t. if the
160 // allocation crashes us, the log will highlight what the issue likely was.
161 inverse_hessian_ = Matrix::Identity(num_parameters, num_parameters);
162 }
163
Austin Schuh70cc9552019-01-21 19:46:48 -0800164 bool NextDirection(const LineSearchMinimizer::State& previous,
165 const LineSearchMinimizer::State& current,
Austin Schuh3de38b02024-06-25 18:25:10 -0700166 Vector* search_direction) override {
Austin Schuh70cc9552019-01-21 19:46:48 -0800167 CHECK(is_positive_definite_)
168 << "Ceres bug: NextDirection() called on BFGS after inverse Hessian "
169 << "approximation has become indefinite, please contact the "
170 << "developers!";
171
172 const Vector delta_x = previous.search_direction * previous.step_size;
173 const Vector delta_gradient = current.gradient - previous.gradient;
174 const double delta_x_dot_delta_gradient = delta_x.dot(delta_gradient);
175
176 // The (L)BFGS algorithm explicitly requires that the secant equation:
177 //
178 // B_{k+1} * s_k = y_k
179 //
180 // Is satisfied at each iteration, where B_{k+1} is the approximated
181 // Hessian at the k+1-th iteration, s_k = (x_{k+1} - x_{k}) and
182 // y_k = (grad_{k+1} - grad_{k}). As the approximated Hessian must be
183 // positive definite, this is equivalent to the condition:
184 //
185 // s_k^T * y_k > 0 [s_k^T * B_{k+1} * s_k = s_k^T * y_k > 0]
186 //
187 // This condition would always be satisfied if the function was strictly
188 // convex, alternatively, it is always satisfied provided that a Wolfe line
189 // search is used (even if the function is not strictly convex). See [1]
190 // (p138) for a proof.
191 //
192 // Although Ceres will always use a Wolfe line search when using (L)BFGS,
193 // practical implementation considerations mean that the line search
194 // may return a point that satisfies only the Armijo condition, and thus
195 // could violate the Secant equation. As such, we will only use a step
196 // to update the Hessian approximation if:
197 //
198 // s_k^T * y_k > tolerance
199 //
200 // It is important that tolerance is very small (and >=0), as otherwise we
201 // might skip the update too often and fail to capture important curvature
202 // information in the Hessian. For example going from 1e-10 -> 1e-14
203 // improves the NIST benchmark score from 43/54 to 53/54.
204 //
205 // [1] Nocedal J, Wright S, Numerical Optimization, 2nd Ed. Springer, 1999.
206 //
207 // TODO(alexs.mac): Consider using Damped BFGS update instead of
208 // skipping update.
209 const double kBFGSSecantConditionHessianUpdateTolerance = 1e-14;
210 if (delta_x_dot_delta_gradient <=
211 kBFGSSecantConditionHessianUpdateTolerance) {
212 VLOG(2) << "Skipping BFGS Update, delta_x_dot_delta_gradient too "
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800213 << "small: " << delta_x_dot_delta_gradient
214 << ", tolerance: " << kBFGSSecantConditionHessianUpdateTolerance
Austin Schuh70cc9552019-01-21 19:46:48 -0800215 << " (Secant condition).";
216 } else {
217 // Update dense inverse Hessian approximation.
218
219 if (!initialized_ && use_approximate_eigenvalue_scaling_) {
220 // Rescale the initial inverse Hessian approximation (H_0) to be
221 // iteratively updated so that it is of similar 'size' to the true
222 // inverse Hessian at the start point. As shown in [1]:
223 //
224 // \gamma = (delta_gradient_{0}' * delta_x_{0}) /
225 // (delta_gradient_{0}' * delta_gradient_{0})
226 //
227 // Satisfies:
228 //
229 // (1 / \lambda_m) <= \gamma <= (1 / \lambda_1)
230 //
231 // Where \lambda_1 & \lambda_m are the smallest and largest eigenvalues
232 // of the true initial Hessian (not the inverse) respectively. Thus,
233 // \gamma is an approximate eigenvalue of the true inverse Hessian, and
234 // choosing: H_0 = I * \gamma will yield a starting point that has a
235 // similar scale to the true inverse Hessian. This technique is widely
236 // reported to often improve convergence, however this is not
237 // universally true, particularly if there are errors in the initial
238 // gradients, or if there are significant differences in the sensitivity
239 // of the problem to the parameters (i.e. the range of the magnitudes of
240 // the components of the gradient is large).
241 //
242 // The original origin of this rescaling trick is somewhat unclear, the
243 // earliest reference appears to be Oren [1], however it is widely
Austin Schuh3de38b02024-06-25 18:25:10 -0700244 // discussed without specific attribution in various texts including
Austin Schuh70cc9552019-01-21 19:46:48 -0800245 // [2] (p143).
246 //
247 // [1] Oren S.S., Self-scaling variable metric (SSVM) algorithms
248 // Part II: Implementation and experiments, Management Science,
249 // 20(5), 863-874, 1974.
250 // [2] Nocedal J., Wright S., Numerical Optimization, Springer, 1999.
251 const double approximate_eigenvalue_scale =
252 delta_x_dot_delta_gradient / delta_gradient.dot(delta_gradient);
253 inverse_hessian_ *= approximate_eigenvalue_scale;
254
255 VLOG(4) << "Applying approximate_eigenvalue_scale: "
256 << approximate_eigenvalue_scale << " to initial inverse "
257 << "Hessian approximation.";
258 }
259 initialized_ = true;
260
261 // Efficient O(num_parameters^2) BFGS update [2].
262 //
263 // Starting from dense BFGS update detailed in Nocedal [2] p140/177 and
264 // using: y_k = delta_gradient, s_k = delta_x:
265 //
266 // \rho_k = 1.0 / (s_k' * y_k)
267 // V_k = I - \rho_k * y_k * s_k'
268 // H_k = (V_k' * H_{k-1} * V_k) + (\rho_k * s_k * s_k')
269 //
270 // This update involves matrix, matrix products which naively O(N^3),
271 // however we can exploit our knowledge that H_k is positive definite
272 // and thus by defn. symmetric to reduce the cost of the update:
273 //
274 // Expanding the update above yields:
275 //
276 // H_k = H_{k-1} +
277 // \rho_k * ( (1.0 + \rho_k * y_k' * H_k * y_k) * s_k * s_k' -
278 // (s_k * y_k' * H_k + H_k * y_k * s_k') )
279 //
280 // Using: A = (s_k * y_k' * H_k), and the knowledge that H_k = H_k', the
281 // last term simplifies to (A + A'). Note that although A is not symmetric
282 // (A + A') is symmetric. For ease of construction we also define
283 // B = (1 + \rho_k * y_k' * H_k * y_k) * s_k * s_k', which is by defn
284 // symmetric due to construction from: s_k * s_k'.
285 //
286 // Now we can write the BFGS update as:
287 //
288 // H_k = H_{k-1} + \rho_k * (B - (A + A'))
289
290 // For efficiency, as H_k is by defn. symmetric, we will only maintain the
291 // *lower* triangle of H_k (and all intermediary terms).
292
293 const double rho_k = 1.0 / delta_x_dot_delta_gradient;
294
295 // Calculate: A = s_k * y_k' * H_k
296 Matrix A = delta_x * (delta_gradient.transpose() *
297 inverse_hessian_.selfadjointView<Eigen::Lower>());
298
299 // Calculate scalar: (1 + \rho_k * y_k' * H_k * y_k)
300 const double delta_x_times_delta_x_transpose_scale_factor =
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800301 (1.0 +
302 (rho_k * delta_gradient.transpose() *
303 inverse_hessian_.selfadjointView<Eigen::Lower>() * delta_gradient));
Austin Schuh70cc9552019-01-21 19:46:48 -0800304 // Calculate: B = (1 + \rho_k * y_k' * H_k * y_k) * s_k * s_k'
305 Matrix B = Matrix::Zero(num_parameters_, num_parameters_);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800306 B.selfadjointView<Eigen::Lower>().rankUpdate(
307 delta_x, delta_x_times_delta_x_transpose_scale_factor);
Austin Schuh70cc9552019-01-21 19:46:48 -0800308
309 // Finally, update inverse Hessian approximation according to:
310 // H_k = H_{k-1} + \rho_k * (B - (A + A')). Note that (A + A') is
311 // symmetric, even though A is not.
312 inverse_hessian_.triangularView<Eigen::Lower>() +=
313 rho_k * (B - A - A.transpose());
314 }
315
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800316 *search_direction = inverse_hessian_.selfadjointView<Eigen::Lower>() *
317 (-1.0 * current.gradient);
Austin Schuh70cc9552019-01-21 19:46:48 -0800318
319 if (search_direction->dot(current.gradient) >= 0.0) {
320 LOG(WARNING) << "Numerical failure in BFGS update: inverse Hessian "
321 << "approximation is not positive definite, and thus "
322 << "initial gradient for search direction is positive: "
323 << search_direction->dot(current.gradient);
324 is_positive_definite_ = false;
325 return false;
326 }
327
328 return true;
329 }
330
331 private:
332 const int num_parameters_;
333 const bool use_approximate_eigenvalue_scaling_;
334 Matrix inverse_hessian_;
335 bool initialized_;
336 bool is_positive_definite_;
337};
338
Austin Schuh3de38b02024-06-25 18:25:10 -0700339LineSearchDirection::~LineSearchDirection() = default;
340
341std::unique_ptr<LineSearchDirection> LineSearchDirection::Create(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800342 const LineSearchDirection::Options& options) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800343 if (options.type == STEEPEST_DESCENT) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700344 return std::make_unique<SteepestDescent>();
Austin Schuh70cc9552019-01-21 19:46:48 -0800345 }
346
347 if (options.type == NONLINEAR_CONJUGATE_GRADIENT) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700348 return std::make_unique<NonlinearConjugateGradient>(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800349 options.nonlinear_conjugate_gradient_type, options.function_tolerance);
Austin Schuh70cc9552019-01-21 19:46:48 -0800350 }
351
352 if (options.type == ceres::LBFGS) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700353 return std::make_unique<ceres::internal::LBFGS>(
Austin Schuh70cc9552019-01-21 19:46:48 -0800354 options.num_parameters,
355 options.max_lbfgs_rank,
356 options.use_approximate_eigenvalue_bfgs_scaling);
357 }
358
359 if (options.type == ceres::BFGS) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700360 return std::make_unique<ceres::internal::BFGS>(
Austin Schuh70cc9552019-01-21 19:46:48 -0800361 options.num_parameters,
362 options.use_approximate_eigenvalue_bfgs_scaling);
363 }
364
365 LOG(ERROR) << "Unknown line search direction type: " << options.type;
Austin Schuh3de38b02024-06-25 18:25:10 -0700366 return nullptr;
Austin Schuh70cc9552019-01-21 19:46:48 -0800367}
368
Austin Schuh3de38b02024-06-25 18:25:10 -0700369} // namespace ceres::internal