blob: 0ca262504fd7b9dc6f3f15657f05cb31f6d1e2c7 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2018 Google Inc. All rights reserved.
3// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: mierle@gmail.com (Keir Mierle)
30
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080031#include "ceres/evaluation_callback.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080032
33#include <cmath>
34#include <limits>
35#include <vector>
36
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080037#include "ceres/autodiff_cost_function.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080038#include "ceres/problem.h"
39#include "ceres/problem_impl.h"
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080040#include "ceres/sized_cost_function.h"
41#include "ceres/solver.h"
42#include "gtest/gtest.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080043
44namespace ceres {
45namespace internal {
46
47// Use an inline hash function to avoid portability wrangling. Algorithm from
48// Daniel Bernstein, known as the "djb2" hash.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080049template <typename T>
Austin Schuh70cc9552019-01-21 19:46:48 -080050uint64_t Djb2Hash(const T* data, const int size) {
51 uint64_t hash = 5381;
52 const uint8_t* data_as_bytes = reinterpret_cast<const uint8_t*>(data);
53 for (int i = 0; i < sizeof(*data) * size; ++i) {
54 hash = hash * 33 + data_as_bytes[i];
55 }
56 return hash;
57}
58
59const double kUninitialized = 0;
60
61// Generally multiple inheritance is a terrible idea, but in this (test)
62// case it makes for a relatively elegant test implementation.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080063struct WigglyBowlCostFunctionAndEvaluationCallback : SizedCostFunction<2, 2>,
64 EvaluationCallback {
65 explicit WigglyBowlCostFunctionAndEvaluationCallback(double* parameter)
Austin Schuh70cc9552019-01-21 19:46:48 -080066 : EvaluationCallback(),
67 user_parameter_block(parameter),
68 prepare_num_calls(0),
69 prepare_requested_jacobians(false),
70 prepare_new_evaluation_point(false),
71 prepare_parameter_hash(kUninitialized),
72 evaluate_num_calls(0),
73 evaluate_last_parameter_hash(kUninitialized) {}
74
75 virtual ~WigglyBowlCostFunctionAndEvaluationCallback() {}
76
77 // Evaluation callback interface. This checks that all the preconditions are
78 // met at the point that Ceres calls into it.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080079 void PrepareForEvaluation(bool evaluate_jacobians,
80 bool new_evaluation_point) final {
Austin Schuh70cc9552019-01-21 19:46:48 -080081 // At this point, the incoming parameters are implicitly pushed by Ceres
82 // into the user parameter blocks; in contrast to in Evaluate().
83 uint64_t incoming_parameter_hash = Djb2Hash(user_parameter_block, 2);
84
85 // Check: Prepare() & Evaluate() come in pairs, in that order. Before this
86 // call, the number of calls excluding this one should match.
87 EXPECT_EQ(prepare_num_calls, evaluate_num_calls);
88
89 // Check: new_evaluation_point indicates that the parameter has changed.
90 if (new_evaluation_point) {
91 // If it's a new evaluation point, then the parameter should have
92 // changed. Technically, it's not required that it must change but
93 // in practice it does, and that helps with testing.
94 EXPECT_NE(evaluate_last_parameter_hash, incoming_parameter_hash);
95 EXPECT_NE(prepare_parameter_hash, incoming_parameter_hash);
96 } else {
97 // If this is the same evaluation point as last time, ensure that
98 // the parameters match both from the previous evaluate, the
99 // previous prepare, and the current prepare.
100 EXPECT_EQ(evaluate_last_parameter_hash, prepare_parameter_hash);
101 EXPECT_EQ(evaluate_last_parameter_hash, incoming_parameter_hash);
102 }
103
104 // Save details for to check at the next call to Evaluate().
105 prepare_num_calls++;
106 prepare_requested_jacobians = evaluate_jacobians;
107 prepare_new_evaluation_point = new_evaluation_point;
108 prepare_parameter_hash = incoming_parameter_hash;
109 }
110
111 // Cost function interface. This checks that preconditions that were
112 // set as part of the PrepareForEvaluation() call are met in this one.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800113 bool Evaluate(double const* const* parameters,
114 double* residuals,
115 double** jacobians) const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800116 // Cost function implementation of the "Wiggly Bowl" function:
117 //
118 // 1/2 * [(y - a*sin(x))^2 + x^2],
119 //
120 // expressed as a Ceres cost function with two residuals:
121 //
122 // r[0] = y - a*sin(x)
123 // r[1] = x.
124 //
125 // This is harder to optimize than the Rosenbrock function because the
126 // minimizer has to navigate a sine-shaped valley while descending the 1D
127 // parabola formed along the y axis. Note that the "a" needs to be more
128 // than 5 to get a strong enough wiggle effect in the cost surface to
129 // trigger failed iterations in the optimizer.
130 const double a = 10.0;
131 double x = (*parameters)[0];
132 double y = (*parameters)[1];
133 residuals[0] = y - a * sin(x);
134 residuals[1] = x;
135 if (jacobians != NULL) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800136 (*jacobians)[2 * 0 + 0] = -a * cos(x); // df1/dx
137 (*jacobians)[2 * 0 + 1] = 1.0; // df1/dy
138 (*jacobians)[2 * 1 + 0] = 1.0; // df2/dx
139 (*jacobians)[2 * 1 + 1] = 0.0; // df2/dy
Austin Schuh70cc9552019-01-21 19:46:48 -0800140 }
141
142 uint64_t incoming_parameter_hash = Djb2Hash(*parameters, 2);
143
144 // Check: PrepareForEvaluation() & Evaluate() come in pairs, in that order.
145 EXPECT_EQ(prepare_num_calls, evaluate_num_calls + 1);
146
147 // Check: if new_evaluation_point indicates that the parameter has
148 // changed, it has changed; otherwise it is the same.
149 if (prepare_new_evaluation_point) {
150 EXPECT_NE(evaluate_last_parameter_hash, incoming_parameter_hash);
151 } else {
152 EXPECT_NE(evaluate_last_parameter_hash, kUninitialized);
153 EXPECT_EQ(evaluate_last_parameter_hash, incoming_parameter_hash);
154 }
155
156 // Check: Parameter matches value in in parameter blocks during prepare.
157 EXPECT_EQ(prepare_parameter_hash, incoming_parameter_hash);
158
159 // Check: jacobians are requested if they were in PrepareForEvaluation().
160 EXPECT_EQ(prepare_requested_jacobians, jacobians != NULL);
161
162 evaluate_num_calls++;
163 evaluate_last_parameter_hash = incoming_parameter_hash;
164 return true;
165 }
166
167 // Pointer to the parameter block associated with this cost function.
168 // Contents should get set by Ceres before calls to PrepareForEvaluation()
169 // and Evaluate().
170 double* user_parameter_block;
171
172 // Track state: PrepareForEvaluation().
173 //
174 // These track details from the PrepareForEvaluation() call (hence the
175 // "prepare_" prefix), which are checked for consistency in Evaluate().
176 int prepare_num_calls;
177 bool prepare_requested_jacobians;
178 bool prepare_new_evaluation_point;
179 uint64_t prepare_parameter_hash;
180
181 // Track state: Evaluate().
182 //
183 // These track details from the Evaluate() call (hence the "evaluate_"
184 // prefix), which are then checked for consistency in the calls to
185 // PrepareForEvaluation(). Mutable is reasonable for this case.
186 mutable int evaluate_num_calls;
187 mutable uint64_t evaluate_last_parameter_hash;
188};
189
190TEST(EvaluationCallback, WithTrustRegionMinimizer) {
191 double parameters[2] = {50.0, 50.0};
192 const uint64_t original_parameters_hash = Djb2Hash(parameters, 2);
193
194 WigglyBowlCostFunctionAndEvaluationCallback cost_function(parameters);
195 Problem::Options problem_options;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800196 problem_options.evaluation_callback = &cost_function;
Austin Schuh70cc9552019-01-21 19:46:48 -0800197 problem_options.cost_function_ownership = DO_NOT_TAKE_OWNERSHIP;
198 Problem problem(problem_options);
199 problem.AddResidualBlock(&cost_function, NULL, parameters);
200
201 Solver::Options options;
202 options.linear_solver_type = DENSE_QR;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800203 options.max_num_iterations = 50;
Austin Schuh70cc9552019-01-21 19:46:48 -0800204
205 // Run the solve. Checking is done inside the cost function / callback.
206 Solver::Summary summary;
207 Solve(options, &problem, &summary);
208
209 // Ensure that this was a hard cost function (not all steps succeed).
210 EXPECT_GT(summary.num_successful_steps, 10);
211 EXPECT_GT(summary.num_unsuccessful_steps, 10);
212
213 // Ensure PrepareForEvaluation() is called the appropriate number of times.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800214 EXPECT_EQ(
215 cost_function.prepare_num_calls,
216 // Unsuccessful steps are evaluated only once (no jacobians).
217 summary.num_unsuccessful_steps +
218 // Successful steps are evaluated twice: with and without jacobians.
219 2 * summary.num_successful_steps
220 // Final iteration doesn't re-evaluate the jacobian.
221 // Note: This may be sensitive to tweaks to the TR algorithm; if
222 // this becomes too brittle, remove this EXPECT_EQ() entirely.
223 - 1);
Austin Schuh70cc9552019-01-21 19:46:48 -0800224
225 // Ensure the callback calls ran a reasonable number of times.
226 EXPECT_GT(cost_function.prepare_num_calls, 0);
227 EXPECT_GT(cost_function.evaluate_num_calls, 0);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800228 EXPECT_EQ(cost_function.prepare_num_calls, cost_function.evaluate_num_calls);
Austin Schuh70cc9552019-01-21 19:46:48 -0800229
230 // Ensure that the parameters did actually change.
231 EXPECT_NE(Djb2Hash(parameters, 2), original_parameters_hash);
232}
233
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800234// r = 1 - x
235struct LinearResidual {
236 template <typename T>
237 bool operator()(const T* x, T* residuals) const {
238 residuals[0] = 1.0 - x[0];
239 return true;
240 }
241
242 static CostFunction* Create() {
243 return new AutoDiffCostFunction<LinearResidual, 1, 1>(new LinearResidual);
244 };
245};
246
247// Increments a counter everytime PrepareForEvaluation is called.
248class IncrementingEvaluationCallback : public EvaluationCallback {
249 public:
250 void PrepareForEvaluation(bool evaluate_jacobians,
251 bool new_evaluation_point) final {
252 (void)evaluate_jacobians;
253 (void)new_evaluation_point;
254 counter_ += 1.0;
255 }
256
257 const double counter() const { return counter_; }
258
259 private:
260 double counter_ = -1;
261};
262
263// r = IncrementingEvaluationCallback::counter - x
264struct EvaluationCallbackResidual {
265 explicit EvaluationCallbackResidual(
266 const IncrementingEvaluationCallback& callback)
267 : callback(callback) {}
268
269 template <typename T>
270 bool operator()(const T* x, T* residuals) const {
271 residuals[0] = callback.counter() - x[0];
272 return true;
273 }
274
275 const IncrementingEvaluationCallback& callback;
276
277 static CostFunction* Create(IncrementingEvaluationCallback& callback) {
278 return new AutoDiffCostFunction<EvaluationCallbackResidual, 1, 1>(
279 new EvaluationCallbackResidual(callback));
280 };
281};
282
283// The following test, constructs a problem with residual blocks all
284// of whose parameters are constant, so they are evaluated once
285// outside the Minimizer to compute Solver::Summary::fixed_cost.
286//
287// The cost function for this residual block depends on the
288// IncrementingEvaluationCallback::counter_, by checking the value of
289// the fixed cost, we can check if the IncrementingEvaluationCallback
290// was called.
291TEST(EvaluationCallback, EvaluationCallbackIsCalledBeforeFixedCostIsEvaluated) {
292 double x = 1;
293 double y = 2;
294 std::unique_ptr<IncrementingEvaluationCallback> callback(
295 new IncrementingEvaluationCallback);
296 Problem::Options problem_options;
297 problem_options.evaluation_callback = callback.get();
298 Problem problem(problem_options);
299 problem.AddResidualBlock(LinearResidual::Create(), nullptr, &x);
300 problem.AddResidualBlock(
301 EvaluationCallbackResidual::Create(*callback), nullptr, &y);
302 problem.SetParameterBlockConstant(&y);
303
304 Solver::Options options;
305 options.linear_solver_type = DENSE_QR;
306 Solver::Summary summary;
307 Solve(options, &problem, &summary);
308 EXPECT_EQ(summary.fixed_cost, 2.0);
309 EXPECT_EQ(summary.final_cost, summary.fixed_cost);
310 EXPECT_GT(callback->counter(), 0);
311}
312
313static void WithLineSearchMinimizerImpl(
Austin Schuh70cc9552019-01-21 19:46:48 -0800314 LineSearchType line_search,
315 LineSearchDirectionType line_search_direction,
316 LineSearchInterpolationType line_search_interpolation) {
317 double parameters[2] = {50.0, 50.0};
318 const uint64_t original_parameters_hash = Djb2Hash(parameters, 2);
319
320 WigglyBowlCostFunctionAndEvaluationCallback cost_function(parameters);
321 Problem::Options problem_options;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800322 problem_options.evaluation_callback = &cost_function;
Austin Schuh70cc9552019-01-21 19:46:48 -0800323 problem_options.cost_function_ownership = DO_NOT_TAKE_OWNERSHIP;
324 Problem problem(problem_options);
325 problem.AddResidualBlock(&cost_function, NULL, parameters);
326
327 Solver::Options options;
328 options.linear_solver_type = DENSE_QR;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800329 options.max_num_iterations = 50;
Austin Schuh70cc9552019-01-21 19:46:48 -0800330 options.minimizer_type = ceres::LINE_SEARCH;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800331
Austin Schuh70cc9552019-01-21 19:46:48 -0800332 options.line_search_type = line_search;
333 options.line_search_direction_type = line_search_direction;
334 options.line_search_interpolation_type = line_search_interpolation;
335
336 // Run the solve. Checking is done inside the cost function / callback.
337 Solver::Summary summary;
338 Solve(options, &problem, &summary);
339
340 // Ensure the callback calls ran a reasonable number of times.
341 EXPECT_GT(summary.num_line_search_steps, 10);
342 EXPECT_GT(cost_function.prepare_num_calls, 30);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800343 EXPECT_EQ(cost_function.prepare_num_calls, cost_function.evaluate_num_calls);
Austin Schuh70cc9552019-01-21 19:46:48 -0800344
345 // Ensure that the parameters did actually change.
346 EXPECT_NE(Djb2Hash(parameters, 2), original_parameters_hash);
347}
348
349// Note: These tests omit combinations of Wolfe line search with bisection.
350// Due to an implementation quirk in Wolfe line search with bisection, there
351// are calls to re-evaluate an existing point with new_point = true. That
352// causes the (overly) strict tests to break, since they check the new_point
353// preconditions in an if-and-only-if way. Strictly speaking, if new_point =
354// true, the interface does not *require* that the point has changed; only that
355// if new_point = false, the same point is reused.
356//
357// Since the strict checking is useful to verify that there aren't missed
358// optimizations, omit tests of the Wolfe with bisection cases.
359
360// Wolfe with L-BFGS.
361TEST(EvaluationCallback, WithLineSearchMinimizerWolfeLbfgsCubic) {
362 WithLineSearchMinimizerImpl(WOLFE, LBFGS, CUBIC);
363}
364TEST(EvaluationCallback, WithLineSearchMinimizerWolfeLbfgsQuadratic) {
365 WithLineSearchMinimizerImpl(WOLFE, LBFGS, QUADRATIC);
366}
367
368// Wolfe with full BFGS.
369TEST(EvaluationCallback, WithLineSearchMinimizerWolfeBfgsCubic) {
370 WithLineSearchMinimizerImpl(WOLFE, BFGS, CUBIC);
371}
372
373TEST(EvaluationCallback, WithLineSearchMinimizerWolfeBfgsQuadratic) {
374 WithLineSearchMinimizerImpl(WOLFE, BFGS, QUADRATIC);
375}
376
377// Armijo with nonlinear conjugate gradient.
378TEST(EvaluationCallback, WithLineSearchMinimizerArmijoCubic) {
379 WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, CUBIC);
380}
381
382TEST(EvaluationCallback, WithLineSearchMinimizerArmijoBisection) {
383 WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, BISECTION);
384}
385
386TEST(EvaluationCallback, WithLineSearchMinimizerArmijoQuadratic) {
387 WithLineSearchMinimizerImpl(ARMIJO, NONLINEAR_CONJUGATE_GRADIENT, QUADRATIC);
388}
389
390} // namespace internal
391} // namespace ceres