blob: 2a20470112af83a234612c6ffb39f315e296b548 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
Austin Schuh3de38b02024-06-25 18:25:10 -07002// Copyright 2023 Google Inc. All rights reserved.
Austin Schuh70cc9552019-01-21 19:46:48 -08003// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: wjr@google.com (William Rucklidge)
30//
31// This file contains tests for the GradientChecker class.
32
33#include "ceres/gradient_checker.h"
34
35#include <cmath>
Austin Schuh3de38b02024-06-25 18:25:10 -070036#include <random>
37#include <utility>
Austin Schuh70cc9552019-01-21 19:46:48 -080038#include <vector>
39
40#include "ceres/cost_function.h"
41#include "ceres/problem.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080042#include "ceres/solver.h"
43#include "ceres/test_util.h"
44#include "glog/logging.h"
45#include "gtest/gtest.h"
46
Austin Schuh3de38b02024-06-25 18:25:10 -070047namespace ceres::internal {
Austin Schuh70cc9552019-01-21 19:46:48 -080048
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080049const double kTolerance = 1e-12;
Austin Schuh70cc9552019-01-21 19:46:48 -080050
51// We pick a (non-quadratic) function whose derivative are easy:
52//
53// f = exp(- a' x).
54// df = - f a.
55//
56// where 'a' is a vector of the same size as 'x'. In the block
57// version, they are both block vectors, of course.
58class GoodTestTerm : public CostFunction {
59 public:
Austin Schuh3de38b02024-06-25 18:25:10 -070060 template <class UniformRandomFunctor>
61 GoodTestTerm(int arity, int const* dim, UniformRandomFunctor&& randu)
62 : arity_(arity), return_value_(true) {
63 std::uniform_real_distribution<double> distribution(-1.0, 1.0);
Austin Schuh70cc9552019-01-21 19:46:48 -080064 // Make 'arity' random vectors.
65 a_.resize(arity_);
66 for (int j = 0; j < arity_; ++j) {
67 a_[j].resize(dim[j]);
68 for (int u = 0; u < dim[j]; ++u) {
Austin Schuh3de38b02024-06-25 18:25:10 -070069 a_[j][u] = randu();
Austin Schuh70cc9552019-01-21 19:46:48 -080070 }
71 }
72
73 for (int i = 0; i < arity_; i++) {
74 mutable_parameter_block_sizes()->push_back(dim[i]);
75 }
76 set_num_residuals(1);
77 }
78
79 bool Evaluate(double const* const* parameters,
80 double* residuals,
Austin Schuh3de38b02024-06-25 18:25:10 -070081 double** jacobians) const override {
Austin Schuh70cc9552019-01-21 19:46:48 -080082 if (!return_value_) {
83 return false;
84 }
85 // Compute a . x.
86 double ax = 0;
87 for (int j = 0; j < arity_; ++j) {
88 for (int u = 0; u < parameter_block_sizes()[j]; ++u) {
89 ax += a_[j][u] * parameters[j][u];
90 }
91 }
92
93 // This is the cost, but also appears as a factor
94 // in the derivatives.
95 double f = *residuals = exp(-ax);
96
97 // Accumulate 1st order derivatives.
98 if (jacobians) {
99 for (int j = 0; j < arity_; ++j) {
100 if (jacobians[j]) {
101 for (int u = 0; u < parameter_block_sizes()[j]; ++u) {
102 // See comments before class.
103 jacobians[j][u] = -f * a_[j][u];
104 }
105 }
106 }
107 }
108
109 return true;
110 }
111
112 void SetReturnValue(bool return_value) { return_value_ = return_value; }
113
114 private:
115 int arity_;
116 bool return_value_;
Austin Schuh3de38b02024-06-25 18:25:10 -0700117 std::vector<std::vector<double>> a_; // our vectors.
Austin Schuh70cc9552019-01-21 19:46:48 -0800118};
119
120class BadTestTerm : public CostFunction {
121 public:
Austin Schuh3de38b02024-06-25 18:25:10 -0700122 template <class UniformRandomFunctor>
123 BadTestTerm(int arity, int const* dim, UniformRandomFunctor&& randu)
124 : arity_(arity) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800125 // Make 'arity' random vectors.
126 a_.resize(arity_);
127 for (int j = 0; j < arity_; ++j) {
128 a_[j].resize(dim[j]);
129 for (int u = 0; u < dim[j]; ++u) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700130 a_[j][u] = randu();
Austin Schuh70cc9552019-01-21 19:46:48 -0800131 }
132 }
133
134 for (int i = 0; i < arity_; i++) {
135 mutable_parameter_block_sizes()->push_back(dim[i]);
136 }
137 set_num_residuals(1);
138 }
139
140 bool Evaluate(double const* const* parameters,
141 double* residuals,
Austin Schuh3de38b02024-06-25 18:25:10 -0700142 double** jacobians) const override {
Austin Schuh70cc9552019-01-21 19:46:48 -0800143 // Compute a . x.
144 double ax = 0;
145 for (int j = 0; j < arity_; ++j) {
146 for (int u = 0; u < parameter_block_sizes()[j]; ++u) {
147 ax += a_[j][u] * parameters[j][u];
148 }
149 }
150
151 // This is the cost, but also appears as a factor
152 // in the derivatives.
153 double f = *residuals = exp(-ax);
154
155 // Accumulate 1st order derivatives.
156 if (jacobians) {
157 for (int j = 0; j < arity_; ++j) {
158 if (jacobians[j]) {
159 for (int u = 0; u < parameter_block_sizes()[j]; ++u) {
160 // See comments before class.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800161 jacobians[j][u] = -f * a_[j][u] + kTolerance;
Austin Schuh70cc9552019-01-21 19:46:48 -0800162 }
163 }
164 }
165 }
166
167 return true;
168 }
169
170 private:
171 int arity_;
Austin Schuh3de38b02024-06-25 18:25:10 -0700172 std::vector<std::vector<double>> a_; // our vectors.
Austin Schuh70cc9552019-01-21 19:46:48 -0800173};
174
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800175static void CheckDimensions(const GradientChecker::ProbeResults& results,
176 const std::vector<int>& parameter_sizes,
177 const std::vector<int>& local_parameter_sizes,
178 int residual_size) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800179 CHECK_EQ(parameter_sizes.size(), local_parameter_sizes.size());
180 int num_parameters = parameter_sizes.size();
181 ASSERT_EQ(residual_size, results.residuals.size());
182 ASSERT_EQ(num_parameters, results.local_jacobians.size());
183 ASSERT_EQ(num_parameters, results.local_numeric_jacobians.size());
184 ASSERT_EQ(num_parameters, results.jacobians.size());
185 ASSERT_EQ(num_parameters, results.numeric_jacobians.size());
186 for (int i = 0; i < num_parameters; ++i) {
187 EXPECT_EQ(residual_size, results.local_jacobians.at(i).rows());
188 EXPECT_EQ(local_parameter_sizes[i], results.local_jacobians.at(i).cols());
189 EXPECT_EQ(residual_size, results.local_numeric_jacobians.at(i).rows());
190 EXPECT_EQ(local_parameter_sizes[i],
191 results.local_numeric_jacobians.at(i).cols());
192 EXPECT_EQ(residual_size, results.jacobians.at(i).rows());
193 EXPECT_EQ(parameter_sizes[i], results.jacobians.at(i).cols());
194 EXPECT_EQ(residual_size, results.numeric_jacobians.at(i).rows());
195 EXPECT_EQ(parameter_sizes[i], results.numeric_jacobians.at(i).cols());
196 }
197}
198
199TEST(GradientChecker, SmokeTest) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800200 // Test with 3 blocks of size 2, 3 and 4.
201 int const num_parameters = 3;
202 std::vector<int> parameter_sizes(3);
203 parameter_sizes[0] = 2;
204 parameter_sizes[1] = 3;
205 parameter_sizes[2] = 4;
206
207 // Make a random set of blocks.
208 FixedArray<double*> parameters(num_parameters);
Austin Schuh3de38b02024-06-25 18:25:10 -0700209 std::mt19937 prng;
210 std::uniform_real_distribution<double> distribution(-1.0, 1.0);
211 auto randu = [&prng, &distribution] { return distribution(prng); };
Austin Schuh70cc9552019-01-21 19:46:48 -0800212 for (int j = 0; j < num_parameters; ++j) {
213 parameters[j] = new double[parameter_sizes[j]];
214 for (int u = 0; u < parameter_sizes[j]; ++u) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700215 parameters[j][u] = randu();
Austin Schuh70cc9552019-01-21 19:46:48 -0800216 }
217 }
218
219 NumericDiffOptions numeric_diff_options;
220 GradientChecker::ProbeResults results;
221
222 // Test that Probe returns true for correct Jacobians.
Austin Schuh3de38b02024-06-25 18:25:10 -0700223 GoodTestTerm good_term(num_parameters, parameter_sizes.data(), randu);
224 std::vector<const Manifold*>* manifolds = nullptr;
225 GradientChecker good_gradient_checker(
226 &good_term, manifolds, numeric_diff_options);
227 EXPECT_TRUE(
228 good_gradient_checker.Probe(parameters.data(), kTolerance, nullptr));
Austin Schuh70cc9552019-01-21 19:46:48 -0800229 EXPECT_TRUE(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800230 good_gradient_checker.Probe(parameters.data(), kTolerance, &results))
Austin Schuh70cc9552019-01-21 19:46:48 -0800231 << results.error_log;
232
233 // Check that results contain sensible data.
234 ASSERT_EQ(results.return_value, true);
235 ASSERT_EQ(results.residuals.size(), 1);
236 CheckDimensions(results, parameter_sizes, parameter_sizes, 1);
237 EXPECT_GE(results.maximum_relative_error, 0.0);
238 EXPECT_TRUE(results.error_log.empty());
239
240 // Test that if the cost function return false, Probe should return false.
241 good_term.SetReturnValue(false);
Austin Schuh70cc9552019-01-21 19:46:48 -0800242 EXPECT_FALSE(
Austin Schuh3de38b02024-06-25 18:25:10 -0700243 good_gradient_checker.Probe(parameters.data(), kTolerance, nullptr));
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800244 EXPECT_FALSE(
245 good_gradient_checker.Probe(parameters.data(), kTolerance, &results))
Austin Schuh70cc9552019-01-21 19:46:48 -0800246 << results.error_log;
247
248 // Check that results contain sensible data.
249 ASSERT_EQ(results.return_value, false);
250 ASSERT_EQ(results.residuals.size(), 1);
251 CheckDimensions(results, parameter_sizes, parameter_sizes, 1);
252 for (int i = 0; i < num_parameters; ++i) {
253 EXPECT_EQ(results.local_jacobians.at(i).norm(), 0);
254 EXPECT_EQ(results.local_numeric_jacobians.at(i).norm(), 0);
255 }
256 EXPECT_EQ(results.maximum_relative_error, 0.0);
257 EXPECT_FALSE(results.error_log.empty());
258
259 // Test that Probe returns false for incorrect Jacobians.
Austin Schuh3de38b02024-06-25 18:25:10 -0700260 BadTestTerm bad_term(num_parameters, parameter_sizes.data(), randu);
261 GradientChecker bad_gradient_checker(
262 &bad_term, manifolds, numeric_diff_options);
263 EXPECT_FALSE(
264 bad_gradient_checker.Probe(parameters.data(), kTolerance, nullptr));
Austin Schuh70cc9552019-01-21 19:46:48 -0800265 EXPECT_FALSE(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800266 bad_gradient_checker.Probe(parameters.data(), kTolerance, &results));
Austin Schuh70cc9552019-01-21 19:46:48 -0800267
268 // Check that results contain sensible data.
269 ASSERT_EQ(results.return_value, true);
270 ASSERT_EQ(results.residuals.size(), 1);
271 CheckDimensions(results, parameter_sizes, parameter_sizes, 1);
272 EXPECT_GT(results.maximum_relative_error, kTolerance);
273 EXPECT_FALSE(results.error_log.empty());
274
275 // Setting a high threshold should make the test pass.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800276 EXPECT_TRUE(bad_gradient_checker.Probe(parameters.data(), 1.0, &results));
Austin Schuh70cc9552019-01-21 19:46:48 -0800277
278 // Check that results contain sensible data.
279 ASSERT_EQ(results.return_value, true);
280 ASSERT_EQ(results.residuals.size(), 1);
281 CheckDimensions(results, parameter_sizes, parameter_sizes, 1);
282 EXPECT_GT(results.maximum_relative_error, 0.0);
283 EXPECT_TRUE(results.error_log.empty());
284
285 for (int j = 0; j < num_parameters; j++) {
286 delete[] parameters[j];
287 }
288}
289
290/**
291 * Helper cost function that multiplies the parameters by the given jacobians
292 * and adds a constant offset.
293 */
294class LinearCostFunction : public CostFunction {
295 public:
Austin Schuh3de38b02024-06-25 18:25:10 -0700296 explicit LinearCostFunction(Vector residuals_offset)
297 : residuals_offset_(std::move(residuals_offset)) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800298 set_num_residuals(residuals_offset_.size());
299 }
300
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800301 bool Evaluate(double const* const* parameter_ptrs,
302 double* residuals_ptr,
303 double** residual_J_params) const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800304 CHECK_GE(residual_J_params_.size(), 0.0);
305 VectorRef residuals(residuals_ptr, residual_J_params_[0].rows());
306 residuals = residuals_offset_;
307
308 for (size_t i = 0; i < residual_J_params_.size(); ++i) {
309 const Matrix& residual_J_param = residual_J_params_[i];
310 int parameter_size = residual_J_param.cols();
311 ConstVectorRef param(parameter_ptrs[i], parameter_size);
312
313 // Compute residual.
314 residuals += residual_J_param * param;
315
316 // Return Jacobian.
Austin Schuh3de38b02024-06-25 18:25:10 -0700317 if (residual_J_params != nullptr && residual_J_params[i] != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800318 Eigen::Map<Matrix> residual_J_param_out(residual_J_params[i],
319 residual_J_param.rows(),
320 residual_J_param.cols());
321 if (jacobian_offsets_.count(i) != 0) {
322 residual_J_param_out = residual_J_param + jacobian_offsets_.at(i);
323 } else {
324 residual_J_param_out = residual_J_param;
325 }
326 }
327 }
328 return true;
329 }
330
331 void AddParameter(const Matrix& residual_J_param) {
332 CHECK_EQ(num_residuals(), residual_J_param.rows());
333 residual_J_params_.push_back(residual_J_param);
334 mutable_parameter_block_sizes()->push_back(residual_J_param.cols());
335 }
336
337 /// Add offset to the given Jacobian before returning it from Evaluate(),
Austin Schuh3de38b02024-06-25 18:25:10 -0700338 /// thus introducing an error in the computation.
Austin Schuh70cc9552019-01-21 19:46:48 -0800339 void SetJacobianOffset(size_t index, Matrix offset) {
340 CHECK_LT(index, residual_J_params_.size());
341 CHECK_EQ(residual_J_params_[index].rows(), offset.rows());
342 CHECK_EQ(residual_J_params_[index].cols(), offset.cols());
343 jacobian_offsets_[index] = offset;
344 }
345
346 private:
347 std::vector<Matrix> residual_J_params_;
348 std::map<int, Matrix> jacobian_offsets_;
349 Vector residuals_offset_;
350};
351
Austin Schuh70cc9552019-01-21 19:46:48 -0800352// Helper function to compare two Eigen matrices (used in the test below).
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800353static void ExpectMatricesClose(Matrix p, Matrix q, double tolerance) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800354 ASSERT_EQ(p.rows(), q.rows());
355 ASSERT_EQ(p.cols(), q.cols());
356 ExpectArraysClose(p.size(), p.data(), q.data(), tolerance);
357}
358
Austin Schuh3de38b02024-06-25 18:25:10 -0700359// Helper manifold that multiplies the delta vector by the given
360// jacobian and adds it to the parameter.
361class MatrixManifold : public Manifold {
362 public:
363 bool Plus(const double* x,
364 const double* delta,
365 double* x_plus_delta) const final {
366 VectorRef(x_plus_delta, AmbientSize()) =
367 ConstVectorRef(x, AmbientSize()) +
368 (global_to_local_ * ConstVectorRef(delta, TangentSize()));
369 return true;
370 }
371
372 bool PlusJacobian(const double* /*x*/, double* jacobian) const final {
373 MatrixRef(jacobian, AmbientSize(), TangentSize()) = global_to_local_;
374 return true;
375 }
376
377 bool Minus(const double* y, const double* x, double* y_minus_x) const final {
378 LOG(FATAL) << "Should not be called";
379 return true;
380 }
381
382 bool MinusJacobian(const double* x, double* jacobian) const final {
383 LOG(FATAL) << "Should not be called";
384 return true;
385 }
386
387 int AmbientSize() const final { return global_to_local_.rows(); }
388 int TangentSize() const final { return global_to_local_.cols(); }
389
390 Matrix global_to_local_;
391};
392
393TEST(GradientChecker, TestCorrectnessWithManifolds) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800394 // Create cost function.
395 Eigen::Vector3d residual_offset(100.0, 200.0, 300.0);
396 LinearCostFunction cost_function(residual_offset);
397 Eigen::Matrix<double, 3, 3, Eigen::RowMajor> j0;
398 j0.row(0) << 1.0, 2.0, 3.0;
399 j0.row(1) << 4.0, 5.0, 6.0;
400 j0.row(2) << 7.0, 8.0, 9.0;
401 Eigen::Matrix<double, 3, 2, Eigen::RowMajor> j1;
402 j1.row(0) << 10.0, 11.0;
403 j1.row(1) << 12.0, 13.0;
404 j1.row(2) << 14.0, 15.0;
405
406 Eigen::Vector3d param0(1.0, 2.0, 3.0);
407 Eigen::Vector2d param1(4.0, 5.0);
408
409 cost_function.AddParameter(j0);
410 cost_function.AddParameter(j1);
411
412 std::vector<int> parameter_sizes(2);
413 parameter_sizes[0] = 3;
414 parameter_sizes[1] = 2;
Austin Schuh3de38b02024-06-25 18:25:10 -0700415 std::vector<int> tangent_sizes(2);
416 tangent_sizes[0] = 2;
417 tangent_sizes[1] = 2;
Austin Schuh70cc9552019-01-21 19:46:48 -0800418
419 // Test cost function for correctness.
420 Eigen::Matrix<double, 3, 3, Eigen::RowMajor> j1_out;
421 Eigen::Matrix<double, 3, 2, Eigen::RowMajor> j2_out;
422 Eigen::Vector3d residual;
423 std::vector<const double*> parameters(2);
424 parameters[0] = param0.data();
425 parameters[1] = param1.data();
426 std::vector<double*> jacobians(2);
427 jacobians[0] = j1_out.data();
428 jacobians[1] = j2_out.data();
429 cost_function.Evaluate(parameters.data(), residual.data(), jacobians.data());
430
431 Matrix residual_expected = residual_offset + j0 * param0 + j1 * param1;
432
433 ExpectMatricesClose(j1_out, j0, std::numeric_limits<double>::epsilon());
434 ExpectMatricesClose(j2_out, j1, std::numeric_limits<double>::epsilon());
435 ExpectMatricesClose(residual, residual_expected, kTolerance);
436
Austin Schuh3de38b02024-06-25 18:25:10 -0700437 // Create manifold.
438 Eigen::Matrix<double, 3, 2, Eigen::RowMajor> global_to_local;
439 global_to_local.row(0) << 1.5, 2.5;
440 global_to_local.row(1) << 3.5, 4.5;
441 global_to_local.row(2) << 5.5, 6.5;
Austin Schuh70cc9552019-01-21 19:46:48 -0800442
Austin Schuh3de38b02024-06-25 18:25:10 -0700443 MatrixManifold manifold;
444 manifold.global_to_local_ = global_to_local;
Austin Schuh70cc9552019-01-21 19:46:48 -0800445
Austin Schuh3de38b02024-06-25 18:25:10 -0700446 // Test manifold for correctness.
Austin Schuh70cc9552019-01-21 19:46:48 -0800447 Eigen::Vector3d x(7.0, 8.0, 9.0);
448 Eigen::Vector2d delta(10.0, 11.0);
449
Austin Schuh3de38b02024-06-25 18:25:10 -0700450 Eigen::Matrix<double, 3, 2, Eigen::RowMajor> global_to_local_out;
451 manifold.PlusJacobian(x.data(), global_to_local_out.data());
452 ExpectMatricesClose(global_to_local_out,
453 global_to_local,
Austin Schuh70cc9552019-01-21 19:46:48 -0800454 std::numeric_limits<double>::epsilon());
455
456 Eigen::Vector3d x_plus_delta;
Austin Schuh3de38b02024-06-25 18:25:10 -0700457 manifold.Plus(x.data(), delta.data(), x_plus_delta.data());
458 Eigen::Vector3d x_plus_delta_expected = x + (global_to_local * delta);
Austin Schuh70cc9552019-01-21 19:46:48 -0800459 ExpectMatricesClose(x_plus_delta, x_plus_delta_expected, kTolerance);
460
461 // Now test GradientChecker.
Austin Schuh3de38b02024-06-25 18:25:10 -0700462 std::vector<const Manifold*> manifolds(2);
463 manifolds[0] = &manifold;
464 manifolds[1] = nullptr;
Austin Schuh70cc9552019-01-21 19:46:48 -0800465 NumericDiffOptions numeric_diff_options;
466 GradientChecker::ProbeResults results;
467 GradientChecker gradient_checker(
Austin Schuh3de38b02024-06-25 18:25:10 -0700468 &cost_function, &manifolds, numeric_diff_options);
Austin Schuh70cc9552019-01-21 19:46:48 -0800469
470 Problem::Options problem_options;
471 problem_options.cost_function_ownership = DO_NOT_TAKE_OWNERSHIP;
Austin Schuh3de38b02024-06-25 18:25:10 -0700472 problem_options.manifold_ownership = DO_NOT_TAKE_OWNERSHIP;
Austin Schuh70cc9552019-01-21 19:46:48 -0800473 Problem problem(problem_options);
474 Eigen::Vector3d param0_solver;
475 Eigen::Vector2d param1_solver;
Austin Schuh3de38b02024-06-25 18:25:10 -0700476 problem.AddParameterBlock(param0_solver.data(), 3, &manifold);
Austin Schuh70cc9552019-01-21 19:46:48 -0800477 problem.AddParameterBlock(param1_solver.data(), 2);
478 problem.AddResidualBlock(
Austin Schuh3de38b02024-06-25 18:25:10 -0700479 &cost_function, nullptr, param0_solver.data(), param1_solver.data());
Austin Schuh70cc9552019-01-21 19:46:48 -0800480
481 // First test case: everything is correct.
Austin Schuh3de38b02024-06-25 18:25:10 -0700482 EXPECT_TRUE(gradient_checker.Probe(parameters.data(), kTolerance, nullptr));
Austin Schuh70cc9552019-01-21 19:46:48 -0800483 EXPECT_TRUE(gradient_checker.Probe(parameters.data(), kTolerance, &results))
484 << results.error_log;
485
486 // Check that results contain correct data.
487 ASSERT_EQ(results.return_value, true);
488 ExpectMatricesClose(
489 results.residuals, residual, std::numeric_limits<double>::epsilon());
Austin Schuh3de38b02024-06-25 18:25:10 -0700490 CheckDimensions(results, parameter_sizes, tangent_sizes, 3);
Austin Schuh70cc9552019-01-21 19:46:48 -0800491 ExpectMatricesClose(
Austin Schuh3de38b02024-06-25 18:25:10 -0700492 results.local_jacobians.at(0), j0 * global_to_local, kTolerance);
Austin Schuh70cc9552019-01-21 19:46:48 -0800493 ExpectMatricesClose(results.local_jacobians.at(1),
494 j1,
495 std::numeric_limits<double>::epsilon());
496 ExpectMatricesClose(
Austin Schuh3de38b02024-06-25 18:25:10 -0700497 results.local_numeric_jacobians.at(0), j0 * global_to_local, kTolerance);
Austin Schuh70cc9552019-01-21 19:46:48 -0800498 ExpectMatricesClose(results.local_numeric_jacobians.at(1), j1, kTolerance);
499 ExpectMatricesClose(
500 results.jacobians.at(0), j0, std::numeric_limits<double>::epsilon());
501 ExpectMatricesClose(
502 results.jacobians.at(1), j1, std::numeric_limits<double>::epsilon());
503 ExpectMatricesClose(results.numeric_jacobians.at(0), j0, kTolerance);
504 ExpectMatricesClose(results.numeric_jacobians.at(1), j1, kTolerance);
505 EXPECT_GE(results.maximum_relative_error, 0.0);
506 EXPECT_TRUE(results.error_log.empty());
507
508 // Test interaction with the 'check_gradients' option in Solver.
Austin Schuh3de38b02024-06-25 18:25:10 -0700509 Solver::Options solver_options;
510 solver_options.linear_solver_type = DENSE_QR;
511 solver_options.check_gradients = true;
512 solver_options.initial_trust_region_radius = 1e10;
513 Solver solver;
514 Solver::Summary summary;
515
Austin Schuh70cc9552019-01-21 19:46:48 -0800516 param0_solver = param0;
517 param1_solver = param1;
518 solver.Solve(solver_options, &problem, &summary);
519 EXPECT_EQ(CONVERGENCE, summary.termination_type);
520 EXPECT_LE(summary.final_cost, 1e-12);
521
522 // Second test case: Mess up reported derivatives with respect to 3rd
523 // component of 1st parameter. Check should fail.
524 Eigen::Matrix<double, 3, 3, Eigen::RowMajor> j0_offset;
525 j0_offset.setZero();
526 j0_offset.col(2).setConstant(0.001);
527 cost_function.SetJacobianOffset(0, j0_offset);
Austin Schuh3de38b02024-06-25 18:25:10 -0700528 EXPECT_FALSE(gradient_checker.Probe(parameters.data(), kTolerance, nullptr));
Austin Schuh70cc9552019-01-21 19:46:48 -0800529 EXPECT_FALSE(gradient_checker.Probe(parameters.data(), kTolerance, &results))
530 << results.error_log;
531
532 // Check that results contain correct data.
533 ASSERT_EQ(results.return_value, true);
534 ExpectMatricesClose(
535 results.residuals, residual, std::numeric_limits<double>::epsilon());
Austin Schuh3de38b02024-06-25 18:25:10 -0700536 CheckDimensions(results, parameter_sizes, tangent_sizes, 3);
Austin Schuh70cc9552019-01-21 19:46:48 -0800537 ASSERT_EQ(results.local_jacobians.size(), 2);
538 ASSERT_EQ(results.local_numeric_jacobians.size(), 2);
539 ExpectMatricesClose(results.local_jacobians.at(0),
Austin Schuh3de38b02024-06-25 18:25:10 -0700540 (j0 + j0_offset) * global_to_local,
Austin Schuh70cc9552019-01-21 19:46:48 -0800541 kTolerance);
542 ExpectMatricesClose(results.local_jacobians.at(1),
543 j1,
544 std::numeric_limits<double>::epsilon());
545 ExpectMatricesClose(
Austin Schuh3de38b02024-06-25 18:25:10 -0700546 results.local_numeric_jacobians.at(0), j0 * global_to_local, kTolerance);
Austin Schuh70cc9552019-01-21 19:46:48 -0800547 ExpectMatricesClose(results.local_numeric_jacobians.at(1), j1, kTolerance);
548 ExpectMatricesClose(results.jacobians.at(0), j0 + j0_offset, kTolerance);
549 ExpectMatricesClose(
550 results.jacobians.at(1), j1, std::numeric_limits<double>::epsilon());
551 ExpectMatricesClose(results.numeric_jacobians.at(0), j0, kTolerance);
552 ExpectMatricesClose(results.numeric_jacobians.at(1), j1, kTolerance);
553 EXPECT_GT(results.maximum_relative_error, 0.0);
554 EXPECT_FALSE(results.error_log.empty());
555
556 // Test interaction with the 'check_gradients' option in Solver.
557 param0_solver = param0;
558 param1_solver = param1;
559 solver.Solve(solver_options, &problem, &summary);
560 EXPECT_EQ(FAILURE, summary.termination_type);
561
Austin Schuh3de38b02024-06-25 18:25:10 -0700562 // Now, zero out the manifold Jacobian with respect to the 3rd component of
563 // the 1st parameter. This makes the combination of cost function and manifold
564 // return correct values again.
565 manifold.global_to_local_.row(2).setZero();
Austin Schuh70cc9552019-01-21 19:46:48 -0800566
567 // Verify that the gradient checker does not treat this as an error.
568 EXPECT_TRUE(gradient_checker.Probe(parameters.data(), kTolerance, &results))
569 << results.error_log;
570
571 // Check that results contain correct data.
572 ASSERT_EQ(results.return_value, true);
573 ExpectMatricesClose(
574 results.residuals, residual, std::numeric_limits<double>::epsilon());
Austin Schuh3de38b02024-06-25 18:25:10 -0700575 CheckDimensions(results, parameter_sizes, tangent_sizes, 3);
Austin Schuh70cc9552019-01-21 19:46:48 -0800576 ASSERT_EQ(results.local_jacobians.size(), 2);
577 ASSERT_EQ(results.local_numeric_jacobians.size(), 2);
578 ExpectMatricesClose(results.local_jacobians.at(0),
Austin Schuh3de38b02024-06-25 18:25:10 -0700579 (j0 + j0_offset) * manifold.global_to_local_,
Austin Schuh70cc9552019-01-21 19:46:48 -0800580 kTolerance);
581 ExpectMatricesClose(results.local_jacobians.at(1),
582 j1,
583 std::numeric_limits<double>::epsilon());
584 ExpectMatricesClose(results.local_numeric_jacobians.at(0),
Austin Schuh3de38b02024-06-25 18:25:10 -0700585 j0 * manifold.global_to_local_,
Austin Schuh70cc9552019-01-21 19:46:48 -0800586 kTolerance);
587 ExpectMatricesClose(results.local_numeric_jacobians.at(1), j1, kTolerance);
588 ExpectMatricesClose(results.jacobians.at(0), j0 + j0_offset, kTolerance);
589 ExpectMatricesClose(
590 results.jacobians.at(1), j1, std::numeric_limits<double>::epsilon());
591 ExpectMatricesClose(results.numeric_jacobians.at(0), j0, kTolerance);
592 ExpectMatricesClose(results.numeric_jacobians.at(1), j1, kTolerance);
593 EXPECT_GE(results.maximum_relative_error, 0.0);
594 EXPECT_TRUE(results.error_log.empty());
595
596 // Test interaction with the 'check_gradients' option in Solver.
597 param0_solver = param0;
598 param1_solver = param1;
599 solver.Solve(solver_options, &problem, &summary);
600 EXPECT_EQ(CONVERGENCE, summary.termination_type);
601 EXPECT_LE(summary.final_cost, 1e-12);
602}
Austin Schuh3de38b02024-06-25 18:25:10 -0700603} // namespace ceres::internal