blob: 43e88722b73cf1c511b2929ce2a282343d0a43ee [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
Austin Schuh3de38b02024-06-25 18:25:10 -07002// Copyright 2023 Google Inc. All rights reserved.
Austin Schuh70cc9552019-01-21 19:46:48 -08003// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: keir@google.com (Keir Mierle)
30//
31// Tests shared across evaluators. The tests try all combinations of linear
32// solver and num_eliminate_blocks (for schur-based solvers).
33
34#include "ceres/evaluator.h"
35
36#include <memory>
Austin Schuh3de38b02024-06-25 18:25:10 -070037#include <string>
38#include <vector>
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080039
Austin Schuh70cc9552019-01-21 19:46:48 -080040#include "ceres/casts.h"
41#include "ceres/cost_function.h"
42#include "ceres/crs_matrix.h"
43#include "ceres/evaluator_test_utils.h"
44#include "ceres/internal/eigen.h"
Austin Schuh3de38b02024-06-25 18:25:10 -070045#include "ceres/manifold.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080046#include "ceres/problem_impl.h"
47#include "ceres/program.h"
48#include "ceres/sized_cost_function.h"
49#include "ceres/sparse_matrix.h"
50#include "ceres/stringprintf.h"
51#include "ceres/types.h"
52#include "gtest/gtest.h"
53
54namespace ceres {
55namespace internal {
56
Austin Schuh70cc9552019-01-21 19:46:48 -080057// TODO(keir): Consider pushing this into a common test utils file.
58template <int kFactor, int kNumResiduals, int... Ns>
59class ParameterIgnoringCostFunction
60 : public SizedCostFunction<kNumResiduals, Ns...> {
Austin Schuh3de38b02024-06-25 18:25:10 -070061 using Base = SizedCostFunction<kNumResiduals, Ns...>;
Austin Schuh70cc9552019-01-21 19:46:48 -080062
63 public:
64 explicit ParameterIgnoringCostFunction(bool succeeds = true)
65 : succeeds_(succeeds) {}
66
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080067 bool Evaluate(double const* const* parameters,
68 double* residuals,
69 double** jacobians) const final {
Austin Schuh70cc9552019-01-21 19:46:48 -080070 for (int i = 0; i < Base::num_residuals(); ++i) {
71 residuals[i] = i + 1;
72 }
73 if (jacobians) {
74 for (int k = 0; k < Base::parameter_block_sizes().size(); ++k) {
75 // The jacobians here are full sized, but they are transformed in the
76 // evaluator into the "local" jacobian. In the tests, the "subset
Austin Schuh3de38b02024-06-25 18:25:10 -070077 // constant" manifold is used, which should pick out columns from these
78 // jacobians. Put values in the jacobian that make this obvious; in
79 // particular, make the jacobians like this:
Austin Schuh70cc9552019-01-21 19:46:48 -080080 //
81 // 1 2 3 4 ...
82 // 1 2 3 4 ... .* kFactor
83 // 1 2 3 4 ...
84 //
85 // where the multiplication by kFactor makes it easier to distinguish
86 // between Jacobians of different residuals for the same parameter.
87 if (jacobians[k] != nullptr) {
88 MatrixRef jacobian(jacobians[k],
89 Base::num_residuals(),
90 Base::parameter_block_sizes()[k]);
91 for (int j = 0; j < Base::parameter_block_sizes()[k]; ++j) {
92 jacobian.col(j).setConstant(kFactor * (j + 1));
93 }
94 }
95 }
96 }
97 return succeeds_;
98 }
99
100 private:
101 bool succeeds_;
102};
103
104struct EvaluatorTestOptions {
105 EvaluatorTestOptions(LinearSolverType linear_solver_type,
106 int num_eliminate_blocks,
107 bool dynamic_sparsity = false)
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800108 : linear_solver_type(linear_solver_type),
109 num_eliminate_blocks(num_eliminate_blocks),
110 dynamic_sparsity(dynamic_sparsity) {}
Austin Schuh70cc9552019-01-21 19:46:48 -0800111
112 LinearSolverType linear_solver_type;
113 int num_eliminate_blocks;
114 bool dynamic_sparsity;
115};
116
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800117struct EvaluatorTest : public ::testing::TestWithParam<EvaluatorTestOptions> {
Austin Schuh3de38b02024-06-25 18:25:10 -0700118 std::unique_ptr<Evaluator> CreateEvaluator(Program* program) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800119 // This program is straight from the ProblemImpl, and so has no index/offset
120 // yet; compute it here as required by the evaluator implementations.
121 program->SetParameterOffsetsAndIndex();
122
123 if (VLOG_IS_ON(1)) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700124 std::string report;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800125 StringAppendF(&report,
126 "Creating evaluator with type: %d",
Austin Schuh70cc9552019-01-21 19:46:48 -0800127 GetParam().linear_solver_type);
128 if (GetParam().linear_solver_type == SPARSE_NORMAL_CHOLESKY) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800129 StringAppendF(
130 &report, ", dynamic_sparsity: %d", GetParam().dynamic_sparsity);
Austin Schuh70cc9552019-01-21 19:46:48 -0800131 }
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800132 StringAppendF(&report,
133 " and num_eliminate_blocks: %d",
Austin Schuh70cc9552019-01-21 19:46:48 -0800134 GetParam().num_eliminate_blocks);
135 VLOG(1) << report;
136 }
137 Evaluator::Options options;
138 options.linear_solver_type = GetParam().linear_solver_type;
139 options.num_eliminate_blocks = GetParam().num_eliminate_blocks;
140 options.dynamic_sparsity = GetParam().dynamic_sparsity;
141 options.context = problem.context();
Austin Schuh3de38b02024-06-25 18:25:10 -0700142 std::string error;
Austin Schuh70cc9552019-01-21 19:46:48 -0800143 return Evaluator::Create(options, program, &error);
144 }
145
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800146 void EvaluateAndCompare(ProblemImpl* problem,
Austin Schuh70cc9552019-01-21 19:46:48 -0800147 int expected_num_rows,
148 int expected_num_cols,
149 double expected_cost,
150 const double* expected_residuals,
151 const double* expected_gradient,
152 const double* expected_jacobian) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700153 std::unique_ptr<Evaluator> evaluator =
154 CreateEvaluator(problem->mutable_program());
Austin Schuh70cc9552019-01-21 19:46:48 -0800155 int num_residuals = expected_num_rows;
156 int num_parameters = expected_num_cols;
157
158 double cost = -1;
159
160 Vector residuals(num_residuals);
161 residuals.setConstant(-2000);
162
163 Vector gradient(num_parameters);
164 gradient.setConstant(-3000);
165
166 std::unique_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
167
168 ASSERT_EQ(expected_num_rows, evaluator->NumResiduals());
169 ASSERT_EQ(expected_num_cols, evaluator->NumEffectiveParameters());
170 ASSERT_EQ(expected_num_rows, jacobian->num_rows());
171 ASSERT_EQ(expected_num_cols, jacobian->num_cols());
172
Austin Schuh3de38b02024-06-25 18:25:10 -0700173 std::vector<double> state(evaluator->NumParameters());
Austin Schuh70cc9552019-01-21 19:46:48 -0800174
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800175 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800176 ASSERT_TRUE(evaluator->Evaluate(
177 &state[0],
178 &cost,
179 expected_residuals != nullptr ? &residuals[0] : nullptr,
180 expected_gradient != nullptr ? &gradient[0] : nullptr,
181 expected_jacobian != nullptr ? jacobian.get() : nullptr));
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800182 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800183
184 Matrix actual_jacobian;
185 if (expected_jacobian != nullptr) {
186 jacobian->ToDenseMatrix(&actual_jacobian);
187 }
188
189 CompareEvaluations(expected_num_rows,
190 expected_num_cols,
191 expected_cost,
192 expected_residuals,
193 expected_gradient,
194 expected_jacobian,
195 cost,
196 &residuals[0],
197 &gradient[0],
198 actual_jacobian.data());
199 }
200
201 // Try all combinations of parameters for the evaluator.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800202 void CheckAllEvaluationCombinations(const ExpectedEvaluation& expected) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800203 for (int i = 0; i < 8; ++i) {
204 EvaluateAndCompare(&problem,
205 expected.num_rows,
206 expected.num_cols,
207 expected.cost,
208 (i & 1) ? expected.residuals : nullptr,
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800209 (i & 2) ? expected.gradient : nullptr,
210 (i & 4) ? expected.jacobian : nullptr);
Austin Schuh70cc9552019-01-21 19:46:48 -0800211 }
212 }
213
214 // The values are ignored completely by the cost function.
215 double x[2];
216 double y[3];
217 double z[4];
218
219 ProblemImpl problem;
220};
221
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800222static void SetSparseMatrixConstant(SparseMatrix* sparse_matrix, double value) {
223 VectorRef(sparse_matrix->mutable_values(), sparse_matrix->num_nonzeros())
224 .setConstant(value);
Austin Schuh70cc9552019-01-21 19:46:48 -0800225}
226
227TEST_P(EvaluatorTest, SingleResidualProblem) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800228 problem.AddResidualBlock(
229 new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>, nullptr, x, y, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800230
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800231 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800232 ExpectedEvaluation expected = {
233 // Rows/columns
234 3, 9,
235 // Cost
236 7.0,
237 // Residuals
238 { 1.0, 2.0, 3.0 },
239 // Gradient
240 { 6.0, 12.0, // x
241 6.0, 12.0, 18.0, // y
242 6.0, 12.0, 18.0, 24.0, // z
243 },
244 // Jacobian
245 // x y z
246 { 1, 2, 1, 2, 3, 1, 2, 3, 4,
247 1, 2, 1, 2, 3, 1, 2, 3, 4,
248 1, 2, 1, 2, 3, 1, 2, 3, 4
249 }
250 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800251 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800252 CheckAllEvaluationCombinations(expected);
253}
254
255TEST_P(EvaluatorTest, SingleResidualProblemWithPermutedParameters) {
256 // Add the parameters in explicit order to force the ordering in the program.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800257 problem.AddParameterBlock(x, 2);
258 problem.AddParameterBlock(y, 3);
259 problem.AddParameterBlock(z, 4);
Austin Schuh70cc9552019-01-21 19:46:48 -0800260
261 // Then use a cost function which is similar to the others, but swap around
262 // the ordering of the parameters to the cost function. This shouldn't affect
263 // the jacobian evaluation, but requires explicit handling in the evaluators.
264 // At one point the compressed row evaluator had a bug that went undetected
265 // for a long time, since by chance most users added parameters to the problem
266 // in the same order that they occurred as parameters to a cost function.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800267 problem.AddResidualBlock(
268 new ParameterIgnoringCostFunction<1, 3, 4, 3, 2>, nullptr, z, y, x);
Austin Schuh70cc9552019-01-21 19:46:48 -0800269
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800270 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800271 ExpectedEvaluation expected = {
272 // Rows/columns
273 3, 9,
274 // Cost
275 7.0,
276 // Residuals
277 { 1.0, 2.0, 3.0 },
278 // Gradient
279 { 6.0, 12.0, // x
280 6.0, 12.0, 18.0, // y
281 6.0, 12.0, 18.0, 24.0, // z
282 },
283 // Jacobian
284 // x y z
285 { 1, 2, 1, 2, 3, 1, 2, 3, 4,
286 1, 2, 1, 2, 3, 1, 2, 3, 4,
287 1, 2, 1, 2, 3, 1, 2, 3, 4
288 }
289 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800290 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800291 CheckAllEvaluationCombinations(expected);
292}
293
294TEST_P(EvaluatorTest, SingleResidualProblemWithNuisanceParameters) {
295 // These parameters are not used.
296 double a[2];
297 double b[1];
298 double c[1];
299 double d[3];
300
301 // Add the parameters in a mixed order so the Jacobian is "checkered" with the
302 // values from the other parameters.
303 problem.AddParameterBlock(a, 2);
304 problem.AddParameterBlock(x, 2);
305 problem.AddParameterBlock(b, 1);
306 problem.AddParameterBlock(y, 3);
307 problem.AddParameterBlock(c, 1);
308 problem.AddParameterBlock(z, 4);
309 problem.AddParameterBlock(d, 3);
310
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800311 problem.AddResidualBlock(
312 new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>, nullptr, x, y, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800313
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800314 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800315 ExpectedEvaluation expected = {
316 // Rows/columns
317 3, 16,
318 // Cost
319 7.0,
320 // Residuals
321 { 1.0, 2.0, 3.0 },
322 // Gradient
323 { 0.0, 0.0, // a
324 6.0, 12.0, // x
325 0.0, // b
326 6.0, 12.0, 18.0, // y
327 0.0, // c
328 6.0, 12.0, 18.0, 24.0, // z
329 0.0, 0.0, 0.0, // d
330 },
331 // Jacobian
332 // a x b y c z d
333 { 0, 0, 1, 2, 0, 1, 2, 3, 0, 1, 2, 3, 4, 0, 0, 0,
334 0, 0, 1, 2, 0, 1, 2, 3, 0, 1, 2, 3, 4, 0, 0, 0,
335 0, 0, 1, 2, 0, 1, 2, 3, 0, 1, 2, 3, 4, 0, 0, 0
336 }
337 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800338 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800339 CheckAllEvaluationCombinations(expected);
340}
341
342TEST_P(EvaluatorTest, MultipleResidualProblem) {
343 // Add the parameters in explicit order to force the ordering in the program.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800344 problem.AddParameterBlock(x, 2);
345 problem.AddParameterBlock(y, 3);
346 problem.AddParameterBlock(z, 4);
Austin Schuh70cc9552019-01-21 19:46:48 -0800347
348 // f(x, y) in R^2
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800349 problem.AddResidualBlock(
350 new ParameterIgnoringCostFunction<1, 2, 2, 3>, nullptr, x, y);
Austin Schuh70cc9552019-01-21 19:46:48 -0800351
352 // g(x, z) in R^3
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800353 problem.AddResidualBlock(
354 new ParameterIgnoringCostFunction<2, 3, 2, 4>, nullptr, x, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800355
356 // h(y, z) in R^4
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800357 problem.AddResidualBlock(
358 new ParameterIgnoringCostFunction<3, 4, 3, 4>, nullptr, y, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800359
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800360 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800361 ExpectedEvaluation expected = {
362 // Rows/columns
363 9, 9,
364 // Cost
365 // f g h
366 ( 1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
367 // Residuals
368 { 1.0, 2.0, // f
369 1.0, 2.0, 3.0, // g
370 1.0, 2.0, 3.0, 4.0 // h
371 },
372 // Gradient
373 { 15.0, 30.0, // x
374 33.0, 66.0, 99.0, // y
375 42.0, 84.0, 126.0, 168.0 // z
376 },
377 // Jacobian
378 // x y z
379 { /* f(x, y) */ 1, 2, 1, 2, 3, 0, 0, 0, 0,
380 1, 2, 1, 2, 3, 0, 0, 0, 0,
381
382 /* g(x, z) */ 2, 4, 0, 0, 0, 2, 4, 6, 8,
383 2, 4, 0, 0, 0, 2, 4, 6, 8,
384 2, 4, 0, 0, 0, 2, 4, 6, 8,
385
386 /* h(y, z) */ 0, 0, 3, 6, 9, 3, 6, 9, 12,
387 0, 0, 3, 6, 9, 3, 6, 9, 12,
388 0, 0, 3, 6, 9, 3, 6, 9, 12,
389 0, 0, 3, 6, 9, 3, 6, 9, 12
390 }
391 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800392 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800393 CheckAllEvaluationCombinations(expected);
394}
395
Austin Schuh3de38b02024-06-25 18:25:10 -0700396TEST_P(EvaluatorTest, MultipleResidualsWithManifolds) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800397 // Add the parameters in explicit order to force the ordering in the program.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800398 problem.AddParameterBlock(x, 2);
Austin Schuh70cc9552019-01-21 19:46:48 -0800399
400 // Fix y's first dimension.
Austin Schuh3de38b02024-06-25 18:25:10 -0700401 std::vector<int> y_fixed;
Austin Schuh70cc9552019-01-21 19:46:48 -0800402 y_fixed.push_back(0);
Austin Schuh3de38b02024-06-25 18:25:10 -0700403 problem.AddParameterBlock(y, 3, new SubsetManifold(3, y_fixed));
Austin Schuh70cc9552019-01-21 19:46:48 -0800404
405 // Fix z's second dimension.
Austin Schuh3de38b02024-06-25 18:25:10 -0700406 std::vector<int> z_fixed;
Austin Schuh70cc9552019-01-21 19:46:48 -0800407 z_fixed.push_back(1);
Austin Schuh3de38b02024-06-25 18:25:10 -0700408 problem.AddParameterBlock(z, 4, new SubsetManifold(4, z_fixed));
Austin Schuh70cc9552019-01-21 19:46:48 -0800409
410 // f(x, y) in R^2
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800411 problem.AddResidualBlock(
412 new ParameterIgnoringCostFunction<1, 2, 2, 3>, nullptr, x, y);
Austin Schuh70cc9552019-01-21 19:46:48 -0800413
414 // g(x, z) in R^3
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800415 problem.AddResidualBlock(
416 new ParameterIgnoringCostFunction<2, 3, 2, 4>, nullptr, x, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800417
418 // h(y, z) in R^4
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800419 problem.AddResidualBlock(
420 new ParameterIgnoringCostFunction<3, 4, 3, 4>, nullptr, y, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800421
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800422 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800423 ExpectedEvaluation expected = {
424 // Rows/columns
425 9, 7,
426 // Cost
427 // f g h
428 ( 1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
429 // Residuals
430 { 1.0, 2.0, // f
431 1.0, 2.0, 3.0, // g
432 1.0, 2.0, 3.0, 4.0 // h
433 },
434 // Gradient
435 { 15.0, 30.0, // x
436 66.0, 99.0, // y
437 42.0, 126.0, 168.0 // z
438 },
439 // Jacobian
440 // x y z
441 { /* f(x, y) */ 1, 2, 2, 3, 0, 0, 0,
442 1, 2, 2, 3, 0, 0, 0,
443
444 /* g(x, z) */ 2, 4, 0, 0, 2, 6, 8,
445 2, 4, 0, 0, 2, 6, 8,
446 2, 4, 0, 0, 2, 6, 8,
447
448 /* h(y, z) */ 0, 0, 6, 9, 3, 9, 12,
449 0, 0, 6, 9, 3, 9, 12,
450 0, 0, 6, 9, 3, 9, 12,
451 0, 0, 6, 9, 3, 9, 12
452 }
453 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800454 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800455 CheckAllEvaluationCombinations(expected);
456}
457
458TEST_P(EvaluatorTest, MultipleResidualProblemWithSomeConstantParameters) {
459 // The values are ignored completely by the cost function.
460 double x[2];
461 double y[3];
462 double z[4];
463
464 // Add the parameters in explicit order to force the ordering in the program.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800465 problem.AddParameterBlock(x, 2);
466 problem.AddParameterBlock(y, 3);
467 problem.AddParameterBlock(z, 4);
Austin Schuh70cc9552019-01-21 19:46:48 -0800468
469 // f(x, y) in R^2
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800470 problem.AddResidualBlock(
471 new ParameterIgnoringCostFunction<1, 2, 2, 3>, nullptr, x, y);
Austin Schuh70cc9552019-01-21 19:46:48 -0800472
473 // g(x, z) in R^3
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800474 problem.AddResidualBlock(
475 new ParameterIgnoringCostFunction<2, 3, 2, 4>, nullptr, x, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800476
477 // h(y, z) in R^4
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800478 problem.AddResidualBlock(
479 new ParameterIgnoringCostFunction<3, 4, 3, 4>, nullptr, y, z);
Austin Schuh70cc9552019-01-21 19:46:48 -0800480
481 // For this test, "z" is constant.
482 problem.SetParameterBlockConstant(z);
483
484 // Create the reduced program which is missing the fixed "z" variable.
485 // Normally, the preprocessing of the program that happens in solver_impl
486 // takes care of this, but we don't want to invoke the solver here.
487 Program reduced_program;
Austin Schuh3de38b02024-06-25 18:25:10 -0700488 std::vector<ParameterBlock*>* parameter_blocks =
Austin Schuh70cc9552019-01-21 19:46:48 -0800489 problem.mutable_program()->mutable_parameter_blocks();
490
491 // "z" is the last parameter; save it for later and pop it off temporarily.
492 // Note that "z" will still get read during evaluation, so it cannot be
493 // deleted at this point.
494 ParameterBlock* parameter_block_z = parameter_blocks->back();
495 parameter_blocks->pop_back();
496
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800497 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800498 ExpectedEvaluation expected = {
499 // Rows/columns
500 9, 5,
501 // Cost
502 // f g h
503 ( 1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
504 // Residuals
505 { 1.0, 2.0, // f
506 1.0, 2.0, 3.0, // g
507 1.0, 2.0, 3.0, 4.0 // h
508 },
509 // Gradient
510 { 15.0, 30.0, // x
511 33.0, 66.0, 99.0, // y
512 },
513 // Jacobian
514 // x y
515 { /* f(x, y) */ 1, 2, 1, 2, 3,
516 1, 2, 1, 2, 3,
517
518 /* g(x, z) */ 2, 4, 0, 0, 0,
519 2, 4, 0, 0, 0,
520 2, 4, 0, 0, 0,
521
522 /* h(y, z) */ 0, 0, 3, 6, 9,
523 0, 0, 3, 6, 9,
524 0, 0, 3, 6, 9,
525 0, 0, 3, 6, 9
526 }
527 };
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800528 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800529 CheckAllEvaluationCombinations(expected);
530
531 // Restore parameter block z, so it will get freed in a consistent way.
532 parameter_blocks->push_back(parameter_block_z);
533}
534
535TEST_P(EvaluatorTest, EvaluatorAbortsForResidualsThatFailToEvaluate) {
536 // Switch the return value to failure.
537 problem.AddResidualBlock(
538 new ParameterIgnoringCostFunction<20, 3, 2, 3, 4>(false),
539 nullptr,
540 x,
541 y,
542 z);
543
544 // The values are ignored.
545 double state[9];
546
Austin Schuh3de38b02024-06-25 18:25:10 -0700547 std::unique_ptr<Evaluator> evaluator =
548 CreateEvaluator(problem.mutable_program());
Austin Schuh70cc9552019-01-21 19:46:48 -0800549 std::unique_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
550 double cost;
551 EXPECT_FALSE(evaluator->Evaluate(state, &cost, nullptr, nullptr, nullptr));
552}
553
554// In the pairs, the first argument is the linear solver type, and the second
555// argument is num_eliminate_blocks. Changing the num_eliminate_blocks only
556// makes sense for the schur-based solvers.
557//
558// Try all values of num_eliminate_blocks that make sense given that in the
559// tests a maximum of 4 parameter blocks are present.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800560INSTANTIATE_TEST_SUITE_P(
Austin Schuh70cc9552019-01-21 19:46:48 -0800561 LinearSolvers,
562 EvaluatorTest,
563 ::testing::Values(EvaluatorTestOptions(DENSE_QR, 0),
564 EvaluatorTestOptions(DENSE_SCHUR, 0),
565 EvaluatorTestOptions(DENSE_SCHUR, 1),
566 EvaluatorTestOptions(DENSE_SCHUR, 2),
567 EvaluatorTestOptions(DENSE_SCHUR, 3),
568 EvaluatorTestOptions(DENSE_SCHUR, 4),
569 EvaluatorTestOptions(SPARSE_SCHUR, 0),
570 EvaluatorTestOptions(SPARSE_SCHUR, 1),
571 EvaluatorTestOptions(SPARSE_SCHUR, 2),
572 EvaluatorTestOptions(SPARSE_SCHUR, 3),
573 EvaluatorTestOptions(SPARSE_SCHUR, 4),
574 EvaluatorTestOptions(ITERATIVE_SCHUR, 0),
575 EvaluatorTestOptions(ITERATIVE_SCHUR, 1),
576 EvaluatorTestOptions(ITERATIVE_SCHUR, 2),
577 EvaluatorTestOptions(ITERATIVE_SCHUR, 3),
578 EvaluatorTestOptions(ITERATIVE_SCHUR, 4),
579 EvaluatorTestOptions(SPARSE_NORMAL_CHOLESKY, 0, false),
580 EvaluatorTestOptions(SPARSE_NORMAL_CHOLESKY, 0, true)));
581
582// Simple cost function used to check if the evaluator is sensitive to
583// state changes.
584class ParameterSensitiveCostFunction : public SizedCostFunction<2, 2> {
585 public:
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800586 bool Evaluate(double const* const* parameters,
587 double* residuals,
588 double** jacobians) const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800589 double x1 = parameters[0][0];
590 double x2 = parameters[0][1];
591 residuals[0] = x1 * x1;
592 residuals[1] = x2 * x2;
593
594 if (jacobians != nullptr) {
595 double* jacobian = jacobians[0];
596 if (jacobian != nullptr) {
597 jacobian[0] = 2.0 * x1;
598 jacobian[1] = 0.0;
599 jacobian[2] = 0.0;
600 jacobian[3] = 2.0 * x2;
601 }
602 }
603 return true;
604 }
605};
606
607TEST(Evaluator, EvaluatorRespectsParameterChanges) {
608 ProblemImpl problem;
609
610 double x[2];
611 x[0] = 1.0;
612 x[1] = 1.0;
613
614 problem.AddResidualBlock(new ParameterSensitiveCostFunction(), nullptr, x);
615 Program* program = problem.mutable_program();
616 program->SetParameterOffsetsAndIndex();
617
618 Evaluator::Options options;
619 options.linear_solver_type = DENSE_QR;
620 options.num_eliminate_blocks = 0;
621 options.context = problem.context();
Austin Schuh3de38b02024-06-25 18:25:10 -0700622 std::string error;
Austin Schuh70cc9552019-01-21 19:46:48 -0800623 std::unique_ptr<Evaluator> evaluator(
624 Evaluator::Create(options, program, &error));
625 std::unique_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
626
627 ASSERT_EQ(2, jacobian->num_rows());
628 ASSERT_EQ(2, jacobian->num_cols());
629
630 double state[2];
631 state[0] = 2.0;
632 state[1] = 3.0;
633
634 // The original state of a residual block comes from the user's
635 // state. So the original state is 1.0, 1.0, and the only way we get
636 // the 2.0, 3.0 results in the following tests is if it respects the
637 // values in the state vector.
638
639 // Cost only; no residuals and no jacobian.
640 {
641 double cost = -1;
642 ASSERT_TRUE(evaluator->Evaluate(state, &cost, nullptr, nullptr, nullptr));
643 EXPECT_EQ(48.5, cost);
644 }
645
646 // Cost and residuals, no jacobian.
647 {
648 double cost = -1;
649 double residuals[2] = {-2, -2};
650 ASSERT_TRUE(evaluator->Evaluate(state, &cost, residuals, nullptr, nullptr));
651 EXPECT_EQ(48.5, cost);
652 EXPECT_EQ(4, residuals[0]);
653 EXPECT_EQ(9, residuals[1]);
654 }
655
656 // Cost, residuals, and jacobian.
657 {
658 double cost = -1;
659 double residuals[2] = {-2, -2};
660 SetSparseMatrixConstant(jacobian.get(), -1);
661 ASSERT_TRUE(
662 evaluator->Evaluate(state, &cost, residuals, nullptr, jacobian.get()));
663 EXPECT_EQ(48.5, cost);
664 EXPECT_EQ(4, residuals[0]);
665 EXPECT_EQ(9, residuals[1]);
666 Matrix actual_jacobian;
667 jacobian->ToDenseMatrix(&actual_jacobian);
668
669 Matrix expected_jacobian(2, 2);
670 expected_jacobian << 2 * state[0], 0, 0, 2 * state[1];
671
672 EXPECT_TRUE((actual_jacobian.array() == expected_jacobian.array()).all())
673 << "Actual:\n"
674 << actual_jacobian << "\nExpected:\n"
675 << expected_jacobian;
676 }
677}
678
Austin Schuh3de38b02024-06-25 18:25:10 -0700679class HugeCostFunction : public SizedCostFunction<46341, 46345> {
680 bool Evaluate(double const* const* parameters,
681 double* residuals,
682 double** jacobians) const override {
683 return true;
684 }
685};
686
687TEST(Evaluator, LargeProblemDoesNotCauseCrashBlockJacobianWriter) {
688 ProblemImpl problem;
689 std::vector<double> x(46345);
690
691 problem.AddResidualBlock(new HugeCostFunction, nullptr, x.data());
692 Evaluator::Options options;
693 options.linear_solver_type = SPARSE_NORMAL_CHOLESKY;
694 options.context = problem.context();
695 options.num_eliminate_blocks = 0;
696 options.dynamic_sparsity = false;
697 std::string error;
698 auto program = problem.mutable_program();
699 program->SetParameterOffsetsAndIndex();
700 auto evaluator = Evaluator::Create(options, program, &error);
701 auto jacobian = evaluator->CreateJacobian();
702 EXPECT_EQ(jacobian, nullptr);
703}
704
705TEST(Evaluator, LargeProblemDoesNotCauseCrashCompressedRowJacobianWriter) {
706 ProblemImpl problem;
707 std::vector<double> x(46345);
708
709 problem.AddResidualBlock(new HugeCostFunction, nullptr, x.data());
710 Evaluator::Options options;
711 // CGNR on CUDA_SPARSE is the only combination that triggers a
712 // CompressedRowJacobianWriter.
713 options.linear_solver_type = CGNR;
714 options.sparse_linear_algebra_library_type = CUDA_SPARSE;
715 options.context = problem.context();
716 options.num_eliminate_blocks = 0;
717 std::string error;
718 auto program = problem.mutable_program();
719 program->SetParameterOffsetsAndIndex();
720 auto evaluator = Evaluator::Create(options, program, &error);
721 auto jacobian = evaluator->CreateJacobian();
722 EXPECT_EQ(jacobian, nullptr);
723}
724
Austin Schuh70cc9552019-01-21 19:46:48 -0800725} // namespace internal
726} // namespace ceres