blob: 5d549a7b6dc51704ea0eb389e113cc8f46756137 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
Austin Schuh3de38b02024-06-25 18:25:10 -07002// Copyright 2023 Google Inc. All rights reserved.
Austin Schuh70cc9552019-01-21 19:46:48 -08003// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: keir@google.com (Keir Mierle)
30//
31// The ProgramEvaluator runs the cost functions contained in each residual block
32// and stores the result into a jacobian. The particular type of jacobian is
33// abstracted out using two template parameters:
34//
35// - An "EvaluatePreparer" that is responsible for creating the array with
36// pointers to the jacobian blocks where the cost function evaluates to.
37// - A "JacobianWriter" that is responsible for storing the resulting
38// jacobian blocks in the passed sparse matrix.
39//
40// This abstraction affords an efficient evaluator implementation while still
41// supporting writing to multiple sparse matrix formats. For example, when the
42// ProgramEvaluator is parameterized for writing to block sparse matrices, the
43// residual jacobians are written directly into their final position in the
44// block sparse matrix by the user's CostFunction; there is no copying.
45//
Austin Schuh3de38b02024-06-25 18:25:10 -070046// The evaluation is threaded with C++ threads.
Austin Schuh70cc9552019-01-21 19:46:48 -080047//
48// The EvaluatePreparer and JacobianWriter interfaces are as follows:
49//
50// class EvaluatePreparer {
51// // Prepare the jacobians array for use as the destination of a call to
52// // a cost function's evaluate method.
53// void Prepare(const ResidualBlock* residual_block,
54// int residual_block_index,
55// SparseMatrix* jacobian,
56// double** jacobians);
57// }
58//
59// class JacobianWriter {
60// // Create a jacobian that this writer can write. Same as
61// // Evaluator::CreateJacobian.
Austin Schuh3de38b02024-06-25 18:25:10 -070062// std::unique_ptr<SparseMatrix> CreateJacobian() const;
Austin Schuh70cc9552019-01-21 19:46:48 -080063//
Austin Schuh3de38b02024-06-25 18:25:10 -070064// // Create num_threads evaluate preparers.Resulting preparers are valid
65// // while *this is.
66//
67// std::unique_ptr<EvaluatePreparer[]> CreateEvaluatePreparers(
68// int num_threads);
Austin Schuh70cc9552019-01-21 19:46:48 -080069//
70// // Write the block jacobians from a residual block evaluation to the
71// // larger sparse jacobian.
72// void Write(int residual_id,
73// int residual_offset,
74// double** jacobians,
75// SparseMatrix* jacobian);
76// }
77//
78// Note: The ProgramEvaluator is not thread safe, since internally it maintains
79// some per-thread scratch space.
80
81#ifndef CERES_INTERNAL_PROGRAM_EVALUATOR_H_
82#define CERES_INTERNAL_PROGRAM_EVALUATOR_H_
83
84// This include must come before any #ifndef check on Ceres compile options.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080085// clang-format off
Austin Schuh3de38b02024-06-25 18:25:10 -070086#include "ceres/internal/config.h"
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080087// clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -080088
89#include <atomic>
90#include <map>
91#include <memory>
92#include <string>
93#include <vector>
94
95#include "ceres/evaluation_callback.h"
96#include "ceres/execution_summary.h"
97#include "ceres/internal/eigen.h"
98#include "ceres/parallel_for.h"
Austin Schuh3de38b02024-06-25 18:25:10 -070099#include "ceres/parallel_vector_ops.h"
Austin Schuh70cc9552019-01-21 19:46:48 -0800100#include "ceres/parameter_block.h"
101#include "ceres/program.h"
102#include "ceres/residual_block.h"
103#include "ceres/small_blas.h"
104
105namespace ceres {
106namespace internal {
107
108struct NullJacobianFinalizer {
Austin Schuh3de38b02024-06-25 18:25:10 -0700109 void operator()(SparseMatrix* /*jacobian*/, int /*num_parameters*/) {}
Austin Schuh70cc9552019-01-21 19:46:48 -0800110};
111
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800112template <typename EvaluatePreparer,
113 typename JacobianWriter,
114 typename JacobianFinalizer = NullJacobianFinalizer>
Austin Schuh3de38b02024-06-25 18:25:10 -0700115class ProgramEvaluator final : public Evaluator {
Austin Schuh70cc9552019-01-21 19:46:48 -0800116 public:
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800117 ProgramEvaluator(const Evaluator::Options& options, Program* program)
Austin Schuh70cc9552019-01-21 19:46:48 -0800118 : options_(options),
119 program_(program),
120 jacobian_writer_(options, program),
Austin Schuh3de38b02024-06-25 18:25:10 -0700121 evaluate_preparers_(std::move(
122 jacobian_writer_.CreateEvaluatePreparers(options.num_threads))),
123 num_parameters_(program->NumEffectiveParameters()) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800124 BuildResidualLayout(*program, &residual_layout_);
Austin Schuh3de38b02024-06-25 18:25:10 -0700125 evaluate_scratch_ = std::move(CreateEvaluatorScratch(
126 *program, static_cast<unsigned>(options.num_threads)));
Austin Schuh70cc9552019-01-21 19:46:48 -0800127 }
128
129 // Implementation of Evaluator interface.
Austin Schuh3de38b02024-06-25 18:25:10 -0700130 std::unique_ptr<SparseMatrix> CreateJacobian() const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800131 return jacobian_writer_.CreateJacobian();
132 }
133
134 bool Evaluate(const Evaluator::EvaluateOptions& evaluate_options,
135 const double* state,
136 double* cost,
137 double* residuals,
138 double* gradient,
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800139 SparseMatrix* jacobian) final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800140 ScopedExecutionTimer total_timer("Evaluator::Total", &execution_summary_);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800141 ScopedExecutionTimer call_type_timer(
142 gradient == nullptr && jacobian == nullptr ? "Evaluator::Residual"
143 : "Evaluator::Jacobian",
144 &execution_summary_);
Austin Schuh70cc9552019-01-21 19:46:48 -0800145
146 // The parameters are stateful, so set the state before evaluating.
147 if (!program_->StateVectorToParameterBlocks(state)) {
148 return false;
149 }
150
151 // Notify the user about a new evaluation point if they are interested.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800152 if (options_.evaluation_callback != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800153 program_->CopyParameterBlockStateToUserState();
154 options_.evaluation_callback->PrepareForEvaluation(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800155 /*jacobians=*/(gradient != nullptr || jacobian != nullptr),
Austin Schuh70cc9552019-01-21 19:46:48 -0800156 evaluate_options.new_evaluation_point);
157 }
158
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800159 if (residuals != nullptr) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700160 ParallelSetZero(options_.context,
161 options_.num_threads,
162 residuals,
163 program_->NumResiduals());
Austin Schuh70cc9552019-01-21 19:46:48 -0800164 }
165
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800166 if (jacobian != nullptr) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700167 jacobian->SetZero(options_.context, options_.num_threads);
Austin Schuh70cc9552019-01-21 19:46:48 -0800168 }
169
170 // Each thread gets it's own cost and evaluate scratch space.
171 for (int i = 0; i < options_.num_threads; ++i) {
172 evaluate_scratch_[i].cost = 0.0;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800173 if (gradient != nullptr) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700174 ParallelSetZero(options_.context,
175 options_.num_threads,
176 evaluate_scratch_[i].gradient.get(),
177 num_parameters_);
Austin Schuh70cc9552019-01-21 19:46:48 -0800178 }
179 }
180
181 const int num_residual_blocks = program_->NumResidualBlocks();
182 // This bool is used to disable the loop if an error is encountered without
183 // breaking out of it. The remaining loop iterations are still run, but with
184 // an empty body, and so will finish quickly.
185 std::atomic_bool abort(false);
186 ParallelFor(
187 options_.context,
188 0,
189 num_residual_blocks,
190 options_.num_threads,
191 [&](int thread_id, int i) {
192 if (abort) {
193 return;
194 }
195
196 EvaluatePreparer* preparer = &evaluate_preparers_[thread_id];
197 EvaluateScratch* scratch = &evaluate_scratch_[thread_id];
198
199 // Prepare block residuals if requested.
200 const ResidualBlock* residual_block = program_->residual_blocks()[i];
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800201 double* block_residuals = nullptr;
202 if (residuals != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800203 block_residuals = residuals + residual_layout_[i];
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800204 } else if (gradient != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800205 block_residuals = scratch->residual_block_residuals.get();
206 }
207
208 // Prepare block jacobians if requested.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800209 double** block_jacobians = nullptr;
210 if (jacobian != nullptr || gradient != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800211 preparer->Prepare(residual_block,
212 i,
213 jacobian,
214 scratch->jacobian_block_ptrs.get());
215 block_jacobians = scratch->jacobian_block_ptrs.get();
216 }
217
218 // Evaluate the cost, residuals, and jacobians.
219 double block_cost;
220 if (!residual_block->Evaluate(
221 evaluate_options.apply_loss_function,
222 &block_cost,
223 block_residuals,
224 block_jacobians,
225 scratch->residual_block_evaluate_scratch.get())) {
226 abort = true;
227 return;
228 }
229
230 scratch->cost += block_cost;
231
232 // Store the jacobians, if they were requested.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800233 if (jacobian != nullptr) {
234 jacobian_writer_.Write(
235 i, residual_layout_[i], block_jacobians, jacobian);
Austin Schuh70cc9552019-01-21 19:46:48 -0800236 }
237
238 // Compute and store the gradient, if it was requested.
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800239 if (gradient != nullptr) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800240 int num_residuals = residual_block->NumResiduals();
241 int num_parameter_blocks = residual_block->NumParameterBlocks();
242 for (int j = 0; j < num_parameter_blocks; ++j) {
243 const ParameterBlock* parameter_block =
244 residual_block->parameter_blocks()[j];
245 if (parameter_block->IsConstant()) {
246 continue;
247 }
248
249 MatrixTransposeVectorMultiply<Eigen::Dynamic, Eigen::Dynamic, 1>(
250 block_jacobians[j],
251 num_residuals,
Austin Schuh3de38b02024-06-25 18:25:10 -0700252 parameter_block->TangentSize(),
Austin Schuh70cc9552019-01-21 19:46:48 -0800253 block_residuals,
254 scratch->gradient.get() + parameter_block->delta_offset());
255 }
256 }
257 });
258
Austin Schuh3de38b02024-06-25 18:25:10 -0700259 if (abort) {
260 return false;
261 }
Austin Schuh70cc9552019-01-21 19:46:48 -0800262
Austin Schuh3de38b02024-06-25 18:25:10 -0700263 // Sum the cost and gradient (if requested) from each thread.
264 (*cost) = 0.0;
265 if (gradient != nullptr) {
266 auto gradient_vector = VectorRef(gradient, num_parameters_);
267 ParallelSetZero(options_.context, options_.num_threads, gradient_vector);
268 }
269
270 for (int i = 0; i < options_.num_threads; ++i) {
271 (*cost) += evaluate_scratch_[i].cost;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800272 if (gradient != nullptr) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700273 auto gradient_vector = VectorRef(gradient, num_parameters_);
274 ParallelAssign(
275 options_.context,
276 options_.num_threads,
277 gradient_vector,
278 gradient_vector + VectorRef(evaluate_scratch_[i].gradient.get(),
279 num_parameters_));
Austin Schuh70cc9552019-01-21 19:46:48 -0800280 }
281 }
Austin Schuh3de38b02024-06-25 18:25:10 -0700282
283 // It is possible that after accumulation that the cost has become infinite
284 // or a nan.
285 if (!std::isfinite(*cost)) {
286 LOG(ERROR) << "Accumulated cost = " << *cost
287 << " is not a finite number. Evaluation failed.";
288 return false;
289 }
290
291 // Finalize the Jacobian if it is available.
292 // `num_parameters` is passed to the finalizer so that additional
293 // storage can be reserved for additional diagonal elements if
294 // necessary.
295 if (jacobian != nullptr) {
296 JacobianFinalizer f;
297 f(jacobian, num_parameters_);
298 }
299
300 return true;
Austin Schuh70cc9552019-01-21 19:46:48 -0800301 }
302
303 bool Plus(const double* state,
304 const double* delta,
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800305 double* state_plus_delta) const final {
Austin Schuh3de38b02024-06-25 18:25:10 -0700306 return program_->Plus(
307 state, delta, state_plus_delta, options_.context, options_.num_threads);
Austin Schuh70cc9552019-01-21 19:46:48 -0800308 }
309
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800310 int NumParameters() const final { return program_->NumParameters(); }
311 int NumEffectiveParameters() const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800312 return program_->NumEffectiveParameters();
313 }
314
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800315 int NumResiduals() const final { return program_->NumResiduals(); }
Austin Schuh70cc9552019-01-21 19:46:48 -0800316
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800317 std::map<std::string, CallStatistics> Statistics() const final {
Austin Schuh70cc9552019-01-21 19:46:48 -0800318 return execution_summary_.statistics();
319 }
320
321 private:
322 // Per-thread scratch space needed to evaluate and store each residual block.
323 struct EvaluateScratch {
324 void Init(int max_parameters_per_residual_block,
325 int max_scratch_doubles_needed_for_evaluate,
326 int max_residuals_per_residual_block,
327 int num_parameters) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700328 residual_block_evaluate_scratch =
329 std::make_unique<double[]>(max_scratch_doubles_needed_for_evaluate);
330 gradient = std::make_unique<double[]>(num_parameters);
Austin Schuh70cc9552019-01-21 19:46:48 -0800331 VectorRef(gradient.get(), num_parameters).setZero();
Austin Schuh3de38b02024-06-25 18:25:10 -0700332 residual_block_residuals =
333 std::make_unique<double[]>(max_residuals_per_residual_block);
334 jacobian_block_ptrs =
335 std::make_unique<double*[]>(max_parameters_per_residual_block);
Austin Schuh70cc9552019-01-21 19:46:48 -0800336 }
337
338 double cost;
339 std::unique_ptr<double[]> residual_block_evaluate_scratch;
Austin Schuh3de38b02024-06-25 18:25:10 -0700340 // The gradient on the manifold.
Austin Schuh70cc9552019-01-21 19:46:48 -0800341 std::unique_ptr<double[]> gradient;
342 // Enough space to store the residual for the largest residual block.
343 std::unique_ptr<double[]> residual_block_residuals;
344 std::unique_ptr<double*[]> jacobian_block_ptrs;
345 };
346
347 static void BuildResidualLayout(const Program& program,
348 std::vector<int>* residual_layout) {
349 const std::vector<ResidualBlock*>& residual_blocks =
350 program.residual_blocks();
351 residual_layout->resize(program.NumResidualBlocks());
352 int residual_pos = 0;
353 for (int i = 0; i < residual_blocks.size(); ++i) {
354 const int num_residuals = residual_blocks[i]->NumResiduals();
355 (*residual_layout)[i] = residual_pos;
356 residual_pos += num_residuals;
357 }
358 }
359
360 // Create scratch space for each thread evaluating the program.
Austin Schuh3de38b02024-06-25 18:25:10 -0700361 static std::unique_ptr<EvaluateScratch[]> CreateEvaluatorScratch(
362 const Program& program, unsigned num_threads) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800363 int max_parameters_per_residual_block =
364 program.MaxParametersPerResidualBlock();
365 int max_scratch_doubles_needed_for_evaluate =
366 program.MaxScratchDoublesNeededForEvaluate();
367 int max_residuals_per_residual_block =
368 program.MaxResidualsPerResidualBlock();
369 int num_parameters = program.NumEffectiveParameters();
370
Austin Schuh3de38b02024-06-25 18:25:10 -0700371 auto evaluate_scratch = std::make_unique<EvaluateScratch[]>(num_threads);
Austin Schuh70cc9552019-01-21 19:46:48 -0800372 for (int i = 0; i < num_threads; i++) {
373 evaluate_scratch[i].Init(max_parameters_per_residual_block,
374 max_scratch_doubles_needed_for_evaluate,
375 max_residuals_per_residual_block,
376 num_parameters);
377 }
378 return evaluate_scratch;
379 }
380
381 Evaluator::Options options_;
382 Program* program_;
383 JacobianWriter jacobian_writer_;
384 std::unique_ptr<EvaluatePreparer[]> evaluate_preparers_;
385 std::unique_ptr<EvaluateScratch[]> evaluate_scratch_;
386 std::vector<int> residual_layout_;
Austin Schuh3de38b02024-06-25 18:25:10 -0700387 int num_parameters_;
Austin Schuh70cc9552019-01-21 19:46:48 -0800388 ::ceres::internal::ExecutionSummary execution_summary_;
389};
390
391} // namespace internal
392} // namespace ceres
393
394#endif // CERES_INTERNAL_PROGRAM_EVALUATOR_H_