Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 1 | // Ceres Solver - A fast non-linear least squares minimizer |
| 2 | // Copyright 2015 Google Inc. All rights reserved. |
| 3 | // http://ceres-solver.org/ |
| 4 | // |
| 5 | // Redistribution and use in source and binary forms, with or without |
| 6 | // modification, are permitted provided that the following conditions are met: |
| 7 | // |
| 8 | // * Redistributions of source code must retain the above copyright notice, |
| 9 | // this list of conditions and the following disclaimer. |
| 10 | // * Redistributions in binary form must reproduce the above copyright notice, |
| 11 | // this list of conditions and the following disclaimer in the documentation |
| 12 | // and/or other materials provided with the distribution. |
| 13 | // * Neither the name of Google Inc. nor the names of its contributors may be |
| 14 | // used to endorse or promote products derived from this software without |
| 15 | // specific prior written permission. |
| 16 | // |
| 17 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
| 18 | // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 19 | // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
| 20 | // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE |
| 21 | // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
| 22 | // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| 23 | // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| 24 | // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
| 25 | // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| 26 | // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
| 27 | // POSSIBILITY OF SUCH DAMAGE. |
| 28 | // |
| 29 | // Author: keir@google.com (Keir Mierle) |
| 30 | // |
| 31 | // The ProgramEvaluator runs the cost functions contained in each residual block |
| 32 | // and stores the result into a jacobian. The particular type of jacobian is |
| 33 | // abstracted out using two template parameters: |
| 34 | // |
| 35 | // - An "EvaluatePreparer" that is responsible for creating the array with |
| 36 | // pointers to the jacobian blocks where the cost function evaluates to. |
| 37 | // - A "JacobianWriter" that is responsible for storing the resulting |
| 38 | // jacobian blocks in the passed sparse matrix. |
| 39 | // |
| 40 | // This abstraction affords an efficient evaluator implementation while still |
| 41 | // supporting writing to multiple sparse matrix formats. For example, when the |
| 42 | // ProgramEvaluator is parameterized for writing to block sparse matrices, the |
| 43 | // residual jacobians are written directly into their final position in the |
| 44 | // block sparse matrix by the user's CostFunction; there is no copying. |
| 45 | // |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 46 | // The evaluation is threaded with OpenMP or C++ threads. |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 47 | // |
| 48 | // The EvaluatePreparer and JacobianWriter interfaces are as follows: |
| 49 | // |
| 50 | // class EvaluatePreparer { |
| 51 | // // Prepare the jacobians array for use as the destination of a call to |
| 52 | // // a cost function's evaluate method. |
| 53 | // void Prepare(const ResidualBlock* residual_block, |
| 54 | // int residual_block_index, |
| 55 | // SparseMatrix* jacobian, |
| 56 | // double** jacobians); |
| 57 | // } |
| 58 | // |
| 59 | // class JacobianWriter { |
| 60 | // // Create a jacobian that this writer can write. Same as |
| 61 | // // Evaluator::CreateJacobian. |
| 62 | // SparseMatrix* CreateJacobian() const; |
| 63 | // |
| 64 | // // Create num_threads evaluate preparers. Caller owns result which must |
| 65 | // // be freed with delete[]. Resulting preparers are valid while *this is. |
| 66 | // EvaluatePreparer* CreateEvaluatePreparers(int num_threads); |
| 67 | // |
| 68 | // // Write the block jacobians from a residual block evaluation to the |
| 69 | // // larger sparse jacobian. |
| 70 | // void Write(int residual_id, |
| 71 | // int residual_offset, |
| 72 | // double** jacobians, |
| 73 | // SparseMatrix* jacobian); |
| 74 | // } |
| 75 | // |
| 76 | // Note: The ProgramEvaluator is not thread safe, since internally it maintains |
| 77 | // some per-thread scratch space. |
| 78 | |
| 79 | #ifndef CERES_INTERNAL_PROGRAM_EVALUATOR_H_ |
| 80 | #define CERES_INTERNAL_PROGRAM_EVALUATOR_H_ |
| 81 | |
| 82 | // This include must come before any #ifndef check on Ceres compile options. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 83 | // clang-format off |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 84 | #include "ceres/internal/port.h" |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 85 | // clang-format on |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 86 | |
| 87 | #include <atomic> |
| 88 | #include <map> |
| 89 | #include <memory> |
| 90 | #include <string> |
| 91 | #include <vector> |
| 92 | |
| 93 | #include "ceres/evaluation_callback.h" |
| 94 | #include "ceres/execution_summary.h" |
| 95 | #include "ceres/internal/eigen.h" |
| 96 | #include "ceres/parallel_for.h" |
| 97 | #include "ceres/parameter_block.h" |
| 98 | #include "ceres/program.h" |
| 99 | #include "ceres/residual_block.h" |
| 100 | #include "ceres/small_blas.h" |
| 101 | |
| 102 | namespace ceres { |
| 103 | namespace internal { |
| 104 | |
| 105 | struct NullJacobianFinalizer { |
| 106 | void operator()(SparseMatrix* jacobian, int num_parameters) {} |
| 107 | }; |
| 108 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 109 | template <typename EvaluatePreparer, |
| 110 | typename JacobianWriter, |
| 111 | typename JacobianFinalizer = NullJacobianFinalizer> |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 112 | class ProgramEvaluator : public Evaluator { |
| 113 | public: |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 114 | ProgramEvaluator(const Evaluator::Options& options, Program* program) |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 115 | : options_(options), |
| 116 | program_(program), |
| 117 | jacobian_writer_(options, program), |
| 118 | evaluate_preparers_( |
| 119 | jacobian_writer_.CreateEvaluatePreparers(options.num_threads)) { |
| 120 | #ifdef CERES_NO_THREADS |
| 121 | if (options_.num_threads > 1) { |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 122 | LOG(WARNING) << "No threading support is compiled into this binary; " |
| 123 | << "only options.num_threads = 1 is supported. Switching " |
| 124 | << "to single threaded mode."; |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 125 | options_.num_threads = 1; |
| 126 | } |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 127 | #endif // CERES_NO_THREADS |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 128 | |
| 129 | BuildResidualLayout(*program, &residual_layout_); |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 130 | evaluate_scratch_.reset( |
| 131 | CreateEvaluatorScratch(*program, options.num_threads)); |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 132 | } |
| 133 | |
| 134 | // Implementation of Evaluator interface. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 135 | SparseMatrix* CreateJacobian() const final { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 136 | return jacobian_writer_.CreateJacobian(); |
| 137 | } |
| 138 | |
| 139 | bool Evaluate(const Evaluator::EvaluateOptions& evaluate_options, |
| 140 | const double* state, |
| 141 | double* cost, |
| 142 | double* residuals, |
| 143 | double* gradient, |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 144 | SparseMatrix* jacobian) final { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 145 | ScopedExecutionTimer total_timer("Evaluator::Total", &execution_summary_); |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 146 | ScopedExecutionTimer call_type_timer( |
| 147 | gradient == nullptr && jacobian == nullptr ? "Evaluator::Residual" |
| 148 | : "Evaluator::Jacobian", |
| 149 | &execution_summary_); |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 150 | |
| 151 | // The parameters are stateful, so set the state before evaluating. |
| 152 | if (!program_->StateVectorToParameterBlocks(state)) { |
| 153 | return false; |
| 154 | } |
| 155 | |
| 156 | // Notify the user about a new evaluation point if they are interested. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 157 | if (options_.evaluation_callback != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 158 | program_->CopyParameterBlockStateToUserState(); |
| 159 | options_.evaluation_callback->PrepareForEvaluation( |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 160 | /*jacobians=*/(gradient != nullptr || jacobian != nullptr), |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 161 | evaluate_options.new_evaluation_point); |
| 162 | } |
| 163 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 164 | if (residuals != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 165 | VectorRef(residuals, program_->NumResiduals()).setZero(); |
| 166 | } |
| 167 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 168 | if (jacobian != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 169 | jacobian->SetZero(); |
| 170 | } |
| 171 | |
| 172 | // Each thread gets it's own cost and evaluate scratch space. |
| 173 | for (int i = 0; i < options_.num_threads; ++i) { |
| 174 | evaluate_scratch_[i].cost = 0.0; |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 175 | if (gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 176 | VectorRef(evaluate_scratch_[i].gradient.get(), |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 177 | program_->NumEffectiveParameters()) |
| 178 | .setZero(); |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 179 | } |
| 180 | } |
| 181 | |
| 182 | const int num_residual_blocks = program_->NumResidualBlocks(); |
| 183 | // This bool is used to disable the loop if an error is encountered without |
| 184 | // breaking out of it. The remaining loop iterations are still run, but with |
| 185 | // an empty body, and so will finish quickly. |
| 186 | std::atomic_bool abort(false); |
| 187 | ParallelFor( |
| 188 | options_.context, |
| 189 | 0, |
| 190 | num_residual_blocks, |
| 191 | options_.num_threads, |
| 192 | [&](int thread_id, int i) { |
| 193 | if (abort) { |
| 194 | return; |
| 195 | } |
| 196 | |
| 197 | EvaluatePreparer* preparer = &evaluate_preparers_[thread_id]; |
| 198 | EvaluateScratch* scratch = &evaluate_scratch_[thread_id]; |
| 199 | |
| 200 | // Prepare block residuals if requested. |
| 201 | const ResidualBlock* residual_block = program_->residual_blocks()[i]; |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 202 | double* block_residuals = nullptr; |
| 203 | if (residuals != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 204 | block_residuals = residuals + residual_layout_[i]; |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 205 | } else if (gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 206 | block_residuals = scratch->residual_block_residuals.get(); |
| 207 | } |
| 208 | |
| 209 | // Prepare block jacobians if requested. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 210 | double** block_jacobians = nullptr; |
| 211 | if (jacobian != nullptr || gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 212 | preparer->Prepare(residual_block, |
| 213 | i, |
| 214 | jacobian, |
| 215 | scratch->jacobian_block_ptrs.get()); |
| 216 | block_jacobians = scratch->jacobian_block_ptrs.get(); |
| 217 | } |
| 218 | |
| 219 | // Evaluate the cost, residuals, and jacobians. |
| 220 | double block_cost; |
| 221 | if (!residual_block->Evaluate( |
| 222 | evaluate_options.apply_loss_function, |
| 223 | &block_cost, |
| 224 | block_residuals, |
| 225 | block_jacobians, |
| 226 | scratch->residual_block_evaluate_scratch.get())) { |
| 227 | abort = true; |
| 228 | return; |
| 229 | } |
| 230 | |
| 231 | scratch->cost += block_cost; |
| 232 | |
| 233 | // Store the jacobians, if they were requested. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 234 | if (jacobian != nullptr) { |
| 235 | jacobian_writer_.Write( |
| 236 | i, residual_layout_[i], block_jacobians, jacobian); |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 237 | } |
| 238 | |
| 239 | // Compute and store the gradient, if it was requested. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 240 | if (gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 241 | int num_residuals = residual_block->NumResiduals(); |
| 242 | int num_parameter_blocks = residual_block->NumParameterBlocks(); |
| 243 | for (int j = 0; j < num_parameter_blocks; ++j) { |
| 244 | const ParameterBlock* parameter_block = |
| 245 | residual_block->parameter_blocks()[j]; |
| 246 | if (parameter_block->IsConstant()) { |
| 247 | continue; |
| 248 | } |
| 249 | |
| 250 | MatrixTransposeVectorMultiply<Eigen::Dynamic, Eigen::Dynamic, 1>( |
| 251 | block_jacobians[j], |
| 252 | num_residuals, |
| 253 | parameter_block->LocalSize(), |
| 254 | block_residuals, |
| 255 | scratch->gradient.get() + parameter_block->delta_offset()); |
| 256 | } |
| 257 | } |
| 258 | }); |
| 259 | |
| 260 | if (!abort) { |
| 261 | const int num_parameters = program_->NumEffectiveParameters(); |
| 262 | |
| 263 | // Sum the cost and gradient (if requested) from each thread. |
| 264 | (*cost) = 0.0; |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 265 | if (gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 266 | VectorRef(gradient, num_parameters).setZero(); |
| 267 | } |
| 268 | for (int i = 0; i < options_.num_threads; ++i) { |
| 269 | (*cost) += evaluate_scratch_[i].cost; |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 270 | if (gradient != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 271 | VectorRef(gradient, num_parameters) += |
| 272 | VectorRef(evaluate_scratch_[i].gradient.get(), num_parameters); |
| 273 | } |
| 274 | } |
| 275 | |
| 276 | // Finalize the Jacobian if it is available. |
| 277 | // `num_parameters` is passed to the finalizer so that additional |
| 278 | // storage can be reserved for additional diagonal elements if |
| 279 | // necessary. |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 280 | if (jacobian != nullptr) { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 281 | JacobianFinalizer f; |
| 282 | f(jacobian, num_parameters); |
| 283 | } |
| 284 | } |
| 285 | return !abort; |
| 286 | } |
| 287 | |
| 288 | bool Plus(const double* state, |
| 289 | const double* delta, |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 290 | double* state_plus_delta) const final { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 291 | return program_->Plus(state, delta, state_plus_delta); |
| 292 | } |
| 293 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 294 | int NumParameters() const final { return program_->NumParameters(); } |
| 295 | int NumEffectiveParameters() const final { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 296 | return program_->NumEffectiveParameters(); |
| 297 | } |
| 298 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 299 | int NumResiduals() const final { return program_->NumResiduals(); } |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 300 | |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 301 | std::map<std::string, CallStatistics> Statistics() const final { |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 302 | return execution_summary_.statistics(); |
| 303 | } |
| 304 | |
| 305 | private: |
| 306 | // Per-thread scratch space needed to evaluate and store each residual block. |
| 307 | struct EvaluateScratch { |
| 308 | void Init(int max_parameters_per_residual_block, |
| 309 | int max_scratch_doubles_needed_for_evaluate, |
| 310 | int max_residuals_per_residual_block, |
| 311 | int num_parameters) { |
| 312 | residual_block_evaluate_scratch.reset( |
| 313 | new double[max_scratch_doubles_needed_for_evaluate]); |
| 314 | gradient.reset(new double[num_parameters]); |
| 315 | VectorRef(gradient.get(), num_parameters).setZero(); |
| 316 | residual_block_residuals.reset( |
| 317 | new double[max_residuals_per_residual_block]); |
Austin Schuh | 1d1e6ea | 2020-12-23 21:56:30 -0800 | [diff] [blame^] | 318 | jacobian_block_ptrs.reset(new double*[max_parameters_per_residual_block]); |
Austin Schuh | 70cc955 | 2019-01-21 19:46:48 -0800 | [diff] [blame] | 319 | } |
| 320 | |
| 321 | double cost; |
| 322 | std::unique_ptr<double[]> residual_block_evaluate_scratch; |
| 323 | // The gradient in the local parameterization. |
| 324 | std::unique_ptr<double[]> gradient; |
| 325 | // Enough space to store the residual for the largest residual block. |
| 326 | std::unique_ptr<double[]> residual_block_residuals; |
| 327 | std::unique_ptr<double*[]> jacobian_block_ptrs; |
| 328 | }; |
| 329 | |
| 330 | static void BuildResidualLayout(const Program& program, |
| 331 | std::vector<int>* residual_layout) { |
| 332 | const std::vector<ResidualBlock*>& residual_blocks = |
| 333 | program.residual_blocks(); |
| 334 | residual_layout->resize(program.NumResidualBlocks()); |
| 335 | int residual_pos = 0; |
| 336 | for (int i = 0; i < residual_blocks.size(); ++i) { |
| 337 | const int num_residuals = residual_blocks[i]->NumResiduals(); |
| 338 | (*residual_layout)[i] = residual_pos; |
| 339 | residual_pos += num_residuals; |
| 340 | } |
| 341 | } |
| 342 | |
| 343 | // Create scratch space for each thread evaluating the program. |
| 344 | static EvaluateScratch* CreateEvaluatorScratch(const Program& program, |
| 345 | int num_threads) { |
| 346 | int max_parameters_per_residual_block = |
| 347 | program.MaxParametersPerResidualBlock(); |
| 348 | int max_scratch_doubles_needed_for_evaluate = |
| 349 | program.MaxScratchDoublesNeededForEvaluate(); |
| 350 | int max_residuals_per_residual_block = |
| 351 | program.MaxResidualsPerResidualBlock(); |
| 352 | int num_parameters = program.NumEffectiveParameters(); |
| 353 | |
| 354 | EvaluateScratch* evaluate_scratch = new EvaluateScratch[num_threads]; |
| 355 | for (int i = 0; i < num_threads; i++) { |
| 356 | evaluate_scratch[i].Init(max_parameters_per_residual_block, |
| 357 | max_scratch_doubles_needed_for_evaluate, |
| 358 | max_residuals_per_residual_block, |
| 359 | num_parameters); |
| 360 | } |
| 361 | return evaluate_scratch; |
| 362 | } |
| 363 | |
| 364 | Evaluator::Options options_; |
| 365 | Program* program_; |
| 366 | JacobianWriter jacobian_writer_; |
| 367 | std::unique_ptr<EvaluatePreparer[]> evaluate_preparers_; |
| 368 | std::unique_ptr<EvaluateScratch[]> evaluate_scratch_; |
| 369 | std::vector<int> residual_layout_; |
| 370 | ::ceres::internal::ExecutionSummary execution_summary_; |
| 371 | }; |
| 372 | |
| 373 | } // namespace internal |
| 374 | } // namespace ceres |
| 375 | |
| 376 | #endif // CERES_INTERNAL_PROGRAM_EVALUATOR_H_ |