blob: ad2ea136b4a4722d7d9622c0d684c62ca11e30bd [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
Austin Schuh3de38b02024-06-25 18:25:10 -07002// Copyright 2023 Google Inc. All rights reserved.
Austin Schuh70cc9552019-01-21 19:46:48 -08003// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30
31#include "ceres/gradient_problem_solver.h"
32
Austin Schuh3de38b02024-06-25 18:25:10 -070033#include <map>
Austin Schuh70cc9552019-01-21 19:46:48 -080034#include <memory>
Austin Schuh3de38b02024-06-25 18:25:10 -070035#include <string>
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080036
Austin Schuh70cc9552019-01-21 19:46:48 -080037#include "ceres/callbacks.h"
38#include "ceres/gradient_problem.h"
39#include "ceres/gradient_problem_evaluator.h"
40#include "ceres/internal/eigen.h"
Austin Schuh3de38b02024-06-25 18:25:10 -070041#include "ceres/internal/export.h"
Austin Schuh70cc9552019-01-21 19:46:48 -080042#include "ceres/map_util.h"
43#include "ceres/minimizer.h"
44#include "ceres/solver.h"
45#include "ceres/solver_utils.h"
46#include "ceres/stringprintf.h"
47#include "ceres/types.h"
48#include "ceres/wall_time.h"
49
50namespace ceres {
Austin Schuh70cc9552019-01-21 19:46:48 -080051using internal::StringAppendF;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080052using internal::StringPrintf;
Austin Schuh70cc9552019-01-21 19:46:48 -080053
54namespace {
55
56Solver::Options GradientProblemSolverOptionsToSolverOptions(
57 const GradientProblemSolver::Options& options) {
58#define COPY_OPTION(x) solver_options.x = options.x
59
60 Solver::Options solver_options;
61 solver_options.minimizer_type = LINE_SEARCH;
62 COPY_OPTION(line_search_direction_type);
63 COPY_OPTION(line_search_type);
64 COPY_OPTION(nonlinear_conjugate_gradient_type);
65 COPY_OPTION(max_lbfgs_rank);
66 COPY_OPTION(use_approximate_eigenvalue_bfgs_scaling);
67 COPY_OPTION(line_search_interpolation_type);
68 COPY_OPTION(min_line_search_step_size);
69 COPY_OPTION(line_search_sufficient_function_decrease);
70 COPY_OPTION(max_line_search_step_contraction);
71 COPY_OPTION(min_line_search_step_contraction);
72 COPY_OPTION(max_num_line_search_step_size_iterations);
73 COPY_OPTION(max_num_line_search_direction_restarts);
74 COPY_OPTION(line_search_sufficient_curvature_decrease);
75 COPY_OPTION(max_line_search_step_expansion);
76 COPY_OPTION(max_num_iterations);
77 COPY_OPTION(max_solver_time_in_seconds);
78 COPY_OPTION(parameter_tolerance);
79 COPY_OPTION(function_tolerance);
80 COPY_OPTION(gradient_tolerance);
81 COPY_OPTION(logging_type);
82 COPY_OPTION(minimizer_progress_to_stdout);
83 COPY_OPTION(callbacks);
84 return solver_options;
85#undef COPY_OPTION
86}
87
Austin Schuh70cc9552019-01-21 19:46:48 -080088} // namespace
89
90bool GradientProblemSolver::Options::IsValid(std::string* error) const {
91 const Solver::Options solver_options =
92 GradientProblemSolverOptionsToSolverOptions(*this);
93 return solver_options.IsValid(error);
94}
95
Austin Schuh3de38b02024-06-25 18:25:10 -070096GradientProblemSolver::~GradientProblemSolver() = default;
Austin Schuh70cc9552019-01-21 19:46:48 -080097
98void GradientProblemSolver::Solve(const GradientProblemSolver::Options& options,
99 const GradientProblem& problem,
100 double* parameters_ptr,
101 GradientProblemSolver::Summary* summary) {
102 using internal::CallStatistics;
103 using internal::GradientProblemEvaluator;
104 using internal::GradientProblemSolverStateUpdatingCallback;
105 using internal::LoggingCallback;
106 using internal::Minimizer;
107 using internal::SetSummaryFinalCost;
108 using internal::WallTimeInSeconds;
109
110 double start_time = WallTimeInSeconds();
111
112 CHECK(summary != nullptr);
113 *summary = Summary();
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800114 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800115 summary->num_parameters = problem.NumParameters();
Austin Schuh3de38b02024-06-25 18:25:10 -0700116 summary->num_tangent_parameters = problem.NumTangentParameters();
Austin Schuh70cc9552019-01-21 19:46:48 -0800117 summary->line_search_direction_type = options.line_search_direction_type; // NOLINT
118 summary->line_search_interpolation_type = options.line_search_interpolation_type; // NOLINT
119 summary->line_search_type = options.line_search_type;
120 summary->max_lbfgs_rank = options.max_lbfgs_rank;
121 summary->nonlinear_conjugate_gradient_type = options.nonlinear_conjugate_gradient_type; // NOLINT
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800122 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800123
124 // Check validity
125 if (!options.IsValid(&summary->message)) {
126 LOG(ERROR) << "Terminating: " << summary->message;
127 return;
128 }
129
130 VectorRef parameters(parameters_ptr, problem.NumParameters());
131 Vector solution(problem.NumParameters());
132 solution = parameters;
133
134 // TODO(sameeragarwal): This is a bit convoluted, we should be able
135 // to convert to minimizer options directly, but this will do for
136 // now.
137 Minimizer::Options minimizer_options =
138 Minimizer::Options(GradientProblemSolverOptionsToSolverOptions(options));
Austin Schuh3de38b02024-06-25 18:25:10 -0700139 minimizer_options.evaluator =
140 std::make_unique<GradientProblemEvaluator>(problem);
Austin Schuh70cc9552019-01-21 19:46:48 -0800141
142 std::unique_ptr<IterationCallback> logging_callback;
143 if (options.logging_type != SILENT) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700144 logging_callback = std::make_unique<LoggingCallback>(
145 LINE_SEARCH, options.minimizer_progress_to_stdout);
Austin Schuh70cc9552019-01-21 19:46:48 -0800146 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
147 logging_callback.get());
148 }
149
150 std::unique_ptr<IterationCallback> state_updating_callback;
151 if (options.update_state_every_iteration) {
Austin Schuh3de38b02024-06-25 18:25:10 -0700152 state_updating_callback =
153 std::make_unique<GradientProblemSolverStateUpdatingCallback>(
154 problem.NumParameters(), solution.data(), parameters_ptr);
Austin Schuh70cc9552019-01-21 19:46:48 -0800155 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
156 state_updating_callback.get());
157 }
158
159 std::unique_ptr<Minimizer> minimizer(Minimizer::Create(LINE_SEARCH));
160
161 Solver::Summary solver_summary;
162 solver_summary.fixed_cost = 0.0;
163 solver_summary.preprocessor_time_in_seconds = 0.0;
164 solver_summary.postprocessor_time_in_seconds = 0.0;
165 solver_summary.line_search_polynomial_minimization_time_in_seconds = 0.0;
166
167 minimizer->Minimize(minimizer_options, solution.data(), &solver_summary);
168
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800169 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800170 summary->termination_type = solver_summary.termination_type;
171 summary->message = solver_summary.message;
172 summary->initial_cost = solver_summary.initial_cost;
173 summary->final_cost = solver_summary.final_cost;
174 summary->iterations = solver_summary.iterations;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800175 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800176 summary->line_search_polynomial_minimization_time_in_seconds =
177 solver_summary.line_search_polynomial_minimization_time_in_seconds;
178
179 if (summary->IsSolutionUsable()) {
180 parameters = solution;
181 SetSummaryFinalCost(summary);
182 }
183
Austin Schuh3de38b02024-06-25 18:25:10 -0700184 const std::map<std::string, CallStatistics>& evaluator_statistics =
Austin Schuh70cc9552019-01-21 19:46:48 -0800185 minimizer_options.evaluator->Statistics();
186 {
187 const CallStatistics& call_stats = FindWithDefault(
188 evaluator_statistics, "Evaluator::Residual", CallStatistics());
189 summary->cost_evaluation_time_in_seconds = call_stats.time;
190 summary->num_cost_evaluations = call_stats.calls;
191 }
192
193 {
194 const CallStatistics& call_stats = FindWithDefault(
195 evaluator_statistics, "Evaluator::Jacobian", CallStatistics());
196 summary->gradient_evaluation_time_in_seconds = call_stats.time;
197 summary->num_gradient_evaluations = call_stats.calls;
198 }
199
200 summary->total_time_in_seconds = WallTimeInSeconds() - start_time;
201}
202
203bool GradientProblemSolver::Summary::IsSolutionUsable() const {
204 return internal::IsSolutionUsable(*this);
205}
206
Austin Schuh3de38b02024-06-25 18:25:10 -0700207std::string GradientProblemSolver::Summary::BriefReport() const {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800208 return StringPrintf(
209 "Ceres GradientProblemSolver Report: "
210 "Iterations: %d, "
211 "Initial cost: %e, "
212 "Final cost: %e, "
213 "Termination: %s",
214 static_cast<int>(iterations.size()),
215 initial_cost,
216 final_cost,
217 TerminationTypeToString(termination_type));
Austin Schuh70cc9552019-01-21 19:46:48 -0800218}
219
Austin Schuh3de38b02024-06-25 18:25:10 -0700220std::string GradientProblemSolver::Summary::FullReport() const {
Austin Schuh70cc9552019-01-21 19:46:48 -0800221 using internal::VersionString;
222
Austin Schuh3de38b02024-06-25 18:25:10 -0700223 // NOTE operator+ is not usable for concatenating a string and a string_view.
224 std::string report =
225 std::string{"\nSolver Summary (v "}.append(VersionString()) + ")\n\n";
Austin Schuh70cc9552019-01-21 19:46:48 -0800226
227 StringAppendF(&report, "Parameters % 25d\n", num_parameters);
Austin Schuh3de38b02024-06-25 18:25:10 -0700228 if (num_tangent_parameters != num_parameters) {
229 StringAppendF(
230 &report, "Tangent parameters % 25d\n", num_tangent_parameters);
Austin Schuh70cc9552019-01-21 19:46:48 -0800231 }
232
Austin Schuh3de38b02024-06-25 18:25:10 -0700233 std::string line_search_direction_string;
Austin Schuh70cc9552019-01-21 19:46:48 -0800234 if (line_search_direction_type == LBFGS) {
235 line_search_direction_string = StringPrintf("LBFGS (%d)", max_lbfgs_rank);
236 } else if (line_search_direction_type == NONLINEAR_CONJUGATE_GRADIENT) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800237 line_search_direction_string = NonlinearConjugateGradientTypeToString(
238 nonlinear_conjugate_gradient_type);
Austin Schuh70cc9552019-01-21 19:46:48 -0800239 } else {
240 line_search_direction_string =
241 LineSearchDirectionTypeToString(line_search_direction_type);
242 }
243
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800244 StringAppendF(&report,
245 "Line search direction %19s\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800246 line_search_direction_string.c_str());
247
Austin Schuh3de38b02024-06-25 18:25:10 -0700248 const std::string line_search_type_string = StringPrintf(
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800249 "%s %s",
250 LineSearchInterpolationTypeToString(line_search_interpolation_type),
251 LineSearchTypeToString(line_search_type));
252 StringAppendF(&report,
253 "Line search type %19s\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800254 line_search_type_string.c_str());
255 StringAppendF(&report, "\n");
256
257 StringAppendF(&report, "\nCost:\n");
258 StringAppendF(&report, "Initial % 30e\n", initial_cost);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800259 if (termination_type != FAILURE && termination_type != USER_FAILURE) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800260 StringAppendF(&report, "Final % 30e\n", final_cost);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800261 StringAppendF(&report, "Change % 30e\n", initial_cost - final_cost);
Austin Schuh70cc9552019-01-21 19:46:48 -0800262 }
263
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800264 StringAppendF(&report,
265 "\nMinimizer iterations % 16d\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800266 static_cast<int>(iterations.size()));
267
268 StringAppendF(&report, "\nTime (in seconds):\n");
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800269 StringAppendF(&report,
270 "\n Cost evaluation %23.6f (%d)\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800271 cost_evaluation_time_in_seconds,
272 num_cost_evaluations);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800273 StringAppendF(&report,
274 " Gradient & cost evaluation %16.6f (%d)\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800275 gradient_evaluation_time_in_seconds,
276 num_gradient_evaluations);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800277 StringAppendF(&report,
278 " Polynomial minimization %17.6f\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800279 line_search_polynomial_minimization_time_in_seconds);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800280 StringAppendF(
281 &report, "Total %25.6f\n\n", total_time_in_seconds);
Austin Schuh70cc9552019-01-21 19:46:48 -0800282
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800283 StringAppendF(&report,
284 "Termination: %25s (%s)\n",
285 TerminationTypeToString(termination_type),
286 message.c_str());
Austin Schuh70cc9552019-01-21 19:46:48 -0800287 return report;
288}
289
290void Solve(const GradientProblemSolver::Options& options,
291 const GradientProblem& problem,
292 double* parameters,
293 GradientProblemSolver::Summary* summary) {
294 GradientProblemSolver solver;
295 solver.Solve(options, problem, parameters, summary);
296}
297
298} // namespace ceres