blob: b72fad91542285b4381b7cf8a42237cd0ffe56e6 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2015 Google Inc. All rights reserved.
3// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30
31#include "ceres/gradient_problem_solver.h"
32
33#include <memory>
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080034
Austin Schuh70cc9552019-01-21 19:46:48 -080035#include "ceres/callbacks.h"
36#include "ceres/gradient_problem.h"
37#include "ceres/gradient_problem_evaluator.h"
38#include "ceres/internal/eigen.h"
39#include "ceres/internal/port.h"
40#include "ceres/map_util.h"
41#include "ceres/minimizer.h"
42#include "ceres/solver.h"
43#include "ceres/solver_utils.h"
44#include "ceres/stringprintf.h"
45#include "ceres/types.h"
46#include "ceres/wall_time.h"
47
48namespace ceres {
Austin Schuh70cc9552019-01-21 19:46:48 -080049using internal::StringAppendF;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080050using internal::StringPrintf;
Austin Schuh70cc9552019-01-21 19:46:48 -080051using std::string;
52
53namespace {
54
55Solver::Options GradientProblemSolverOptionsToSolverOptions(
56 const GradientProblemSolver::Options& options) {
57#define COPY_OPTION(x) solver_options.x = options.x
58
59 Solver::Options solver_options;
60 solver_options.minimizer_type = LINE_SEARCH;
61 COPY_OPTION(line_search_direction_type);
62 COPY_OPTION(line_search_type);
63 COPY_OPTION(nonlinear_conjugate_gradient_type);
64 COPY_OPTION(max_lbfgs_rank);
65 COPY_OPTION(use_approximate_eigenvalue_bfgs_scaling);
66 COPY_OPTION(line_search_interpolation_type);
67 COPY_OPTION(min_line_search_step_size);
68 COPY_OPTION(line_search_sufficient_function_decrease);
69 COPY_OPTION(max_line_search_step_contraction);
70 COPY_OPTION(min_line_search_step_contraction);
71 COPY_OPTION(max_num_line_search_step_size_iterations);
72 COPY_OPTION(max_num_line_search_direction_restarts);
73 COPY_OPTION(line_search_sufficient_curvature_decrease);
74 COPY_OPTION(max_line_search_step_expansion);
75 COPY_OPTION(max_num_iterations);
76 COPY_OPTION(max_solver_time_in_seconds);
77 COPY_OPTION(parameter_tolerance);
78 COPY_OPTION(function_tolerance);
79 COPY_OPTION(gradient_tolerance);
80 COPY_OPTION(logging_type);
81 COPY_OPTION(minimizer_progress_to_stdout);
82 COPY_OPTION(callbacks);
83 return solver_options;
84#undef COPY_OPTION
85}
86
Austin Schuh70cc9552019-01-21 19:46:48 -080087} // namespace
88
89bool GradientProblemSolver::Options::IsValid(std::string* error) const {
90 const Solver::Options solver_options =
91 GradientProblemSolverOptionsToSolverOptions(*this);
92 return solver_options.IsValid(error);
93}
94
Austin Schuh1d1e6ea2020-12-23 21:56:30 -080095GradientProblemSolver::~GradientProblemSolver() {}
Austin Schuh70cc9552019-01-21 19:46:48 -080096
97void GradientProblemSolver::Solve(const GradientProblemSolver::Options& options,
98 const GradientProblem& problem,
99 double* parameters_ptr,
100 GradientProblemSolver::Summary* summary) {
101 using internal::CallStatistics;
102 using internal::GradientProblemEvaluator;
103 using internal::GradientProblemSolverStateUpdatingCallback;
104 using internal::LoggingCallback;
105 using internal::Minimizer;
106 using internal::SetSummaryFinalCost;
107 using internal::WallTimeInSeconds;
108
109 double start_time = WallTimeInSeconds();
110
111 CHECK(summary != nullptr);
112 *summary = Summary();
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800113 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800114 summary->num_parameters = problem.NumParameters();
115 summary->num_local_parameters = problem.NumLocalParameters();
116 summary->line_search_direction_type = options.line_search_direction_type; // NOLINT
117 summary->line_search_interpolation_type = options.line_search_interpolation_type; // NOLINT
118 summary->line_search_type = options.line_search_type;
119 summary->max_lbfgs_rank = options.max_lbfgs_rank;
120 summary->nonlinear_conjugate_gradient_type = options.nonlinear_conjugate_gradient_type; // NOLINT
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800121 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800122
123 // Check validity
124 if (!options.IsValid(&summary->message)) {
125 LOG(ERROR) << "Terminating: " << summary->message;
126 return;
127 }
128
129 VectorRef parameters(parameters_ptr, problem.NumParameters());
130 Vector solution(problem.NumParameters());
131 solution = parameters;
132
133 // TODO(sameeragarwal): This is a bit convoluted, we should be able
134 // to convert to minimizer options directly, but this will do for
135 // now.
136 Minimizer::Options minimizer_options =
137 Minimizer::Options(GradientProblemSolverOptionsToSolverOptions(options));
138 minimizer_options.evaluator.reset(new GradientProblemEvaluator(problem));
139
140 std::unique_ptr<IterationCallback> logging_callback;
141 if (options.logging_type != SILENT) {
142 logging_callback.reset(
143 new LoggingCallback(LINE_SEARCH, options.minimizer_progress_to_stdout));
144 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
145 logging_callback.get());
146 }
147
148 std::unique_ptr<IterationCallback> state_updating_callback;
149 if (options.update_state_every_iteration) {
150 state_updating_callback.reset(
151 new GradientProblemSolverStateUpdatingCallback(
152 problem.NumParameters(), solution.data(), parameters_ptr));
153 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
154 state_updating_callback.get());
155 }
156
157 std::unique_ptr<Minimizer> minimizer(Minimizer::Create(LINE_SEARCH));
158
159 Solver::Summary solver_summary;
160 solver_summary.fixed_cost = 0.0;
161 solver_summary.preprocessor_time_in_seconds = 0.0;
162 solver_summary.postprocessor_time_in_seconds = 0.0;
163 solver_summary.line_search_polynomial_minimization_time_in_seconds = 0.0;
164
165 minimizer->Minimize(minimizer_options, solution.data(), &solver_summary);
166
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800167 // clang-format off
Austin Schuh70cc9552019-01-21 19:46:48 -0800168 summary->termination_type = solver_summary.termination_type;
169 summary->message = solver_summary.message;
170 summary->initial_cost = solver_summary.initial_cost;
171 summary->final_cost = solver_summary.final_cost;
172 summary->iterations = solver_summary.iterations;
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800173 // clang-format on
Austin Schuh70cc9552019-01-21 19:46:48 -0800174 summary->line_search_polynomial_minimization_time_in_seconds =
175 solver_summary.line_search_polynomial_minimization_time_in_seconds;
176
177 if (summary->IsSolutionUsable()) {
178 parameters = solution;
179 SetSummaryFinalCost(summary);
180 }
181
182 const std::map<string, CallStatistics>& evaluator_statistics =
183 minimizer_options.evaluator->Statistics();
184 {
185 const CallStatistics& call_stats = FindWithDefault(
186 evaluator_statistics, "Evaluator::Residual", CallStatistics());
187 summary->cost_evaluation_time_in_seconds = call_stats.time;
188 summary->num_cost_evaluations = call_stats.calls;
189 }
190
191 {
192 const CallStatistics& call_stats = FindWithDefault(
193 evaluator_statistics, "Evaluator::Jacobian", CallStatistics());
194 summary->gradient_evaluation_time_in_seconds = call_stats.time;
195 summary->num_gradient_evaluations = call_stats.calls;
196 }
197
198 summary->total_time_in_seconds = WallTimeInSeconds() - start_time;
199}
200
201bool GradientProblemSolver::Summary::IsSolutionUsable() const {
202 return internal::IsSolutionUsable(*this);
203}
204
205string GradientProblemSolver::Summary::BriefReport() const {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800206 return StringPrintf(
207 "Ceres GradientProblemSolver Report: "
208 "Iterations: %d, "
209 "Initial cost: %e, "
210 "Final cost: %e, "
211 "Termination: %s",
212 static_cast<int>(iterations.size()),
213 initial_cost,
214 final_cost,
215 TerminationTypeToString(termination_type));
Austin Schuh70cc9552019-01-21 19:46:48 -0800216}
217
218string GradientProblemSolver::Summary::FullReport() const {
219 using internal::VersionString;
220
221 string report = string("\nSolver Summary (v " + VersionString() + ")\n\n");
222
223 StringAppendF(&report, "Parameters % 25d\n", num_parameters);
224 if (num_local_parameters != num_parameters) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800225 StringAppendF(&report, "Local parameters % 25d\n", num_local_parameters);
Austin Schuh70cc9552019-01-21 19:46:48 -0800226 }
227
228 string line_search_direction_string;
229 if (line_search_direction_type == LBFGS) {
230 line_search_direction_string = StringPrintf("LBFGS (%d)", max_lbfgs_rank);
231 } else if (line_search_direction_type == NONLINEAR_CONJUGATE_GRADIENT) {
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800232 line_search_direction_string = NonlinearConjugateGradientTypeToString(
233 nonlinear_conjugate_gradient_type);
Austin Schuh70cc9552019-01-21 19:46:48 -0800234 } else {
235 line_search_direction_string =
236 LineSearchDirectionTypeToString(line_search_direction_type);
237 }
238
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800239 StringAppendF(&report,
240 "Line search direction %19s\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800241 line_search_direction_string.c_str());
242
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800243 const string line_search_type_string = StringPrintf(
244 "%s %s",
245 LineSearchInterpolationTypeToString(line_search_interpolation_type),
246 LineSearchTypeToString(line_search_type));
247 StringAppendF(&report,
248 "Line search type %19s\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800249 line_search_type_string.c_str());
250 StringAppendF(&report, "\n");
251
252 StringAppendF(&report, "\nCost:\n");
253 StringAppendF(&report, "Initial % 30e\n", initial_cost);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800254 if (termination_type != FAILURE && termination_type != USER_FAILURE) {
Austin Schuh70cc9552019-01-21 19:46:48 -0800255 StringAppendF(&report, "Final % 30e\n", final_cost);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800256 StringAppendF(&report, "Change % 30e\n", initial_cost - final_cost);
Austin Schuh70cc9552019-01-21 19:46:48 -0800257 }
258
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800259 StringAppendF(&report,
260 "\nMinimizer iterations % 16d\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800261 static_cast<int>(iterations.size()));
262
263 StringAppendF(&report, "\nTime (in seconds):\n");
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800264 StringAppendF(&report,
265 "\n Cost evaluation %23.6f (%d)\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800266 cost_evaluation_time_in_seconds,
267 num_cost_evaluations);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800268 StringAppendF(&report,
269 " Gradient & cost evaluation %16.6f (%d)\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800270 gradient_evaluation_time_in_seconds,
271 num_gradient_evaluations);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800272 StringAppendF(&report,
273 " Polynomial minimization %17.6f\n",
Austin Schuh70cc9552019-01-21 19:46:48 -0800274 line_search_polynomial_minimization_time_in_seconds);
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800275 StringAppendF(
276 &report, "Total %25.6f\n\n", total_time_in_seconds);
Austin Schuh70cc9552019-01-21 19:46:48 -0800277
Austin Schuh1d1e6ea2020-12-23 21:56:30 -0800278 StringAppendF(&report,
279 "Termination: %25s (%s)\n",
280 TerminationTypeToString(termination_type),
281 message.c_str());
Austin Schuh70cc9552019-01-21 19:46:48 -0800282 return report;
283}
284
285void Solve(const GradientProblemSolver::Options& options,
286 const GradientProblem& problem,
287 double* parameters,
288 GradientProblemSolver::Summary* summary) {
289 GradientProblemSolver solver;
290 solver.Solve(options, problem, parameters, summary);
291}
292
293} // namespace ceres