blob: 1639e30666c00e8c13c1ba3ea6db55301e87f0f6 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2015 Google Inc. All rights reserved.
3// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30
31#include "ceres/gradient_problem_solver.h"
32
33#include <memory>
34#include "ceres/callbacks.h"
35#include "ceres/gradient_problem.h"
36#include "ceres/gradient_problem_evaluator.h"
37#include "ceres/internal/eigen.h"
38#include "ceres/internal/port.h"
39#include "ceres/map_util.h"
40#include "ceres/minimizer.h"
41#include "ceres/solver.h"
42#include "ceres/solver_utils.h"
43#include "ceres/stringprintf.h"
44#include "ceres/types.h"
45#include "ceres/wall_time.h"
46
47namespace ceres {
48using internal::StringPrintf;
49using internal::StringAppendF;
50using std::string;
51
52namespace {
53
54Solver::Options GradientProblemSolverOptionsToSolverOptions(
55 const GradientProblemSolver::Options& options) {
56#define COPY_OPTION(x) solver_options.x = options.x
57
58 Solver::Options solver_options;
59 solver_options.minimizer_type = LINE_SEARCH;
60 COPY_OPTION(line_search_direction_type);
61 COPY_OPTION(line_search_type);
62 COPY_OPTION(nonlinear_conjugate_gradient_type);
63 COPY_OPTION(max_lbfgs_rank);
64 COPY_OPTION(use_approximate_eigenvalue_bfgs_scaling);
65 COPY_OPTION(line_search_interpolation_type);
66 COPY_OPTION(min_line_search_step_size);
67 COPY_OPTION(line_search_sufficient_function_decrease);
68 COPY_OPTION(max_line_search_step_contraction);
69 COPY_OPTION(min_line_search_step_contraction);
70 COPY_OPTION(max_num_line_search_step_size_iterations);
71 COPY_OPTION(max_num_line_search_direction_restarts);
72 COPY_OPTION(line_search_sufficient_curvature_decrease);
73 COPY_OPTION(max_line_search_step_expansion);
74 COPY_OPTION(max_num_iterations);
75 COPY_OPTION(max_solver_time_in_seconds);
76 COPY_OPTION(parameter_tolerance);
77 COPY_OPTION(function_tolerance);
78 COPY_OPTION(gradient_tolerance);
79 COPY_OPTION(logging_type);
80 COPY_OPTION(minimizer_progress_to_stdout);
81 COPY_OPTION(callbacks);
82 return solver_options;
83#undef COPY_OPTION
84}
85
86
87} // namespace
88
89bool GradientProblemSolver::Options::IsValid(std::string* error) const {
90 const Solver::Options solver_options =
91 GradientProblemSolverOptionsToSolverOptions(*this);
92 return solver_options.IsValid(error);
93}
94
95GradientProblemSolver::~GradientProblemSolver() {
96}
97
98void GradientProblemSolver::Solve(const GradientProblemSolver::Options& options,
99 const GradientProblem& problem,
100 double* parameters_ptr,
101 GradientProblemSolver::Summary* summary) {
102 using internal::CallStatistics;
103 using internal::GradientProblemEvaluator;
104 using internal::GradientProblemSolverStateUpdatingCallback;
105 using internal::LoggingCallback;
106 using internal::Minimizer;
107 using internal::SetSummaryFinalCost;
108 using internal::WallTimeInSeconds;
109
110 double start_time = WallTimeInSeconds();
111
112 CHECK(summary != nullptr);
113 *summary = Summary();
114 summary->num_parameters = problem.NumParameters();
115 summary->num_local_parameters = problem.NumLocalParameters();
116 summary->line_search_direction_type = options.line_search_direction_type; // NOLINT
117 summary->line_search_interpolation_type = options.line_search_interpolation_type; // NOLINT
118 summary->line_search_type = options.line_search_type;
119 summary->max_lbfgs_rank = options.max_lbfgs_rank;
120 summary->nonlinear_conjugate_gradient_type = options.nonlinear_conjugate_gradient_type; // NOLINT
121
122 // Check validity
123 if (!options.IsValid(&summary->message)) {
124 LOG(ERROR) << "Terminating: " << summary->message;
125 return;
126 }
127
128 VectorRef parameters(parameters_ptr, problem.NumParameters());
129 Vector solution(problem.NumParameters());
130 solution = parameters;
131
132 // TODO(sameeragarwal): This is a bit convoluted, we should be able
133 // to convert to minimizer options directly, but this will do for
134 // now.
135 Minimizer::Options minimizer_options =
136 Minimizer::Options(GradientProblemSolverOptionsToSolverOptions(options));
137 minimizer_options.evaluator.reset(new GradientProblemEvaluator(problem));
138
139 std::unique_ptr<IterationCallback> logging_callback;
140 if (options.logging_type != SILENT) {
141 logging_callback.reset(
142 new LoggingCallback(LINE_SEARCH, options.minimizer_progress_to_stdout));
143 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
144 logging_callback.get());
145 }
146
147 std::unique_ptr<IterationCallback> state_updating_callback;
148 if (options.update_state_every_iteration) {
149 state_updating_callback.reset(
150 new GradientProblemSolverStateUpdatingCallback(
151 problem.NumParameters(), solution.data(), parameters_ptr));
152 minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
153 state_updating_callback.get());
154 }
155
156 std::unique_ptr<Minimizer> minimizer(Minimizer::Create(LINE_SEARCH));
157
158 Solver::Summary solver_summary;
159 solver_summary.fixed_cost = 0.0;
160 solver_summary.preprocessor_time_in_seconds = 0.0;
161 solver_summary.postprocessor_time_in_seconds = 0.0;
162 solver_summary.line_search_polynomial_minimization_time_in_seconds = 0.0;
163
164 minimizer->Minimize(minimizer_options, solution.data(), &solver_summary);
165
166 summary->termination_type = solver_summary.termination_type;
167 summary->message = solver_summary.message;
168 summary->initial_cost = solver_summary.initial_cost;
169 summary->final_cost = solver_summary.final_cost;
170 summary->iterations = solver_summary.iterations;
171 summary->line_search_polynomial_minimization_time_in_seconds =
172 solver_summary.line_search_polynomial_minimization_time_in_seconds;
173
174 if (summary->IsSolutionUsable()) {
175 parameters = solution;
176 SetSummaryFinalCost(summary);
177 }
178
179 const std::map<string, CallStatistics>& evaluator_statistics =
180 minimizer_options.evaluator->Statistics();
181 {
182 const CallStatistics& call_stats = FindWithDefault(
183 evaluator_statistics, "Evaluator::Residual", CallStatistics());
184 summary->cost_evaluation_time_in_seconds = call_stats.time;
185 summary->num_cost_evaluations = call_stats.calls;
186 }
187
188 {
189 const CallStatistics& call_stats = FindWithDefault(
190 evaluator_statistics, "Evaluator::Jacobian", CallStatistics());
191 summary->gradient_evaluation_time_in_seconds = call_stats.time;
192 summary->num_gradient_evaluations = call_stats.calls;
193 }
194
195 summary->total_time_in_seconds = WallTimeInSeconds() - start_time;
196}
197
198bool GradientProblemSolver::Summary::IsSolutionUsable() const {
199 return internal::IsSolutionUsable(*this);
200}
201
202string GradientProblemSolver::Summary::BriefReport() const {
203 return StringPrintf("Ceres GradientProblemSolver Report: "
204 "Iterations: %d, "
205 "Initial cost: %e, "
206 "Final cost: %e, "
207 "Termination: %s",
208 static_cast<int>(iterations.size()),
209 initial_cost,
210 final_cost,
211 TerminationTypeToString(termination_type));
212}
213
214string GradientProblemSolver::Summary::FullReport() const {
215 using internal::VersionString;
216
217 string report = string("\nSolver Summary (v " + VersionString() + ")\n\n");
218
219 StringAppendF(&report, "Parameters % 25d\n", num_parameters);
220 if (num_local_parameters != num_parameters) {
221 StringAppendF(&report, "Local parameters % 25d\n",
222 num_local_parameters);
223 }
224
225 string line_search_direction_string;
226 if (line_search_direction_type == LBFGS) {
227 line_search_direction_string = StringPrintf("LBFGS (%d)", max_lbfgs_rank);
228 } else if (line_search_direction_type == NONLINEAR_CONJUGATE_GRADIENT) {
229 line_search_direction_string =
230 NonlinearConjugateGradientTypeToString(
231 nonlinear_conjugate_gradient_type);
232 } else {
233 line_search_direction_string =
234 LineSearchDirectionTypeToString(line_search_direction_type);
235 }
236
237 StringAppendF(&report, "Line search direction %19s\n",
238 line_search_direction_string.c_str());
239
240 const string line_search_type_string =
241 StringPrintf("%s %s",
242 LineSearchInterpolationTypeToString(
243 line_search_interpolation_type),
244 LineSearchTypeToString(line_search_type));
245 StringAppendF(&report, "Line search type %19s\n",
246 line_search_type_string.c_str());
247 StringAppendF(&report, "\n");
248
249 StringAppendF(&report, "\nCost:\n");
250 StringAppendF(&report, "Initial % 30e\n", initial_cost);
251 if (termination_type != FAILURE &&
252 termination_type != USER_FAILURE) {
253 StringAppendF(&report, "Final % 30e\n", final_cost);
254 StringAppendF(&report, "Change % 30e\n",
255 initial_cost - final_cost);
256 }
257
258 StringAppendF(&report, "\nMinimizer iterations % 16d\n",
259 static_cast<int>(iterations.size()));
260
261 StringAppendF(&report, "\nTime (in seconds):\n");
262 StringAppendF(&report, "\n Cost evaluation %23.6f (%d)\n",
263 cost_evaluation_time_in_seconds,
264 num_cost_evaluations);
265 StringAppendF(&report, " Gradient & cost evaluation %16.6f (%d)\n",
266 gradient_evaluation_time_in_seconds,
267 num_gradient_evaluations);
268 StringAppendF(&report, " Polynomial minimization %17.6f\n",
269 line_search_polynomial_minimization_time_in_seconds);
270 StringAppendF(&report, "Total %25.6f\n\n",
271 total_time_in_seconds);
272
273 StringAppendF(&report, "Termination: %25s (%s)\n",
274 TerminationTypeToString(termination_type), message.c_str());
275 return report;
276}
277
278void Solve(const GradientProblemSolver::Options& options,
279 const GradientProblem& problem,
280 double* parameters,
281 GradientProblemSolver::Summary* summary) {
282 GradientProblemSolver solver;
283 solver.Solve(options, problem, parameters, summary);
284}
285
286} // namespace ceres