blob: b820958ed77599e96013c155678c97f181c81252 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2015 Google Inc. All rights reserved.
3// http://ceres-solver.org/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11// this list of conditions and the following disclaimer in the documentation
12// and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14// used to endorse or promote products derived from this software without
15// specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30// keir@google.com (Keir Mierle)
31
32#ifndef CERES_INTERNAL_EVALUATOR_H_
33#define CERES_INTERNAL_EVALUATOR_H_
34
35#include <map>
36#include <string>
37#include <vector>
38
39#include "ceres/context_impl.h"
40#include "ceres/execution_summary.h"
41#include "ceres/internal/port.h"
42#include "ceres/types.h"
43
44namespace ceres {
45
46struct CRSMatrix;
47class EvaluationCallback;
48
49namespace internal {
50
51class Program;
52class SparseMatrix;
53
54// The Evaluator interface offers a way to interact with a least squares cost
55// function that is useful for an optimizer that wants to minimize the least
56// squares objective. This insulates the optimizer from issues like Jacobian
57// storage, parameterization, etc.
58class Evaluator {
59 public:
60 virtual ~Evaluator();
61
62 struct Options {
63 int num_threads = 1;
64 int num_eliminate_blocks = -1;
65 LinearSolverType linear_solver_type = DENSE_QR;
66 bool dynamic_sparsity = false;
67 ContextImpl* context = nullptr;
68 EvaluationCallback* evaluation_callback = nullptr;
69 };
70
71 static Evaluator* Create(const Options& options,
72 Program* program,
73 std::string* error);
74
75 // Build and return a sparse matrix for storing and working with the Jacobian
76 // of the objective function. The jacobian has dimensions
77 // NumEffectiveParameters() by NumParameters(), and is typically extremely
78 // sparse. Since the sparsity pattern of the Jacobian remains constant over
79 // the lifetime of the optimization problem, this method is used to
80 // instantiate a SparseMatrix object with the appropriate sparsity structure
81 // (which can be an expensive operation) and then reused by the optimization
82 // algorithm and the various linear solvers.
83 //
84 // It is expected that the classes implementing this interface will be aware
85 // of their client's requirements for the kind of sparse matrix storage and
86 // layout that is needed for an efficient implementation. For example
87 // CompressedRowOptimizationProblem creates a compressed row representation of
88 // the jacobian for use with CHOLMOD, where as BlockOptimizationProblem
89 // creates a BlockSparseMatrix representation of the jacobian for use in the
90 // Schur complement based methods.
91 virtual SparseMatrix* CreateJacobian() const = 0;
92
93 // Options struct to control Evaluator::Evaluate;
94 struct EvaluateOptions {
95 // If false, the loss function correction is not applied to the
96 // residual blocks.
97 bool apply_loss_function = true;
98
99 // If false, this evaluation point is the same as the last one.
100 bool new_evaluation_point = true;
101 };
102
103 // Evaluate the cost function for the given state. Returns the cost,
104 // residuals, and jacobian in the corresponding arguments. Both residuals and
105 // jacobian are optional; to avoid computing them, pass NULL.
106 //
107 // If non-NULL, the Jacobian must have a suitable sparsity pattern; only the
108 // values array of the jacobian is modified.
109 //
110 // state is an array of size NumParameters(), cost is a pointer to a single
111 // double, and residuals is an array of doubles of size NumResiduals().
112 virtual bool Evaluate(const EvaluateOptions& evaluate_options,
113 const double* state,
114 double* cost,
115 double* residuals,
116 double* gradient,
117 SparseMatrix* jacobian) = 0;
118
119 // Variant of Evaluator::Evaluate where the user wishes to use the
120 // default EvaluateOptions struct. This is mostly here as a
121 // convenience method.
122 bool Evaluate(const double* state,
123 double* cost,
124 double* residuals,
125 double* gradient,
126 SparseMatrix* jacobian) {
127 return Evaluate(EvaluateOptions(),
128 state,
129 cost,
130 residuals,
131 gradient,
132 jacobian);
133 }
134
135 // Make a change delta (of size NumEffectiveParameters()) to state (of size
136 // NumParameters()) and store the result in state_plus_delta.
137 //
138 // In the case that there are no parameterizations used, this is equivalent to
139 //
140 // state_plus_delta[i] = state[i] + delta[i] ;
141 //
142 // however, the mapping is more complicated in the case of parameterizations
143 // like quaternions. This is the same as the "Plus()" operation in
144 // local_parameterization.h, but operating over the entire state vector for a
145 // problem.
146 virtual bool Plus(const double* state,
147 const double* delta,
148 double* state_plus_delta) const = 0;
149
150 // The number of parameters in the optimization problem.
151 virtual int NumParameters() const = 0;
152
153 // This is the effective number of parameters that the optimizer may adjust.
154 // This applies when there are parameterizations on some of the parameters.
155 virtual int NumEffectiveParameters() const = 0;
156
157 // The number of residuals in the optimization problem.
158 virtual int NumResiduals() const = 0;
159
160 // The following two methods return copies instead of references so
161 // that the base class implementation does not have to worry about
162 // life time issues. Further, these calls are not expected to be
163 // frequent or performance sensitive.
164 virtual std::map<std::string, CallStatistics> Statistics() const {
165 return std::map<std::string, CallStatistics>();
166 }
167};
168
169} // namespace internal
170} // namespace ceres
171
172#endif // CERES_INTERNAL_EVALUATOR_H_