blob: 2af44e1147336662b62c7d402d646e6a452b9914 [file] [log] [blame]
Austin Schuh70cc9552019-01-21 19:46:48 -08001.. highlight:: c++
2
3.. default-domain:: cpp
4
Austin Schuh3de38b02024-06-25 18:25:10 -07005.. cpp:namespace:: ceres
6
Austin Schuh70cc9552019-01-21 19:46:48 -08007.. _chapter-gradient_tutorial:
8
9==================================
10General Unconstrained Minimization
11==================================
12
Austin Schuh3de38b02024-06-25 18:25:10 -070013Ceres Solver besides being able to solve non-linear least squares
14problem can also solve general unconstrained problems using just their
15objective function value and gradients. In this chapter we will see
16how to do this.
Austin Schuh70cc9552019-01-21 19:46:48 -080017
18Rosenbrock's Function
19=====================
20
Austin Schuh3de38b02024-06-25 18:25:10 -070021Consider minimizing the famous `Rosenbrock's function
Austin Schuh70cc9552019-01-21 19:46:48 -080022<http://en.wikipedia.org/wiki/Rosenbrock_function>`_ [#f1]_.
23
Austin Schuh3de38b02024-06-25 18:25:10 -070024The simplest way to minimize is to define a templated functor to
25evaluate the objective value of this function and then use Ceres
26Solver's automatic differentiation to compute its derivatives.
27
28We begin by defining a templated functor and then using
29``AutoDiffFirstOrderFunction`` to construct an instance of the
30``FirstOrderFunction`` interface. This is the object that is
31responsible for computing the objective function value and the
32gradient (if required). This is the analog of the
33:class:`CostFunction` when defining non-linear least squares problems
34in Ceres.
Austin Schuh70cc9552019-01-21 19:46:48 -080035
36.. code::
37
Austin Schuh3de38b02024-06-25 18:25:10 -070038 // f(x,y) = (1-x)^2 + 100(y - x^2)^2;
39 struct Rosenbrock {
40 template <typename T>
41 bool operator()(const T* parameters, T* cost) const {
42 const T x = parameters[0];
43 const T y = parameters[1];
Austin Schuh70cc9552019-01-21 19:46:48 -080044 cost[0] = (1.0 - x) * (1.0 - x) + 100.0 * (y - x * x) * (y - x * x);
Austin Schuh70cc9552019-01-21 19:46:48 -080045 return true;
46 }
47
Austin Schuh3de38b02024-06-25 18:25:10 -070048 static ceres::FirstOrderFunction* Create() {
49 constexpr int kNumParameters = 2;
50 return new ceres::AutoDiffFirstOrderFunction<Rosenbrock, kNumParameters>();
51 }
Austin Schuh70cc9552019-01-21 19:46:48 -080052 };
53
54
55Minimizing it then is a straightforward matter of constructing a
56:class:`GradientProblem` object and calling :func:`Solve` on it.
57
58.. code::
59
60 double parameters[2] = {-1.2, 1.0};
61
Austin Schuh3de38b02024-06-25 18:25:10 -070062 ceres::GradientProblem problem(Rosenbrock::Create());
Austin Schuh70cc9552019-01-21 19:46:48 -080063
64 ceres::GradientProblemSolver::Options options;
65 options.minimizer_progress_to_stdout = true;
66 ceres::GradientProblemSolver::Summary summary;
67 ceres::Solve(options, problem, parameters, &summary);
68
69 std::cout << summary.FullReport() << "\n";
70
71Executing this code results, solve the problem using limited memory
72`BFGS
73<http://en.wikipedia.org/wiki/Broyden%E2%80%93Fletcher%E2%80%93Goldfarb%E2%80%93Shanno_algorithm>`_
74algorithm.
75
76.. code-block:: bash
77
Austin Schuh3de38b02024-06-25 18:25:10 -070078 0: f: 2.420000e+01 d: 0.00e+00 g: 2.16e+02 h: 0.00e+00 s: 0.00e+00 e: 0 it: 1.19e-05 tt: 1.19e-05
79 1: f: 4.280493e+00 d: 1.99e+01 g: 1.52e+01 h: 2.01e-01 s: 8.62e-04 e: 2 it: 7.30e-05 tt: 1.72e-04
80 2: f: 3.571154e+00 d: 7.09e-01 g: 1.35e+01 h: 3.78e-01 s: 1.34e-01 e: 3 it: 1.60e-05 tt: 1.93e-04
81 3: f: 3.440869e+00 d: 1.30e-01 g: 1.73e+01 h: 1.36e-01 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 1.97e-04
82 4: f: 3.213597e+00 d: 2.27e-01 g: 1.55e+01 h: 1.06e-01 s: 4.59e-01 e: 1 it: 1.19e-06 tt: 2.00e-04
83 5: f: 2.839723e+00 d: 3.74e-01 g: 1.05e+01 h: 1.34e-01 s: 5.24e-01 e: 1 it: 9.54e-07 tt: 2.03e-04
84 6: f: 2.448490e+00 d: 3.91e-01 g: 1.29e+01 h: 3.04e-01 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 2.05e-04
85 7: f: 1.943019e+00 d: 5.05e-01 g: 4.00e+00 h: 8.81e-02 s: 7.43e-01 e: 1 it: 9.54e-07 tt: 2.08e-04
86 8: f: 1.731469e+00 d: 2.12e-01 g: 7.36e+00 h: 1.71e-01 s: 4.60e-01 e: 2 it: 2.15e-06 tt: 2.11e-04
87 9: f: 1.503267e+00 d: 2.28e-01 g: 6.47e+00 h: 8.66e-02 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 2.14e-04
88 10: f: 1.228331e+00 d: 2.75e-01 g: 2.00e+00 h: 7.70e-02 s: 7.90e-01 e: 1 it: 0.00e+00 tt: 2.16e-04
89 11: f: 1.016523e+00 d: 2.12e-01 g: 5.15e+00 h: 1.39e-01 s: 3.76e-01 e: 2 it: 1.91e-06 tt: 2.25e-04
90 12: f: 9.145773e-01 d: 1.02e-01 g: 6.74e+00 h: 7.98e-02 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 2.28e-04
91 13: f: 7.508302e-01 d: 1.64e-01 g: 3.88e+00 h: 5.76e-02 s: 4.93e-01 e: 1 it: 9.54e-07 tt: 2.30e-04
92 14: f: 5.832378e-01 d: 1.68e-01 g: 5.56e+00 h: 1.42e-01 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 2.33e-04
93 15: f: 3.969581e-01 d: 1.86e-01 g: 1.64e+00 h: 1.17e-01 s: 1.00e+00 e: 1 it: 1.19e-06 tt: 2.36e-04
94 16: f: 3.171557e-01 d: 7.98e-02 g: 3.84e+00 h: 1.18e-01 s: 3.97e-01 e: 2 it: 1.91e-06 tt: 2.39e-04
95 17: f: 2.641257e-01 d: 5.30e-02 g: 3.27e+00 h: 6.14e-02 s: 1.00e+00 e: 1 it: 1.19e-06 tt: 2.42e-04
96 18: f: 1.909730e-01 d: 7.32e-02 g: 5.29e-01 h: 8.55e-02 s: 6.82e-01 e: 1 it: 9.54e-07 tt: 2.45e-04
97 19: f: 1.472012e-01 d: 4.38e-02 g: 3.11e+00 h: 1.20e-01 s: 3.47e-01 e: 2 it: 1.91e-06 tt: 2.49e-04
98 20: f: 1.093558e-01 d: 3.78e-02 g: 2.97e+00 h: 8.43e-02 s: 1.00e+00 e: 1 it: 2.15e-06 tt: 2.52e-04
99 21: f: 6.710346e-02 d: 4.23e-02 g: 1.42e+00 h: 9.64e-02 s: 8.85e-01 e: 1 it: 8.82e-06 tt: 2.81e-04
100 22: f: 3.993377e-02 d: 2.72e-02 g: 2.30e+00 h: 1.29e-01 s: 4.63e-01 e: 2 it: 7.87e-06 tt: 2.96e-04
101 23: f: 2.911794e-02 d: 1.08e-02 g: 2.55e+00 h: 6.55e-02 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.00e-04
102 24: f: 1.457683e-02 d: 1.45e-02 g: 2.77e-01 h: 6.37e-02 s: 6.14e-01 e: 1 it: 1.19e-06 tt: 3.03e-04
103 25: f: 8.577515e-03 d: 6.00e-03 g: 2.86e+00 h: 1.40e-01 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.06e-04
104 26: f: 3.486574e-03 d: 5.09e-03 g: 1.76e-01 h: 1.23e-02 s: 1.00e+00 e: 1 it: 1.19e-06 tt: 3.09e-04
105 27: f: 1.257570e-03 d: 2.23e-03 g: 1.39e-01 h: 5.08e-02 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.12e-04
106 28: f: 2.783568e-04 d: 9.79e-04 g: 6.20e-01 h: 6.47e-02 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.15e-04
107 29: f: 2.533399e-05 d: 2.53e-04 g: 1.68e-02 h: 1.98e-03 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.17e-04
108 30: f: 7.591572e-07 d: 2.46e-05 g: 5.40e-03 h: 9.27e-03 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.20e-04
109 31: f: 1.902460e-09 d: 7.57e-07 g: 1.62e-03 h: 1.89e-03 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.23e-04
110 32: f: 1.003030e-12 d: 1.90e-09 g: 3.50e-05 h: 3.52e-05 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.26e-04
111 33: f: 4.835994e-17 d: 1.00e-12 g: 1.05e-07 h: 1.13e-06 s: 1.00e+00 e: 1 it: 1.19e-06 tt: 3.34e-04
112 34: f: 1.885250e-22 d: 4.84e-17 g: 2.69e-10 h: 1.45e-08 s: 1.00e+00 e: 1 it: 9.54e-07 tt: 3.37e-04
Austin Schuh70cc9552019-01-21 19:46:48 -0800113
Austin Schuh3de38b02024-06-25 18:25:10 -0700114 Solver Summary (v 2.2.0-eigen-(3.4.0)-lapack-suitesparse-(7.1.0)-metis-(5.1.0)-acceleratesparse-eigensparse)
Austin Schuh70cc9552019-01-21 19:46:48 -0800115
Austin Schuh3de38b02024-06-25 18:25:10 -0700116 Parameters 2
117 Line search direction LBFGS (20)
118 Line search type CUBIC WOLFE
Austin Schuh70cc9552019-01-21 19:46:48 -0800119
120
Austin Schuh3de38b02024-06-25 18:25:10 -0700121 Cost:
122 Initial 2.420000e+01
123 Final 1.955192e-27
124 Change 2.420000e+01
Austin Schuh70cc9552019-01-21 19:46:48 -0800125
Austin Schuh3de38b02024-06-25 18:25:10 -0700126 Minimizer iterations 36
Austin Schuh70cc9552019-01-21 19:46:48 -0800127
Austin Schuh3de38b02024-06-25 18:25:10 -0700128 Time (in seconds):
Austin Schuh70cc9552019-01-21 19:46:48 -0800129
Austin Schuh3de38b02024-06-25 18:25:10 -0700130 Cost evaluation 0.000000 (0)
131 Gradient & cost evaluation 0.000000 (44)
132 Polynomial minimization 0.000061
133 Total 0.000438
Austin Schuh70cc9552019-01-21 19:46:48 -0800134
Austin Schuh3de38b02024-06-25 18:25:10 -0700135 Termination: CONVERGENCE (Parameter tolerance reached. Relative step_norm: 1.890726e-11 <= 1.000000e-08.)
136
137 Initial x: -1.2 y: 1
138 Final x: 1 y: 1
139
140
141
142
143If you are unable to use automatic differentiation for some reason
144(say because you need to call an external library), then you can
145use numeric differentiation. In that case the functor is defined as
146follows [#f2]_.
147
148.. code::
149
150 // f(x,y) = (1-x)^2 + 100(y - x^2)^2;
151 struct Rosenbrock {
152 bool operator()(const double* parameters, double* cost) const {
153 const double x = parameters[0];
154 const double y = parameters[1];
155 cost[0] = (1.0 - x) * (1.0 - x) + 100.0 * (y - x * x) * (y - x * x);
156 return true;
157 }
158
159 static ceres::FirstOrderFunction* Create() {
160 constexpr int kNumParameters = 2;
161 return new ceres::NumericDiffFirstOrderFunction<Rosenbrock,
162 ceres::CENTRAL,
163 kNumParameters>();
164 }
165 };
166
167And finally, if you would rather compute the derivatives by hand (say
168because the size of the parameter vector is too large to be
169automatically differentiated). Then you should define an instance of
170`FirstOrderFunction`, which is the analog of :class:`CostFunction` for
171non-linear least squares problems [#f3]_.
172
173.. code::
174
175 // f(x,y) = (1-x)^2 + 100(y - x^2)^2;
176 class Rosenbrock final : public ceres::FirstOrderFunction {
177 public:
178 bool Evaluate(const double* parameters,
179 double* cost,
180 double* gradient) const override {
181 const double x = parameters[0];
182 const double y = parameters[1];
183
184 cost[0] = (1.0 - x) * (1.0 - x) + 100.0 * (y - x * x) * (y - x * x);
185 if (gradient) {
186 gradient[0] = -2.0 * (1.0 - x) - 200.0 * (y - x * x) * 2.0 * x;
187 gradient[1] = 200.0 * (y - x * x);
188 }
189 return true;
190 }
191
192 int NumParameters() const override { return 2; }
193 };
Austin Schuh70cc9552019-01-21 19:46:48 -0800194
195.. rubric:: Footnotes
196
197.. [#f1] `examples/rosenbrock.cc
198 <https://ceres-solver.googlesource.com/ceres-solver/+/master/examples/rosenbrock.cc>`_
Austin Schuh3de38b02024-06-25 18:25:10 -0700199
200.. [#f2] `examples/rosenbrock_numeric_diff.cc
201 <https://ceres-solver.googlesource.com/ceres-solver/+/master/examples/rosenbrock_numeric_diff.cc>`_
202
203.. [#f3] `examples/rosenbrock_analytic_diff.cc
204 <https://ceres-solver.googlesource.com/ceres-solver/+/master/examples/rosenbrock_analytic_diff.cc>`_