1 // Ceres Solver - A fast non-linear least squares minimizer
2 // Copyright 2012 Google Inc. All rights reserved.
3 // http://code.google.com/p/ceres-solver/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are met:
7 //
8 // * Redistributions of source code must retain the above copyright notice,
9 //   this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright notice,
11 //   this list of conditions and the following disclaimer in the documentation
12 //   and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors may be
14 //   used to endorse or promote products derived from this software without
15 //   specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 // POSSIBILITY OF SUCH DAMAGE.
28 //
29 // Author: sameeragarwal@google.com (Sameer Agarwal)
30 
31 #ifndef CERES_INTERNAL_DOGLEG_STRATEGY_H_
32 #define CERES_INTERNAL_DOGLEG_STRATEGY_H_
33 
34 #include "ceres/linear_solver.h"
35 #include "ceres/trust_region_strategy.h"
36 
37 namespace ceres {
38 namespace internal {
39 
40 // Dogleg step computation and trust region sizing strategy based on
41 // on "Methods for Nonlinear Least Squares" by K. Madsen, H.B. Nielsen
42 // and O. Tingleff. Available to download from
43 //
44 // http://www2.imm.dtu.dk/pubdb/views/edoc_download.php/3215/pdf/imm3215.pdf
45 //
46 // One minor modification is that instead of computing the pure
47 // Gauss-Newton step, we compute a regularized version of it. This is
48 // because the Jacobian is often rank-deficient and in such cases
49 // using a direct solver leads to numerical failure.
50 //
51 // If SUBSPACE is passed as the type argument to the constructor, the
52 // DoglegStrategy follows the approach by Shultz, Schnabel, Byrd.
53 // This finds the exact optimum over the two-dimensional subspace
54 // spanned by the two Dogleg vectors.
55 class DoglegStrategy : public TrustRegionStrategy {
56  public:
57   explicit DoglegStrategy(const TrustRegionStrategy::Options& options);
~DoglegStrategy()58   virtual ~DoglegStrategy() {}
59 
60   // TrustRegionStrategy interface
61   virtual Summary ComputeStep(const PerSolveOptions& per_solve_options,
62                               SparseMatrix* jacobian,
63                               const double* residuals,
64                               double* step);
65   virtual void StepAccepted(double step_quality);
66   virtual void StepRejected(double step_quality);
67   virtual void StepIsInvalid();
68 
69   virtual double Radius() const;
70 
71   // These functions are predominantly for testing.
gradient()72   Vector gradient() const { return gradient_; }
gauss_newton_step()73   Vector gauss_newton_step() const { return gauss_newton_step_; }
subspace_basis()74   Matrix subspace_basis() const { return subspace_basis_; }
subspace_g()75   Vector subspace_g() const { return subspace_g_; }
subspace_B()76   Matrix subspace_B() const { return subspace_B_; }
77 
78  private:
79   typedef Eigen::Matrix<double, 2, 1, Eigen::DontAlign> Vector2d;
80   typedef Eigen::Matrix<double, 2, 2, Eigen::DontAlign> Matrix2d;
81 
82   LinearSolver::Summary ComputeGaussNewtonStep(
83       const PerSolveOptions& per_solve_options,
84       SparseMatrix* jacobian,
85       const double* residuals);
86   void ComputeCauchyPoint(SparseMatrix* jacobian);
87   void ComputeGradient(SparseMatrix* jacobian, const double* residuals);
88   void ComputeTraditionalDoglegStep(double* step);
89   bool ComputeSubspaceModel(SparseMatrix* jacobian);
90   void ComputeSubspaceDoglegStep(double* step);
91 
92   bool FindMinimumOnTrustRegionBoundary(Vector2d* minimum) const;
93   Vector MakePolynomialForBoundaryConstrainedProblem() const;
94   Vector2d ComputeSubspaceStepFromRoot(double lambda) const;
95   double EvaluateSubspaceModel(const Vector2d& x) const;
96 
97   LinearSolver* linear_solver_;
98   double radius_;
99   const double max_radius_;
100 
101   const double min_diagonal_;
102   const double max_diagonal_;
103 
104   // mu is used to scale the diagonal matrix used to make the
105   // Gauss-Newton solve full rank. In each solve, the strategy starts
106   // out with mu = min_mu, and tries values upto max_mu. If the user
107   // reports an invalid step, the value of mu_ is increased so that
108   // the next solve starts with a stronger regularization.
109   //
110   // If a successful step is reported, then the value of mu_ is
111   // decreased with a lower bound of min_mu_.
112   double mu_;
113   const double min_mu_;
114   const double max_mu_;
115   const double mu_increase_factor_;
116   const double increase_threshold_;
117   const double decrease_threshold_;
118 
119   Vector diagonal_;  // sqrt(diag(J^T J))
120   Vector lm_diagonal_;
121 
122   Vector gradient_;
123   Vector gauss_newton_step_;
124 
125   // cauchy_step = alpha * gradient
126   double alpha_;
127   double dogleg_step_norm_;
128 
129   // When, ComputeStep is called, reuse_ indicates whether the
130   // Gauss-Newton and Cauchy steps from the last call to ComputeStep
131   // can be reused or not.
132   //
133   // If the user called StepAccepted, then it is expected that the
134   // user has recomputed the Jacobian matrix and new Gauss-Newton
135   // solve is needed and reuse is set to false.
136   //
137   // If the user called StepRejected, then it is expected that the
138   // user wants to solve the trust region problem with the same matrix
139   // but a different trust region radius and the Gauss-Newton and
140   // Cauchy steps can be reused to compute the Dogleg, thus reuse is
141   // set to true.
142   //
143   // If the user called StepIsInvalid, then there was a numerical
144   // problem with the step computed in the last call to ComputeStep,
145   // and the regularization used to do the Gauss-Newton solve is
146   // increased and a new solve should be done when ComputeStep is
147   // called again, thus reuse is set to false.
148   bool reuse_;
149 
150   // The dogleg type determines how the minimum of the local
151   // quadratic model is found.
152   DoglegType dogleg_type_;
153 
154   // If the type is SUBSPACE_DOGLEG, the two-dimensional
155   // model 1/2 x^T B x + g^T x has to be computed and stored.
156   bool subspace_is_one_dimensional_;
157   Matrix subspace_basis_;
158   Vector2d subspace_g_;
159   Matrix2d subspace_B_;
160 };
161 
162 }  // namespace internal
163 }  // namespace ceres
164 
165 #endif  // CERES_INTERNAL_DOGLEG_STRATEGY_H_
166