1// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
3// http://code.google.com/p/ceres-solver/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9//   this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11//   this list of conditions and the following disclaimer in the documentation
12//   and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14//   used to endorse or promote products derived from this software without
15//   specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30//         keir@google.com (Keir Mierle)
31
32#ifndef CERES_INTERNAL_EVALUATOR_H_
33#define CERES_INTERNAL_EVALUATOR_H_
34
35#include <map>
36#include <string>
37#include <vector>
38
39#include "ceres/execution_summary.h"
40#include "ceres/internal/port.h"
41#include "ceres/types.h"
42
43namespace ceres {
44
45struct CRSMatrix;
46
47namespace internal {
48
49class Program;
50class SparseMatrix;
51
52// The Evaluator interface offers a way to interact with a least squares cost
53// function that is useful for an optimizer that wants to minimize the least
54// squares objective. This insulates the optimizer from issues like Jacobian
55// storage, parameterization, etc.
56class Evaluator {
57 public:
58  virtual ~Evaluator();
59
60  struct Options {
61    Options()
62        : num_threads(1),
63          num_eliminate_blocks(-1),
64          linear_solver_type(DENSE_QR),
65          dynamic_sparsity(false) {}
66
67    int num_threads;
68    int num_eliminate_blocks;
69    LinearSolverType linear_solver_type;
70    bool dynamic_sparsity;
71  };
72
73  static Evaluator* Create(const Options& options,
74                           Program* program,
75                           string* error);
76
77  // This is used for computing the cost, residual and Jacobian for
78  // returning to the user. For actually solving the optimization
79  // problem, the optimization algorithm uses the ProgramEvaluator
80  // objects directly.
81  //
82  // The residual, gradients and jacobian pointers can be NULL, in
83  // which case they will not be evaluated. cost cannot be NULL.
84  //
85  // The parallelism of the evaluator is controlled by num_threads; it
86  // should be at least 1.
87  //
88  // Note: That this function does not take a parameter vector as
89  // input. The parameter blocks are evaluated on the values contained
90  // in the arrays pointed to by their user_state pointers.
91  //
92  // Also worth noting is that this function mutates program by
93  // calling Program::SetParameterOffsetsAndIndex() on it so that an
94  // evaluator object can be constructed.
95  static bool Evaluate(Program* program,
96                       int num_threads,
97                       double* cost,
98                       vector<double>* residuals,
99                       vector<double>* gradient,
100                       CRSMatrix* jacobian);
101
102  // Build and return a sparse matrix for storing and working with the Jacobian
103  // of the objective function. The jacobian has dimensions
104  // NumEffectiveParameters() by NumParameters(), and is typically extremely
105  // sparse. Since the sparsity pattern of the Jacobian remains constant over
106  // the lifetime of the optimization problem, this method is used to
107  // instantiate a SparseMatrix object with the appropriate sparsity structure
108  // (which can be an expensive operation) and then reused by the optimization
109  // algorithm and the various linear solvers.
110  //
111  // It is expected that the classes implementing this interface will be aware
112  // of their client's requirements for the kind of sparse matrix storage and
113  // layout that is needed for an efficient implementation. For example
114  // CompressedRowOptimizationProblem creates a compressed row representation of
115  // the jacobian for use with CHOLMOD, where as BlockOptimizationProblem
116  // creates a BlockSparseMatrix representation of the jacobian for use in the
117  // Schur complement based methods.
118  virtual SparseMatrix* CreateJacobian() const = 0;
119
120
121  // Options struct to control Evaluator::Evaluate;
122  struct EvaluateOptions {
123    EvaluateOptions()
124        : apply_loss_function(true) {
125    }
126
127    // If false, the loss function correction is not applied to the
128    // residual blocks.
129    bool apply_loss_function;
130  };
131
132  // Evaluate the cost function for the given state. Returns the cost,
133  // residuals, and jacobian in the corresponding arguments. Both residuals and
134  // jacobian are optional; to avoid computing them, pass NULL.
135  //
136  // If non-NULL, the Jacobian must have a suitable sparsity pattern; only the
137  // values array of the jacobian is modified.
138  //
139  // state is an array of size NumParameters(), cost is a pointer to a single
140  // double, and residuals is an array of doubles of size NumResiduals().
141  virtual bool Evaluate(const EvaluateOptions& evaluate_options,
142                        const double* state,
143                        double* cost,
144                        double* residuals,
145                        double* gradient,
146                        SparseMatrix* jacobian) = 0;
147
148  // Variant of Evaluator::Evaluate where the user wishes to use the
149  // default EvaluateOptions struct. This is mostly here as a
150  // convenience method.
151  bool Evaluate(const double* state,
152                double* cost,
153                double* residuals,
154                double* gradient,
155                SparseMatrix* jacobian) {
156    return Evaluate(EvaluateOptions(),
157                    state,
158                    cost,
159                    residuals,
160                    gradient,
161                    jacobian);
162  }
163
164  // Make a change delta (of size NumEffectiveParameters()) to state (of size
165  // NumParameters()) and store the result in state_plus_delta.
166  //
167  // In the case that there are no parameterizations used, this is equivalent to
168  //
169  //   state_plus_delta[i] = state[i] + delta[i] ;
170  //
171  // however, the mapping is more complicated in the case of parameterizations
172  // like quaternions. This is the same as the "Plus()" operation in
173  // local_parameterization.h, but operating over the entire state vector for a
174  // problem.
175  virtual bool Plus(const double* state,
176                    const double* delta,
177                    double* state_plus_delta) const = 0;
178
179  // The number of parameters in the optimization problem.
180  virtual int NumParameters() const = 0;
181
182  // This is the effective number of parameters that the optimizer may adjust.
183  // This applies when there are parameterizations on some of the parameters.
184  virtual int NumEffectiveParameters()  const = 0;
185
186  // The number of residuals in the optimization problem.
187  virtual int NumResiduals() const = 0;
188
189  // The following two methods return copies instead of references so
190  // that the base class implementation does not have to worry about
191  // life time issues. Further, these calls are not expected to be
192  // frequent or performance sensitive.
193  virtual map<string, int> CallStatistics() const {
194    return map<string, int>();
195  }
196
197  virtual map<string, double> TimeStatistics() const {
198    return map<string, double>();
199  }
200};
201
202}  // namespace internal
203}  // namespace ceres
204
205#endif  // CERES_INTERNAL_EVALUATOR_H_
206