1// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
3// http://code.google.com/p/ceres-solver/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9//   this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11//   this list of conditions and the following disclaimer in the documentation
12//   and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14//   used to endorse or promote products derived from this software without
15//   specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: keir@google.com (Keir Mierle)
30//
31// Tests shared across evaluators. The tests try all combinations of linear
32// solver and num_eliminate_blocks (for schur-based solvers).
33
34#include "ceres/evaluator.h"
35
36#include "ceres/casts.h"
37#include "ceres/cost_function.h"
38#include "ceres/crs_matrix.h"
39#include "ceres/evaluator_test_utils.h"
40#include "ceres/internal/eigen.h"
41#include "ceres/internal/scoped_ptr.h"
42#include "ceres/local_parameterization.h"
43#include "ceres/problem_impl.h"
44#include "ceres/program.h"
45#include "ceres/sized_cost_function.h"
46#include "ceres/sparse_matrix.h"
47#include "ceres/stringprintf.h"
48#include "ceres/types.h"
49#include "gtest/gtest.h"
50
51namespace ceres {
52namespace internal {
53
54// TODO(keir): Consider pushing this into a common test utils file.
55template<int kFactor, int kNumResiduals,
56         int N0 = 0, int N1 = 0, int N2 = 0, bool kSucceeds = true>
57class ParameterIgnoringCostFunction
58    : public SizedCostFunction<kNumResiduals, N0, N1, N2> {
59  typedef SizedCostFunction<kNumResiduals, N0, N1, N2> Base;
60 public:
61  virtual bool Evaluate(double const* const* parameters,
62                        double* residuals,
63                        double** jacobians) const {
64    for (int i = 0; i < Base::num_residuals(); ++i) {
65      residuals[i] = i + 1;
66    }
67    if (jacobians) {
68      for (int k = 0; k < Base::parameter_block_sizes().size(); ++k) {
69        // The jacobians here are full sized, but they are transformed in the
70        // evaluator into the "local" jacobian. In the tests, the "subset
71        // constant" parameterization is used, which should pick out columns
72        // from these jacobians. Put values in the jacobian that make this
73        // obvious; in particular, make the jacobians like this:
74        //
75        //   1 2 3 4 ...
76        //   1 2 3 4 ...   .*  kFactor
77        //   1 2 3 4 ...
78        //
79        // where the multiplication by kFactor makes it easier to distinguish
80        // between Jacobians of different residuals for the same parameter.
81        if (jacobians[k] != NULL) {
82          MatrixRef jacobian(jacobians[k],
83                             Base::num_residuals(),
84                             Base::parameter_block_sizes()[k]);
85          for (int j = 0; j < Base::parameter_block_sizes()[k]; ++j) {
86            jacobian.col(j).setConstant(kFactor * (j + 1));
87          }
88        }
89      }
90    }
91    return kSucceeds;
92  }
93};
94
95struct EvaluatorTestOptions {
96  EvaluatorTestOptions(LinearSolverType linear_solver_type,
97                       int num_eliminate_blocks,
98                       bool dynamic_sparsity = false)
99    : linear_solver_type(linear_solver_type),
100      num_eliminate_blocks(num_eliminate_blocks),
101      dynamic_sparsity(dynamic_sparsity) {}
102
103  LinearSolverType linear_solver_type;
104  int num_eliminate_blocks;
105  bool dynamic_sparsity;
106};
107
108struct EvaluatorTest
109    : public ::testing::TestWithParam<EvaluatorTestOptions> {
110  Evaluator* CreateEvaluator(Program* program) {
111    // This program is straight from the ProblemImpl, and so has no index/offset
112    // yet; compute it here as required by the evalutor implementations.
113    program->SetParameterOffsetsAndIndex();
114
115    if (VLOG_IS_ON(1)) {
116      string report;
117      StringAppendF(&report, "Creating evaluator with type: %d",
118                    GetParam().linear_solver_type);
119      if (GetParam().linear_solver_type == SPARSE_NORMAL_CHOLESKY) {
120        StringAppendF(&report, ", dynamic_sparsity: %d",
121                      GetParam().dynamic_sparsity);
122      }
123      StringAppendF(&report, " and num_eliminate_blocks: %d",
124                    GetParam().num_eliminate_blocks);
125      VLOG(1) << report;
126    }
127    Evaluator::Options options;
128    options.linear_solver_type = GetParam().linear_solver_type;
129    options.num_eliminate_blocks = GetParam().num_eliminate_blocks;
130    options.dynamic_sparsity = GetParam().dynamic_sparsity;
131    string error;
132    return Evaluator::Create(options, program, &error);
133  }
134
135  void EvaluateAndCompare(ProblemImpl *problem,
136                          int expected_num_rows,
137                          int expected_num_cols,
138                          double expected_cost,
139                          const double* expected_residuals,
140                          const double* expected_gradient,
141                          const double* expected_jacobian) {
142    scoped_ptr<Evaluator> evaluator(
143        CreateEvaluator(problem->mutable_program()));
144    int num_residuals = expected_num_rows;
145    int num_parameters = expected_num_cols;
146
147    double cost = -1;
148
149    Vector residuals(num_residuals);
150    residuals.setConstant(-2000);
151
152    Vector gradient(num_parameters);
153    gradient.setConstant(-3000);
154
155    scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
156
157    ASSERT_EQ(expected_num_rows, evaluator->NumResiduals());
158    ASSERT_EQ(expected_num_cols, evaluator->NumEffectiveParameters());
159    ASSERT_EQ(expected_num_rows, jacobian->num_rows());
160    ASSERT_EQ(expected_num_cols, jacobian->num_cols());
161
162    vector<double> state(evaluator->NumParameters());
163
164    ASSERT_TRUE(evaluator->Evaluate(
165          &state[0],
166          &cost,
167          expected_residuals != NULL ? &residuals[0]  : NULL,
168          expected_gradient  != NULL ? &gradient[0]   : NULL,
169          expected_jacobian  != NULL ? jacobian.get() : NULL));
170
171    Matrix actual_jacobian;
172    if (expected_jacobian != NULL) {
173      jacobian->ToDenseMatrix(&actual_jacobian);
174    }
175
176    CompareEvaluations(expected_num_rows,
177                       expected_num_cols,
178                       expected_cost,
179                       expected_residuals,
180                       expected_gradient,
181                       expected_jacobian,
182                       cost,
183                       &residuals[0],
184                       &gradient[0],
185                       actual_jacobian.data());
186  }
187
188  // Try all combinations of parameters for the evaluator.
189  void CheckAllEvaluationCombinations(const ExpectedEvaluation &expected) {
190    for (int i = 0; i < 8; ++i) {
191      EvaluateAndCompare(&problem,
192                         expected.num_rows,
193                         expected.num_cols,
194                         expected.cost,
195                         (i & 1) ? expected.residuals : NULL,
196                         (i & 2) ? expected.gradient  : NULL,
197                         (i & 4) ? expected.jacobian  : NULL);
198    }
199  }
200
201  // The values are ignored completely by the cost function.
202  double x[2];
203  double y[3];
204  double z[4];
205
206  ProblemImpl problem;
207};
208
209void SetSparseMatrixConstant(SparseMatrix* sparse_matrix, double value) {
210  VectorRef(sparse_matrix->mutable_values(),
211            sparse_matrix->num_nonzeros()).setConstant(value);
212}
213
214TEST_P(EvaluatorTest, SingleResidualProblem) {
215  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
216                           NULL,
217                           x, y, z);
218
219  ExpectedEvaluation expected = {
220    // Rows/columns
221    3, 9,
222    // Cost
223    7.0,
224    // Residuals
225    { 1.0, 2.0, 3.0 },
226    // Gradient
227    { 6.0, 12.0,              // x
228      6.0, 12.0, 18.0,        // y
229      6.0, 12.0, 18.0, 24.0,  // z
230    },
231    // Jacobian
232    //   x          y             z
233    { 1, 2,   1, 2, 3,   1, 2, 3, 4,
234      1, 2,   1, 2, 3,   1, 2, 3, 4,
235      1, 2,   1, 2, 3,   1, 2, 3, 4
236    }
237  };
238  CheckAllEvaluationCombinations(expected);
239}
240
241TEST_P(EvaluatorTest, SingleResidualProblemWithPermutedParameters) {
242  // Add the parameters in explicit order to force the ordering in the program.
243  problem.AddParameterBlock(x,  2);
244  problem.AddParameterBlock(y,  3);
245  problem.AddParameterBlock(z,  4);
246
247  // Then use a cost function which is similar to the others, but swap around
248  // the ordering of the parameters to the cost function. This shouldn't affect
249  // the jacobian evaluation, but requires explicit handling in the evaluators.
250  // At one point the compressed row evaluator had a bug that went undetected
251  // for a long time, since by chance most users added parameters to the problem
252  // in the same order that they occured as parameters to a cost function.
253  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 4, 3, 2>,
254                           NULL,
255                           z, y, x);
256
257  ExpectedEvaluation expected = {
258    // Rows/columns
259    3, 9,
260    // Cost
261    7.0,
262    // Residuals
263    { 1.0, 2.0, 3.0 },
264    // Gradient
265    { 6.0, 12.0,              // x
266      6.0, 12.0, 18.0,        // y
267      6.0, 12.0, 18.0, 24.0,  // z
268    },
269    // Jacobian
270    //   x          y             z
271    { 1, 2,   1, 2, 3,   1, 2, 3, 4,
272      1, 2,   1, 2, 3,   1, 2, 3, 4,
273      1, 2,   1, 2, 3,   1, 2, 3, 4
274    }
275  };
276  CheckAllEvaluationCombinations(expected);
277}
278
279TEST_P(EvaluatorTest, SingleResidualProblemWithNuisanceParameters) {
280  // These parameters are not used.
281  double a[2];
282  double b[1];
283  double c[1];
284  double d[3];
285
286  // Add the parameters in a mixed order so the Jacobian is "checkered" with the
287  // values from the other parameters.
288  problem.AddParameterBlock(a, 2);
289  problem.AddParameterBlock(x, 2);
290  problem.AddParameterBlock(b, 1);
291  problem.AddParameterBlock(y, 3);
292  problem.AddParameterBlock(c, 1);
293  problem.AddParameterBlock(z, 4);
294  problem.AddParameterBlock(d, 3);
295
296  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
297                           NULL,
298                           x, y, z);
299
300  ExpectedEvaluation expected = {
301    // Rows/columns
302    3, 16,
303    // Cost
304    7.0,
305    // Residuals
306    { 1.0, 2.0, 3.0 },
307    // Gradient
308    { 0.0, 0.0,               // a
309      6.0, 12.0,              // x
310      0.0,                    // b
311      6.0, 12.0, 18.0,        // y
312      0.0,                    // c
313      6.0, 12.0, 18.0, 24.0,  // z
314      0.0, 0.0, 0.0,          // d
315    },
316    // Jacobian
317    //   a        x     b           y     c              z           d
318    { 0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
319      0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
320      0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0
321    }
322  };
323  CheckAllEvaluationCombinations(expected);
324}
325
326TEST_P(EvaluatorTest, MultipleResidualProblem) {
327  // Add the parameters in explicit order to force the ordering in the program.
328  problem.AddParameterBlock(x,  2);
329  problem.AddParameterBlock(y,  3);
330  problem.AddParameterBlock(z,  4);
331
332  // f(x, y) in R^2
333  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
334                           NULL,
335                           x, y);
336
337  // g(x, z) in R^3
338  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
339                           NULL,
340                           x, z);
341
342  // h(y, z) in R^4
343  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
344                           NULL,
345                           y, z);
346
347  ExpectedEvaluation expected = {
348    // Rows/columns
349    9, 9,
350    // Cost
351    // f       g           h
352    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
353    // Residuals
354    { 1.0, 2.0,           // f
355      1.0, 2.0, 3.0,      // g
356      1.0, 2.0, 3.0, 4.0  // h
357    },
358    // Gradient
359    { 15.0, 30.0,               // x
360      33.0, 66.0, 99.0,         // y
361      42.0, 84.0, 126.0, 168.0  // z
362    },
363    // Jacobian
364    //                x        y           z
365    {   /* f(x, y) */ 1, 2,    1, 2, 3,    0, 0, 0, 0,
366                      1, 2,    1, 2, 3,    0, 0, 0, 0,
367
368        /* g(x, z) */ 2, 4,    0, 0, 0,    2, 4, 6, 8,
369                      2, 4,    0, 0, 0,    2, 4, 6, 8,
370                      2, 4,    0, 0, 0,    2, 4, 6, 8,
371
372        /* h(y, z) */ 0, 0,    3, 6, 9,    3, 6, 9, 12,
373                      0, 0,    3, 6, 9,    3, 6, 9, 12,
374                      0, 0,    3, 6, 9,    3, 6, 9, 12,
375                      0, 0,    3, 6, 9,    3, 6, 9, 12
376    }
377  };
378  CheckAllEvaluationCombinations(expected);
379}
380
381TEST_P(EvaluatorTest, MultipleResidualsWithLocalParameterizations) {
382  // Add the parameters in explicit order to force the ordering in the program.
383  problem.AddParameterBlock(x,  2);
384
385  // Fix y's first dimension.
386  vector<int> y_fixed;
387  y_fixed.push_back(0);
388  problem.AddParameterBlock(y, 3, new SubsetParameterization(3, y_fixed));
389
390  // Fix z's second dimension.
391  vector<int> z_fixed;
392  z_fixed.push_back(1);
393  problem.AddParameterBlock(z, 4, new SubsetParameterization(4, z_fixed));
394
395  // f(x, y) in R^2
396  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
397                           NULL,
398                           x, y);
399
400  // g(x, z) in R^3
401  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
402                           NULL,
403                           x, z);
404
405  // h(y, z) in R^4
406  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
407                           NULL,
408                           y, z);
409
410  ExpectedEvaluation expected = {
411    // Rows/columns
412    9, 7,
413    // Cost
414    // f       g           h
415    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
416    // Residuals
417    { 1.0, 2.0,           // f
418      1.0, 2.0, 3.0,      // g
419      1.0, 2.0, 3.0, 4.0  // h
420    },
421    // Gradient
422    { 15.0, 30.0,         // x
423      66.0, 99.0,         // y
424      42.0, 126.0, 168.0  // z
425    },
426    // Jacobian
427    //                x        y           z
428    {   /* f(x, y) */ 1, 2,    2, 3,    0, 0, 0,
429                      1, 2,    2, 3,    0, 0, 0,
430
431        /* g(x, z) */ 2, 4,    0, 0,    2, 6, 8,
432                      2, 4,    0, 0,    2, 6, 8,
433                      2, 4,    0, 0,    2, 6, 8,
434
435        /* h(y, z) */ 0, 0,    6, 9,    3, 9, 12,
436                      0, 0,    6, 9,    3, 9, 12,
437                      0, 0,    6, 9,    3, 9, 12,
438                      0, 0,    6, 9,    3, 9, 12
439    }
440  };
441  CheckAllEvaluationCombinations(expected);
442}
443
444TEST_P(EvaluatorTest, MultipleResidualProblemWithSomeConstantParameters) {
445  // The values are ignored completely by the cost function.
446  double x[2];
447  double y[3];
448  double z[4];
449
450  // Add the parameters in explicit order to force the ordering in the program.
451  problem.AddParameterBlock(x,  2);
452  problem.AddParameterBlock(y,  3);
453  problem.AddParameterBlock(z,  4);
454
455  // f(x, y) in R^2
456  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
457                           NULL,
458                           x, y);
459
460  // g(x, z) in R^3
461  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
462                           NULL,
463                           x, z);
464
465  // h(y, z) in R^4
466  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
467                           NULL,
468                           y, z);
469
470  // For this test, "z" is constant.
471  problem.SetParameterBlockConstant(z);
472
473  // Create the reduced program which is missing the fixed "z" variable.
474  // Normally, the preprocessing of the program that happens in solver_impl
475  // takes care of this, but we don't want to invoke the solver here.
476  Program reduced_program;
477  vector<ParameterBlock*>* parameter_blocks =
478      problem.mutable_program()->mutable_parameter_blocks();
479
480  // "z" is the last parameter; save it for later and pop it off temporarily.
481  // Note that "z" will still get read during evaluation, so it cannot be
482  // deleted at this point.
483  ParameterBlock* parameter_block_z = parameter_blocks->back();
484  parameter_blocks->pop_back();
485
486  ExpectedEvaluation expected = {
487    // Rows/columns
488    9, 5,
489    // Cost
490    // f       g           h
491    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
492    // Residuals
493    { 1.0, 2.0,           // f
494      1.0, 2.0, 3.0,      // g
495      1.0, 2.0, 3.0, 4.0  // h
496    },
497    // Gradient
498    { 15.0, 30.0,        // x
499      33.0, 66.0, 99.0,  // y
500    },
501    // Jacobian
502    //                x        y
503    {   /* f(x, y) */ 1, 2,    1, 2, 3,
504                      1, 2,    1, 2, 3,
505
506        /* g(x, z) */ 2, 4,    0, 0, 0,
507                      2, 4,    0, 0, 0,
508                      2, 4,    0, 0, 0,
509
510        /* h(y, z) */ 0, 0,    3, 6, 9,
511                      0, 0,    3, 6, 9,
512                      0, 0,    3, 6, 9,
513                      0, 0,    3, 6, 9
514    }
515  };
516  CheckAllEvaluationCombinations(expected);
517
518  // Restore parameter block z, so it will get freed in a consistent way.
519  parameter_blocks->push_back(parameter_block_z);
520}
521
522TEST_P(EvaluatorTest, EvaluatorAbortsForResidualsThatFailToEvaluate) {
523  // Switch the return value to failure.
524  problem.AddResidualBlock(
525      new ParameterIgnoringCostFunction<20, 3, 2, 3, 4, false>, NULL, x, y, z);
526
527  // The values are ignored.
528  double state[9];
529
530  scoped_ptr<Evaluator> evaluator(CreateEvaluator(problem.mutable_program()));
531  scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
532  double cost;
533  EXPECT_FALSE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
534}
535
536// In the pairs, the first argument is the linear solver type, and the second
537// argument is num_eliminate_blocks. Changing the num_eliminate_blocks only
538// makes sense for the schur-based solvers.
539//
540// Try all values of num_eliminate_blocks that make sense given that in the
541// tests a maximum of 4 parameter blocks are present.
542INSTANTIATE_TEST_CASE_P(
543    LinearSolvers,
544    EvaluatorTest,
545    ::testing::Values(
546      EvaluatorTestOptions(DENSE_QR, 0),
547      EvaluatorTestOptions(DENSE_SCHUR, 0),
548      EvaluatorTestOptions(DENSE_SCHUR, 1),
549      EvaluatorTestOptions(DENSE_SCHUR, 2),
550      EvaluatorTestOptions(DENSE_SCHUR, 3),
551      EvaluatorTestOptions(DENSE_SCHUR, 4),
552      EvaluatorTestOptions(SPARSE_SCHUR, 0),
553      EvaluatorTestOptions(SPARSE_SCHUR, 1),
554      EvaluatorTestOptions(SPARSE_SCHUR, 2),
555      EvaluatorTestOptions(SPARSE_SCHUR, 3),
556      EvaluatorTestOptions(SPARSE_SCHUR, 4),
557      EvaluatorTestOptions(ITERATIVE_SCHUR, 0),
558      EvaluatorTestOptions(ITERATIVE_SCHUR, 1),
559      EvaluatorTestOptions(ITERATIVE_SCHUR, 2),
560      EvaluatorTestOptions(ITERATIVE_SCHUR, 3),
561      EvaluatorTestOptions(ITERATIVE_SCHUR, 4),
562      EvaluatorTestOptions(SPARSE_NORMAL_CHOLESKY, 0, false),
563      EvaluatorTestOptions(SPARSE_NORMAL_CHOLESKY, 0, true)));
564
565// Simple cost function used to check if the evaluator is sensitive to
566// state changes.
567class ParameterSensitiveCostFunction : public SizedCostFunction<2, 2> {
568 public:
569  virtual bool Evaluate(double const* const* parameters,
570                        double* residuals,
571                        double** jacobians) const {
572    double x1 = parameters[0][0];
573    double x2 = parameters[0][1];
574    residuals[0] = x1 * x1;
575    residuals[1] = x2 * x2;
576
577    if (jacobians != NULL) {
578      double* jacobian = jacobians[0];
579      if (jacobian != NULL) {
580        jacobian[0] = 2.0 * x1;
581        jacobian[1] = 0.0;
582        jacobian[2] = 0.0;
583        jacobian[3] = 2.0 * x2;
584      }
585    }
586    return true;
587  }
588};
589
590TEST(Evaluator, EvaluatorRespectsParameterChanges) {
591  ProblemImpl problem;
592
593  double x[2];
594  x[0] = 1.0;
595  x[1] = 1.0;
596
597  problem.AddResidualBlock(new ParameterSensitiveCostFunction(), NULL, x);
598  Program* program = problem.mutable_program();
599  program->SetParameterOffsetsAndIndex();
600
601  Evaluator::Options options;
602  options.linear_solver_type = DENSE_QR;
603  options.num_eliminate_blocks = 0;
604  string error;
605  scoped_ptr<Evaluator> evaluator(Evaluator::Create(options, program, &error));
606  scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
607
608  ASSERT_EQ(2, jacobian->num_rows());
609  ASSERT_EQ(2, jacobian->num_cols());
610
611  double state[2];
612  state[0] = 2.0;
613  state[1] = 3.0;
614
615  // The original state of a residual block comes from the user's
616  // state. So the original state is 1.0, 1.0, and the only way we get
617  // the 2.0, 3.0 results in the following tests is if it respects the
618  // values in the state vector.
619
620  // Cost only; no residuals and no jacobian.
621  {
622    double cost = -1;
623    ASSERT_TRUE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
624    EXPECT_EQ(48.5, cost);
625  }
626
627  // Cost and residuals, no jacobian.
628  {
629    double cost = -1;
630    double residuals[2] = { -2, -2 };
631    ASSERT_TRUE(evaluator->Evaluate(state, &cost, residuals, NULL, NULL));
632    EXPECT_EQ(48.5, cost);
633    EXPECT_EQ(4, residuals[0]);
634    EXPECT_EQ(9, residuals[1]);
635  }
636
637  // Cost, residuals, and jacobian.
638  {
639    double cost = -1;
640    double residuals[2] = { -2, -2};
641    SetSparseMatrixConstant(jacobian.get(), -1);
642    ASSERT_TRUE(evaluator->Evaluate(state,
643                                    &cost,
644                                    residuals,
645                                    NULL,
646                                    jacobian.get()));
647    EXPECT_EQ(48.5, cost);
648    EXPECT_EQ(4, residuals[0]);
649    EXPECT_EQ(9, residuals[1]);
650    Matrix actual_jacobian;
651    jacobian->ToDenseMatrix(&actual_jacobian);
652
653    Matrix expected_jacobian(2, 2);
654    expected_jacobian
655        << 2 * state[0], 0,
656           0, 2 * state[1];
657
658    EXPECT_TRUE((actual_jacobian.array() == expected_jacobian.array()).all())
659        << "Actual:\n" << actual_jacobian
660        << "\nExpected:\n" << expected_jacobian;
661  }
662}
663
664}  // namespace internal
665}  // namespace ceres
666