evaluator_test.cc revision 0ae28bd5885b5daa526898fcf7c323dc2c3e1963
1// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
3// http://code.google.com/p/ceres-solver/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9//   this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11//   this list of conditions and the following disclaimer in the documentation
12//   and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14//   used to endorse or promote products derived from this software without
15//   specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: keir@google.com (Keir Mierle)
30//
31// Tests shared across evaluators. The tests try all combinations of linear
32// solver and num_eliminate_blocks (for schur-based solvers).
33
34#include "ceres/evaluator.h"
35
36#include "ceres/casts.h"
37#include "ceres/cost_function.h"
38#include "ceres/crs_matrix.h"
39#include "ceres/internal/eigen.h"
40#include "ceres/internal/scoped_ptr.h"
41#include "ceres/local_parameterization.h"
42#include "ceres/problem_impl.h"
43#include "ceres/program.h"
44#include "ceres/sized_cost_function.h"
45#include "ceres/sparse_matrix.h"
46#include "ceres/types.h"
47#include "gtest/gtest.h"
48
49namespace ceres {
50namespace internal {
51
52// TODO(keir): Consider pushing this into a common test utils file.
53template<int kFactor, int kNumResiduals,
54         int N0 = 0, int N1 = 0, int N2 = 0, bool kSucceeds = true>
55class ParameterIgnoringCostFunction
56    : public SizedCostFunction<kNumResiduals, N0, N1, N2> {
57  typedef SizedCostFunction<kNumResiduals, N0, N1, N2> Base;
58 public:
59  virtual bool Evaluate(double const* const* parameters,
60                        double* residuals,
61                        double** jacobians) const {
62    for (int i = 0; i < Base::num_residuals(); ++i) {
63      residuals[i] = i + 1;
64    }
65    if (jacobians) {
66      for (int k = 0; k < Base::parameter_block_sizes().size(); ++k) {
67        // The jacobians here are full sized, but they are transformed in the
68        // evaluator into the "local" jacobian. In the tests, the "subset
69        // constant" parameterization is used, which should pick out columns
70        // from these jacobians. Put values in the jacobian that make this
71        // obvious; in particular, make the jacobians like this:
72        //
73        //   1 2 3 4 ...
74        //   1 2 3 4 ...   .*  kFactor
75        //   1 2 3 4 ...
76        //
77        // where the multiplication by kFactor makes it easier to distinguish
78        // between Jacobians of different residuals for the same parameter.
79        if (jacobians[k] != NULL) {
80          MatrixRef jacobian(jacobians[k],
81                             Base::num_residuals(),
82                             Base::parameter_block_sizes()[k]);
83          for (int j = 0; j < Base::parameter_block_sizes()[k]; ++j) {
84            jacobian.col(j).setConstant(kFactor * (j + 1));
85          }
86        }
87      }
88    }
89    return kSucceeds;
90  }
91};
92
93struct ExpectedEvaluation {
94  int num_rows;
95  int num_cols;
96  double cost;
97  const double residuals[50];
98  const double gradient[50];
99  const double jacobian[200];
100};
101
102void CompareEvaluations(int expected_num_rows,
103                        int expected_num_cols,
104                        double expected_cost,
105                        const double* expected_residuals,
106                        const double* expected_gradient,
107                        const double* expected_jacobian,
108                        const double actual_cost,
109                        const double* actual_residuals,
110                        const double* actual_gradient,
111                        const double* actual_jacobian) {
112  EXPECT_EQ(expected_cost, actual_cost);
113
114  if (expected_residuals != NULL) {
115    ConstVectorRef expected_residuals_vector(expected_residuals,
116                                             expected_num_rows);
117    ConstVectorRef actual_residuals_vector(actual_residuals,
118                                           expected_num_rows);
119    EXPECT_TRUE((actual_residuals_vector.array() ==
120                 expected_residuals_vector.array()).all())
121        << "Actual:\n" << actual_residuals_vector
122        << "\nExpected:\n" << expected_residuals_vector;
123  }
124
125  if (expected_gradient != NULL) {
126    ConstVectorRef expected_gradient_vector(expected_gradient,
127                                            expected_num_cols);
128    ConstVectorRef actual_gradient_vector(actual_gradient,
129                                            expected_num_cols);
130
131    EXPECT_TRUE((actual_gradient_vector.array() ==
132                 expected_gradient_vector.array()).all())
133        << "Actual:\n" << actual_gradient_vector.transpose()
134        << "\nExpected:\n" << expected_gradient_vector.transpose();
135  }
136
137  if (expected_jacobian != NULL) {
138    ConstMatrixRef expected_jacobian_matrix(expected_jacobian,
139                                            expected_num_rows,
140                                            expected_num_cols);
141    ConstMatrixRef actual_jacobian_matrix(actual_jacobian,
142                                          expected_num_rows,
143                                          expected_num_cols);
144    EXPECT_TRUE((actual_jacobian_matrix.array() ==
145                 expected_jacobian_matrix.array()).all())
146        << "Actual:\n" << actual_jacobian_matrix
147        << "\nExpected:\n" << expected_jacobian_matrix;
148  }
149}
150
151
152struct EvaluatorTest
153    : public ::testing::TestWithParam<pair<LinearSolverType, int> > {
154  Evaluator* CreateEvaluator(Program* program) {
155    // This program is straight from the ProblemImpl, and so has no index/offset
156    // yet; compute it here as required by the evalutor implementations.
157    program->SetParameterOffsetsAndIndex();
158
159    VLOG(1) << "Creating evaluator with type: " << GetParam().first
160            << " and num_eliminate_blocks: " << GetParam().second;
161    Evaluator::Options options;
162    options.linear_solver_type = GetParam().first;
163    options.num_eliminate_blocks = GetParam().second;
164    string error;
165    return Evaluator::Create(options, program, &error);
166  }
167
168  void EvaluateAndCompare(ProblemImpl *problem,
169                          int expected_num_rows,
170                          int expected_num_cols,
171                          double expected_cost,
172                          const double* expected_residuals,
173                          const double* expected_gradient,
174                          const double* expected_jacobian) {
175    scoped_ptr<Evaluator> evaluator(
176        CreateEvaluator(problem->mutable_program()));
177    int num_residuals = expected_num_rows;
178    int num_parameters = expected_num_cols;
179
180    double cost = -1;
181
182    Vector residuals(num_residuals);
183    residuals.setConstant(-2000);
184
185    Vector gradient(num_parameters);
186    gradient.setConstant(-3000);
187
188    scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
189
190    ASSERT_EQ(expected_num_rows, evaluator->NumResiduals());
191    ASSERT_EQ(expected_num_cols, evaluator->NumEffectiveParameters());
192    ASSERT_EQ(expected_num_rows, jacobian->num_rows());
193    ASSERT_EQ(expected_num_cols, jacobian->num_cols());
194
195    vector<double> state(evaluator->NumParameters());
196
197    ASSERT_TRUE(evaluator->Evaluate(
198          &state[0],
199          &cost,
200          expected_residuals != NULL ? &residuals[0]  : NULL,
201          expected_gradient  != NULL ? &gradient[0]   : NULL,
202          expected_jacobian  != NULL ? jacobian.get() : NULL));
203
204    Matrix actual_jacobian;
205    if (expected_jacobian != NULL) {
206      jacobian->ToDenseMatrix(&actual_jacobian);
207    }
208
209    CompareEvaluations(expected_num_rows,
210                       expected_num_cols,
211                       expected_cost,
212                       expected_residuals,
213                       expected_gradient,
214                       expected_jacobian,
215                       cost,
216                       &residuals[0],
217                       &gradient[0],
218                       actual_jacobian.data());
219  }
220
221  // Try all combinations of parameters for the evaluator.
222  void CheckAllEvaluationCombinations(const ExpectedEvaluation &expected) {
223    for (int i = 0; i < 8; ++i) {
224      EvaluateAndCompare(&problem,
225                         expected.num_rows,
226                         expected.num_cols,
227                         expected.cost,
228                         (i & 1) ? expected.residuals : NULL,
229                         (i & 2) ? expected.gradient  : NULL,
230                         (i & 4) ? expected.jacobian  : NULL);
231    }
232  }
233
234  // The values are ignored completely by the cost function.
235  double x[2];
236  double y[3];
237  double z[4];
238
239  ProblemImpl problem;
240};
241
242void SetSparseMatrixConstant(SparseMatrix* sparse_matrix, double value) {
243  VectorRef(sparse_matrix->mutable_values(),
244            sparse_matrix->num_nonzeros()).setConstant(value);
245}
246
247TEST_P(EvaluatorTest, SingleResidualProblem) {
248  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
249                           NULL,
250                           x, y, z);
251
252  ExpectedEvaluation expected = {
253    // Rows/columns
254    3, 9,
255    // Cost
256    7.0,
257    // Residuals
258    { 1.0, 2.0, 3.0 },
259    // Gradient
260    { 6.0, 12.0,              // x
261      6.0, 12.0, 18.0,        // y
262      6.0, 12.0, 18.0, 24.0,  // z
263    },
264    // Jacobian
265    //   x          y             z
266    { 1, 2,   1, 2, 3,   1, 2, 3, 4,
267      1, 2,   1, 2, 3,   1, 2, 3, 4,
268      1, 2,   1, 2, 3,   1, 2, 3, 4
269    }
270  };
271  CheckAllEvaluationCombinations(expected);
272}
273
274TEST_P(EvaluatorTest, SingleResidualProblemWithPermutedParameters) {
275  // Add the parameters in explicit order to force the ordering in the program.
276  problem.AddParameterBlock(x,  2);
277  problem.AddParameterBlock(y,  3);
278  problem.AddParameterBlock(z,  4);
279
280  // Then use a cost function which is similar to the others, but swap around
281  // the ordering of the parameters to the cost function. This shouldn't affect
282  // the jacobian evaluation, but requires explicit handling in the evaluators.
283  // At one point the compressed row evaluator had a bug that went undetected
284  // for a long time, since by chance most users added parameters to the problem
285  // in the same order that they occured as parameters to a cost function.
286  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 4, 3, 2>,
287                           NULL,
288                           z, y, x);
289
290  ExpectedEvaluation expected = {
291    // Rows/columns
292    3, 9,
293    // Cost
294    7.0,
295    // Residuals
296    { 1.0, 2.0, 3.0 },
297    // Gradient
298    { 6.0, 12.0,              // x
299      6.0, 12.0, 18.0,        // y
300      6.0, 12.0, 18.0, 24.0,  // z
301    },
302    // Jacobian
303    //   x          y             z
304    { 1, 2,   1, 2, 3,   1, 2, 3, 4,
305      1, 2,   1, 2, 3,   1, 2, 3, 4,
306      1, 2,   1, 2, 3,   1, 2, 3, 4
307    }
308  };
309  CheckAllEvaluationCombinations(expected);
310}
311
312TEST_P(EvaluatorTest, SingleResidualProblemWithNuisanceParameters) {
313  // These parameters are not used.
314  double a[2];
315  double b[1];
316  double c[1];
317  double d[3];
318
319  // Add the parameters in a mixed order so the Jacobian is "checkered" with the
320  // values from the other parameters.
321  problem.AddParameterBlock(a, 2);
322  problem.AddParameterBlock(x, 2);
323  problem.AddParameterBlock(b, 1);
324  problem.AddParameterBlock(y, 3);
325  problem.AddParameterBlock(c, 1);
326  problem.AddParameterBlock(z, 4);
327  problem.AddParameterBlock(d, 3);
328
329  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 3, 2, 3, 4>,
330                           NULL,
331                           x, y, z);
332
333  ExpectedEvaluation expected = {
334    // Rows/columns
335    3, 16,
336    // Cost
337    7.0,
338    // Residuals
339    { 1.0, 2.0, 3.0 },
340    // Gradient
341    { 0.0, 0.0,               // a
342      6.0, 12.0,              // x
343      0.0,                    // b
344      6.0, 12.0, 18.0,        // y
345      0.0,                    // c
346      6.0, 12.0, 18.0, 24.0,  // z
347      0.0, 0.0, 0.0,          // d
348    },
349    // Jacobian
350    //   a        x     b           y     c              z           d
351    { 0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
352      0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0,
353      0, 0,    1, 2,    0,    1, 2, 3,    0,    1, 2, 3, 4,    0, 0, 0
354    }
355  };
356  CheckAllEvaluationCombinations(expected);
357}
358
359TEST_P(EvaluatorTest, MultipleResidualProblem) {
360  // Add the parameters in explicit order to force the ordering in the program.
361  problem.AddParameterBlock(x,  2);
362  problem.AddParameterBlock(y,  3);
363  problem.AddParameterBlock(z,  4);
364
365  // f(x, y) in R^2
366  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
367                           NULL,
368                           x, y);
369
370  // g(x, z) in R^3
371  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
372                           NULL,
373                           x, z);
374
375  // h(y, z) in R^4
376  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
377                           NULL,
378                           y, z);
379
380  ExpectedEvaluation expected = {
381    // Rows/columns
382    9, 9,
383    // Cost
384    // f       g           h
385    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
386    // Residuals
387    { 1.0, 2.0,           // f
388      1.0, 2.0, 3.0,      // g
389      1.0, 2.0, 3.0, 4.0  // h
390    },
391    // Gradient
392    { 15.0, 30.0,               // x
393      33.0, 66.0, 99.0,         // y
394      42.0, 84.0, 126.0, 168.0  // z
395    },
396    // Jacobian
397    //                x        y           z
398    {   /* f(x, y) */ 1, 2,    1, 2, 3,    0, 0, 0, 0,
399                      1, 2,    1, 2, 3,    0, 0, 0, 0,
400
401        /* g(x, z) */ 2, 4,    0, 0, 0,    2, 4, 6, 8,
402                      2, 4,    0, 0, 0,    2, 4, 6, 8,
403                      2, 4,    0, 0, 0,    2, 4, 6, 8,
404
405        /* h(y, z) */ 0, 0,    3, 6, 9,    3, 6, 9, 12,
406                      0, 0,    3, 6, 9,    3, 6, 9, 12,
407                      0, 0,    3, 6, 9,    3, 6, 9, 12,
408                      0, 0,    3, 6, 9,    3, 6, 9, 12
409    }
410  };
411  CheckAllEvaluationCombinations(expected);
412}
413
414TEST_P(EvaluatorTest, MultipleResidualsWithLocalParameterizations) {
415  // Add the parameters in explicit order to force the ordering in the program.
416  problem.AddParameterBlock(x,  2);
417
418  // Fix y's first dimension.
419  vector<int> y_fixed;
420  y_fixed.push_back(0);
421  problem.AddParameterBlock(y, 3, new SubsetParameterization(3, y_fixed));
422
423  // Fix z's second dimension.
424  vector<int> z_fixed;
425  z_fixed.push_back(1);
426  problem.AddParameterBlock(z, 4, new SubsetParameterization(4, z_fixed));
427
428  // f(x, y) in R^2
429  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
430                           NULL,
431                           x, y);
432
433  // g(x, z) in R^3
434  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
435                           NULL,
436                           x, z);
437
438  // h(y, z) in R^4
439  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
440                           NULL,
441                           y, z);
442
443  ExpectedEvaluation expected = {
444    // Rows/columns
445    9, 7,
446    // Cost
447    // f       g           h
448    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
449    // Residuals
450    { 1.0, 2.0,           // f
451      1.0, 2.0, 3.0,      // g
452      1.0, 2.0, 3.0, 4.0  // h
453    },
454    // Gradient
455    { 15.0, 30.0,         // x
456      66.0, 99.0,         // y
457      42.0, 126.0, 168.0  // z
458    },
459    // Jacobian
460    //                x        y           z
461    {   /* f(x, y) */ 1, 2,    2, 3,    0, 0, 0,
462                      1, 2,    2, 3,    0, 0, 0,
463
464        /* g(x, z) */ 2, 4,    0, 0,    2, 6, 8,
465                      2, 4,    0, 0,    2, 6, 8,
466                      2, 4,    0, 0,    2, 6, 8,
467
468        /* h(y, z) */ 0, 0,    6, 9,    3, 9, 12,
469                      0, 0,    6, 9,    3, 9, 12,
470                      0, 0,    6, 9,    3, 9, 12,
471                      0, 0,    6, 9,    3, 9, 12
472    }
473  };
474  CheckAllEvaluationCombinations(expected);
475}
476
477TEST_P(EvaluatorTest, MultipleResidualProblemWithSomeConstantParameters) {
478  // The values are ignored completely by the cost function.
479  double x[2];
480  double y[3];
481  double z[4];
482
483  // Add the parameters in explicit order to force the ordering in the program.
484  problem.AddParameterBlock(x,  2);
485  problem.AddParameterBlock(y,  3);
486  problem.AddParameterBlock(z,  4);
487
488  // f(x, y) in R^2
489  problem.AddResidualBlock(new ParameterIgnoringCostFunction<1, 2, 2, 3>,
490                           NULL,
491                           x, y);
492
493  // g(x, z) in R^3
494  problem.AddResidualBlock(new ParameterIgnoringCostFunction<2, 3, 2, 4>,
495                           NULL,
496                           x, z);
497
498  // h(y, z) in R^4
499  problem.AddResidualBlock(new ParameterIgnoringCostFunction<3, 4, 3, 4>,
500                           NULL,
501                           y, z);
502
503  // For this test, "z" is constant.
504  problem.SetParameterBlockConstant(z);
505
506  // Create the reduced program which is missing the fixed "z" variable.
507  // Normally, the preprocessing of the program that happens in solver_impl
508  // takes care of this, but we don't want to invoke the solver here.
509  Program reduced_program;
510  vector<ParameterBlock*>* parameter_blocks =
511      problem.mutable_program()->mutable_parameter_blocks();
512
513  // "z" is the last parameter; save it for later and pop it off temporarily.
514  // Note that "z" will still get read during evaluation, so it cannot be
515  // deleted at this point.
516  ParameterBlock* parameter_block_z = parameter_blocks->back();
517  parameter_blocks->pop_back();
518
519  ExpectedEvaluation expected = {
520    // Rows/columns
521    9, 5,
522    // Cost
523    // f       g           h
524    (  1 + 4 + 1 + 4 + 9 + 1 + 4 + 9 + 16) / 2.0,
525    // Residuals
526    { 1.0, 2.0,           // f
527      1.0, 2.0, 3.0,      // g
528      1.0, 2.0, 3.0, 4.0  // h
529    },
530    // Gradient
531    { 15.0, 30.0,        // x
532      33.0, 66.0, 99.0,  // y
533    },
534    // Jacobian
535    //                x        y
536    {   /* f(x, y) */ 1, 2,    1, 2, 3,
537                      1, 2,    1, 2, 3,
538
539        /* g(x, z) */ 2, 4,    0, 0, 0,
540                      2, 4,    0, 0, 0,
541                      2, 4,    0, 0, 0,
542
543        /* h(y, z) */ 0, 0,    3, 6, 9,
544                      0, 0,    3, 6, 9,
545                      0, 0,    3, 6, 9,
546                      0, 0,    3, 6, 9
547    }
548  };
549  CheckAllEvaluationCombinations(expected);
550
551  // Restore parameter block z, so it will get freed in a consistent way.
552  parameter_blocks->push_back(parameter_block_z);
553}
554
555TEST_P(EvaluatorTest, EvaluatorAbortsForResidualsThatFailToEvaluate) {
556  // Switch the return value to failure.
557  problem.AddResidualBlock(
558      new ParameterIgnoringCostFunction<20, 3, 2, 3, 4, false>, NULL, x, y, z);
559
560  // The values are ignored.
561  double state[9];
562
563  scoped_ptr<Evaluator> evaluator(CreateEvaluator(problem.mutable_program()));
564  scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
565  double cost;
566  EXPECT_FALSE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
567}
568
569// In the pairs, the first argument is the linear solver type, and the second
570// argument is num_eliminate_blocks. Changing the num_eliminate_blocks only
571// makes sense for the schur-based solvers.
572//
573// Try all values of num_eliminate_blocks that make sense given that in the
574// tests a maximum of 4 parameter blocks are present.
575INSTANTIATE_TEST_CASE_P(
576    LinearSolvers,
577    EvaluatorTest,
578    ::testing::Values(make_pair(DENSE_QR, 0),
579                      make_pair(DENSE_SCHUR, 0),
580                      make_pair(DENSE_SCHUR, 1),
581                      make_pair(DENSE_SCHUR, 2),
582                      make_pair(DENSE_SCHUR, 3),
583                      make_pair(DENSE_SCHUR, 4),
584                      make_pair(SPARSE_SCHUR, 0),
585                      make_pair(SPARSE_SCHUR, 1),
586                      make_pair(SPARSE_SCHUR, 2),
587                      make_pair(SPARSE_SCHUR, 3),
588                      make_pair(SPARSE_SCHUR, 4),
589                      make_pair(ITERATIVE_SCHUR, 0),
590                      make_pair(ITERATIVE_SCHUR, 1),
591                      make_pair(ITERATIVE_SCHUR, 2),
592                      make_pair(ITERATIVE_SCHUR, 3),
593                      make_pair(ITERATIVE_SCHUR, 4),
594                      make_pair(SPARSE_NORMAL_CHOLESKY, 0)));
595
596// Simple cost function used to check if the evaluator is sensitive to
597// state changes.
598class ParameterSensitiveCostFunction : public SizedCostFunction<2, 2> {
599 public:
600  virtual bool Evaluate(double const* const* parameters,
601                        double* residuals,
602                        double** jacobians) const {
603    double x1 = parameters[0][0];
604    double x2 = parameters[0][1];
605    residuals[0] = x1 * x1;
606    residuals[1] = x2 * x2;
607
608    if (jacobians != NULL) {
609      double* jacobian = jacobians[0];
610      if (jacobian != NULL) {
611        jacobian[0] = 2.0 * x1;
612        jacobian[1] = 0.0;
613        jacobian[2] = 0.0;
614        jacobian[3] = 2.0 * x2;
615      }
616    }
617    return true;
618  }
619};
620
621TEST(Evaluator, EvaluatorRespectsParameterChanges) {
622  ProblemImpl problem;
623
624  double x[2];
625  x[0] = 1.0;
626  x[1] = 1.0;
627
628  problem.AddResidualBlock(new ParameterSensitiveCostFunction(), NULL, x);
629  Program* program = problem.mutable_program();
630  program->SetParameterOffsetsAndIndex();
631
632  Evaluator::Options options;
633  options.linear_solver_type = DENSE_QR;
634  options.num_eliminate_blocks = 0;
635  string error;
636  scoped_ptr<Evaluator> evaluator(Evaluator::Create(options, program, &error));
637  scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
638
639  ASSERT_EQ(2, jacobian->num_rows());
640  ASSERT_EQ(2, jacobian->num_cols());
641
642  double state[2];
643  state[0] = 2.0;
644  state[1] = 3.0;
645
646  // The original state of a residual block comes from the user's
647  // state. So the original state is 1.0, 1.0, and the only way we get
648  // the 2.0, 3.0 results in the following tests is if it respects the
649  // values in the state vector.
650
651  // Cost only; no residuals and no jacobian.
652  {
653    double cost = -1;
654    ASSERT_TRUE(evaluator->Evaluate(state, &cost, NULL, NULL, NULL));
655    EXPECT_EQ(48.5, cost);
656  }
657
658  // Cost and residuals, no jacobian.
659  {
660    double cost = -1;
661    double residuals[2] = { -2, -2 };
662    ASSERT_TRUE(evaluator->Evaluate(state, &cost, residuals, NULL, NULL));
663    EXPECT_EQ(48.5, cost);
664    EXPECT_EQ(4, residuals[0]);
665    EXPECT_EQ(9, residuals[1]);
666  }
667
668  // Cost, residuals, and jacobian.
669  {
670    double cost = -1;
671    double residuals[2] = { -2, -2};
672    SetSparseMatrixConstant(jacobian.get(), -1);
673    ASSERT_TRUE(evaluator->Evaluate(state,
674                                    &cost,
675                                    residuals,
676                                    NULL,
677                                    jacobian.get()));
678    EXPECT_EQ(48.5, cost);
679    EXPECT_EQ(4, residuals[0]);
680    EXPECT_EQ(9, residuals[1]);
681    Matrix actual_jacobian;
682    jacobian->ToDenseMatrix(&actual_jacobian);
683
684    Matrix expected_jacobian(2, 2);
685    expected_jacobian
686        << 2 * state[0], 0,
687           0, 2 * state[1];
688
689    EXPECT_TRUE((actual_jacobian.array() == expected_jacobian.array()).all())
690        << "Actual:\n" << actual_jacobian
691        << "\nExpected:\n" << expected_jacobian;
692  }
693}
694
695// Simple cost function used for testing Evaluator::Evaluate.
696//
697// r_i = i - (j + 1) * x_ij^2
698template <int kNumResiduals, int kNumParameterBlocks >
699class QuadraticCostFunction : public CostFunction {
700 public:
701  QuadraticCostFunction() {
702    CHECK_GT(kNumResiduals, 0);
703    CHECK_GT(kNumParameterBlocks, 0);
704    set_num_residuals(kNumResiduals);
705    for (int i = 0; i < kNumParameterBlocks; ++i) {
706      mutable_parameter_block_sizes()->push_back(kNumResiduals);
707    }
708  }
709
710  virtual bool Evaluate(double const* const* parameters,
711                        double* residuals,
712                        double** jacobians) const {
713    for (int i = 0; i < kNumResiduals; ++i) {
714      residuals[i] = i;
715      for (int j = 0; j < kNumParameterBlocks; ++j) {
716        residuals[i] -= (j + 1.0) * parameters[j][i] * parameters[j][i];
717      }
718    }
719
720    if (jacobians == NULL) {
721      return true;
722    }
723
724    for (int j = 0; j < kNumParameterBlocks; ++j) {
725      if (jacobians[j] != NULL) {
726        MatrixRef(jacobians[j], kNumResiduals, kNumResiduals) =
727            (-2.0 * (j + 1.0) *
728             ConstVectorRef(parameters[j], kNumResiduals)).asDiagonal();
729      }
730    }
731
732    return true;
733  }
734};
735
736// Convert a CRSMatrix to a dense Eigen matrix.
737void CRSToDenseMatrix(const CRSMatrix& input, Matrix* output) {
738  Matrix& m = *CHECK_NOTNULL(output);
739  m.resize(input.num_rows, input.num_cols);
740  m.setZero();
741  for (int row = 0; row < input.num_rows; ++row) {
742    for (int j = input.rows[row]; j < input.rows[row + 1]; ++j) {
743      const int col = input.cols[j];
744      m(row, col) = input.values[j];
745    }
746  }
747}
748
749
750class StaticEvaluateTest : public ::testing::Test {
751 protected:
752  void SetUp() {
753    for (int i = 0; i < 6; ++i) {
754      parameters_[i] = static_cast<double>(i + 1);
755    }
756
757    CostFunction* cost_function = new QuadraticCostFunction<2, 2>;
758
759    // f(x, y)
760    problem_.AddResidualBlock(cost_function,
761                              NULL,
762                              parameters_,
763                              parameters_ + 2);
764    // g(y, z)
765    problem_.AddResidualBlock(cost_function,
766                              NULL, parameters_ + 2,
767                              parameters_ + 4);
768    // h(z, x)
769    problem_.AddResidualBlock(cost_function,
770                              NULL,
771                              parameters_ + 4,
772                              parameters_);
773  }
774
775
776
777  void EvaluateAndCompare(const int expected_num_rows,
778                          const int expected_num_cols,
779                          const double expected_cost,
780                          const double* expected_residuals,
781                          const double* expected_gradient,
782                          const double* expected_jacobian) {
783    double cost;
784    vector<double> residuals;
785    vector<double> gradient;
786    CRSMatrix jacobian;
787
788    EXPECT_TRUE(Evaluator::Evaluate(
789                    problem_.mutable_program(),
790                    1,
791                    &cost,
792                    expected_residuals != NULL ? &residuals : NULL,
793                    expected_gradient != NULL ? &gradient : NULL,
794                    expected_jacobian != NULL ? &jacobian : NULL));
795
796    if (expected_residuals != NULL) {
797      EXPECT_EQ(residuals.size(), expected_num_rows);
798    }
799
800    if (expected_gradient != NULL) {
801      EXPECT_EQ(gradient.size(), expected_num_cols);
802    }
803
804    if (expected_jacobian != NULL) {
805      EXPECT_EQ(jacobian.num_rows, expected_num_rows);
806      EXPECT_EQ(jacobian.num_cols, expected_num_cols);
807    }
808
809    Matrix dense_jacobian;
810    if (expected_jacobian != NULL) {
811      CRSToDenseMatrix(jacobian, &dense_jacobian);
812    }
813
814    CompareEvaluations(expected_num_rows,
815                       expected_num_cols,
816                       expected_cost,
817                       expected_residuals,
818                       expected_gradient,
819                       expected_jacobian,
820                       cost,
821                       residuals.size() > 0 ? &residuals[0] : NULL,
822                       gradient.size() > 0 ? &gradient[0] : NULL,
823                       dense_jacobian.data());
824  }
825
826  void CheckAllEvaluationCombinations(const ExpectedEvaluation& expected ) {
827    for (int i = 0; i < 8; ++i) {
828      EvaluateAndCompare(expected.num_rows,
829                         expected.num_cols,
830                         expected.cost,
831                         (i & 1) ? expected.residuals : NULL,
832                         (i & 2) ? expected.gradient  : NULL,
833                         (i & 4) ? expected.jacobian  : NULL);
834    }
835
836
837    double new_parameters[6];
838    for (int i = 0; i < 6; ++i) {
839      new_parameters[i] = 0.0;
840    }
841
842    problem_.mutable_program()->StateVectorToParameterBlocks(new_parameters);
843
844    for (int i = 0; i < 8; ++i) {
845      EvaluateAndCompare(expected.num_rows,
846                         expected.num_cols,
847                         expected.cost,
848                         (i & 1) ? expected.residuals : NULL,
849                         (i & 2) ? expected.gradient  : NULL,
850                         (i & 4) ? expected.jacobian  : NULL);
851    }
852  }
853
854  ProblemImpl problem_;
855  double parameters_[6];
856};
857
858
859TEST_F(StaticEvaluateTest, MultipleParameterAndResidualBlocks) {
860  ExpectedEvaluation expected = {
861    // Rows/columns
862    6, 6,
863    // Cost
864    7607.0,
865    // Residuals
866    { -19.0, -35.0,  // f
867      -59.0, -87.0,  // g
868      -27.0, -43.0   // h
869    },
870    // Gradient
871    {  146.0,  484.0,   // x
872       582.0, 1256.0,   // y
873      1450.0, 2604.0,   // z
874    },
875    // Jacobian
876    //                       x             y             z
877    { /* f(x, y) */ -2.0,  0.0, -12.0,   0.0,   0.0,   0.0,
878                     0.0, -4.0,   0.0, -16.0,   0.0,   0.0,
879      /* g(y, z) */  0.0,  0.0,  -6.0,   0.0, -20.0,   0.0,
880                     0.0,  0.0,   0.0,  -8.0,   0.0, -24.0,
881      /* h(z, x) */ -4.0,  0.0,   0.0,   0.0, -10.0,   0.0,
882                     0.0, -8.0,   0.0,   0.0,   0.0, -12.0
883    }
884  };
885
886  CheckAllEvaluationCombinations(expected);
887}
888
889TEST_F(StaticEvaluateTest, ConstantParameterBlock) {
890  ExpectedEvaluation expected = {
891    // Rows/columns
892    6, 6,
893    // Cost
894    7607.0,
895    // Residuals
896    { -19.0, -35.0,  // f
897      -59.0, -87.0,  // g
898      -27.0, -43.0   // h
899    },
900
901    // Gradient
902    {  146.0,  484.0,  // x
903         0.0,    0.0,  // y
904      1450.0, 2604.0,  // z
905    },
906
907    // Jacobian
908    //                       x             y             z
909    { /* f(x, y) */ -2.0,  0.0,   0.0,   0.0,   0.0,   0.0,
910                     0.0, -4.0,   0.0,   0.0,   0.0,   0.0,
911      /* g(y, z) */  0.0,  0.0,   0.0,   0.0, -20.0,   0.0,
912                     0.0,  0.0,   0.0,   0.0,   0.0, -24.0,
913      /* h(z, x) */ -4.0,  0.0,   0.0,   0.0, -10.0,   0.0,
914                     0.0, -8.0,   0.0,   0.0,   0.0, -12.0
915    }
916  };
917
918  problem_.SetParameterBlockConstant(parameters_ + 2);
919  CheckAllEvaluationCombinations(expected);
920}
921
922TEST_F(StaticEvaluateTest, LocalParameterization) {
923  ExpectedEvaluation expected = {
924    // Rows/columns
925    6, 5,
926    // Cost
927    7607.0,
928    // Residuals
929    { -19.0, -35.0,  // f
930      -59.0, -87.0,  // g
931      -27.0, -43.0   // h
932    },
933    // Gradient
934    {  146.0,  484.0,  // x
935      1256.0,          // y with SubsetParameterization
936      1450.0, 2604.0,  // z
937    },
938    // Jacobian
939    //                       x      y             z
940    { /* f(x, y) */ -2.0,  0.0,   0.0,   0.0,   0.0,
941                     0.0, -4.0, -16.0,   0.0,   0.0,
942      /* g(y, z) */  0.0,  0.0,   0.0, -20.0,   0.0,
943                     0.0,  0.0,  -8.0,   0.0, -24.0,
944      /* h(z, x) */ -4.0,  0.0,   0.0, -10.0,   0.0,
945                     0.0, -8.0,   0.0,   0.0, -12.0
946    }
947  };
948
949  vector<int> constant_parameters;
950  constant_parameters.push_back(0);
951  problem_.SetParameterization(parameters_ + 2,
952                               new SubsetParameterization(2,
953                                                          constant_parameters));
954
955  CheckAllEvaluationCombinations(expected);
956}
957
958}  // namespace internal
959}  // namespace ceres
960