1// Ceres Solver - A fast non-linear least squares minimizer
2// Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
3// http://code.google.com/p/ceres-solver/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are met:
7//
8// * Redistributions of source code must retain the above copyright notice,
9//   this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above copyright notice,
11//   this list of conditions and the following disclaimer in the documentation
12//   and/or other materials provided with the distribution.
13// * Neither the name of Google Inc. nor the names of its contributors may be
14//   used to endorse or promote products derived from this software without
15//   specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27// POSSIBILITY OF SUCH DAMAGE.
28//
29// Author: sameeragarwal@google.com (Sameer Agarwal)
30
31// This include must come before any #ifndef check on Ceres compile options.
32#include "ceres/internal/port.h"
33
34#ifndef CERES_NO_SUITESPARSE
35
36#include "ceres/visibility_based_preconditioner.h"
37
38#include <algorithm>
39#include <functional>
40#include <iterator>
41#include <set>
42#include <utility>
43#include <vector>
44#include "Eigen/Dense"
45#include "ceres/block_random_access_sparse_matrix.h"
46#include "ceres/block_sparse_matrix.h"
47#include "ceres/canonical_views_clustering.h"
48#include "ceres/collections_port.h"
49#include "ceres/graph.h"
50#include "ceres/graph_algorithms.h"
51#include "ceres/internal/scoped_ptr.h"
52#include "ceres/linear_solver.h"
53#include "ceres/schur_eliminator.h"
54#include "ceres/single_linkage_clustering.h"
55#include "ceres/visibility.h"
56#include "glog/logging.h"
57
58namespace ceres {
59namespace internal {
60
61// TODO(sameeragarwal): Currently these are magic weights for the
62// preconditioner construction. Move these higher up into the Options
63// struct and provide some guidelines for choosing them.
64//
65// This will require some more work on the clustering algorithm and
66// possibly some more refactoring of the code.
67static const double kCanonicalViewsSizePenaltyWeight = 3.0;
68static const double kCanonicalViewsSimilarityPenaltyWeight = 0.0;
69static const double kSingleLinkageMinSimilarity = 0.9;
70
71VisibilityBasedPreconditioner::VisibilityBasedPreconditioner(
72    const CompressedRowBlockStructure& bs,
73    const Preconditioner::Options& options)
74    : options_(options),
75      num_blocks_(0),
76      num_clusters_(0),
77      factor_(NULL) {
78  CHECK_GT(options_.elimination_groups.size(), 1);
79  CHECK_GT(options_.elimination_groups[0], 0);
80  CHECK(options_.type == CLUSTER_JACOBI ||
81        options_.type == CLUSTER_TRIDIAGONAL)
82      << "Unknown preconditioner type: " << options_.type;
83  num_blocks_ = bs.cols.size() - options_.elimination_groups[0];
84  CHECK_GT(num_blocks_, 0)
85      << "Jacobian should have atleast 1 f_block for "
86      << "visibility based preconditioning.";
87
88  // Vector of camera block sizes
89  block_size_.resize(num_blocks_);
90  for (int i = 0; i < num_blocks_; ++i) {
91    block_size_[i] = bs.cols[i + options_.elimination_groups[0]].size;
92  }
93
94  const time_t start_time = time(NULL);
95  switch (options_.type) {
96    case CLUSTER_JACOBI:
97      ComputeClusterJacobiSparsity(bs);
98      break;
99    case CLUSTER_TRIDIAGONAL:
100      ComputeClusterTridiagonalSparsity(bs);
101      break;
102    default:
103      LOG(FATAL) << "Unknown preconditioner type";
104  }
105  const time_t structure_time = time(NULL);
106  InitStorage(bs);
107  const time_t storage_time = time(NULL);
108  InitEliminator(bs);
109  const time_t eliminator_time = time(NULL);
110
111  // Allocate temporary storage for a vector used during
112  // RightMultiply.
113  tmp_rhs_ = CHECK_NOTNULL(ss_.CreateDenseVector(NULL,
114                                                 m_->num_rows(),
115                                                 m_->num_rows()));
116  const time_t init_time = time(NULL);
117  VLOG(2) << "init time: "
118          << init_time - start_time
119          << " structure time: " << structure_time - start_time
120          << " storage time:" << storage_time - structure_time
121          << " eliminator time: " << eliminator_time - storage_time;
122}
123
124VisibilityBasedPreconditioner::~VisibilityBasedPreconditioner() {
125  if (factor_ != NULL) {
126    ss_.Free(factor_);
127    factor_ = NULL;
128  }
129  if (tmp_rhs_ != NULL) {
130    ss_.Free(tmp_rhs_);
131    tmp_rhs_ = NULL;
132  }
133}
134
135// Determine the sparsity structure of the CLUSTER_JACOBI
136// preconditioner. It clusters cameras using their scene
137// visibility. The clusters form the diagonal blocks of the
138// preconditioner matrix.
139void VisibilityBasedPreconditioner::ComputeClusterJacobiSparsity(
140    const CompressedRowBlockStructure& bs) {
141  vector<set<int> > visibility;
142  ComputeVisibility(bs, options_.elimination_groups[0], &visibility);
143  CHECK_EQ(num_blocks_, visibility.size());
144  ClusterCameras(visibility);
145  cluster_pairs_.clear();
146  for (int i = 0; i < num_clusters_; ++i) {
147    cluster_pairs_.insert(make_pair(i, i));
148  }
149}
150
151// Determine the sparsity structure of the CLUSTER_TRIDIAGONAL
152// preconditioner. It clusters cameras using using the scene
153// visibility and then finds the strongly interacting pairs of
154// clusters by constructing another graph with the clusters as
155// vertices and approximating it with a degree-2 maximum spanning
156// forest. The set of edges in this forest are the cluster pairs.
157void VisibilityBasedPreconditioner::ComputeClusterTridiagonalSparsity(
158    const CompressedRowBlockStructure& bs) {
159  vector<set<int> > visibility;
160  ComputeVisibility(bs, options_.elimination_groups[0], &visibility);
161  CHECK_EQ(num_blocks_, visibility.size());
162  ClusterCameras(visibility);
163
164  // Construct a weighted graph on the set of clusters, where the
165  // edges are the number of 3D points/e_blocks visible in both the
166  // clusters at the ends of the edge. Return an approximate degree-2
167  // maximum spanning forest of this graph.
168  vector<set<int> > cluster_visibility;
169  ComputeClusterVisibility(visibility, &cluster_visibility);
170  scoped_ptr<Graph<int> > cluster_graph(
171      CHECK_NOTNULL(CreateClusterGraph(cluster_visibility)));
172  scoped_ptr<Graph<int> > forest(
173      CHECK_NOTNULL(Degree2MaximumSpanningForest(*cluster_graph)));
174  ForestToClusterPairs(*forest, &cluster_pairs_);
175}
176
177// Allocate storage for the preconditioner matrix.
178void VisibilityBasedPreconditioner::InitStorage(
179    const CompressedRowBlockStructure& bs) {
180  ComputeBlockPairsInPreconditioner(bs);
181  m_.reset(new BlockRandomAccessSparseMatrix(block_size_, block_pairs_));
182}
183
184// Call the canonical views algorithm and cluster the cameras based on
185// their visibility sets. The visibility set of a camera is the set of
186// e_blocks/3D points in the scene that are seen by it.
187//
188// The cluster_membership_ vector is updated to indicate cluster
189// memberships for each camera block.
190void VisibilityBasedPreconditioner::ClusterCameras(
191    const vector<set<int> >& visibility) {
192  scoped_ptr<Graph<int> > schur_complement_graph(
193      CHECK_NOTNULL(CreateSchurComplementGraph(visibility)));
194
195  HashMap<int, int> membership;
196
197  if (options_.visibility_clustering_type == CANONICAL_VIEWS) {
198    vector<int> centers;
199    CanonicalViewsClusteringOptions clustering_options;
200    clustering_options.size_penalty_weight =
201        kCanonicalViewsSizePenaltyWeight;
202    clustering_options.similarity_penalty_weight =
203        kCanonicalViewsSimilarityPenaltyWeight;
204    ComputeCanonicalViewsClustering(clustering_options,
205                                    *schur_complement_graph,
206                                    &centers,
207                                    &membership);
208    num_clusters_ = centers.size();
209  } else if (options_.visibility_clustering_type == SINGLE_LINKAGE) {
210    SingleLinkageClusteringOptions clustering_options;
211    clustering_options.min_similarity =
212        kSingleLinkageMinSimilarity;
213    num_clusters_ = ComputeSingleLinkageClustering(clustering_options,
214                                                   *schur_complement_graph,
215                                                   &membership);
216  } else {
217    LOG(FATAL) << "Unknown visibility clustering algorithm.";
218  }
219
220  CHECK_GT(num_clusters_, 0);
221  VLOG(2) << "num_clusters: " << num_clusters_;
222  FlattenMembershipMap(membership, &cluster_membership_);
223}
224
225// Compute the block sparsity structure of the Schur complement
226// matrix. For each pair of cameras contributing a non-zero cell to
227// the schur complement, determine if that cell is present in the
228// preconditioner or not.
229//
230// A pair of cameras contribute a cell to the preconditioner if they
231// are part of the same cluster or if the the two clusters that they
232// belong have an edge connecting them in the degree-2 maximum
233// spanning forest.
234//
235// For example, a camera pair (i,j) where i belonges to cluster1 and
236// j belongs to cluster2 (assume that cluster1 < cluster2).
237//
238// The cell corresponding to (i,j) is present in the preconditioner
239// if cluster1 == cluster2 or the pair (cluster1, cluster2) were
240// connected by an edge in the degree-2 maximum spanning forest.
241//
242// Since we have already expanded the forest into a set of camera
243// pairs/edges, including self edges, the check can be reduced to
244// checking membership of (cluster1, cluster2) in cluster_pairs_.
245void VisibilityBasedPreconditioner::ComputeBlockPairsInPreconditioner(
246    const CompressedRowBlockStructure& bs) {
247  block_pairs_.clear();
248  for (int i = 0; i < num_blocks_; ++i) {
249    block_pairs_.insert(make_pair(i, i));
250  }
251
252  int r = 0;
253  const int num_row_blocks = bs.rows.size();
254  const int num_eliminate_blocks = options_.elimination_groups[0];
255
256  // Iterate over each row of the matrix. The block structure of the
257  // matrix is assumed to be sorted in order of the e_blocks/point
258  // blocks. Thus all row blocks containing an e_block/point occur
259  // contiguously. Further, if present, an e_block is always the first
260  // parameter block in each row block.  These structural assumptions
261  // are common to all Schur complement based solvers in Ceres.
262  //
263  // For each e_block/point block we identify the set of cameras
264  // seeing it. The cross product of this set with itself is the set
265  // of non-zero cells contibuted by this e_block.
266  //
267  // The time complexity of this is O(nm^2) where, n is the number of
268  // 3d points and m is the maximum number of cameras seeing any
269  // point, which for most scenes is a fairly small number.
270  while (r < num_row_blocks) {
271    int e_block_id = bs.rows[r].cells.front().block_id;
272    if (e_block_id >= num_eliminate_blocks) {
273      // Skip the rows whose first block is an f_block.
274      break;
275    }
276
277    set<int> f_blocks;
278    for (; r < num_row_blocks; ++r) {
279      const CompressedRow& row = bs.rows[r];
280      if (row.cells.front().block_id != e_block_id) {
281        break;
282      }
283
284      // Iterate over the blocks in the row, ignoring the first block
285      // since it is the one to be eliminated and adding the rest to
286      // the list of f_blocks associated with this e_block.
287      for (int c = 1; c < row.cells.size(); ++c) {
288        const Cell& cell = row.cells[c];
289        const int f_block_id = cell.block_id - num_eliminate_blocks;
290        CHECK_GE(f_block_id, 0);
291        f_blocks.insert(f_block_id);
292      }
293    }
294
295    for (set<int>::const_iterator block1 = f_blocks.begin();
296         block1 != f_blocks.end();
297         ++block1) {
298      set<int>::const_iterator block2 = block1;
299      ++block2;
300      for (; block2 != f_blocks.end(); ++block2) {
301        if (IsBlockPairInPreconditioner(*block1, *block2)) {
302          block_pairs_.insert(make_pair(*block1, *block2));
303        }
304      }
305    }
306  }
307
308  // The remaining rows which do not contain any e_blocks.
309  for (; r < num_row_blocks; ++r) {
310    const CompressedRow& row = bs.rows[r];
311    CHECK_GE(row.cells.front().block_id, num_eliminate_blocks);
312    for (int i = 0; i < row.cells.size(); ++i) {
313      const int block1 = row.cells[i].block_id - num_eliminate_blocks;
314      for (int j = 0; j < row.cells.size(); ++j) {
315        const int block2 = row.cells[j].block_id - num_eliminate_blocks;
316        if (block1 <= block2) {
317          if (IsBlockPairInPreconditioner(block1, block2)) {
318            block_pairs_.insert(make_pair(block1, block2));
319          }
320        }
321      }
322    }
323  }
324
325  VLOG(1) << "Block pair stats: " << block_pairs_.size();
326}
327
328// Initialize the SchurEliminator.
329void VisibilityBasedPreconditioner::InitEliminator(
330    const CompressedRowBlockStructure& bs) {
331  LinearSolver::Options eliminator_options;
332  eliminator_options.elimination_groups = options_.elimination_groups;
333  eliminator_options.num_threads = options_.num_threads;
334  eliminator_options.e_block_size = options_.e_block_size;
335  eliminator_options.f_block_size = options_.f_block_size;
336  eliminator_options.row_block_size = options_.row_block_size;
337  eliminator_.reset(SchurEliminatorBase::Create(eliminator_options));
338  eliminator_->Init(eliminator_options.elimination_groups[0], &bs);
339}
340
341// Update the values of the preconditioner matrix and factorize it.
342bool VisibilityBasedPreconditioner::UpdateImpl(const BlockSparseMatrix& A,
343                                               const double* D) {
344  const time_t start_time = time(NULL);
345  const int num_rows = m_->num_rows();
346  CHECK_GT(num_rows, 0);
347
348  // We need a dummy rhs vector and a dummy b vector since the Schur
349  // eliminator combines the computation of the reduced camera matrix
350  // with the computation of the right hand side of that linear
351  // system.
352  //
353  // TODO(sameeragarwal): Perhaps its worth refactoring the
354  // SchurEliminator::Eliminate function to allow NULL for the rhs. As
355  // of now it does not seem to be worth the effort.
356  Vector rhs = Vector::Zero(m_->num_rows());
357  Vector b = Vector::Zero(A.num_rows());
358
359  // Compute a subset of the entries of the Schur complement.
360  eliminator_->Eliminate(&A, b.data(), D, m_.get(), rhs.data());
361
362  // Try factorizing the matrix. For CLUSTER_JACOBI, this should
363  // always succeed modulo some numerical/conditioning problems. For
364  // CLUSTER_TRIDIAGONAL, in general the preconditioner matrix as
365  // constructed is not positive definite. However, we will go ahead
366  // and try factorizing it. If it works, great, otherwise we scale
367  // all the cells in the preconditioner corresponding to the edges in
368  // the degree-2 forest and that guarantees positive
369  // definiteness. The proof of this fact can be found in Lemma 1 in
370  // "Visibility Based Preconditioning for Bundle Adjustment".
371  //
372  // Doing the factorization like this saves us matrix mass when
373  // scaling is not needed, which is quite often in our experience.
374  LinearSolverTerminationType status = Factorize();
375
376  if (status == LINEAR_SOLVER_FATAL_ERROR) {
377    return false;
378  }
379
380  // The scaling only affects the tri-diagonal case, since
381  // ScaleOffDiagonalBlocks only pays attenion to the cells that
382  // belong to the edges of the degree-2 forest. In the CLUSTER_JACOBI
383  // case, the preconditioner is guaranteed to be positive
384  // semidefinite.
385  if (status == LINEAR_SOLVER_FAILURE && options_.type == CLUSTER_TRIDIAGONAL) {
386    VLOG(1) << "Unscaled factorization failed. Retrying with off-diagonal "
387            << "scaling";
388    ScaleOffDiagonalCells();
389    status = Factorize();
390  }
391
392  VLOG(2) << "Compute time: " << time(NULL) - start_time;
393  return (status == LINEAR_SOLVER_SUCCESS);
394}
395
396// Consider the preconditioner matrix as meta-block matrix, whose
397// blocks correspond to the clusters. Then cluster pairs corresponding
398// to edges in the degree-2 forest are off diagonal entries of this
399// matrix. Scaling these off-diagonal entries by 1/2 forces this
400// matrix to be positive definite.
401void VisibilityBasedPreconditioner::ScaleOffDiagonalCells() {
402  for (set< pair<int, int> >::const_iterator it = block_pairs_.begin();
403       it != block_pairs_.end();
404       ++it) {
405    const int block1 = it->first;
406    const int block2 = it->second;
407    if (!IsBlockPairOffDiagonal(block1, block2)) {
408      continue;
409    }
410
411    int r, c, row_stride, col_stride;
412    CellInfo* cell_info = m_->GetCell(block1, block2,
413                                      &r, &c,
414                                      &row_stride, &col_stride);
415    CHECK(cell_info != NULL)
416        << "Cell missing for block pair (" << block1 << "," << block2 << ")"
417        << " cluster pair (" << cluster_membership_[block1]
418        << " " << cluster_membership_[block2] << ")";
419
420    // Ah the magic of tri-diagonal matrices and diagonal
421    // dominance. See Lemma 1 in "Visibility Based Preconditioning
422    // For Bundle Adjustment".
423    MatrixRef m(cell_info->values, row_stride, col_stride);
424    m.block(r, c, block_size_[block1], block_size_[block2]) *= 0.5;
425  }
426}
427
428// Compute the sparse Cholesky factorization of the preconditioner
429// matrix.
430LinearSolverTerminationType VisibilityBasedPreconditioner::Factorize() {
431  // Extract the TripletSparseMatrix that is used for actually storing
432  // S and convert it into a cholmod_sparse object.
433  cholmod_sparse* lhs = ss_.CreateSparseMatrix(
434      down_cast<BlockRandomAccessSparseMatrix*>(
435          m_.get())->mutable_matrix());
436
437  // The matrix is symmetric, and the upper triangular part of the
438  // matrix contains the values.
439  lhs->stype = 1;
440
441  // TODO(sameeragarwal): Refactor to pipe this up and out.
442  string status;
443
444  // Symbolic factorization is computed if we don't already have one handy.
445  if (factor_ == NULL) {
446    factor_ = ss_.BlockAnalyzeCholesky(lhs, block_size_, block_size_, &status);
447  }
448
449  const LinearSolverTerminationType termination_type =
450      (factor_ != NULL)
451      ? ss_.Cholesky(lhs, factor_, &status)
452      : LINEAR_SOLVER_FATAL_ERROR;
453
454  ss_.Free(lhs);
455  return termination_type;
456}
457
458void VisibilityBasedPreconditioner::RightMultiply(const double* x,
459                                                  double* y) const {
460  CHECK_NOTNULL(x);
461  CHECK_NOTNULL(y);
462  SuiteSparse* ss = const_cast<SuiteSparse*>(&ss_);
463
464  const int num_rows = m_->num_rows();
465  memcpy(CHECK_NOTNULL(tmp_rhs_)->x, x, m_->num_rows() * sizeof(*x));
466  // TODO(sameeragarwal): Better error handling.
467  string status;
468  cholmod_dense* solution =
469      CHECK_NOTNULL(ss->Solve(factor_, tmp_rhs_, &status));
470  memcpy(y, solution->x, sizeof(*y) * num_rows);
471  ss->Free(solution);
472}
473
474int VisibilityBasedPreconditioner::num_rows() const {
475  return m_->num_rows();
476}
477
478// Classify camera/f_block pairs as in and out of the preconditioner,
479// based on whether the cluster pair that they belong to is in the
480// preconditioner or not.
481bool VisibilityBasedPreconditioner::IsBlockPairInPreconditioner(
482    const int block1,
483    const int block2) const {
484  int cluster1 = cluster_membership_[block1];
485  int cluster2 = cluster_membership_[block2];
486  if (cluster1 > cluster2) {
487    std::swap(cluster1, cluster2);
488  }
489  return (cluster_pairs_.count(make_pair(cluster1, cluster2)) > 0);
490}
491
492bool VisibilityBasedPreconditioner::IsBlockPairOffDiagonal(
493    const int block1,
494    const int block2) const {
495  return (cluster_membership_[block1] != cluster_membership_[block2]);
496}
497
498// Convert a graph into a list of edges that includes self edges for
499// each vertex.
500void VisibilityBasedPreconditioner::ForestToClusterPairs(
501    const Graph<int>& forest,
502    HashSet<pair<int, int> >* cluster_pairs) const {
503  CHECK_NOTNULL(cluster_pairs)->clear();
504  const HashSet<int>& vertices = forest.vertices();
505  CHECK_EQ(vertices.size(), num_clusters_);
506
507  // Add all the cluster pairs corresponding to the edges in the
508  // forest.
509  for (HashSet<int>::const_iterator it1 = vertices.begin();
510       it1 != vertices.end();
511       ++it1) {
512    const int cluster1 = *it1;
513    cluster_pairs->insert(make_pair(cluster1, cluster1));
514    const HashSet<int>& neighbors = forest.Neighbors(cluster1);
515    for (HashSet<int>::const_iterator it2 = neighbors.begin();
516         it2 != neighbors.end();
517         ++it2) {
518      const int cluster2 = *it2;
519      if (cluster1 < cluster2) {
520        cluster_pairs->insert(make_pair(cluster1, cluster2));
521      }
522    }
523  }
524}
525
526// The visibilty set of a cluster is the union of the visibilty sets
527// of all its cameras. In other words, the set of points visible to
528// any camera in the cluster.
529void VisibilityBasedPreconditioner::ComputeClusterVisibility(
530    const vector<set<int> >& visibility,
531    vector<set<int> >* cluster_visibility) const {
532  CHECK_NOTNULL(cluster_visibility)->resize(0);
533  cluster_visibility->resize(num_clusters_);
534  for (int i = 0; i < num_blocks_; ++i) {
535    const int cluster_id = cluster_membership_[i];
536    (*cluster_visibility)[cluster_id].insert(visibility[i].begin(),
537                                             visibility[i].end());
538  }
539}
540
541// Construct a graph whose vertices are the clusters, and the edge
542// weights are the number of 3D points visible to cameras in both the
543// vertices.
544Graph<int>* VisibilityBasedPreconditioner::CreateClusterGraph(
545    const vector<set<int> >& cluster_visibility) const {
546  Graph<int>* cluster_graph = new Graph<int>;
547
548  for (int i = 0; i < num_clusters_; ++i) {
549    cluster_graph->AddVertex(i);
550  }
551
552  for (int i = 0; i < num_clusters_; ++i) {
553    const set<int>& cluster_i = cluster_visibility[i];
554    for (int j = i+1; j < num_clusters_; ++j) {
555      vector<int> intersection;
556      const set<int>& cluster_j = cluster_visibility[j];
557      set_intersection(cluster_i.begin(), cluster_i.end(),
558                       cluster_j.begin(), cluster_j.end(),
559                       back_inserter(intersection));
560
561      if (intersection.size() > 0) {
562        // Clusters interact strongly when they share a large number
563        // of 3D points. The degree-2 maximum spanning forest
564        // alorithm, iterates on the edges in decreasing order of
565        // their weight, which is the number of points shared by the
566        // two cameras that it connects.
567        cluster_graph->AddEdge(i, j, intersection.size());
568      }
569    }
570  }
571  return cluster_graph;
572}
573
574// Canonical views clustering returns a HashMap from vertices to
575// cluster ids. Convert this into a flat array for quick lookup. It is
576// possible that some of the vertices may not be associated with any
577// cluster. In that case, randomly assign them to one of the clusters.
578//
579// The cluster ids can be non-contiguous integers. So as we flatten
580// the membership_map, we also map the cluster ids to a contiguous set
581// of integers so that the cluster ids are in [0, num_clusters_).
582void VisibilityBasedPreconditioner::FlattenMembershipMap(
583    const HashMap<int, int>& membership_map,
584    vector<int>* membership_vector) const {
585  CHECK_NOTNULL(membership_vector)->resize(0);
586  membership_vector->resize(num_blocks_, -1);
587
588  HashMap<int, int> cluster_id_to_index;
589  // Iterate over the cluster membership map and update the
590  // cluster_membership_ vector assigning arbitrary cluster ids to
591  // the few cameras that have not been clustered.
592  for (HashMap<int, int>::const_iterator it = membership_map.begin();
593       it != membership_map.end();
594       ++it) {
595    const int camera_id = it->first;
596    int cluster_id = it->second;
597
598    // If the view was not clustered, randomly assign it to one of the
599    // clusters. This preserves the mathematical correctness of the
600    // preconditioner. If there are too many views which are not
601    // clustered, it may lead to some quality degradation though.
602    //
603    // TODO(sameeragarwal): Check if a large number of views have not
604    // been clustered and deal with it?
605    if (cluster_id == -1) {
606      cluster_id = camera_id % num_clusters_;
607    }
608
609    const int index = FindWithDefault(cluster_id_to_index,
610                                      cluster_id,
611                                      cluster_id_to_index.size());
612
613    if (index == cluster_id_to_index.size()) {
614      cluster_id_to_index[cluster_id] = index;
615    }
616
617    CHECK_LT(index, num_clusters_);
618    membership_vector->at(camera_id) = index;
619  }
620}
621
622}  // namespace internal
623}  // namespace ceres
624
625#endif  // CERES_NO_SUITESPARSE
626