1// Ceres Solver - A fast non-linear least squares minimizer 2// Copyright 2010, 2011, 2012 Google Inc. All rights reserved. 3// http://code.google.com/p/ceres-solver/ 4// 5// Redistribution and use in source and binary forms, with or without 6// modification, are permitted provided that the following conditions are met: 7// 8// * Redistributions of source code must retain the above copyright notice, 9// this list of conditions and the following disclaimer. 10// * Redistributions in binary form must reproduce the above copyright notice, 11// this list of conditions and the following disclaimer in the documentation 12// and/or other materials provided with the distribution. 13// * Neither the name of Google Inc. nor the names of its contributors may be 14// used to endorse or promote products derived from this software without 15// specific prior written permission. 16// 17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 21// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 24// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 26// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 27// POSSIBILITY OF SUCH DAMAGE. 28// 29// Author: sameeragarwal@google.com (Sameer Agarwal) 30// 31// Various algorithms that operate on undirected graphs. 32 33#ifndef CERES_INTERNAL_GRAPH_ALGORITHMS_H_ 34#define CERES_INTERNAL_GRAPH_ALGORITHMS_H_ 35 36#include <algorithm> 37#include <vector> 38#include <utility> 39#include "ceres/collections_port.h" 40#include "ceres/graph.h" 41#include "glog/logging.h" 42 43namespace ceres { 44namespace internal { 45 46// Compare two vertices of a graph by their degrees, if the degrees 47// are equal then order them by their ids. 48template <typename Vertex> 49class VertexTotalOrdering { 50 public: 51 explicit VertexTotalOrdering(const Graph<Vertex>& graph) 52 : graph_(graph) {} 53 54 bool operator()(const Vertex& lhs, const Vertex& rhs) const { 55 if (graph_.Neighbors(lhs).size() == graph_.Neighbors(rhs).size()) { 56 return lhs < rhs; 57 } 58 return graph_.Neighbors(lhs).size() < graph_.Neighbors(rhs).size(); 59 } 60 61 private: 62 const Graph<Vertex>& graph_; 63}; 64 65template <typename Vertex> 66class VertexDegreeLessThan { 67 public: 68 explicit VertexDegreeLessThan(const Graph<Vertex>& graph) 69 : graph_(graph) {} 70 71 bool operator()(const Vertex& lhs, const Vertex& rhs) const { 72 return graph_.Neighbors(lhs).size() < graph_.Neighbors(rhs).size(); 73 } 74 75 private: 76 const Graph<Vertex>& graph_; 77}; 78 79// Order the vertices of a graph using its (approximately) largest 80// independent set, where an independent set of a graph is a set of 81// vertices that have no edges connecting them. The maximum 82// independent set problem is NP-Hard, but there are effective 83// approximation algorithms available. The implementation here uses a 84// breadth first search that explores the vertices in order of 85// increasing degree. The same idea is used by Saad & Li in "MIQR: A 86// multilevel incomplete QR preconditioner for large sparse 87// least-squares problems", SIMAX, 2007. 88// 89// Given a undirected graph G(V,E), the algorithm is a greedy BFS 90// search where the vertices are explored in increasing order of their 91// degree. The output vector ordering contains elements of S in 92// increasing order of their degree, followed by elements of V - S in 93// increasing order of degree. The return value of the function is the 94// cardinality of S. 95template <typename Vertex> 96int IndependentSetOrdering(const Graph<Vertex>& graph, 97 vector<Vertex>* ordering) { 98 const HashSet<Vertex>& vertices = graph.vertices(); 99 const int num_vertices = vertices.size(); 100 101 CHECK_NOTNULL(ordering); 102 ordering->clear(); 103 ordering->reserve(num_vertices); 104 105 // Colors for labeling the graph during the BFS. 106 const char kWhite = 0; 107 const char kGrey = 1; 108 const char kBlack = 2; 109 110 // Mark all vertices white. 111 HashMap<Vertex, char> vertex_color; 112 vector<Vertex> vertex_queue; 113 for (typename HashSet<Vertex>::const_iterator it = vertices.begin(); 114 it != vertices.end(); 115 ++it) { 116 vertex_color[*it] = kWhite; 117 vertex_queue.push_back(*it); 118 } 119 120 121 sort(vertex_queue.begin(), vertex_queue.end(), 122 VertexTotalOrdering<Vertex>(graph)); 123 124 // Iterate over vertex_queue. Pick the first white vertex, add it 125 // to the independent set. Mark it black and its neighbors grey. 126 for (int i = 0; i < vertex_queue.size(); ++i) { 127 const Vertex& vertex = vertex_queue[i]; 128 if (vertex_color[vertex] != kWhite) { 129 continue; 130 } 131 132 ordering->push_back(vertex); 133 vertex_color[vertex] = kBlack; 134 const HashSet<Vertex>& neighbors = graph.Neighbors(vertex); 135 for (typename HashSet<Vertex>::const_iterator it = neighbors.begin(); 136 it != neighbors.end(); 137 ++it) { 138 vertex_color[*it] = kGrey; 139 } 140 } 141 142 int independent_set_size = ordering->size(); 143 144 // Iterate over the vertices and add all the grey vertices to the 145 // ordering. At this stage there should only be black or grey 146 // vertices in the graph. 147 for (typename vector<Vertex>::const_iterator it = vertex_queue.begin(); 148 it != vertex_queue.end(); 149 ++it) { 150 const Vertex vertex = *it; 151 DCHECK(vertex_color[vertex] != kWhite); 152 if (vertex_color[vertex] != kBlack) { 153 ordering->push_back(vertex); 154 } 155 } 156 157 CHECK_EQ(ordering->size(), num_vertices); 158 return independent_set_size; 159} 160 161// Same as above with one important difference. The ordering parameter 162// is an input/output parameter which carries an initial ordering of 163// the vertices of the graph. The greedy independent set algorithm 164// starts by sorting the vertices in increasing order of their 165// degree. The input ordering is used to stabilize this sort, i.e., if 166// two vertices have the same degree then they are ordered in the same 167// order in which they occur in "ordering". 168// 169// This is useful in eliminating non-determinism from the Schur 170// ordering algorithm over all. 171template <typename Vertex> 172int StableIndependentSetOrdering(const Graph<Vertex>& graph, 173 vector<Vertex>* ordering) { 174 CHECK_NOTNULL(ordering); 175 const HashSet<Vertex>& vertices = graph.vertices(); 176 const int num_vertices = vertices.size(); 177 CHECK_EQ(vertices.size(), ordering->size()); 178 179 // Colors for labeling the graph during the BFS. 180 const char kWhite = 0; 181 const char kGrey = 1; 182 const char kBlack = 2; 183 184 vector<Vertex> vertex_queue(*ordering); 185 186 stable_sort(vertex_queue.begin(), vertex_queue.end(), 187 VertexDegreeLessThan<Vertex>(graph)); 188 189 // Mark all vertices white. 190 HashMap<Vertex, char> vertex_color; 191 for (typename HashSet<Vertex>::const_iterator it = vertices.begin(); 192 it != vertices.end(); 193 ++it) { 194 vertex_color[*it] = kWhite; 195 } 196 197 ordering->clear(); 198 ordering->reserve(num_vertices); 199 // Iterate over vertex_queue. Pick the first white vertex, add it 200 // to the independent set. Mark it black and its neighbors grey. 201 for (int i = 0; i < vertex_queue.size(); ++i) { 202 const Vertex& vertex = vertex_queue[i]; 203 if (vertex_color[vertex] != kWhite) { 204 continue; 205 } 206 207 ordering->push_back(vertex); 208 vertex_color[vertex] = kBlack; 209 const HashSet<Vertex>& neighbors = graph.Neighbors(vertex); 210 for (typename HashSet<Vertex>::const_iterator it = neighbors.begin(); 211 it != neighbors.end(); 212 ++it) { 213 vertex_color[*it] = kGrey; 214 } 215 } 216 217 int independent_set_size = ordering->size(); 218 219 // Iterate over the vertices and add all the grey vertices to the 220 // ordering. At this stage there should only be black or grey 221 // vertices in the graph. 222 for (typename vector<Vertex>::const_iterator it = vertex_queue.begin(); 223 it != vertex_queue.end(); 224 ++it) { 225 const Vertex vertex = *it; 226 DCHECK(vertex_color[vertex] != kWhite); 227 if (vertex_color[vertex] != kBlack) { 228 ordering->push_back(vertex); 229 } 230 } 231 232 CHECK_EQ(ordering->size(), num_vertices); 233 return independent_set_size; 234} 235 236// Find the connected component for a vertex implemented using the 237// find and update operation for disjoint-set. Recursively traverse 238// the disjoint set structure till you reach a vertex whose connected 239// component has the same id as the vertex itself. Along the way 240// update the connected components of all the vertices. This updating 241// is what gives this data structure its efficiency. 242template <typename Vertex> 243Vertex FindConnectedComponent(const Vertex& vertex, 244 HashMap<Vertex, Vertex>* union_find) { 245 typename HashMap<Vertex, Vertex>::iterator it = union_find->find(vertex); 246 DCHECK(it != union_find->end()); 247 if (it->second != vertex) { 248 it->second = FindConnectedComponent(it->second, union_find); 249 } 250 251 return it->second; 252} 253 254// Compute a degree two constrained Maximum Spanning Tree/forest of 255// the input graph. Caller owns the result. 256// 257// Finding degree 2 spanning tree of a graph is not always 258// possible. For example a star graph, i.e. a graph with n-nodes 259// where one node is connected to the other n-1 nodes does not have 260// a any spanning trees of degree less than n-1.Even if such a tree 261// exists, finding such a tree is NP-Hard. 262 263// We get around both of these problems by using a greedy, degree 264// constrained variant of Kruskal's algorithm. We start with a graph 265// G_T with the same vertex set V as the input graph G(V,E) but an 266// empty edge set. We then iterate over the edges of G in decreasing 267// order of weight, adding them to G_T if doing so does not create a 268// cycle in G_T} and the degree of all the vertices in G_T remains 269// bounded by two. This O(|E|) algorithm results in a degree-2 270// spanning forest, or a collection of linear paths that span the 271// graph G. 272template <typename Vertex> 273Graph<Vertex>* 274Degree2MaximumSpanningForest(const Graph<Vertex>& graph) { 275 // Array of edges sorted in decreasing order of their weights. 276 vector<pair<double, pair<Vertex, Vertex> > > weighted_edges; 277 Graph<Vertex>* forest = new Graph<Vertex>(); 278 279 // Disjoint-set to keep track of the connected components in the 280 // maximum spanning tree. 281 HashMap<Vertex, Vertex> disjoint_set; 282 283 // Sort of the edges in the graph in decreasing order of their 284 // weight. Also add the vertices of the graph to the Maximum 285 // Spanning Tree graph and set each vertex to be its own connected 286 // component in the disjoint_set structure. 287 const HashSet<Vertex>& vertices = graph.vertices(); 288 for (typename HashSet<Vertex>::const_iterator it = vertices.begin(); 289 it != vertices.end(); 290 ++it) { 291 const Vertex vertex1 = *it; 292 forest->AddVertex(vertex1, graph.VertexWeight(vertex1)); 293 disjoint_set[vertex1] = vertex1; 294 295 const HashSet<Vertex>& neighbors = graph.Neighbors(vertex1); 296 for (typename HashSet<Vertex>::const_iterator it2 = neighbors.begin(); 297 it2 != neighbors.end(); 298 ++it2) { 299 const Vertex vertex2 = *it2; 300 if (vertex1 >= vertex2) { 301 continue; 302 } 303 const double weight = graph.EdgeWeight(vertex1, vertex2); 304 weighted_edges.push_back(make_pair(weight, make_pair(vertex1, vertex2))); 305 } 306 } 307 308 // The elements of this vector, are pairs<edge_weight, 309 // edge>. Sorting it using the reverse iterators gives us the edges 310 // in decreasing order of edges. 311 sort(weighted_edges.rbegin(), weighted_edges.rend()); 312 313 // Greedily add edges to the spanning tree/forest as long as they do 314 // not violate the degree/cycle constraint. 315 for (int i =0; i < weighted_edges.size(); ++i) { 316 const pair<Vertex, Vertex>& edge = weighted_edges[i].second; 317 const Vertex vertex1 = edge.first; 318 const Vertex vertex2 = edge.second; 319 320 // Check if either of the vertices are of degree 2 already, in 321 // which case adding this edge will violate the degree 2 322 // constraint. 323 if ((forest->Neighbors(vertex1).size() == 2) || 324 (forest->Neighbors(vertex2).size() == 2)) { 325 continue; 326 } 327 328 // Find the id of the connected component to which the two 329 // vertices belong to. If the id is the same, it means that the 330 // two of them are already connected to each other via some other 331 // vertex, and adding this edge will create a cycle. 332 Vertex root1 = FindConnectedComponent(vertex1, &disjoint_set); 333 Vertex root2 = FindConnectedComponent(vertex2, &disjoint_set); 334 335 if (root1 == root2) { 336 continue; 337 } 338 339 // This edge can be added, add an edge in either direction with 340 // the same weight as the original graph. 341 const double edge_weight = graph.EdgeWeight(vertex1, vertex2); 342 forest->AddEdge(vertex1, vertex2, edge_weight); 343 forest->AddEdge(vertex2, vertex1, edge_weight); 344 345 // Connected the two connected components by updating the 346 // disjoint_set structure. Always connect the connected component 347 // with the greater index with the connected component with the 348 // smaller index. This should ensure shallower trees, for quicker 349 // lookup. 350 if (root2 < root1) { 351 std::swap(root1, root2); 352 }; 353 354 disjoint_set[root2] = root1; 355 } 356 return forest; 357} 358 359} // namespace internal 360} // namespace ceres 361 362#endif // CERES_INTERNAL_GRAPH_ALGORITHMS_H_ 363