1c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// This file is part of Eigen, a lightweight C++ template library 2c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// for linear algebra. 3c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// 4c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// Copyright (C) 2011 Gael Guennebaud <gael.guennebaud@inria.fr> 5c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// 6c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// This Source Code Form is subject to the terms of the Mozilla 7c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// Public License v. 2.0. If a copy of the MPL was not distributed 8c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath// with this file, You can obtain one at http://mozilla.org/MPL/2.0/. 9c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 10c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath#ifndef EIGEN_CONJUGATE_GRADIENT_H 11c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath#define EIGEN_CONJUGATE_GRADIENT_H 12c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 13c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathnamespace Eigen { 14c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 15c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathnamespace internal { 16c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 17c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath/** \internal Low-level conjugate gradient algorithm 18c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param mat The matrix A 19c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param rhs The right hand side vector b 20c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param x On input and initial solution, on output the computed solution. 21c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param precond A preconditioner being able to efficiently solve for an 22c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * approximation of Ax=b (regardless of b) 23c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param iters On input the max number of iteration, on output the number of performed iterations. 24c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \param tol_error On input the tolerance error, on output an estimation of the relative error. 25c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath */ 26c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathtemplate<typename MatrixType, typename Rhs, typename Dest, typename Preconditioner> 27c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan KamathEIGEN_DONT_INLINE 28c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathvoid conjugate_gradient(const MatrixType& mat, const Rhs& rhs, Dest& x, 29c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath const Preconditioner& precond, int& iters, 30c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typename Dest::RealScalar& tol_error) 31c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath{ 32c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using std::sqrt; 33c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using std::abs; 34c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef typename Dest::RealScalar RealScalar; 35c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef typename Dest::Scalar Scalar; 36c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef Matrix<Scalar,Dynamic,1> VectorType; 37c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 38c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath RealScalar tol = tol_error; 39c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath int maxIters = iters; 40c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 41c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath int n = mat.cols(); 42c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 43c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath VectorType residual = rhs - mat * x; //initial residual 44c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 457faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez RealScalar rhsNorm2 = rhs.squaredNorm(); 467faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez if(rhsNorm2 == 0) 477faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez { 487faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez x.setZero(); 497faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez iters = 0; 507faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez tol_error = 0; 517faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez return; 527faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez } 537faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez RealScalar threshold = tol*tol*rhsNorm2; 547faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez RealScalar residualNorm2 = residual.squaredNorm(); 557faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez if (residualNorm2 < threshold) 567faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez { 577faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez iters = 0; 587faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez tol_error = sqrt(residualNorm2 / rhsNorm2); 597faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez return; 607faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez } 617faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez 627faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez VectorType p(n); 63c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath p = precond.solve(residual); //initial search direction 64c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 65c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath VectorType z(n), tmp(n); 667faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez RealScalar absNew = numext::real(residual.dot(p)); // the square of the absolute value of r scaled by invM 67c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath int i = 0; 68c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath while(i < maxIters) 69c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 70c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath tmp.noalias() = mat * p; // the bottleneck of the algorithm 71c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 72c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath Scalar alpha = absNew / p.dot(tmp); // the amount we travel on dir 73c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath x += alpha * p; // update solution 74c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath residual -= alpha * tmp; // update residue 75c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 76c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath residualNorm2 = residual.squaredNorm(); 77c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath if(residualNorm2 < threshold) 78c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath break; 79c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 80c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath z = precond.solve(residual); // approximately solve for "A z = residual" 81c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 82c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath RealScalar absOld = absNew; 837faaa9f3f0df9d23790277834d426c3d992ac3baCarlos Hernandez absNew = numext::real(residual.dot(z)); // update the absolute value of r 84c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath RealScalar beta = absNew / absOld; // calculate the Gram-Schmidt value used to create the new search direction 85c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath p = z + beta * p; // update search direction 86c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath i++; 87c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 88c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath tol_error = sqrt(residualNorm2 / rhsNorm2); 89c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath iters = i; 90c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath} 91c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 92c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath} 93c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 94c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathtemplate< typename _MatrixType, int _UpLo=Lower, 95c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typename _Preconditioner = DiagonalPreconditioner<typename _MatrixType::Scalar> > 96c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathclass ConjugateGradient; 97c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 98c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathnamespace internal { 99c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 100c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathtemplate< typename _MatrixType, int _UpLo, typename _Preconditioner> 101c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathstruct traits<ConjugateGradient<_MatrixType,_UpLo,_Preconditioner> > 102c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath{ 103c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef _MatrixType MatrixType; 104c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef _Preconditioner Preconditioner; 105c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath}; 106c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 107c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath} 108c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 109c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath/** \ingroup IterativeLinearSolvers_Module 110c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \brief A conjugate gradient solver for sparse self-adjoint problems 111c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 112c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * This class allows to solve for A.x = b sparse linear problems using a conjugate gradient algorithm. 113c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * The sparse matrix A must be selfadjoint. The vectors x and b can be either dense or sparse. 114c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 115c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \tparam _MatrixType the type of the sparse matrix A, can be a dense or a sparse matrix. 116c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \tparam _UpLo the triangular part that will be used for the computations. It can be Lower 117c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * or Upper. Default is Lower. 118c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \tparam _Preconditioner the type of the preconditioner. Default is DiagonalPreconditioner 119c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 120c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * The maximal number of iterations and tolerance value can be controlled via the setMaxIterations() 121c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * and setTolerance() methods. The defaults are the size of the problem for the maximal number of iterations 122c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * and NumTraits<Scalar>::epsilon() for the tolerance. 123c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 124c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * This class can be used as the direct solver classes. Here is a typical usage example: 125c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \code 126c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * int n = 10000; 127c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * VectorXd x(n), b(n); 128c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * SparseMatrix<double> A(n,n); 129c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * // fill A and b 130c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * ConjugateGradient<SparseMatrix<double> > cg; 131c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * cg.compute(A); 132c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * x = cg.solve(b); 133c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * std::cout << "#iterations: " << cg.iterations() << std::endl; 134c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * std::cout << "estimated error: " << cg.error() << std::endl; 135c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * // update b, and solve again 136c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * x = cg.solve(b); 137c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \endcode 138c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 139c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * By default the iterations start with x=0 as an initial guess of the solution. 140c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * One can control the start using the solveWithGuess() method. Here is a step by 141c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * step execution example starting with a random guess and printing the evolution 142c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * of the estimated error: 143c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * * \code 144c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * x = VectorXd::Random(n); 145c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * cg.setMaxIterations(1); 146c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * int i = 0; 147c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * do { 148c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * x = cg.solveWithGuess(b,x); 149c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * std::cout << i << " : " << cg.error() << std::endl; 150c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * ++i; 151c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * } while (cg.info()!=Success && i<100); 152c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \endcode 153c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * Note that such a step by step excution is slightly slower. 154c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 155c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \sa class SimplicialCholesky, DiagonalPreconditioner, IdentityPreconditioner 156c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath */ 157c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathtemplate< typename _MatrixType, int _UpLo, typename _Preconditioner> 158c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathclass ConjugateGradient : public IterativeSolverBase<ConjugateGradient<_MatrixType,_UpLo,_Preconditioner> > 159c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath{ 160c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef IterativeSolverBase<ConjugateGradient> Base; 161c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using Base::mp_matrix; 162c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using Base::m_error; 163c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using Base::m_iterations; 164c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using Base::m_info; 165c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath using Base::m_isInitialized; 166c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathpublic: 167c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef _MatrixType MatrixType; 168c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef typename MatrixType::Scalar Scalar; 169c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef typename MatrixType::Index Index; 170c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef typename MatrixType::RealScalar RealScalar; 171c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef _Preconditioner Preconditioner; 172c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 173c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath enum { 174c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath UpLo = _UpLo 175c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath }; 176c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 177c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathpublic: 178c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 179c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath /** Default constructor. */ 180c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath ConjugateGradient() : Base() {} 181c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 182c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath /** Initialize the solver with matrix \a A for further \c Ax=b solving. 183c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 184c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * This constructor is a shortcut for the default constructor followed 185c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * by a call to compute(). 186c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 187c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \warning this class stores a reference to the matrix A as well as some 188c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * precomputed values that depend on it. Therefore, if \a A is changed 189c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * this class becomes invalid. Call compute() to update it with the new 190c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * matrix A, or modify a copy of A. 191c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath */ 192c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath ConjugateGradient(const MatrixType& A) : Base(A) {} 193c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 194c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath ~ConjugateGradient() {} 195c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 196c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath /** \returns the solution x of \f$ A x = b \f$ using the current decomposition of A 197c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \a x0 as an initial solution. 198c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * 199c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath * \sa compute() 200c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath */ 201c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath template<typename Rhs,typename Guess> 202c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath inline const internal::solve_retval_with_guess<ConjugateGradient, Rhs, Guess> 203c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath solveWithGuess(const MatrixBase<Rhs>& b, const Guess& x0) const 204c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 205c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath eigen_assert(m_isInitialized && "ConjugateGradient is not initialized."); 206c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath eigen_assert(Base::rows()==b.rows() 207c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath && "ConjugateGradient::solve(): invalid number of rows of the right hand side matrix b"); 208c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath return internal::solve_retval_with_guess 209c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath <ConjugateGradient, Rhs, Guess>(*this, b.derived(), x0); 210c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 211c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 212c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath /** \internal */ 213c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath template<typename Rhs,typename Dest> 214c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath void _solveWithGuess(const Rhs& b, Dest& x) const 215c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 216c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_iterations = Base::maxIterations(); 217c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_error = Base::m_tolerance; 218c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 219c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath for(int j=0; j<b.cols(); ++j) 220c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 221c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_iterations = Base::maxIterations(); 222c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_error = Base::m_tolerance; 223c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 224c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typename Dest::ColXpr xj(x,j); 225c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath internal::conjugate_gradient(mp_matrix->template selfadjointView<UpLo>(), b.col(j), xj, 226c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath Base::m_preconditioner, m_iterations, m_error); 227c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 228c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 229c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_isInitialized = true; 230c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath m_info = m_error <= Base::m_tolerance ? Success : NoConvergence; 231c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 232c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 233c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath /** \internal */ 234c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath template<typename Rhs,typename Dest> 235c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath void _solve(const Rhs& b, Dest& x) const 236c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 237c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath x.setOnes(); 238c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath _solveWithGuess(b,x); 239c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 240c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 241c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathprotected: 242c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 243c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath}; 244c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 245c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 246c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathnamespace internal { 247c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 248c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathtemplate<typename _MatrixType, int _UpLo, typename _Preconditioner, typename Rhs> 249c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamathstruct solve_retval<ConjugateGradient<_MatrixType,_UpLo,_Preconditioner>, Rhs> 250c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath : solve_retval_base<ConjugateGradient<_MatrixType,_UpLo,_Preconditioner>, Rhs> 251c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath{ 252c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath typedef ConjugateGradient<_MatrixType,_UpLo,_Preconditioner> Dec; 253c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath EIGEN_MAKE_SOLVE_HELPERS(Dec,Rhs) 254c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 255c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath template<typename Dest> void evalTo(Dest& dst) const 256c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath { 257c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath dec()._solve(rhs(),dst); 258c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath } 259c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath}; 260c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 261c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath} // end namespace internal 262c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 263c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath} // end namespace Eigen 264c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath 265c981c48f5bc9aefeffc0bcb0cc3934c2fae179ddNarayan Kamath#endif // EIGEN_CONJUGATE_GRADIENT_H 266