function pcgdemo1(A) %PGCDEMO1 simple demonstration comparing SD, CG, and PCG % % A simple demonstration comparing the convergence of % four iterative methods: steepest descents, conjugate gradients, % diagonally preconditioned conjugate gradients, and conjugate % gradients preconditioned with incomplete Cholesky factorization % % The argument, if given, should be a sparse SPD matrix (e.g., % gallery('poisson',50) ). If the first argument is omitted, % a previously computed finite element stiffness matrix is used. % Douglas N. Arnold, 2009-10-01 if nargin == 0 load StiffnessMatrix.mat end n=length(A); b=ones(n,1); clf % number of iterations niter = 249; %%% Steepest descent % initial guess x=zeros(n,1); % store norms of residuals% steepest descents ressd=zeros(niter+1,1); r=b-A*x; r2 = r'*r; ressd(1)=sqrt(r2); for i = 1:niter p = A*r; lambda = r2/(r'*p); x = x + lambda*r; r = r - lambda*p; r2 = r'*r; % fprintf('iteration %3d x(1) %20.18f\n',i,x(1)) ressd(i+1)=sqrt(r2); end %%% Conjugate gradients % initial guess x=zeros(n,1); % store norms of residuals rescg=zeros(niter+1,1); r = b-A*x; s = r; r2 = r'*r; rescg(1) = sqrt(r2); for i = 1:niter t = A*s; s2 = s'*t; lambda = r2/s2; x = x + lambda*s; % fprintf('iteration %3d x(1) %20.18f\n',i,x(1)) r = r-lambda*t; r2old = r2; r2 = r'*r; s=r+(r2/r2old)*s; rescg(i+1)=sqrt(r2); end %%% Preconditioned conjugate gradients, diagonal preconditioner % initial guess x=zeros(n,1); % store norms of residuals respcgd=zeros(niter+1,1); M = diag(diag(A)); r = b-A*x; s = M\r; r2 = r'*s; respcgd(1) = sqrt(r2); for i = 1:niter t = A*s; s2 = s'*t; lambda = r2/s2; x = x + lambda*s; % fprintf('iteration %3d x(1) %20.18f\n',i,x(1)) r = r-lambda*t; r2old = r2; z = M\r; r2 = r'*z; s=z+(r2/r2old)*s; respcgd(i+1)=sqrt(r2); end %%% Preconditioned conjugate gradients, incomplete Cholesky preconditioner % initial guess x=zeros(n,1); % store norms of residuals respcgic=zeros(niter+1,1); U=cholinc(A,'0'); x=zeros(n,1); r = b-A*x; s = U\(U'\r); r2 = r'*s; respcgic(1) = sqrt(r2); for i = 1:niter t = A*s; s2 = s'*t; lambda = r2/s2; x = x + lambda*s; % fprintf('iteration %3d x(1) %20.18f\n',i,x(1)) r = r-lambda*t; r2old = r2; z = U\(U'\r); r2 = r'*z; s=z+(r2/r2old)*s; respcgic(i+1)=sqrt(r2); end % plot norms of residuals: linear convergence means a straight line p=semilogy(ressd,'r'); set(p,'color',[.75,.25,.75],'linewidth',2,'marker','o') hold on p=semilogy(rescg); set(p,'color',[.75,.25,.25],'linewidth',2,'marker','o') p=semilogy(respcgd); set(p,'color',[.25,.75,.25],'linewidth',2,'marker','o') p=semilogy(respcgic,'m'); set(p,'color',[.25,.25,.75],'linewidth',2,'marker','o') grid on set(gca,'fontsize',20) xlabel('iterations') ylabel('norm of residual') legend('SD','CG','CG (diag)','CG (IC)','Location','SouthWest') % compute observed rates of convergence by linear fit of log of norm of residuals lfit=polyfit(1:niter+1,log(ressd)',1); ratesd = exp(lfit(1)); lfit=polyfit(1:niter+1,log(rescg)',1); ratecg = exp(lfit(1)); lfit=polyfit(1:niter+1,log(respcgd)',1); ratepcgd = exp(lfit(1)); lfit=polyfit(1:niter+1,log(respcgic)',1); ratepcgic = exp(lfit(1)); fprintf('Observed rates of linear convergence\n') fprintf('SD: %6.4f\nCG: %6.5f\nPCG (diag): %6.4f\nPCG (IC): %6.4f\n',... ratesd,ratecg,ratepcgd,ratepcgic)