1 // /home/couturie/work/petsc-3.5.1_old/arch-linux2-c-debug/bin/mpirun -np 4 -machinefile archi ./ex15 -m 1000 -n 1000 -ksp_type fgmres -pc_type mg
4 // /home/couturie/work/petsc-3.5.1/arch-linux2-c-debug/bin/mpirun -np 4 ./ex15 -m 400 -n 400 -ksp_type fgmres
8 static char help[] = "Solves a linear system in parallel with KSP. Also\n\
9 illustrates setting a user-defined shell preconditioner and using the\n\
10 macro __FUNCT__ to define routine names for use in error handling.\n\
11 Input parameters include:\n\
12 -user_defined_pc : Activate a user-defined preconditioner\n\n";
15 Concepts: KSP^basic parallel example
16 Concepts: PC^setting a user-defined shell preconditioner
17 Concepts: error handling^Using the macro __FUNCT__ to define routine names;
22 Include "petscksp.h" so that we can use KSP solvers. Note that this file
23 automatically includes:
24 petscsys.h - base PETSc routines petscvec.h - vectors
26 petscis.h - index sets petscksp.h - Krylov subspace methods
27 petscviewer.h - viewers petscpc.h - preconditioners
31 /* Define context for user-provided preconditioner */
39 User-defined routines. Note that immediately before each routine below,
40 we define the macro __FUNCT__ to be a string containing the routine name.
41 If defined, this macro is used in the PETSc error handlers to provide a
42 complete traceback of routine names. All PETSc library routines use this
43 macro, and users can optionally employ it as well in their application
44 codes. Note that users can get a traceback of PETSc errors regardless of
45 whether they define __FUNCT__ in application codes; this macro merely
46 provides the added traceback detail of the application routine names.
49 #define __FUNCT__ "main"
55 int KrylovMinimize(Mat A, Vec b, Vec x) {
60 PetscScalar gamma, alpha, oldgamma, beta;
61 PetscReal norm=20, Eprecision=1e-3, cgprec=1e-40;
62 PetscInt giter=0, ColS=12, col=0, Emaxiter=50000000, iter=0, iterations=15, Iiter=0;
74 Vec Alpha, p, ss, vect, r, q, Ax;
79 ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
80 ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
81 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
88 ierr = PetscPrintf(PETSC_COMM_WORLD, "Size of vector %D\n", size); CHKERRQ(ierr);
91 MatGetOwnershipRange(A,&aa,&bb);
93 // ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%D %D\n", aa,bb); CHKERRQ(ierr);
94 //PetscSynchronizedFlush(PETSC_COMM_WORLD);
97 ierr = MatCreate(PETSC_COMM_WORLD, &S);CHKERRQ(ierr);
98 ierr = MatSetSizes(S, bb-aa, PETSC_DECIDE, size, ColS); CHKERRQ(ierr);
99 ierr = MatSetType(S, MATMPIDENSE); CHKERRQ(ierr);
100 ierr = MatSetUp(S); CHKERRQ(ierr);
102 ierr = MatGetOwnershipRange(S, &Istart, &Iend); CHKERRQ(ierr);
104 ierr = VecCreate(PETSC_COMM_WORLD, &Alpha); CHKERRQ(ierr);
105 ierr = VecSetSizes(Alpha, PETSC_DECIDE, ColS); CHKERRQ(ierr);
106 ierr = VecSetFromOptions(Alpha); CHKERRQ(ierr);
107 ierr = VecDuplicate(Alpha, &vect); CHKERRQ(ierr);
108 ierr = VecDuplicate(Alpha, &p); CHKERRQ(ierr);
109 ierr = VecDuplicate(Alpha, &ss); CHKERRQ(ierr);
110 ierr = VecDuplicate(b, &r); CHKERRQ(ierr);
111 ierr = VecDuplicate(b, &q); CHKERRQ(ierr);
112 ierr = VecDuplicate(b, &Ax); CHKERRQ(ierr);
114 ierr = VecDuplicate(b,&x_old);CHKERRQ(ierr);
115 ierr = VecDuplicate(b,&residu);CHKERRQ(ierr);
119 //indexes of row (these indexes are global)
120 ind_row = (PetscInt*)malloc(sizeof(PetscInt)*(Iend-Istart));
121 for(i=0; i<Iend-Istart; i++) ind_row[i] = i + Istart;
124 // ierr = KSPGMRESSetRestart(ksp, 16); CHKERRQ(ierr);
125 ierr = KSPSetTolerances(ksp, 1e-13, 1e-13, PETSC_DEFAULT, 30); CHKERRQ(ierr);
126 ierr = KSPSetInitialGuessNonzero(ksp, PETSC_TRUE); CHKERRQ(ierr);
130 //GMRES WITH MINIMIZATION
132 ierr = KSPSetUp(ksp); CHKERRQ(ierr);
133 while(giter<Emaxiter && norm>Eprecision ){
134 for(col=0; col<ColS && norm>Eprecision; col++){
138 ierr = KSPSolve(ksp, b, x); CHKERRQ(ierr);
140 ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
141 total += its; Iiter ++;
146 ierr = VecGetArray(x, &array);
147 ierr = MatSetValues(S, Iend-Istart, ind_row, 1, &col, array, INSERT_VALUES);
148 VecRestoreArray(x, &array);
152 KSPGetResidualNorm(ksp,&norm);
155 ierr = VecCopy(x, residu); CHKERRQ(ierr);
156 ierr = VecAXPY(residu, -1, x_old); CHKERRQ(ierr);
157 ierr = VecNorm(residu, NORM_INFINITY, &norm); CHKERRQ(ierr);
161 ierr = PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g, outer iteration %D\n", norm, giter); CHKERRQ(ierr);
162 ierr = VecCopy(x, x_old); CHKERRQ(ierr);
169 if( norm>Eprecision) {
171 MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
172 MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
177 MatMatMult(A,S, MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AS);
181 MatMatMult(A,S, MAT_REUSE_MATRIX,PETSC_DEFAULT,&AS);
186 //Minimization with CGLS
187 MatMult(AS, Alpha, r); VecAYPX(r, -1, b); //r_0 = b-AS*x_0
190 MatMultTranspose(AS, r, p); //p_0 = AS'*r_0
195 ierr = VecCopy(p, ss); CHKERRQ(ierr); //p_0 = s_0
196 VecNorm(ss, NORM_2, &gamma); gamma = gamma * gamma; //gamma = norm2(s)^2
197 while(gamma>cgprec && iter<iterations){
198 MatMult(AS, p, q); //q = AS*p
199 VecNorm(q, NORM_2, &alpha); alpha = alpha * alpha; //alpha = norm2(q)^2
200 alpha = gamma / alpha;
201 VecAXPY(Alpha, alpha, p); //Alpha += alpha*p
202 VecAXPY(r, -alpha, q); //r -= alpha*q
203 MatMultTranspose(AS, r, ss); // ss = AS'*r
205 VecNorm(ss, NORM_2, &gamma); gamma = gamma * gamma; //gamma = norm2(s)^2
206 beta = gamma / oldgamma;
207 VecAYPX(p, beta, ss); //p = s+beta*p;
213 MatMult(S, Alpha, x); //x = S*Alpha;
218 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Execution time : %g (s)\n", T2-T1); CHKERRQ(ierr);
219 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Total number of iterations : %D\n", total); CHKERRQ(ierr);
233 int KrylovMinimizeLSQR(Mat A, Vec b, Vec x) {
238 PetscScalar alpha, beta;
239 PetscReal norm=20, Eprecision=1e-3, tol=1e-40;
240 PetscInt giter=0, ColS=12, col=0, Emaxiter=50000000, iter=0, iterations=20, Iiter=0;
246 PetscInt Istart,Iend;
253 PetscScalar norm_ksp;
254 Vec u,v,d,uu,vt,zero_long,zero_short,x_lsqr;
258 ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
259 ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
260 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
267 ierr = PetscPrintf(PETSC_COMM_WORLD, "Size of vector %D\n", size); CHKERRQ(ierr);
270 MatGetOwnershipRange(A,&aa,&bb);
272 // ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%D %D\n", aa,bb); CHKERRQ(ierr);
273 //PetscSynchronizedFlush(PETSC_COMM_WORLD);
276 ierr = MatCreate(PETSC_COMM_WORLD, &S);CHKERRQ(ierr);
277 ierr = MatSetSizes(S, bb-aa, PETSC_DECIDE, size, ColS); CHKERRQ(ierr);
278 ierr = MatSetType(S, MATMPIDENSE); CHKERRQ(ierr);
279 ierr = MatSetUp(S); CHKERRQ(ierr);
281 ierr = MatGetOwnershipRange(S, &Istart, &Iend); CHKERRQ(ierr);
285 ierr = VecDuplicate(b, &Ax); CHKERRQ(ierr);
287 ierr = VecDuplicate(b,&x_old);CHKERRQ(ierr);
288 ierr = VecDuplicate(b,&residu);CHKERRQ(ierr);
292 ierr = VecDuplicate(b,&u);CHKERRQ(ierr);
295 ierr = VecDuplicate(b,&uu);CHKERRQ(ierr);
296 ierr = VecDuplicate(b,&zero_long);CHKERRQ(ierr);
297 ierr = VecSet(zero_long,0);CHKERRQ(ierr);
300 ierr = VecCreate(PETSC_COMM_WORLD, &v); CHKERRQ(ierr);
301 ierr = VecSetSizes(v, PETSC_DECIDE, ColS); CHKERRQ(ierr);
302 ierr = VecSetFromOptions(v); CHKERRQ(ierr);
303 ierr = VecDuplicate(v,&zero_short);CHKERRQ(ierr);
304 ierr = VecSet(zero_short,0);CHKERRQ(ierr);
305 ierr = VecDuplicate(v,&d);CHKERRQ(ierr);
306 ierr = VecDuplicate(v,&vt);CHKERRQ(ierr);
307 ierr = VecDuplicate(v,&x_lsqr);CHKERRQ(ierr);
310 //indexes of row (these indexes are global)
311 ind_row = (PetscInt*)malloc(sizeof(PetscInt)*(Iend-Istart));
312 for(i=0; i<Iend-Istart; i++) ind_row[i] = i + Istart;
315 // ierr = KSPGMRESSetRestart(ksp, 16); CHKERRQ(ierr);
316 ierr = KSPSetTolerances(ksp, 1e-13, 1e-13, PETSC_DEFAULT, 30); CHKERRQ(ierr);
317 ierr = KSPSetInitialGuessNonzero(ksp, PETSC_TRUE); CHKERRQ(ierr);
322 //GMRES WITH MINIMIZATION
324 ierr = KSPSetUp(ksp); CHKERRQ(ierr);
325 while(giter<Emaxiter && norm>Eprecision ){
326 for(col=0; col<ColS && norm>Eprecision; col++){
330 ierr = KSPSolve(ksp, b, x); CHKERRQ(ierr);
332 ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
333 total += its; Iiter ++;
338 ierr = VecGetArray(x, &array);
339 ierr = MatSetValues(S, Iend-Istart, ind_row, 1, &col, array, INSERT_VALUES);
340 VecRestoreArray(x, &array);
344 KSPGetResidualNorm(ksp,&norm);
348 ierr = VecCopy(x, residu); CHKERRQ(ierr);
349 ierr = VecAXPY(residu, -1, x_old); CHKERRQ(ierr);
350 ierr = VecNorm(residu, NORM_INFINITY, &norm); CHKERRQ(ierr);
354 ierr = PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g, outer iteration %D\n", norm, giter); CHKERRQ(ierr);
355 ierr = VecCopy(x, x_old); CHKERRQ(ierr);
362 if( norm>Eprecision) {
364 MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
365 MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
372 MatMatMult(A,S, MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AS);
377 MatMatMult(A,S, MAT_REUSE_MATRIX,PETSC_DEFAULT,&AS);
389 PetscScalar n2b,tolb,normr,c,s,phibar,normar,norma,thet,rhot,rho,phi;
392 VecNorm(b, NORM_2, &n2b); //n2b = norm(b);
393 ierr = VecCopy(b, u); CHKERRQ(ierr); //u=b
394 VecNorm(u, NORM_2, &beta); // beta=norm(u)
397 VecAYPX(u,1/beta,zero_long); // u = u / beta;
402 MatMultTranspose(AS, u, v); //v=A'*u
403 ierr = VecSet(x_lsqr,0);CHKERRQ(ierr);
404 VecNorm(v, NORM_2, &alpha); // alpha=norm(v)
406 VecAYPX(v,1/alpha,zero_short); // v = v / alpha;
408 ierr = VecSet(d,0);CHKERRQ(ierr);
409 normar = alpha * beta;
412 for(i=0;i<iterations;i++) {
413 MatMult(AS, v, uu); //uu=A*v
414 VecAYPX(u, -alpha, uu); //u = uu-alpha*u;
415 VecNorm(u, NORM_2, &beta); // beta=norm(u)
416 VecAYPX(u,1/beta,zero_long); // u = u / beta;
417 norma=sqrt(norma*norma+alpha*alpha+beta*beta); // norma = norm([norma alpha beta]);
420 rho = sqrt(rhot*rhot + beta*beta);
424 if (phi == 0) { // stagnation of the method
428 VecAYPX(d,-thet,v); //d = (v - thet * d);
429 VecAYPX(d,1/rho,zero_short); //d=d/ rho;
432 if (normar/(norma*normr) <= tol) { // check for convergence in min{|b-A*x|}
435 if (normr <= tolb) { // check for convergence in A*x=b
440 VecAXPY(x_lsqr,phi,d); // x_lsqr=x_lsqr+phi*d
441 normr = abs(s) * normr;
442 MatMultTranspose(AS, u, vt); //vt=A'*u;
443 VecAYPX(v,-beta,vt); // v = vt - beta * v;
444 VecNorm(v, NORM_2, &alpha); // alpha=norm(v)
445 VecAYPX(v,1/alpha,zero_short); // v = v / alpha;
446 normar = alpha * abs( s * phi);
453 MatMult(S, x_lsqr, x); //x = S*x_small;
458 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Execution time LSQR : %g (s)\n", T2-T1); CHKERRQ(ierr);
459 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Total number of iterations LSQR : %D\n", total); CHKERRQ(ierr);
472 int main(int argc,char **args)
474 Vec x,b,u; /* approx solution, RHS, exact solution */
475 Mat A; /* linear system matrix */
476 KSP ksp; /* linear solver context */
477 PC pc; /* preconditioner context */
478 PetscReal norm; /* norm of solution error */
479 PetscScalar v,one = 1.0;
480 PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
483 PetscInitialize(&argc,&args,(char*)0,help);
484 ierr = PetscOptionsGetInt(NULL,"-m",&m,NULL);CHKERRQ(ierr);
485 ierr = PetscOptionsGetInt(NULL,"-n",&n,NULL);CHKERRQ(ierr);
488 MPI_Comm_size(PETSC_COMM_WORLD,&size);
489 PetscPrintf(PETSC_COMM_WORLD,"Number of processors = %d\n",size);
491 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
492 Compute the matrix and right-hand-side vector that define
493 the linear system, Ax = b.
494 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
496 Create parallel matrix, specifying only its global dimensions.
497 When using MatCreate(), the matrix format can be specified at
498 runtime. Also, the parallel partioning of the matrix is
499 determined by PETSc at runtime.
501 ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
502 ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr);
503 ierr = MatSetFromOptions(A);CHKERRQ(ierr);
504 ierr = MatSetUp(A);CHKERRQ(ierr);
507 Currently, all PETSc parallel matrix formats are partitioned by
508 contiguous chunks of rows across the processors. Determine which
509 rows of the matrix are locally owned.
511 ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr);
514 Set matrix elements for the 2-D, five-point stencil in parallel.
515 - Each processor needs to insert only elements that it owns
516 locally (but any non-local elements will be sent to the
517 appropriate processor during matrix assembly).
518 - Always specify global rows and columns of matrix entries.
520 for (Ii=Istart; Ii<Iend; Ii++) {
521 v = -1.0; i = Ii/n; j = Ii - i*n;
523 if (i>0) {J = Ii - n; ierr = MatSetValues(A,1,&Ii,1,&J,&v2,INSERT_VALUES);CHKERRQ(ierr);}
524 if (i<m-1) {J = Ii + n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
525 if (j>0) {J = Ii - 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
526 if (j<n-1) {J = Ii + 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
527 v = 4.0; ierr = MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);CHKERRQ(ierr);
531 Assemble matrix, using the 2-step process:
532 MatAssemblyBegin(), MatAssemblyEnd()
533 Computations can be done while messages are in transition
534 by placing code between these two statements.
536 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
537 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
540 Create parallel vectors.
541 - When using VecCreate() VecSetSizes() and VecSetFromOptions(),
542 we specify only the vector's global
543 dimension; the parallel partitioning is determined at runtime.
544 - Note: We form 1 vector from scratch and then duplicate as needed.
546 ierr = VecCreate(PETSC_COMM_WORLD,&u);CHKERRQ(ierr);
547 ierr = VecSetSizes(u,PETSC_DECIDE,m*n);CHKERRQ(ierr);
548 ierr = VecSetFromOptions(u);CHKERRQ(ierr);
549 ierr = VecDuplicate(u,&b);CHKERRQ(ierr);
550 ierr = VecDuplicate(b,&x);CHKERRQ(ierr);
553 Set exact solution; then compute right-hand-side vector.
555 ierr = VecSet(u,one);CHKERRQ(ierr);
556 ierr = MatMult(A,u,b);CHKERRQ(ierr);
558 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559 Create the linear solver and set various options
560 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
563 Create linear solver context
565 ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
568 Set operators. Here the matrix that defines the linear system
569 also serves as the preconditioning matrix.
571 ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);
574 Set linear solver defaults for this problem (optional).
575 - By extracting the KSP and PC contexts from the KSP context,
576 we can then directly call any KSP and PC routines
577 to set various options.
579 ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
580 ierr = KSPSetTolerances(ksp,4e-6,4e-6,PETSC_DEFAULT,500000);CHKERRQ(ierr);
584 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
587 PCGetType(pc, &type);
589 PetscPrintf(PETSC_COMM_WORLD, "PC TYPE %s \n", type);
593 Set runtime options, e.g.,
594 -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
595 These options will override those specified above as long as
596 KSPSetFromOptions() is called _after_ any other customization
599 ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
601 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
602 Solve the linear system
603 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
604 /* PetscScalar T1,T2; */
605 /* T1 = MPI_Wtime(); */
606 /* ierr = KSPSetUp(ksp); CHKERRQ(ierr); */
607 /* ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr); */
608 /* T2 = MPI_Wtime(); */
609 /* /\* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
610 /* Check solution and clean up */
611 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - *\/ */
614 /* Check the error */
617 /* VecDuplicate(b,&sol); */
618 /* MatMult(A,x,sol); */
619 /* VecAXPY(sol,-1,b); */
620 /* VecNorm(sol, NORM_2, &norm); */
621 /* ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr); */
622 /* ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);CHKERRQ(ierr); */
623 /* ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Execution time : %g (s)\n\n\n", T2-T1); CHKERRQ(ierr); */
628 //version to control the error
634 VecDuplicate(b,&sol);
636 PetscScalar norm=100;
639 ierr = KSPSetTolerances(ksp,1e-10,1e-10,PETSC_DEFAULT,30);CHKERRQ(ierr);
640 ierr = KSPSetInitialGuessNonzero(ksp, PETSC_TRUE); CHKERRQ(ierr);
643 ierr = KSPSolve(ksp,b,x2);CHKERRQ(ierr);
644 KSPGetResidualNorm(ksp,&norm);
645 ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr);
647 ierr = PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g\n", norm); CHKERRQ(ierr);
654 VecNorm(sol, NORM_2, &norm);
655 ierr = PetscPrintf(PETSC_COMM_WORLD,"Computed norm of error %g iterations %D\n",(double)norm,total);CHKERRQ(ierr);
656 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Execution time NORMAL GMRES : %g (s)\n\n\n", T2-T1); CHKERRQ(ierr);
667 VecDuplicate(b,&sol);
669 KrylovMinimize(A, b, x2);
675 VecNorm(sol, NORM_2, &norm);
676 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Error Krylov Minimization %g\n",norm);
686 VecDuplicate(b,&sol);
688 KrylovMinimizeLSQR(A, b, x2);
694 VecNorm(sol, NORM_2, &norm);
695 ierr = PetscPrintf(PETSC_COMM_WORLD, "\t\t\t -- Error Krylov Minimization LSQR %g\n",norm);
701 Free work space. All PETSc objects should be destroyed when they
702 are no longer needed.
704 ierr = KSPDestroy(&ksp);CHKERRQ(ierr);
705 ierr = VecDestroy(&u);CHKERRQ(ierr); ierr = VecDestroy(&x);CHKERRQ(ierr);
706 ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = MatDestroy(&A);CHKERRQ(ierr);
708 ierr = PetscFinalize();