Actual source code: ex7.c

  1: /*$Id: ex7.c,v 1.58 2001/08/07 21:30:54 bsmith Exp $*/

  3: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with KSP.\n\
  4: The code indicates the\n\
  5: procedures for setting the particular block sizes and for using different\n\
  6: linear solvers on the individual blocks.\n\n";

  8: /*
  9:    Note:  This example focuses on ways to customize the block Jacobi
 10:    preconditioner. See ex1.c and ex2.c for more detailed comments on
 11:    the basic usage of KSP (including working with matrices and vectors).

 13:    Recall: The block Jacobi method is equivalent to the ASM preconditioner
 14:    with zero overlap.
 15:  */

 17: /*T
 18:    Concepts: KSP^customizing the block Jacobi preconditioner
 19:    Processors: n
 20: T*/

 22: /* 
 23:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 24:   automatically includes:
 25:      petsc.h       - base PETSc routines   petscvec.h - vectors
 26:      petscsys.h    - system routines       petscmat.h - matrices
 27:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 28:      petscviewer.h - viewers               petscpc.h  - preconditioners
 29: */
 30:  #include petscksp.h

 34: int main(int argc,char **args)
 35: {
 36:   Vec          x,b,u;      /* approx solution, RHS, exact solution */
 37:   Mat          A;            /* linear system matrix */
 38:   KSP          ksp;         /* KSP context */
 39:   KSP          *subksp;     /* array of local KSP contexts on this processor */
 40:   PC           pc;           /* PC context */
 41:   PC           subpc;        /* PC context for subdomain */
 42:   PetscReal    norm;         /* norm of solution error */
 43:   int          i,j,I,J,ierr,*blks,m = 8,n;
 44:   int          rank,size,its,nlocal,first,Istart,Iend;
 45:   PetscScalar  v,one = 1.0,none = -1.0;
 46:   PetscTruth   isbjacobi,flg;

 48:   PetscInitialize(&argc,&args,(char *)0,help);
 49:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 50:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 51:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 52:   n = m+2;

 54:   /* -------------------------------------------------------------------
 55:          Compute the matrix and right-hand-side vector that define
 56:          the linear system, Ax = b.
 57:      ------------------------------------------------------------------- */

 59:   /* 
 60:      Create and assemble parallel matrix
 61:   */
 62:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 63:   MatSetFromOptions(A);
 64:   MatGetOwnershipRange(A,&Istart,&Iend);
 65:   for (I=Istart; I<Iend; I++) {
 66:     v = -1.0; i = I/n; j = I - i*n;
 67:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
 68:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
 69:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
 70:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
 71:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,ADD_VALUES);
 72:   }
 73:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 74:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 76:   /*
 77:      Create parallel vectors
 78:   */
 79:   VecCreate(PETSC_COMM_WORLD,&u);
 80:   VecSetSizes(u,PETSC_DECIDE,m*n);
 81:   VecSetFromOptions(u);
 82:   VecDuplicate(u,&b);
 83:   VecDuplicate(b,&x);

 85:   /*
 86:      Set exact solution; then compute right-hand-side vector.
 87:   */
 88:   VecSet(&one,u);
 89:   MatMult(A,u,b);

 91:   /*
 92:      Create linear solver context
 93:   */
 94:   KSPCreate(PETSC_COMM_WORLD,&ksp);

 96:   /* 
 97:      Set operators. Here the matrix that defines the linear system
 98:      also serves as the preconditioning matrix.
 99:   */
100:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);

102:   /*
103:      Set default preconditioner for this program to be block Jacobi.
104:      This choice can be overridden at runtime with the option
105:         -pc_type <type>
106:   */
107:   KSPGetPC(ksp,&pc);
108:   PCSetType(pc,PCBJACOBI);


111:   /* -------------------------------------------------------------------
112:                    Define the problem decomposition
113:      ------------------------------------------------------------------- */

115:   /* 
116:      Call PCBJacobiSetTotalBlocks() to set individually the size of
117:      each block in the preconditioner.  This could also be done with
118:      the runtime option
119:          -pc_bjacobi_blocks <blocks>
120:      Also, see the command PCBJacobiSetLocalBlocks() to set the
121:      local blocks.

123:       Note: The default decomposition is 1 block per processor.
124:   */
125:   PetscMalloc(m*sizeof(int),&blks);
126:   for (i=0; i<m; i++) blks[i] = n;
127:   PCBJacobiSetTotalBlocks(pc,m,blks);
128:   PetscFree(blks);


131:   /* -------------------------------------------------------------------
132:                Set the linear solvers for the subblocks
133:      ------------------------------------------------------------------- */

135:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
136:        Basic method, should be sufficient for the needs of most users.
137:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

139:      By default, the block Jacobi method uses the same solver on each
140:      block of the problem.  To set the same solver options on all blocks, 
141:      use the prefix -sub before the usual PC and KSP options, e.g.,
142:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
143:   */

145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
146:         Advanced method, setting different solvers for various blocks.
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

149:      Note that each block's KSP context is completely independent of
150:      the others, and the full range of uniprocessor KSP options is
151:      available for each block. The following section of code is intended
152:      to be a simple illustration of setting different linear solvers for
153:      the individual blocks.  These choices are obviously not recommended
154:      for solving this particular problem.
155:   */
156:   PetscTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
157:   if (isbjacobi) {
158:     /* 
159:        Call KSPSetUp() to set the block Jacobi data structures (including
160:        creation of an internal KSP context for each block).

162:        Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP().
163:     */
164:     KSPSetRhs(ksp,b);
165:     KSPSetSolution(ksp,x);
166:     KSPSetUp(ksp);

168:     /*
169:        Extract the array of KSP contexts for the local blocks
170:     */
171:     PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);

173:     /*
174:        Loop over the local blocks, setting various KSP options
175:        for each block.  
176:     */
177:     for (i=0; i<nlocal; i++) {
178:       KSPGetPC(subksp[i],&subpc);
179:       if (!rank) {
180:         if (i%2) {
181:           PCSetType(subpc,PCILU);
182:         } else {
183:           PCSetType(subpc,PCNONE);
184:           KSPSetType(subksp[i],KSPBCGS);
185:           KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
186:         }
187:       } else {
188:         PCSetType(subpc,PCJACOBI);
189:         KSPSetType(subksp[i],KSPGMRES);
190:         KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
191:       }
192:     }
193:   }

195:   /* -------------------------------------------------------------------
196:                       Solve the linear system
197:      ------------------------------------------------------------------- */

199:   /* 
200:     Set runtime options
201:   */
202:   KSPSetFromOptions(ksp);

204:   /*
205:      Solve the linear system
206:   */
207:   KSPSetRhs(ksp,b);
208:   KSPSetSolution(ksp,x);
209:   KSPSolve(ksp);

211:   /*
212:      View info about the solver
213:   */
214:   PetscOptionsHasName(PETSC_NULL,"-nokspview",&flg);
215:   if (!flg) {
216:     KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);
217:   }

219:   /* -------------------------------------------------------------------
220:                       Check solution and clean up
221:      ------------------------------------------------------------------- */

223:   /*
224:      Check the error
225:   */
226:   VecAXPY(&none,u,x);
227:   VecNorm(x,NORM_2,&norm);
228:   KSPGetIterationNumber(ksp,&its);
229:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %d\n",norm,its);

231:   /* 
232:      Free work space.  All PETSc objects should be destroyed when they
233:      are no longer needed.
234:   */
235:   KSPDestroy(ksp);
236:   VecDestroy(u);  VecDestroy(x);
237:   VecDestroy(b);  MatDestroy(A);
238:   PetscFinalize();
239:   return 0;
240: }