Actual source code: ex8g.c

  2: static char help[] = "Illustrates use of the preconditioner GASM.\n\
  3: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  4: code indicates the procedure for setting user-defined subdomains.  Input\n\
  5: parameters include:\n\
  6:   -M:                           Number of mesh points in the x direction\n\
  7:   -N:                           Number of mesh points in the y direction\n\
  8:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  9:   -user_set_subdomains:         Use the user-provided subdomain partitioning routine\n\
 10: With -user_set_subdomains on, the following options are meaningful:\n\
 11:   -Mdomains:                    Number of subdomains in the x direction \n\
 12:   -Ndomains:                    Number of subdomains in the y direction \n\
 13:   -overlap:                     Size of domain overlap in terms of the number of mesh lines in x and y\n\
 14: General useful options:\n\
 15:   -pc_gasm_print_subdomains:    Print the index sets defining the subdomains\n\
 16: \n";

 18: /*
 19:    Note:  This example focuses on setting the subdomains for the GASM 
 20:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 21:    and ex2.c for more detailed comments on the basic usage of KSP
 22:    (including working with matrices and vectors).

 24:    The GASM preconditioner is fully parallel.  The user-space routine
 25:    CreateSubdomains2D that computes the domain decomposition is also parallel 
 26:    and attempts to generate both subdomains straddling processors and multiple 
 27:    domains per processor.


 30:    This matrix in this linear system arises from the discretized Laplacian,
 31:    and thus is not very interesting in terms of experimenting with variants
 32:    of the GASM preconditioner.  
 33: */

 35: /*T
 36:    Concepts: KSP^Additive Schwarz Method (GASM) with user-defined subdomains
 37:    Processors: n
 38: T*/

 40: /* 
 41:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 42:   automatically includes:
 43:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 44:      petscmat.h - matrices
 45:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 46:      petscviewer.h - viewers               petscpc.h  - preconditioners
 47: */
 48: #include <petscksp.h>



 54: int main(int argc,char **args)
 55: {
 56:   Vec            x,b,u;                  /* approx solution, RHS, exact solution */
 57:   Mat            A;                      /* linear system matrix */
 58:   KSP            ksp;                    /* linear solver context */
 59:   PC             pc;                     /* PC context */
 60:   IS             *is,*is_local;          /* array of index sets that define the subdomains */
 61:   PetscInt       overlap = 1;            /* width of subdomain overlap */
 62:   PetscInt       Nsub;                   /* number of subdomains */
 63:   PetscInt       m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 64:   PetscInt       M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 65:   PetscInt       i,j,Ii,J,Istart,Iend;
 67:   PetscMPIInt    size;
 68:   PetscBool      flg;
 69:   PetscBool      user_set_subdomains = PETSC_FALSE;
 70:   PetscScalar    v, one = 1.0;
 71:   PetscReal      e;

 73:   PetscInitialize(&argc,&args,(char *)0,help);
 74:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 75:   PetscOptionsGetInt(PETSC_NULL,"-M",&m,PETSC_NULL);
 76:   PetscOptionsGetInt(PETSC_NULL,"-N",&n,PETSC_NULL);
 77:   PetscOptionsGetBool(PETSC_NULL,"-user_set_subdomains",&user_set_subdomains,PETSC_NULL);
 78:   PetscOptionsGetInt(PETSC_NULL,"-Mdomains",&M,PETSC_NULL);
 79:   PetscOptionsGetInt(PETSC_NULL,"-Ndomains",&N,PETSC_NULL);
 80:   PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);

 82:   /* -------------------------------------------------------------------
 83:          Compute the matrix and right-hand-side vector that define
 84:          the linear system, Ax = b.
 85:      ------------------------------------------------------------------- */

 87:   /* 
 88:      Assemble the matrix for the five point stencil, YET AGAIN 
 89:   */
 90:   MatCreate(PETSC_COMM_WORLD,&A);
 91:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 92:   MatSetFromOptions(A);
 93:   MatGetOwnershipRange(A,&Istart,&Iend);
 94:   for (Ii=Istart; Ii<Iend; Ii++) {
 95:     v = -1.0; i = Ii/n; j = Ii - i*n;
 96:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 97:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 98:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 99:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
100:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
101:   }
102:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
103:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

105:   /* 
106:      Create and set vectors 
107:   */
108:   VecCreate(PETSC_COMM_WORLD,&b);
109:   VecSetSizes(b,PETSC_DECIDE,m*n);
110:   VecSetFromOptions(b);
111:   VecDuplicate(b,&u);
112:   VecDuplicate(b,&x);
113:   VecSet(u,one);
114:   MatMult(A,u,b);

116:   /* 
117:      Create linear solver context 
118:   */
119:   KSPCreate(PETSC_COMM_WORLD,&ksp);

121:   /* 
122:      Set operators. Here the matrix that defines the linear system
123:      also serves as the preconditioning matrix.
124:   */
125:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);

127:   /* 
128:      Set the default preconditioner for this program to be GASM
129:   */
130:   KSPGetPC(ksp,&pc);
131:   PCSetType(pc,PCGASM);

133:   /* -------------------------------------------------------------------
134:                   Define the problem decomposition
135:      ------------------------------------------------------------------- */

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
138:        Basic method, should be sufficient for the needs of many users.
139:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

141:      Set the overlap, using the default PETSc decomposition via
142:          PCGASMSetOverlap(pc,overlap);
143:      Could instead use the option -pc_gasm_overlap <ovl> 

145:      Set the total number of blocks via -pc_gasm_blocks <blks>
146:      Note:  The GASM default is to use 1 block per processor.  To
147:      experiment on a single processor with various overlaps, you
148:      must specify use of multiple blocks!
149:   */

151:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
152:        More advanced method, setting user-defined subdomains
153:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

155:      Firstly, create index sets that define the subdomains.  The utility
156:      routine PCGASMCreateSubdomains2D() is a simple example, which partitions
157:      the 2D grid into MxN subdomains with an optional overlap.  
158:      More generally, the user should write a custom routine for a particular 
159:      problem geometry.

161:      Then call PCGASMSetLocalSubdomains() with resulting index sets
162:      to set the subdomains for the GASM preconditioner.
163:   */

165:   if (!user_set_subdomains) { /* basic version */
166:     PCGASMSetOverlap(pc,overlap);
167:   } else { /* advanced version */
168:     PCGASMCreateSubdomains2D(pc, m,n,M,N,1,overlap,&Nsub,&is,&is_local);
169:     PCGASMSetLocalSubdomains(pc,Nsub,is,is_local);
170:     PCView(pc, PETSC_VIEWER_STDOUT_WORLD);
171:   }

173:   /* -------------------------------------------------------------------
174:                 Set the linear solvers for the subblocks
175:      ------------------------------------------------------------------- */

177:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
178:        Basic method, should be sufficient for the needs of most users.
179:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

181:      By default, the GASM preconditioner uses the same solver on each
182:      block of the problem.  To set the same solver options on all blocks,
183:      use the prefix -sub before the usual PC and KSP options, e.g.,
184:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

186:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
187:         Advanced method, setting different solvers for various blocks.
188:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

190:      Note that each block's KSP context is completely independent of
191:      the others, and the full range of uniprocessor KSP options is
192:      available for each block.

194:      - Use PCGASMGetSubKSP() to extract the array of KSP contexts for
195:        the local blocks.
196:      - See ex7.c for a simple example of setting different linear solvers
197:        for the individual blocks for the block Jacobi method (which is
198:        equivalent to the GASM method with zero overlap).
199:   */

201:   flg  = PETSC_FALSE;
202:   PetscOptionsGetBool(PETSC_NULL,"-user_set_subdomain_solvers",&flg,PETSC_NULL);
203:   if (flg) {
204:     KSP        *subksp;       /* array of KSP contexts for local subblocks */
205:     PetscInt   nlocal,first;  /* number of local subblocks, first local subblock */
206:     PC         subpc;          /* PC context for subblock */
207:     PetscBool  isasm;

209:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

211:     /* 
212:        Set runtime options
213:     */
214:     KSPSetFromOptions(ksp);

216:     /* 
217:        Flag an error if PCTYPE is changed from the runtime options
218:      */
219:     PetscTypeCompare((PetscObject)pc,PCGASM,&isasm);
220:     if (!isasm) SETERRQ(PETSC_COMM_WORLD,1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");

222:     /* 
223:        Call KSPSetUp() to set the block Jacobi data structures (including
224:        creation of an internal KSP context for each block).

226:        Note: KSPSetUp() MUST be called before PCGASMGetSubKSP().
227:     */
228:     KSPSetUp(ksp);

230:     /*
231:        Extract the array of KSP contexts for the local blocks
232:     */
233:     PCGASMGetSubKSP(pc,&nlocal,&first,&subksp);

235:     /*
236:        Loop over the local blocks, setting various KSP options
237:        for each block.  
238:     */
239:     for (i=0; i<nlocal; i++) {
240:       KSPGetPC(subksp[i],&subpc);
241:       PCSetType(subpc,PCILU);
242:       KSPSetType(subksp[i],KSPGMRES);
243:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
244:     }
245:   } else {
246:     /* 
247:        Set runtime options
248:     */
249:     KSPSetFromOptions(ksp);
250:   }

252:   /* -------------------------------------------------------------------
253:                       Solve the linear system
254:      ------------------------------------------------------------------- */

256:   KSPSolve(ksp,b,x);

258:   /* -------------------------------------------------------------------
259:                       Compare result to the exact solution
260:      ------------------------------------------------------------------- */
261:   VecAXPY(x,-1.0,u);
262:   VecNorm(x,NORM_INFINITY, &e);

264:   flg  = PETSC_FALSE;
265:   PetscOptionsGetBool(PETSC_NULL,"-print_error",&flg,PETSC_NULL);
266:   if(flg) {
267:     PetscPrintf(PETSC_COMM_WORLD, "Infinity norm of the error: %G\n", e);
268:   }

270:   /* 
271:      Free work space.  All PETSc objects should be destroyed when they
272:      are no longer needed.
273:   */

275:   if (user_set_subdomains) {
276:     for (i=0; i<Nsub; i++) {
277:       ISDestroy(&is[i]);
278:       ISDestroy(&is_local[i]);
279:     }
280:     PetscFree(is);
281:     PetscFree(is_local);
282:   }
283:   KSPDestroy(&ksp);
284:   VecDestroy(&u);
285:   VecDestroy(&x);
286:   VecDestroy(&b);
287:   MatDestroy(&A);
288:   PetscFinalize();
289:   return 0;
290: }