Actual source code: ex5.c
2: static char help[] = "Bratu nonlinear PDE in 2d.\n\
3: We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular\n\
4: domain, using distributed arrays (DMDAs) to partition the parallel grid.\n\
5: The command line options include:\n\
6: -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
7: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\n";
9: /*T
10: Concepts: SNES^parallel Bratu example
11: Concepts: DMDA^using distributed arrays;
12: Concepts: IS coloirng types;
13: Processors: n
14: T*/
16: /* ------------------------------------------------------------------------
18: Solid Fuel Ignition (SFI) problem. This problem is modeled by
19: the partial differential equation
20:
21: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
22:
23: with boundary conditions
24:
25: u = 0 for x = 0, x = 1, y = 0, y = 1.
26:
27: A finite difference approximation with the usual 5-point stencil
28: is used to discretize the boundary value problem to obtain a nonlinear
29: system of equations.
31: Program usage: mpiexec -n <procs> ex5 [-help] [all PETSc options]
32: e.g.,
33:
34: This example shows how geometric multigrid can be run transparently with a nonlinear solver so long
35: as SNESSetDM() is provided. Example usage
37: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_levels 3 -pc_mg_galerkin -da_grid_x 17 -da_grid_y 17
38: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
40: or to run with grid sequencing on the nonlinear problem (note that you do not need to provide the number of
41: multigrid levels, it will be determined automatically based on the number of refinements done)
43: ./ex5 -pc_type mg -ksp_monitor -snes_view -pc_mg_galerkin -snes_grid_sequence 3
44: -mg_levels_ksp_monitor -snes_monitor -mg_levels_pc_type sor -pc_mg_type full
46: ------------------------------------------------------------------------- */
48: /*
49: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
50: Include "petscsnes.h" so that we can use SNES solvers. Note that this
51: */
52: #include <petscdmda.h>
53: #include <petscsnes.h>
55: /*
56: User-defined application context - contains data needed by the
57: application-provided call-back routines, FormJacobianLocal() and
58: FormFunctionLocal().
59: */
60: typedef struct {
61: PassiveReal param; /* test problem parameter */
62: } AppCtx;
64: /*
65: User-defined routines
66: */
70: #if defined(PETSC_HAVE_MATLAB_ENGINE)
72: #endif
76: int main(int argc,char **argv)
77: {
78: SNES snes; /* nonlinear solver */
79: Vec x; /* solution vector */
80: AppCtx user; /* user-defined work context */
81: PetscInt its; /* iterations for convergence */
82: PetscErrorCode ierr;
83: PetscReal bratu_lambda_max = 6.81,bratu_lambda_min = 0.;
84: PetscBool flg = PETSC_FALSE;
85: DM da;
86: PetscBool matlab_function = PETSC_FALSE;
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Initialize program
90: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
92: PetscInitialize(&argc,&argv,(char *)0,help);
94: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: Initialize problem parameters
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: user.param = 6.0;
98: PetscOptionsGetReal(PETSC_NULL,"-par",&user.param,PETSC_NULL);
99: if (user.param >= bratu_lambda_max || user.param <= bratu_lambda_min) SETERRQ3(PETSC_COMM_SELF,1,"Lambda, %g, is out of range, [%g, %g]", user.param, bratu_lambda_min, bratu_lambda_max);
101: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
102: Create nonlinear solver context
103: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
104: SNESCreate(PETSC_COMM_WORLD,&snes);
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Create distributed array (DMDA) to manage parallel grid and vectors
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,-4,-4,PETSC_DECIDE,PETSC_DECIDE,1,1,PETSC_NULL,PETSC_NULL,&da);
110: DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
111: DMSetApplicationContext(da,&user);
112: SNESSetDM(snes,da);
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Extract global vectors from DMDA; then duplicate for remaining
116: vectors that are the same types
117: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118: DMCreateGlobalVector(da,&x);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Set local function evaluation routine
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: DMDASetLocalFunction(da,(DMDALocalFunction1)FormFunctionLocal);
124: PetscOptionsGetBool(PETSC_NULL,"-fd",&flg,PETSC_NULL);
125: if (!flg) {
126: DMDASetLocalJacobian(da,(DMDALocalFunction1)FormJacobianLocal);
127: }
129: /* Decide which FormFunction to use */
130: PetscOptionsGetBool(PETSC_NULL,"-matlab_function",&matlab_function,0);
132: #if defined(PETSC_HAVE_MATLAB_ENGINE)
133: Vec r;
134: if (matlab_function) {
135: VecDuplicate(x,&r);
136: SNESSetFunction(snes,r,FormFunctionMatlab,&user);
137: }
138: #endif
140: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: Customize nonlinear solver; set runtime options
142: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
143: SNESSetFromOptions(snes);
145: FormInitialGuess(da,&user,x);
147: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: Solve nonlinear system
149: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150: SNESSolve(snes,PETSC_NULL,x);
151: SNESGetIterationNumber(snes,&its);
153: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157: Free work space. All PETSc objects should be destroyed when they
158: are no longer needed.
159: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160: #if defined(PETSC_HAVE_MATLAB_ENGINE)
161: if (r){VecDestroy(&r);}
162: #endif
163: VecDestroy(&x);
164: SNESDestroy(&snes);
165: DMDestroy(&da);
166: PetscFinalize();
168: return(0);
169: }
170: /* ------------------------------------------------------------------- */
173: /*
174: FormInitialGuess - Forms initial approximation.
176: Input Parameters:
177: user - user-defined application context
178: X - vector
180: Output Parameter:
181: X - vector
182: */
183: PetscErrorCode FormInitialGuess(DM da,AppCtx *user,Vec X)
184: {
185: PetscInt i,j,Mx,My,xs,ys,xm,ym;
187: PetscReal lambda,temp1,temp,hx,hy;
188: PetscScalar **x;
191: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,
192: PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
194: lambda = user->param;
195: hx = 1.0/(PetscReal)(Mx-1);
196: hy = 1.0/(PetscReal)(My-1);
197: temp1 = lambda/(lambda + 1.0);
199: /*
200: Get a pointer to vector data.
201: - For default PETSc vectors, VecGetArray() returns a pointer to
202: the data array. Otherwise, the routine is implementation dependent.
203: - You MUST call VecRestoreArray() when you no longer need access to
204: the array.
205: */
206: DMDAVecGetArray(da,X,&x);
208: /*
209: Get local grid boundaries (for 2-dimensional DMDA):
210: xs, ys - starting grid indices (no ghost points)
211: xm, ym - widths of local grid (no ghost points)
213: */
214: DMDAGetCorners(da,&xs,&ys,PETSC_NULL,&xm,&ym,PETSC_NULL);
216: /*
217: Compute initial guess over the locally owned part of the grid
218: */
219: for (j=ys; j<ys+ym; j++) {
220: temp = (PetscReal)(PetscMin(j,My-j-1))*hy;
221: for (i=xs; i<xs+xm; i++) {
222: if (i == 0 || j == 0 || i == Mx-1 || j == My-1) {
223: /* boundary conditions are all zero Dirichlet */
224: x[j][i] = 0.0;
225: } else {
226: x[j][i] = temp1*sqrt(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,temp));
227: }
228: }
229: }
231: /*
232: Restore vector
233: */
234: DMDAVecRestoreArray(da,X,&x);
236: return(0);
237: }
238: /* ------------------------------------------------------------------- */
241: /*
242: FormFunctionLocal - Evaluates nonlinear function, F(x) on local process patch
245: */
246: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
247: {
249: PetscInt i,j;
250: PetscReal lambda,hx,hy,hxdhy,hydhx,sc;
251: PetscScalar u,uxx,uyy;
255: lambda = user->param;
256: hx = 1.0/(PetscReal)(info->mx-1);
257: hy = 1.0/(PetscReal)(info->my-1);
258: sc = hx*hy*lambda;
259: hxdhy = hx/hy;
260: hydhx = hy/hx;
261: /*
262: Compute function over the locally owned part of the grid
263: */
264: for (j=info->ys; j<info->ys+info->ym; j++) {
265: for (i=info->xs; i<info->xs+info->xm; i++) {
266: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
267: f[j][i] = x[j][i];
268: } else {
269: u = x[j][i];
270: uxx = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
271: uyy = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
272: f[j][i] = uxx + uyy - sc*PetscExpScalar(u);
273: }
274: }
275: }
276: PetscLogFlops(11.0*info->ym*info->xm);
277: return(0);
278: }
282: /*
283: FormJacobianLocal - Evaluates Jacobian matrix on local process patch
284: */
285: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
286: {
288: PetscInt i,j;
289: MatStencil col[5],row;
290: PetscScalar lambda,v[5],hx,hy,hxdhy,hydhx,sc;
293: lambda = user->param;
294: hx = 1.0/(PetscReal)(info->mx-1);
295: hy = 1.0/(PetscReal)(info->my-1);
296: sc = hx*hy*lambda;
297: hxdhy = hx/hy;
298: hydhx = hy/hx;
301: /*
302: Compute entries for the locally owned part of the Jacobian.
303: - Currently, all PETSc parallel matrix formats are partitioned by
304: contiguous chunks of rows across the processors.
305: - Each processor needs to insert only elements that it owns
306: locally (but any non-local elements will be sent to the
307: appropriate processor during matrix assembly).
308: - Here, we set all entries for a particular row at once.
309: - We can set matrix entries either using either
310: MatSetValuesLocal() or MatSetValues(), as discussed above.
311: */
312: for (j=info->ys; j<info->ys+info->ym; j++) {
313: for (i=info->xs; i<info->xs+info->xm; i++) {
314: row.j = j; row.i = i;
315: /* boundary points */
316: if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
317: v[0] = 1.0;
318: MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
319: } else {
320: /* interior grid points */
321: v[0] = -hxdhy; col[0].j = j - 1; col[0].i = i;
322: v[1] = -hydhx; col[1].j = j; col[1].i = i-1;
323: v[2] = 2.0*(hydhx + hxdhy) - sc*PetscExpScalar(x[j][i]); col[2].j = row.j; col[2].i = row.i;
324: v[3] = -hydhx; col[3].j = j; col[3].i = i+1;
325: v[4] = -hxdhy; col[4].j = j + 1; col[4].i = i;
326: MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
327: }
328: }
329: }
331: /*
332: Assemble matrix, using the 2-step process:
333: MatAssemblyBegin(), MatAssemblyEnd().
334: */
335: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
336: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
337: /*
338: Tell the matrix we will never add a new nonzero location to the
339: matrix. If we do, it will generate an error.
340: */
341: MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
342: return(0);
343: }
345: #if defined(PETSC_HAVE_MATLAB_ENGINE)
348: PetscErrorCode FormFunctionMatlab(SNES snes,Vec X,Vec F,void *ptr)
349: {
350: AppCtx *user = (AppCtx*)ptr;
352: PetscInt Mx,My;
353: PetscReal lambda,hx,hy;
354: Vec localX,localF;
355: MPI_Comm comm;
356: DM da;
359: SNESGetDM(snes,&da);
360: DMGetLocalVector(da,&localX);
361: DMGetLocalVector(da,&localF);
362: PetscObjectSetName((PetscObject)localX,"localX");
363: PetscObjectSetName((PetscObject)localF,"localF");
364: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,
365: PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
367: lambda = user->param;
368: hx = 1.0/(PetscReal)(Mx-1);
369: hy = 1.0/(PetscReal)(My-1);
371: PetscObjectGetComm((PetscObject)snes,&comm);
372: /*
373: Scatter ghost points to local vector,using the 2-step process
374: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
375: By placing code between these two statements, computations can be
376: done while messages are in transition.
377: */
378: DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
379: DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
380: PetscMatlabEnginePut(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localX);
381: PetscMatlabEngineEvaluate(PETSC_MATLAB_ENGINE_(comm),"localF=ex5m(localX,%18.16e,%18.16e,%18.16e)",hx,hy,lambda);
382: PetscMatlabEngineGet(PETSC_MATLAB_ENGINE_(comm),(PetscObject)localF);
384: /*
385: Insert values into global vector
386: */
387: DMLocalToGlobalBegin(da,localF,INSERT_VALUES,F);
388: DMLocalToGlobalEnd(da,localF,INSERT_VALUES,F);
389: DMRestoreLocalVector(da,&localX);
390: DMRestoreLocalVector(da,&localF);
391: return(0);
392: }
393: #endif