Actual source code: pcis.c
2: #include <../src/ksp/pc/impls/is/pcis.h>
4: /* -------------------------------------------------------------------------- */
5: /*
6: PCISSetUp -
7: */
10: PetscErrorCode PCISSetUp(PC pc)
11: {
12: PC_IS *pcis = (PC_IS*)(pc->data);
13: Mat_IS *matis = (Mat_IS*)pc->mat->data;
14: PetscInt i;
15: PetscErrorCode ierr;
16: PetscBool flg;
17:
19: PetscTypeCompare((PetscObject)pc->mat,MATIS,&flg);
20: if (!flg) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
22: pcis->pure_neumann = matis->pure_neumann;
24: /*
25: Creating the local vector vec1_N, containing the inverse of the number
26: of subdomains to which each local node (either owned or ghost)
27: pertains. To accomplish that, we scatter local vectors of 1's to
28: a global vector (adding the values); scatter the result back to
29: local vectors and finally invert the result.
30: */
31: {
32: Vec counter;
33: VecDuplicate(matis->x,&pcis->vec1_N);
34: MatGetVecs(pc->pmat,&counter,0); /* temporary auxiliar vector */
35: VecSet(counter,0.0);
36: VecSet(pcis->vec1_N,1.0);
37: VecScatterBegin(matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);
38: VecScatterEnd (matis->ctx,pcis->vec1_N,counter,ADD_VALUES,SCATTER_REVERSE);
39: VecScatterBegin(matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
40: VecScatterEnd (matis->ctx,counter,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
41: VecDestroy(&counter);
42: }
43: /*
44: Creating local and global index sets for interior and
45: inteface nodes. Notice that interior nodes have D[i]==1.0.
46: */
47: {
48: PetscInt n_I;
49: PetscInt *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
50: PetscScalar *array;
51: /* Identifying interior and interface nodes, in local numbering */
52: VecGetSize(pcis->vec1_N,&pcis->n);
53: VecGetArray(pcis->vec1_N,&array);
54: PetscMalloc(pcis->n*sizeof(PetscInt),&idx_I_local);
55: PetscMalloc(pcis->n*sizeof(PetscInt),&idx_B_local);
56: for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
57: if (array[i] == 1.0) { idx_I_local[n_I] = i; n_I++; }
58: else { idx_B_local[pcis->n_B] = i; pcis->n_B++; }
59: }
60: /* Getting the global numbering */
61: idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
62: idx_I_global = idx_B_local + pcis->n_B;
63: ISLocalToGlobalMappingApply(matis->mapping,pcis->n_B,idx_B_local,idx_B_global);
64: ISLocalToGlobalMappingApply(matis->mapping,n_I, idx_I_local,idx_I_global);
65: /* Creating the index sets. */
66: ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);
67: ISCreateGeneral(MPI_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);
68: ISCreateGeneral(MPI_COMM_SELF,n_I ,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);
69: ISCreateGeneral(MPI_COMM_SELF,n_I ,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);
70: /* Freeing memory and restoring arrays */
71: PetscFree(idx_B_local);
72: PetscFree(idx_I_local);
73: VecRestoreArray(pcis->vec1_N,&array);
74: }
76: /*
77: Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
78: is such that interior nodes come first than the interface ones, we have
80: [ | ]
81: [ A_II | A_IB ]
82: A = [ | ]
83: [-----------+------]
84: [ A_BI | A_BB ]
85: */
87: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);
88: MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);
89: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);
90: MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);
92: /*
93: Creating work vectors and arrays
94: */
95: /* pcis->vec1_N has already been created */
96: VecDuplicate(pcis->vec1_N,&pcis->vec2_N);
97: VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);
98: VecDuplicate(pcis->vec1_D,&pcis->vec2_D);
99: VecDuplicate(pcis->vec1_D,&pcis->vec3_D);
100: VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);
101: VecDuplicate(pcis->vec1_B,&pcis->vec2_B);
102: VecDuplicate(pcis->vec1_B,&pcis->vec3_B);
103: MatGetVecs(pc->pmat,&pcis->vec1_global,0);
104: PetscMalloc((pcis->n)*sizeof(PetscScalar),&pcis->work_N);
106: /* Creating the scatter contexts */
107: VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);
108: VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);
109: VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);
111: /* Creating scaling "matrix" D, from information in vec1_N */
112: VecDuplicate(pcis->vec1_B,&pcis->D);
113: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
114: VecScatterEnd (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);
115: VecReciprocal(pcis->D);
117: /* See historical note 01, at the bottom of this file. */
119: /*
120: Creating the KSP contexts for the local Dirichlet and Neumann problems.
121: */
122: {
123: PC pc_ctx;
124: /* Dirichlet */
125: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);
126: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);
127: KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II,SAME_PRECONDITIONER);
128: KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");
129: KSPGetPC(pcis->ksp_D,&pc_ctx);
130: PCSetType(pc_ctx,PCLU);
131: KSPSetType(pcis->ksp_D,KSPPREONLY);
132: KSPSetFromOptions(pcis->ksp_D);
133: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
134: KSPSetUp(pcis->ksp_D);
135: /* Neumann */
136: KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);
137: PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);
138: KSPSetOperators(pcis->ksp_N,matis->A,matis->A,SAME_PRECONDITIONER);
139: KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");
140: KSPGetPC(pcis->ksp_N,&pc_ctx);
141: PCSetType(pc_ctx,PCLU);
142: KSPSetType(pcis->ksp_N,KSPPREONLY);
143: KSPSetFromOptions(pcis->ksp_N);
144: {
145: PetscBool damp_fixed = PETSC_FALSE,
146: remove_nullspace_fixed = PETSC_FALSE,
147: set_damping_factor_floating = PETSC_FALSE,
148: not_damp_floating = PETSC_FALSE,
149: not_remove_nullspace_floating = PETSC_FALSE;
150: PetscReal fixed_factor,
151: floating_factor;
153: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);
154: if (!damp_fixed) { fixed_factor = 0.0; }
155: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,PETSC_NULL);
157: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,PETSC_NULL);
159: PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
160: &floating_factor,&set_damping_factor_floating);
161: if (!set_damping_factor_floating) { floating_factor = 0.0; }
162: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,PETSC_NULL);
163: if (!set_damping_factor_floating) { floating_factor = 1.e-12; }
165: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",¬_damp_floating,PETSC_NULL);
167: PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",¬_remove_nullspace_floating,PETSC_NULL);
169: if (pcis->pure_neumann) { /* floating subdomain */
170: if (!(not_damp_floating)) {
171: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
172: PCFactorSetShiftAmount(pc_ctx,floating_factor);
173: }
174: if (!(not_remove_nullspace_floating)){
175: MatNullSpace nullsp;
176: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);
177: KSPSetNullSpace(pcis->ksp_N,nullsp);
178: MatNullSpaceDestroy(&nullsp);
179: }
180: } else { /* fixed subdomain */
181: if (damp_fixed) {
182: PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);
183: PCFactorSetShiftAmount(pc_ctx,floating_factor);
184: }
185: if (remove_nullspace_fixed) {
186: MatNullSpace nullsp;
187: MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,PETSC_NULL,&nullsp);
188: KSPSetNullSpace(pcis->ksp_N,nullsp);
189: MatNullSpaceDestroy(&nullsp);
190: }
191: }
192: }
193: /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
194: KSPSetUp(pcis->ksp_N);
195: }
197: ISLocalToGlobalMappingGetInfo(((Mat_IS*)(pc->mat->data))->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
198: pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_TRUE;
199: return(0);
200: }
202: /* -------------------------------------------------------------------------- */
203: /*
204: PCISDestroy -
205: */
208: PetscErrorCode PCISDestroy(PC pc)
209: {
210: PC_IS *pcis = (PC_IS*)(pc->data);
214: ISDestroy(&pcis->is_B_local);
215: ISDestroy(&pcis->is_I_local);
216: ISDestroy(&pcis->is_B_global);
217: ISDestroy(&pcis->is_I_global);
218: MatDestroy(&pcis->A_II);
219: MatDestroy(&pcis->A_IB);
220: MatDestroy(&pcis->A_BI);
221: MatDestroy(&pcis->A_BB);
222: VecDestroy(&pcis->D);
223: KSPDestroy(&pcis->ksp_N);
224: KSPDestroy(&pcis->ksp_D);
225: VecDestroy(&pcis->vec1_N);
226: VecDestroy(&pcis->vec2_N);
227: VecDestroy(&pcis->vec1_D);
228: VecDestroy(&pcis->vec2_D);
229: VecDestroy(&pcis->vec3_D);
230: VecDestroy(&pcis->vec1_B);
231: VecDestroy(&pcis->vec2_B);
232: VecDestroy(&pcis->vec3_B);
233: VecDestroy(&pcis->vec1_global);
234: VecScatterDestroy(&pcis->global_to_D);
235: VecScatterDestroy(&pcis->N_to_B);
236: VecScatterDestroy(&pcis->global_to_B);
237: PetscFree(pcis->work_N);
238: if (pcis->ISLocalToGlobalMappingGetInfoWasCalled) {
239: ISLocalToGlobalMappingRestoreInfo((ISLocalToGlobalMapping)0,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));
240: }
241: return(0);
242: }
244: /* -------------------------------------------------------------------------- */
245: /*
246: PCISCreate -
247: */
250: PetscErrorCode PCISCreate(PC pc)
251: {
252: PC_IS *pcis = (PC_IS*)(pc->data);
255: pcis->is_B_local = 0;
256: pcis->is_I_local = 0;
257: pcis->is_B_global = 0;
258: pcis->is_I_global = 0;
259: pcis->A_II = 0;
260: pcis->A_IB = 0;
261: pcis->A_BI = 0;
262: pcis->A_BB = 0;
263: pcis->D = 0;
264: pcis->ksp_N = 0;
265: pcis->ksp_D = 0;
266: pcis->vec1_N = 0;
267: pcis->vec2_N = 0;
268: pcis->vec1_D = 0;
269: pcis->vec2_D = 0;
270: pcis->vec3_D = 0;
271: pcis->vec1_B = 0;
272: pcis->vec2_B = 0;
273: pcis->vec3_B = 0;
274: pcis->vec1_global = 0;
275: pcis->work_N = 0;
276: pcis->global_to_D = 0;
277: pcis->N_to_B = 0;
278: pcis->global_to_B = 0;
279: pcis->ISLocalToGlobalMappingGetInfoWasCalled = PETSC_FALSE;
280: return(0);
281: }
283: /* -------------------------------------------------------------------------- */
284: /*
285: PCISApplySchur -
287: Input parameters:
288: . pc - preconditioner context
289: . v - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
291: Output parameters:
292: . vec1_B - result of Schur complement applied to chunk
293: . vec2_B - garbage (used as work space), or null (and v is used as workspace)
294: . vec1_D - garbage (used as work space)
295: . vec2_D - garbage (used as work space)
297: */
300: PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
301: {
303: PC_IS *pcis = (PC_IS*)(pc->data);
306: if (!vec2_B) { vec2_B = v; }
308: MatMult(pcis->A_BB,v,vec1_B);
309: MatMult(pcis->A_IB,v,vec1_D);
310: KSPSolve(pcis->ksp_D,vec1_D,vec2_D);
311: MatMult(pcis->A_BI,vec2_D,vec2_B);
312: VecAXPY(vec1_B,-1.0,vec2_B);
313: return(0);
314: }
316: /* -------------------------------------------------------------------------- */
317: /*
318: PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
319: including ghosts) into an interface vector, when in SCATTER_FORWARD mode, or vice-versa, when in SCATTER_REVERSE
320: mode.
322: Input parameters:
323: . pc - preconditioner context
324: . array_N - [when in SCATTER_FORWARD mode] Array to be scattered into the vector
325: . v_B - [when in SCATTER_REVERSE mode] Vector to be scattered into the array
327: Output parameter:
328: . array_N - [when in SCATTER_REVERSE mode] Array to receive the scattered vector
329: . v_B - [when in SCATTER_FORWARD mode] Vector to receive the scattered array
331: Notes:
332: The entries in the array that do not correspond to interface nodes remain unaltered.
333: */
336: PetscErrorCode PCISScatterArrayNToVecB (PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc)
337: {
338: PetscInt i;
339: const PetscInt *idex;
341: PetscScalar *array_B;
342: PC_IS *pcis = (PC_IS*)(pc->data);
345: VecGetArray(v_B,&array_B);
346: ISGetIndices(pcis->is_B_local,&idex);
348: if (smode == SCATTER_FORWARD) {
349: if (imode == INSERT_VALUES) {
350: for (i=0; i<pcis->n_B; i++) { array_B[i] = array_N[idex[i]]; }
351: } else { /* ADD_VALUES */
352: for (i=0; i<pcis->n_B; i++) { array_B[i] += array_N[idex[i]]; }
353: }
354: } else { /* SCATTER_REVERSE */
355: if (imode == INSERT_VALUES) {
356: for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] = array_B[i]; }
357: } else { /* ADD_VALUES */
358: for (i=0; i<pcis->n_B; i++) { array_N[idex[i]] += array_B[i]; }
359: }
360: }
361: ISRestoreIndices(pcis->is_B_local,&idex);
362: VecRestoreArray(v_B,&array_B);
363: return(0);
364: }
366: /* -------------------------------------------------------------------------- */
367: /*
368: PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.
369: More precisely, solves the problem:
370: [ A_II A_IB ] [ . ] [ 0 ]
371: [ ] [ ] = [ ]
372: [ A_BI A_BB ] [ x ] [ b ]
374: Input parameters:
375: . pc - preconditioner context
376: . b - vector of local interface nodes (including ghosts)
378: Output parameters:
379: . x - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur
380: complement to b
381: . vec1_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
382: . vec2_N - vector of local nodes (interior and interface, including ghosts); returns garbage (used as work space)
384: */
387: PetscErrorCode PCISApplyInvSchur (PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
388: {
390: PC_IS *pcis = (PC_IS*)(pc->data);
393: /*
394: Neumann solvers.
395: Applying the inverse of the local Schur complement, i.e, solving a Neumann
396: Problem with zero at the interior nodes of the RHS and extracting the interface
397: part of the solution. inverse Schur complement is applied to b and the result
398: is stored in x.
399: */
400: /* Setting the RHS vec1_N */
401: VecSet(vec1_N,0.0);
402: VecScatterBegin(pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
403: VecScatterEnd (pcis->N_to_B,b,vec1_N,INSERT_VALUES,SCATTER_REVERSE);
404: /* Checking for consistency of the RHS */
405: {
406: PetscBool flg = PETSC_FALSE;
407: PetscOptionsGetBool(PETSC_NULL,"-pc_is_check_consistency",&flg,PETSC_NULL);
408: if (flg) {
409: PetscScalar average;
410: PetscViewer viewer;
411: PetscViewerASCIIGetStdout(((PetscObject)pc)->comm,&viewer);
413: VecSum(vec1_N,&average);
414: average = average / ((PetscReal)pcis->n);
415: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
416: if (pcis->pure_neumann) {
417: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is floating. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
418: } else {
419: PetscViewerASCIISynchronizedPrintf(viewer,"Subdomain %04d is fixed. Average = % 1.14e\n",PetscGlobalRank,PetscAbsScalar(average));
420: }
421: PetscViewerFlush(viewer);
422: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
423: }
424: }
425: /* Solving the system for vec2_N */
426: KSPSolve(pcis->ksp_N,vec1_N,vec2_N);
427: /* Extracting the local interface vector out of the solution */
428: VecScatterBegin(pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
429: VecScatterEnd (pcis->N_to_B,vec2_N,x,INSERT_VALUES,SCATTER_FORWARD);
430: return(0);
431: }