Actual source code: dagtona.c
2: /*
3: Tools to help solve the coarse grid problem redundantly.
4: Provides two scatter contexts that (1) map from the usual global vector
5: to all processors the entire vector in NATURAL numbering and (2)
6: from the entire vector on each processor in natural numbering extracts
7: out this processors piece in GLOBAL numbering
8: */
10: #include <private/daimpl.h> /*I "petscdmda.h" I*/
14: /*@
15: DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
16: global vector the entire vector to each processor in natural numbering
18: Collective on DMDA
20: Input Parameter:
21: . da - the distributed array context
23: Output Parameter:
24: . scatter - the scatter context
26: Level: advanced
28: .keywords: distributed array, global to local, begin, coarse problem
30: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
31: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
32: @*/
33: PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
34: {
36: PetscInt N;
37: IS from,to;
38: Vec tmplocal,global;
39: AO ao;
40: DM_DA *dd = (DM_DA*)da->data;
45: DMDAGetAO(da,&ao);
47: /* create the scatter context */
48: VecCreateMPIWithArray(((PetscObject)da)->comm,dd->Nlocal,PETSC_DETERMINE,0,&global);
49: VecGetSize(global,&N);
50: ISCreateStride(((PetscObject)da)->comm,N,0,1,&to);
51: AOPetscToApplicationIS(ao,to);
52: ISCreateStride(((PetscObject)da)->comm,N,0,1,&from);
53: VecCreateSeqWithArray(PETSC_COMM_SELF,N,0,&tmplocal);
54: VecSetBlockSize(tmplocal,dd->w);
55: VecSetBlockSize(global,dd->w);
56: VecScatterCreate(global,from,tmplocal,to,scatter);
57: VecDestroy(&tmplocal);
58: VecDestroy(&global);
59: ISDestroy(&from);
60: ISDestroy(&to);
61: return(0);
62: }
66: /*@
67: DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
68: of the entire vector on each processor to its local part in the global vector.
70: Collective on DMDA
72: Input Parameter:
73: . da - the distributed array context
75: Output Parameter:
76: . scatter - the scatter context
78: Level: advanced
80: .keywords: distributed array, global to local, begin, coarse problem
82: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
83: DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
84: @*/
85: PetscErrorCode DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
86: {
88: DM_DA *dd = (DM_DA*)da->data;
89: PetscInt M,m = dd->Nlocal,start;
90: IS from,to;
91: Vec tmplocal,global;
92: AO ao;
97: DMDAGetAO(da,&ao);
99: /* create the scatter context */
100: MPI_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,((PetscObject)da)->comm);
101: VecCreateMPIWithArray(((PetscObject)da)->comm,m,PETSC_DETERMINE,0,&global);
102: VecGetOwnershipRange(global,&start,PETSC_NULL);
103: ISCreateStride(((PetscObject)da)->comm,m,start,1,&from);
104: AOPetscToApplicationIS(ao,from);
105: ISCreateStride(((PetscObject)da)->comm,m,start,1,&to);
106: VecCreateSeqWithArray(PETSC_COMM_SELF,M,0,&tmplocal);
107: VecSetBlockSize(tmplocal,dd->w);
108: VecSetBlockSize(global,dd->w);
109: VecScatterCreate(tmplocal,from,global,to,scatter);
110: VecDestroy(&tmplocal);
111: VecDestroy(&global);
112: ISDestroy(&from);
113: ISDestroy(&to);
114: return(0);
115: }