Actual source code: pbvec.c
2: /*
3: This file contains routines for Parallel vector operations.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/
9: static PetscErrorCode VecPointwiseMax_Seq(Vec win,Vec xin,Vec yin)
10: {
12: PetscInt n = win->map->n,i;
13: PetscScalar *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
16: VecGetArrayRead(xin,(const PetscScalar**)&xx);
17: VecGetArrayRead(yin,(const PetscScalar**)&yy);
18: VecGetArray(win,&ww);
19: for (i=0; i<n; i++) {
20: ww[i] = PetscMax(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
21: }
22: VecRestoreArrayRead(xin,(const PetscScalar**)&xx);
23: VecRestoreArrayRead(yin,(const PetscScalar**)&yy);
24: VecRestoreArray(win,&ww);
25: PetscLogFlops(n);
26: return(0);
27: }
31: static PetscErrorCode VecPointwiseMin_Seq(Vec win,Vec xin,Vec yin)
32: {
34: PetscInt n = win->map->n,i;
35: PetscScalar *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
38: VecGetArrayRead(xin,(const PetscScalar**)&xx);
39: VecGetArrayRead(yin,(const PetscScalar**)&yy);
40: VecGetArray(win,&ww);
41: for (i=0; i<n; i++) {
42: ww[i] = PetscMin(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
43: }
44: VecRestoreArrayRead(xin,(const PetscScalar**)&xx);
45: VecRestoreArrayRead(yin,(const PetscScalar**)&yy);
46: VecRestoreArray(win,&ww);
47: PetscLogFlops(n);
48: return(0);
49: }
53: static PetscErrorCode VecPointwiseMaxAbs_Seq(Vec win,Vec xin,Vec yin)
54: {
56: PetscInt n = win->map->n,i;
57: PetscScalar *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
60: VecGetArrayRead(xin,(const PetscScalar**)&xx);
61: VecGetArrayRead(yin,(const PetscScalar**)&yy);
62: VecGetArray(win,&ww);
63: for (i=0; i<n; i++) {
64: ww[i] = PetscMax(PetscAbsScalar(xx[i]),PetscAbsScalar(yy[i]));
65: }
66: PetscLogFlops(n);
67: VecRestoreArrayRead(xin,(const PetscScalar**)&xx);
68: VecRestoreArrayRead(yin,(const PetscScalar**)&yy);
69: VecRestoreArray(win,&ww);
70: return(0);
71: }
73: #include <../src/vec/vec/impls/seq/ftn-kernels/fxtimesy.h>
76: static PetscErrorCode VecPointwiseMult_Seq(Vec win,Vec xin,Vec yin)
77: {
79: PetscInt n = win->map->n,i;
80: PetscScalar *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
83: VecGetArrayRead(xin,(const PetscScalar**)&xx);
84: VecGetArrayRead(yin,(const PetscScalar**)&yy);
85: VecGetArray(win,&ww);
86: if (ww == xx) {
87: for (i=0; i<n; i++) ww[i] *= yy[i];
88: } else if (ww == yy) {
89: for (i=0; i<n; i++) ww[i] *= xx[i];
90: } else {
91: #if defined(PETSC_USE_FORTRAN_KERNEL_XTIMESY)
92: fortranxtimesy_(xx,yy,ww,&n);
93: #else
94: for (i=0; i<n; i++) ww[i] = xx[i] * yy[i];
95: #endif
96: }
97: VecRestoreArrayRead(xin,(const PetscScalar**)&xx);
98: VecRestoreArrayRead(yin,(const PetscScalar**)&yy);
99: VecRestoreArray(win,&ww);
100: PetscLogFlops(n);
101: return(0);
102: }
106: static PetscErrorCode VecPointwiseDivide_Seq(Vec win,Vec xin,Vec yin)
107: {
109: PetscInt n = win->map->n,i;
110: PetscScalar *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
113: VecGetArrayRead(xin,(const PetscScalar**)&xx);
114: VecGetArrayRead(yin,(const PetscScalar**)&yy);
115: VecGetArray(win,&ww);
116: for (i=0; i<n; i++) {
117: ww[i] = xx[i] / yy[i];
118: }
119: PetscLogFlops(n);
120: VecRestoreArrayRead(xin,(const PetscScalar**)&xx);
121: VecRestoreArrayRead(yin,(const PetscScalar**)&yy);
122: VecRestoreArray(win,&ww);
123: return(0);
124: }
128: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
129: {
130: PetscScalar sum,work;
134: VecDot_Seq(xin,yin,&work);
135: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
136: *z = sum;
137: return(0);
138: }
142: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
143: {
144: PetscScalar sum,work;
148: VecTDot_Seq(xin,yin,&work);
149: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
150: *z = sum;
151: return(0);
152: }
160: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
161: {
163: Vec_MPI *v = (Vec_MPI *)vin->data;
166: if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
167: v->unplacedarray = v->array; /* save previous array so reset can bring it back */
168: v->array = (PetscScalar *)a;
169: if (v->localrep) {
170: VecPlaceArray(v->localrep,a);
171: }
172: return(0);
173: }
179: static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
180: {
182: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
183: PetscScalar *array;
186: VecCreate(((PetscObject)win)->comm,v);
187: PetscLayoutReference(win->map,&(*v)->map);
189: VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
190: vw = (Vec_MPI *)(*v)->data;
191: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
193: /* save local representation of the parallel vector (and scatter) if it exists */
194: if (w->localrep) {
195: VecGetArray(*v,&array);
196: VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);
197: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
198: VecRestoreArray(*v,&array);
199: PetscLogObjectParent(*v,vw->localrep);
200: vw->localupdate = w->localupdate;
201: if (vw->localupdate) {
202: PetscObjectReference((PetscObject)vw->localupdate);
203: }
204: }
206: /* New vector should inherit stashing property of parent */
207: (*v)->stash.donotstash = win->stash.donotstash;
208: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
209:
210: PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
211: PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
212: (*v)->map->bs = win->map->bs;
213: (*v)->bstash.bs = win->bstash.bs;
215: return(0);
216: }
223: static PetscErrorCode VecSetRandom_Seq(Vec xin,PetscRandom r)
224: {
226: PetscInt n = xin->map->n,i;
227: PetscScalar *xx;
230: VecGetArray(xin,&xx);
231: for (i=0; i<n; i++) {PetscRandomGetValue(r,&xx[i]);}
232: VecRestoreArray(xin,&xx);
233: return(0);
234: }
239: static PetscErrorCode VecGetSize_Seq(Vec vin,PetscInt *size)
240: {
242: *size = vin->map->n;
243: return(0);
244: }
248: static PetscErrorCode VecConjugate_Seq(Vec xin)
249: {
250: PetscScalar *x;
251: PetscInt n = xin->map->n;
255: VecGetArray(xin,&x);
256: while (n-->0) {
257: *x = PetscConj(*x);
258: x++;
259: }
260: VecRestoreArray(xin,&x);
261: return(0);
262: }
266: static PetscErrorCode VecCopy_Seq(Vec xin,Vec yin)
267: {
268: PetscScalar *ya;
269: const PetscScalar *xa;
270: PetscErrorCode ierr;
273: if (xin != yin) {
274: VecGetArrayRead(xin,&xa);
275: VecGetArray(yin,&ya);
276: PetscMemcpy(ya,xa,xin->map->n*sizeof(PetscScalar));
277: VecRestoreArrayRead(xin,&xa);
278: VecRestoreArray(yin,&ya);
279: }
280: return(0);
281: }
283: #include <petscblaslapack.h>
286: static PetscErrorCode VecSwap_Seq(Vec xin,Vec yin)
287: {
288: PetscScalar *ya, *xa;
290: PetscBLASInt one = 1,bn = PetscBLASIntCast(xin->map->n);
293: if (xin != yin) {
294: VecGetArray(xin,&xa);
295: VecGetArray(yin,&ya);
296: BLASswap_(&bn,xa,&one,ya,&one);
297: VecRestoreArray(xin,&xa);
298: VecRestoreArray(yin,&ya);
299: }
300: return(0);
301: }
303: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
304: VecDuplicateVecs_Default,
305: VecDestroyVecs_Default,
306: VecDot_MPI,
307: VecMDot_MPI,
308: VecNorm_MPI,
309: VecTDot_MPI,
310: VecMTDot_MPI,
311: VecScale_Seq,
312: VecCopy_Seq, /* 10 */
313: VecSet_Seq,
314: VecSwap_Seq,
315: VecAXPY_Seq,
316: VecAXPBY_Seq,
317: VecMAXPY_Seq,
318: VecAYPX_Seq,
319: VecWAXPY_Seq,
320: VecAXPBYPCZ_Seq,
321: VecPointwiseMult_Seq,
322: VecPointwiseDivide_Seq,
323: VecSetValues_MPI, /* 20 */
324: VecAssemblyBegin_MPI,
325: VecAssemblyEnd_MPI,
326: 0,
327: VecGetSize_MPI,
328: VecGetSize_Seq,
329: 0,
330: VecMax_MPI,
331: VecMin_MPI,
332: VecSetRandom_Seq,
333: VecSetOption_MPI,
334: VecSetValuesBlocked_MPI,
335: VecDestroy_MPI,
336: VecView_MPI,
337: VecPlaceArray_MPI,
338: VecReplaceArray_Seq,
339: VecDot_Seq,
340: VecTDot_Seq,
341: VecNorm_Seq,
342: VecMDot_Seq,
343: VecMTDot_Seq,
344: VecLoad_Default,
345: VecReciprocal_Default,
346: VecConjugate_Seq,
347: 0,
348: 0,
349: VecResetArray_MPI,
350: 0,
351: VecMaxPointwiseDivide_Seq,
352: VecPointwiseMax_Seq,
353: VecPointwiseMaxAbs_Seq,
354: VecPointwiseMin_Seq,
355: VecGetValues_MPI,
356: 0,
357: 0,
358: 0,
359: 0,
360: 0,
361: 0,
362: VecStrideGather_Default,
363: VecStrideScatter_Default
364: };
368: /*
369: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
370: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
371: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
373: If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
374: no space is allocated.
375: */
376: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
377: {
378: Vec_MPI *s;
383: PetscNewLog(v,Vec_MPI,&s);
384: v->data = (void*)s;
385: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
386: s->nghost = nghost;
387: v->petscnative = PETSC_TRUE;
389: if (v->map->bs == -1) v->map->bs = 1;
390: PetscLayoutSetUp(v->map);
391: s->array = (PetscScalar *)array;
392: s->array_allocated = 0;
393: if (alloc && !array) {
394: PetscInt n = v->map->n+nghost;
395: PetscMalloc(n*sizeof(PetscScalar),&s->array);
396: PetscLogObjectMemory(v,n*sizeof(PetscScalar));
397: PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
398: s->array_allocated = s->array;
399: }
401: /* By default parallel vectors do not have local representation */
402: s->localrep = 0;
403: s->localupdate = 0;
405: v->stash.insertmode = NOT_SET_VALUES;
406: /* create the stashes. The block-size for bstash is set later when
407: VecSetValuesBlocked is called.
408: */
409: VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
410: VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);
411:
412: #if defined(PETSC_HAVE_MATLAB_ENGINE)
413: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
414: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
415: #endif
416: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
417: return(0);
418: }
420: /*MC
421: VECMPI - VECMPI = "mpi" - The basic parallel vector
423: Options Database Keys:
424: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
426: Level: beginner
428: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
429: M*/
434: PetscErrorCode VecCreate_MPI(Vec vv)
435: {
439: VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
440: return(0);
441: }
444: /*MC
445: VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
447: Options Database Keys:
448: . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
450: Level: beginner
452: .seealso: VecCreateSeq(), VecCreateMPI()
453: M*/
458: PetscErrorCode VecCreate_Standard(Vec v)
459: {
461: PetscMPIInt size;
464: MPI_Comm_size(((PetscObject)v)->comm,&size);
465: if (size == 1) {
466: VecSetType(v,VECSEQ);
467: } else {
468: VecSetType(v,VECMPI);
469: }
470: return(0);
471: }
477: /*@C
478: VecCreateMPIWithArray - Creates a parallel, array-style vector,
479: where the user provides the array space to store the vector values.
481: Collective on MPI_Comm
483: Input Parameters:
484: + comm - the MPI communicator to use
485: . n - local vector length, cannot be PETSC_DECIDE
486: . N - global vector length (or PETSC_DECIDE to have calculated)
487: - array - the user provided array to store the vector values
489: Output Parameter:
490: . vv - the vector
491:
492: Notes:
493: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
494: same type as an existing vector.
496: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
497: at a later stage to SET the array for storing the vector values.
499: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
500: The user should not free the array until the vector is destroyed.
502: Level: intermediate
504: Concepts: vectors^creating with array
506: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
507: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
509: @*/
510: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
511: {
515: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
516: PetscSplitOwnership(comm,&n,&N);
517: VecCreate(comm,vv);
518: VecSetSizes(*vv,n,N);
519: VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
520: return(0);
521: }
525: /*@C
526: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
527: the caller allocates the array space.
529: Collective on MPI_Comm
531: Input Parameters:
532: + comm - the MPI communicator to use
533: . n - local vector length
534: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
535: . nghost - number of local ghost points
536: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
537: - array - the space to store the vector values (as long as n + nghost)
539: Output Parameter:
540: . vv - the global vector representation (without ghost points as part of vector)
541:
542: Notes:
543: Use VecGhostGetLocalForm() to access the local, ghosted representation
544: of the vector.
546: This also automatically sets the ISLocalToGlobalMapping() for this vector.
548: Level: advanced
550: Concepts: vectors^creating with array
551: Concepts: vectors^ghosted
553: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
554: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
555: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
557: @*/
558: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
559: {
560: PetscErrorCode ierr;
561: Vec_MPI *w;
562: PetscScalar *larray;
563: IS from,to;
564: ISLocalToGlobalMapping ltog;
565: PetscInt rstart,i,*indices;
568: *vv = 0;
570: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
571: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
572: if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
573: PetscSplitOwnership(comm,&n,&N);
574: /* Create global representation */
575: VecCreate(comm,vv);
576: VecSetSizes(*vv,n,N);
577: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
578: w = (Vec_MPI *)(*vv)->data;
579: /* Create local representation */
580: VecGetArray(*vv,&larray);
581: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
582: PetscLogObjectParent(*vv,w->localrep);
583: VecRestoreArray(*vv,&larray);
585: /*
586: Create scatter context for scattering (updating) ghost values
587: */
588: ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
589: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
590: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
591: PetscLogObjectParent(*vv,w->localupdate);
592: ISDestroy(&to);
593: ISDestroy(&from);
595: /* set local to global mapping for ghosted vector */
596: PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
597: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
598: for (i=0; i<n; i++) {
599: indices[i] = rstart + i;
600: }
601: for (i=0; i<nghost; i++) {
602: indices[n+i] = ghosts[i];
603: }
604: ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);
605: VecSetLocalToGlobalMapping(*vv,ltog);
606: ISLocalToGlobalMappingDestroy(<og);
607: return(0);
608: }
612: /*@
613: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
615: Collective on MPI_Comm
617: Input Parameters:
618: + comm - the MPI communicator to use
619: . n - local vector length
620: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
621: . nghost - number of local ghost points
622: - ghosts - global indices of ghost points
624: Output Parameter:
625: . vv - the global vector representation (without ghost points as part of vector)
626:
627: Notes:
628: Use VecGhostGetLocalForm() to access the local, ghosted representation
629: of the vector.
631: This also automatically sets the ISLocalToGlobalMapping() for this vector.
633: Level: advanced
635: Concepts: vectors^ghosted
637: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
638: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
639: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
640: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
642: @*/
643: PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
644: {
648: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
649: return(0);
650: }
653: /* ------------------------------------------------------------------------------------------*/
656: /*@C
657: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
658: the caller allocates the array space. Indices in the ghost region are based on blocks.
660: Collective on MPI_Comm
662: Input Parameters:
663: + comm - the MPI communicator to use
664: . bs - block size
665: . n - local vector length
666: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
667: . nghost - number of local ghost blocks
668: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
669: - array - the space to store the vector values (as long as n + nghost*bs)
671: Output Parameter:
672: . vv - the global vector representation (without ghost points as part of vector)
673:
674: Notes:
675: Use VecGhostGetLocalForm() to access the local, ghosted representation
676: of the vector.
678: n is the local vector size (total local size not the number of blocks) while nghost
679: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
680: portion is bs*nghost
682: Level: advanced
684: Concepts: vectors^creating ghosted
685: Concepts: vectors^creating with array
687: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
688: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
689: VecCreateGhostWithArray(), VecCreateGhostBlocked()
691: @*/
692: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
693: {
695: Vec_MPI *w;
696: PetscScalar *larray;
697: IS from,to;
698: ISLocalToGlobalMapping ltog;
699: PetscInt rstart,i,nb,*indices;
702: *vv = 0;
704: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
705: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
706: if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
707: if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
708: PetscSplitOwnership(comm,&n,&N);
709: /* Create global representation */
710: VecCreate(comm,vv);
711: VecSetSizes(*vv,n,N);
712: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
713: VecSetBlockSize(*vv,bs);
714: w = (Vec_MPI *)(*vv)->data;
715: /* Create local representation */
716: VecGetArray(*vv,&larray);
717: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
718: VecSetBlockSize(w->localrep,bs);
719: PetscLogObjectParent(*vv,w->localrep);
720: VecRestoreArray(*vv,&larray);
722: /*
723: Create scatter context for scattering (updating) ghost values
724: */
725: ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);
726: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
727: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
728: PetscLogObjectParent(*vv,w->localupdate);
729: ISDestroy(&to);
730: ISDestroy(&from);
732: /* set local to global mapping for ghosted vector */
733: nb = n/bs;
734: PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);
735: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
736: for (i=0; i<nb; i++) {
737: indices[i] = rstart + i*bs;
738: }
739: for (i=0; i<nghost; i++) {
740: indices[nb+i] = ghosts[i];
741: }
742: ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,<og);
743: VecSetLocalToGlobalMappingBlock(*vv,ltog);
744: ISLocalToGlobalMappingDestroy(<og);
745: return(0);
746: }
750: /*@
751: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
752: The indicing of the ghost points is done with blocks.
754: Collective on MPI_Comm
756: Input Parameters:
757: + comm - the MPI communicator to use
758: . bs - the block size
759: . n - local vector length
760: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
761: . nghost - number of local ghost blocks
762: - ghosts - global indices of ghost blocks, counts are by block, not by individual index
764: Output Parameter:
765: . vv - the global vector representation (without ghost points as part of vector)
766:
767: Notes:
768: Use VecGhostGetLocalForm() to access the local, ghosted representation
769: of the vector.
771: n is the local vector size (total local size not the number of blocks) while nghost
772: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
773: portion is bs*nghost
775: Level: advanced
777: Concepts: vectors^ghosted
779: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
780: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
781: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
783: @*/
784: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
785: {
789: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
790: return(0);
791: }