Actual source code: mpiaijcusp.cu

  2: #include <../src/mat/impls/aij/mpi/mpiaij.h>   /*I "petscmat.h" I*/

  7: PetscErrorCode  MatMPIAIJSetPreallocation_MPIAIJCUSP(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
  8: {
  9:   Mat_MPIAIJ     *b;
 11:   PetscInt       i;

 14:   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
 15:   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
 16:   if (d_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
 17:   if (o_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);

 19:   PetscLayoutSetBlockSize(B->rmap,1);
 20:   PetscLayoutSetBlockSize(B->cmap,1);
 21:   PetscLayoutSetUp(B->rmap);
 22:   PetscLayoutSetUp(B->cmap);
 23:   if (d_nnz) {
 24:     for (i=0; i<B->rmap->n; i++) {
 25:       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
 26:     }
 27:   }
 28:   if (o_nnz) {
 29:     for (i=0; i<B->rmap->n; i++) {
 30:       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
 31:     }
 32:   }
 33:   b = (Mat_MPIAIJ*)B->data;

 35:   if (!B->preallocated) {
 36:     /* Explicitly create 2 MATSEQAIJ matrices. */
 37:     MatCreate(PETSC_COMM_SELF,&b->A);
 38:     MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);
 39:     MatSetType(b->A,MATSEQAIJCUSP);
 40:     PetscLogObjectParent(B,b->A);
 41:     MatCreate(PETSC_COMM_SELF,&b->B);
 42:     MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);
 43:     MatSetType(b->B,MATSEQAIJCUSP);
 44:     PetscLogObjectParent(B,b->B);
 45:   }

 47:   MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);
 48:   MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);
 49:   B->preallocated = PETSC_TRUE;
 50:   return(0);
 51: }
 55: PetscErrorCode  MatGetVecs_MPIAIJCUSP(Mat mat,Vec *right,Vec *left)
 56: {

 60:   if (right) {
 61:     VecCreate(((PetscObject)mat)->comm,right);
 62:     VecSetSizes(*right,mat->cmap->n,PETSC_DETERMINE);
 63:     VecSetBlockSize(*right,mat->rmap->bs);
 64:     VecSetType(*right,VECCUSP);
 65:     PetscLayoutReference(mat->cmap,&(*right)->map);
 66:   }
 67:   if (left) {
 68:     VecCreate(((PetscObject)mat)->comm,left);
 69:     VecSetSizes(*left,mat->rmap->n,PETSC_DETERMINE);
 70:     VecSetBlockSize(*left,mat->rmap->bs);
 71:     VecSetType(*left,VECCUSP);
 72:     PetscLayoutReference(mat->rmap,&(*left)->map);
 73:   }
 74:   return(0);
 75: }

 78: PetscErrorCode  MatCreate_MPIAIJ(Mat);
 80: PetscErrorCode MatSetValuesBatch_MPIAIJCUSP(Mat J, PetscInt Ne, PetscInt Nl, PetscInt *elemRows, const PetscScalar *elemMats);

 85: PetscErrorCode  MatCreate_MPIAIJCUSP(Mat B)
 86: {

 90:   MatCreate_MPIAIJ(B);
 91:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
 92:                                      "MatMPIAIJSetPreallocation_MPIAIJCUSP",
 93:                                       MatMPIAIJSetPreallocation_MPIAIJCUSP);
 94:   B->ops->getvecs        = MatGetVecs_MPIAIJCUSP;
 95:   B->ops->setvaluesbatch = MatSetValuesBatch_MPIAIJCUSP;
 96:   PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJCUSP);
 97:   return(0);
 98: }


104: PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
105: {
107:   PetscMPIInt    size;

110:   MatCreate(comm,A);
111:   MatSetSizes(*A,m,n,M,N);
112:   MPI_Comm_size(comm,&size);
113:   if (size > 1) {
114:     MatSetType(*A,MATMPIAIJCUSP);
115:     MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);
116:   } else {
117:     MatSetType(*A,MATSEQAIJCUSP);
118:     MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);
119:   }
120:   return(0);
121: }

123: /*MC
124:    MATAIJCUSP - MATAIJCUSP = "aijcusp" - A matrix type to be used for sparse matrices.

126:    This matrix type is identical to MATSEQAIJCUSP when constructed with a single process communicator,
127:    and MATMPIAIJCUSP otherwise.  As a result, for single process communicators, 
128:   MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported 
129:   for communicators controlling multiple processes.  It is recommended that you call both of
130:   the above preallocation routines for simplicity.

132:    Options Database Keys:
133: . -mat_type aij - sets the matrix type to "aij" during a call to MatSetFromOptions()

135:   Level: beginner

137: .seealso: MatCreateMPIAIJ,MATSEQAIJ,MATMPIAIJ, MATMPIAIJCUSP, MATSEQAIJCUSP
138: M*/