Actual source code: mpiaij.h
5: #include <../src/mat/impls/aij/seq/aij.h>
6: #include <../src/sys/ctable.h>
8: typedef struct {
9: Mat A,B; /* local submatrices: A (diag part),
10: B (off-diag part) */
11: PetscMPIInt size; /* size of communicator */
12: PetscMPIInt rank; /* rank of proc in communicator */
14: /* The following variables are used for matrix assembly */
16: PetscBool donotstash; /* PETSC_TRUE if off processor entries dropped */
17: MPI_Request *send_waits; /* array of send requests */
18: MPI_Request *recv_waits; /* array of receive requests */
19: PetscInt nsends,nrecvs; /* numbers of sends and receives */
20: PetscScalar *svalues,*rvalues; /* sending and receiving data */
21: PetscInt rmax; /* maximum message length */
22: #if defined (PETSC_USE_CTABLE)
23: PetscTable colmap;
24: #else
25: PetscInt *colmap; /* local col number of off-diag col */
26: #endif
27: PetscInt *garray; /* global index of all off-processor columns */
29: /* The following variables are used for matrix-vector products */
31: Vec lvec; /* local vector */
32: Vec diag;
33: VecScatter Mvctx; /* scatter context for vector */
34: PetscBool roworiented; /* if true, row-oriented input, default true */
36: /* The following variables are for MatGetRow() */
38: PetscInt *rowindices; /* column indices for row */
39: PetscScalar *rowvalues; /* nonzero values in row */
40: PetscBool getrowactive; /* indicates MatGetRow(), not restored */
42: /* Used by MatDistribute_MPIAIJ() to allow reuse of previous matrix allocation and nonzero pattern */
43: PetscInt *ld; /* number of entries per row left of diagona block */
44: } Mat_MPIAIJ;
46: typedef struct { /* used by MatMatMult_MPIAIJ_MPIAIJ and MatPtAP_MPIAIJ_MPIAIJ for reusing symbolic mat product */
47: PetscInt *startsj,*startsj_r;
48: PetscScalar *bufa;
49: IS isrowa,isrowb,iscolb;
50: Mat *aseq,*bseq,C_seq; /* A_seq=aseq[0], B_seq=bseq[0] */
51: Mat A_loc,B_seq;
52: Mat B_loc,B_oth; /* partial B_seq -- intend to replace B_seq */
53: PetscInt brstart; /* starting owned rows of B in matrix bseq[0]; brend = brstart+B->m */
54: PetscInt *abi,*abj; /* symbolic i and j arrays of the local product A_loc*B_seq */
55: PetscInt abnz_max; /* max(abi[i+1] - abi[i]), max num of nnz in a row of A_loc*B_seq */
56: MatReuse reuse;
57: PetscErrorCode (*destroy)(Mat);
58: PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*);
59: } Mat_MatMatMultMPI;
61: typedef struct { /* used by MatMerge_SeqsToMPI for reusing the merged matrix */
62: PetscLayout rowmap;
63: PetscInt **buf_ri,**buf_rj;
64: PetscMPIInt *len_s,*len_r,*id_r; /* array of length of comm->size, store send/recv matrix values */
65: PetscMPIInt nsend,nrecv;
66: PetscInt *bi,*bj; /* i and j array of the local portion of mpi C=P^T*A*P */
67: PetscInt *owners_co,*coi,*coj; /* i and j array of (p->B)^T*A*P - used in the communication */
68: PetscErrorCode (*destroy)(Mat);
69: PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*);
70: } Mat_Merge_SeqsToMPI;
106: #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
108: #endif
117: #endif