Actual source code: mpiadj.c
2: /*
3: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4: */
5: #include <../src/mat/impls/adj/mpi/mpiadj.h>
9: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10: {
11: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
12: PetscErrorCode ierr;
13: PetscInt i,j,m = A->rmap->n;
14: const char *name;
15: PetscViewerFormat format;
18: PetscObjectGetName((PetscObject)A,&name);
19: PetscViewerGetFormat(viewer,&format);
20: if (format == PETSC_VIEWER_ASCII_INFO) {
21: return(0);
22: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23: SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
24: } else {
25: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
26: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
27: for (i=0; i<m; i++) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
29: for (j=a->i[i]; j<a->i[i+1]; j++) {
30: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
31: }
32: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
33: }
34: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
35: PetscViewerFlush(viewer);
36: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
37: }
38: return(0);
39: }
43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44: {
46: PetscBool iascii;
49: PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
50: if (iascii) {
51: MatView_MPIAdj_ASCII(A,viewer);
52: } else {
53: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54: }
55: return(0);
56: }
60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61: {
62: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
66: #if defined(PETSC_USE_LOG)
67: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68: #endif
69: PetscFree(a->diag);
70: if (a->freeaij) {
71: if (a->freeaijwithfree) {
72: if (a->i) free(a->i);
73: if (a->j) free(a->j);
74: } else {
75: PetscFree(a->i);
76: PetscFree(a->j);
77: PetscFree(a->values);
78: }
79: }
80: PetscFree(mat->data);
81: PetscObjectChangeTypeName((PetscObject)mat,0);
82: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
83: return(0);
84: }
88: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89: {
90: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
94: switch (op) {
95: case MAT_SYMMETRIC:
96: case MAT_STRUCTURALLY_SYMMETRIC:
97: case MAT_HERMITIAN:
98: a->symmetric = flg;
99: break;
100: case MAT_SYMMETRY_ETERNAL:
101: break;
102: default:
103: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
104: break;
105: }
106: return(0);
107: }
110: /*
111: Adds diagonal pointers to sparse matrix structure.
112: */
116: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117: {
118: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
120: PetscInt i,j,m = A->rmap->n;
123: PetscMalloc(m*sizeof(PetscInt),&a->diag);
124: PetscLogObjectMemory(A,m*sizeof(PetscInt));
125: for (i=0; i<A->rmap->n; i++) {
126: for (j=a->i[i]; j<a->i[i+1]; j++) {
127: if (a->j[j] == i) {
128: a->diag[i] = j;
129: break;
130: }
131: }
132: }
133: return(0);
134: }
138: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139: {
140: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141: PetscInt *itmp;
144: row -= A->rmap->rstart;
146: if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148: *nz = a->i[row+1] - a->i[row];
149: if (v) *v = PETSC_NULL;
150: if (idx) {
151: itmp = a->j + a->i[row];
152: if (*nz) {
153: *idx = itmp;
154: }
155: else *idx = 0;
156: }
157: return(0);
158: }
162: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
163: {
165: return(0);
166: }
170: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
171: {
172: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174: PetscBool flag;
177: /* If the matrix dimensions are not equal,or no of nonzeros */
178: if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
179: flag = PETSC_FALSE;
180: }
181:
182: /* if the a->i are the same */
183: PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
184:
185: /* if a->j are the same */
186: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
188: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);
189: return(0);
190: }
194: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool *done)
195: {
196: PetscInt i;
197: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
200: *m = A->rmap->n;
201: *ia = a->i;
202: *ja = a->j;
203: *done = PETSC_TRUE;
204: if (oshift) {
205: for (i=0; i<(*ia)[*m]; i++) {
206: (*ja)[i]++;
207: }
208: for (i=0; i<=(*m); i++) (*ia)[i]++;
209: }
210: return(0);
211: }
215: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool *done)
216: {
217: PetscInt i;
218: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
221: if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
222: if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
223: if (oshift) {
224: for (i=0; i<=(*m); i++) (*ia)[i]--;
225: for (i=0; i<(*ia)[*m]; i++) {
226: (*ja)[i]--;
227: }
228: }
229: return(0);
230: }
234: PetscErrorCode MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
235: {
236: Mat B;
237: PetscErrorCode ierr;
238: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
239: const PetscInt *rj;
240: const PetscScalar *ra;
241: MPI_Comm comm;
244: MatGetSize(A,PETSC_NULL,&N);
245: MatGetLocalSize(A,&m,PETSC_NULL);
246: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
247:
248: /* count the number of nonzeros per row */
249: for (i=0; i<m; i++) {
250: MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
251: for (j=0; j<len; j++) {
252: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
253: }
254: MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
255: nzeros += len;
256: }
258: /* malloc space for nonzeros */
259: PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
260: PetscMalloc((N+1)*sizeof(PetscInt),&ia);
261: PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);
263: nzeros = 0;
264: ia[0] = 0;
265: for (i=0; i<m; i++) {
266: MatGetRow(A,i+rstart,&len,&rj,&ra);
267: cnt = 0;
268: for (j=0; j<len; j++) {
269: if (rj[j] != i+rstart) { /* if not diagonal */
270: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
271: ja[nzeros+cnt++] = rj[j];
272: }
273: }
274: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
275: nzeros += cnt;
276: ia[i+1] = nzeros;
277: }
279: PetscObjectGetComm((PetscObject)A,&comm);
280: MatCreate(comm,&B);
281: MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
282: MatSetType(B,type);
283: MatMPIAdjSetPreallocation(B,ia,ja,a);
285: if (reuse == MAT_REUSE_MATRIX) {
286: MatHeaderReplace(A,B);
287: } else {
288: *newmat = B;
289: }
290: return(0);
291: }
293: /* -------------------------------------------------------------------*/
294: static struct _MatOps MatOps_Values = {0,
295: MatGetRow_MPIAdj,
296: MatRestoreRow_MPIAdj,
297: 0,
298: /* 4*/ 0,
299: 0,
300: 0,
301: 0,
302: 0,
303: 0,
304: /*10*/ 0,
305: 0,
306: 0,
307: 0,
308: 0,
309: /*15*/ 0,
310: MatEqual_MPIAdj,
311: 0,
312: 0,
313: 0,
314: /*20*/ 0,
315: 0,
316: MatSetOption_MPIAdj,
317: 0,
318: /*24*/ 0,
319: 0,
320: 0,
321: 0,
322: 0,
323: /*29*/ 0,
324: 0,
325: 0,
326: 0,
327: 0,
328: /*34*/ 0,
329: 0,
330: 0,
331: 0,
332: 0,
333: /*39*/ 0,
334: 0,
335: 0,
336: 0,
337: 0,
338: /*44*/ 0,
339: 0,
340: 0,
341: 0,
342: 0,
343: /*49*/ 0,
344: MatGetRowIJ_MPIAdj,
345: MatRestoreRowIJ_MPIAdj,
346: 0,
347: 0,
348: /*54*/ 0,
349: 0,
350: 0,
351: 0,
352: 0,
353: /*59*/ 0,
354: MatDestroy_MPIAdj,
355: MatView_MPIAdj,
356: MatConvertFrom_MPIAdj,
357: 0,
358: /*64*/ 0,
359: 0,
360: 0,
361: 0,
362: 0,
363: /*69*/ 0,
364: 0,
365: 0,
366: 0,
367: 0,
368: /*74*/ 0,
369: 0,
370: 0,
371: 0,
372: 0,
373: /*79*/ 0,
374: 0,
375: 0,
376: 0,
377: 0,
378: /*84*/ 0,
379: 0,
380: 0,
381: 0,
382: 0,
383: /*89*/ 0,
384: 0,
385: 0,
386: 0,
387: 0,
388: /*94*/ 0,
389: 0,
390: 0,
391: 0};
396: PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
397: {
398: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
400: #if defined(PETSC_USE_DEBUG)
401: PetscInt ii;
402: #endif
405: PetscLayoutSetBlockSize(B->rmap,1);
406: PetscLayoutSetBlockSize(B->cmap,1);
407: PetscLayoutSetUp(B->rmap);
408: PetscLayoutSetUp(B->cmap);
410: #if defined(PETSC_USE_DEBUG)
411: if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
412: for (ii=1; ii<B->rmap->n; ii++) {
413: if (i[ii] < 0 || i[ii] < i[ii-1]) {
414: SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
415: }
416: }
417: for (ii=0; ii<i[B->rmap->n]; ii++) {
418: if (j[ii] < 0 || j[ii] >= B->cmap->N) {
419: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
420: }
421: }
422: #endif
423: B->preallocated = PETSC_TRUE;
425: b->j = j;
426: b->i = i;
427: b->values = values;
429: b->nz = i[B->rmap->n];
430: b->diag = 0;
431: b->symmetric = PETSC_FALSE;
432: b->freeaij = PETSC_TRUE;
434: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
435: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
436: return(0);
437: }
440: /*MC
441: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
442: intended for use constructing orderings and partitionings.
444: Level: beginner
446: .seealso: MatCreateMPIAdj
447: M*/
452: PetscErrorCode MatCreate_MPIAdj(Mat B)
453: {
454: Mat_MPIAdj *b;
458: PetscNewLog(B,Mat_MPIAdj,&b);
459: B->data = (void*)b;
460: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
461: B->assembled = PETSC_FALSE;
462:
463: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
464: "MatMPIAdjSetPreallocation_MPIAdj",
465: MatMPIAdjSetPreallocation_MPIAdj);
466: PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
467: return(0);
468: }
473: /*@C
474: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
476: Logically Collective on MPI_Comm
478: Input Parameters:
479: + A - the matrix
480: . i - the indices into j for the start of each row
481: . j - the column indices for each row (sorted for each row).
482: The indices in i and j start with zero (NOT with one).
483: - values - [optional] edge weights
485: Level: intermediate
487: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
488: @*/
489: PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
490: {
494: PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
495: return(0);
496: }
500: /*@C
501: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
502: The matrix does not have numerical values associated with it, but is
503: intended for ordering (to reduce bandwidth etc) and partitioning.
505: Collective on MPI_Comm
507: Input Parameters:
508: + comm - MPI communicator
509: . m - number of local rows
510: . N - number of global columns
511: . i - the indices into j for the start of each row
512: . j - the column indices for each row (sorted for each row).
513: The indices in i and j start with zero (NOT with one).
514: - values -[optional] edge weights
516: Output Parameter:
517: . A - the matrix
519: Level: intermediate
521: Notes: This matrix object does not support most matrix operations, include
522: MatSetValues().
523: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
524: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
525: call from Fortran you need not create the arrays with PetscMalloc().
526: Should not include the matrix diagonals.
528: If you already have a matrix, you can create its adjacency matrix by a call
529: to MatConvert, specifying a type of MATMPIADJ.
531: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
533: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
534: @*/
535: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
536: {
540: MatCreate(comm,A);
541: MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
542: MatSetType(*A,MATMPIADJ);
543: MatMPIAdjSetPreallocation(*A,i,j,values);
544: return(0);
545: }