Actual source code: pastix.c
2: /*
3: Provides an interface to the PaStiX sparse solver
4: */
5: #include <../src/mat/impls/aij/seq/aij.h>
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <../src/mat/impls/sbaij/seq/sbaij.h>
8: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
10: #if defined(PETSC_HAVE_STDLIB_H)
11: #include <stdlib.h>
12: #endif
13: #if defined(PETSC_HAVE_STRING_H)
14: #include <string.h>
15: #endif
18: #include <pastix.h>
21: typedef struct Mat_Pastix_ {
22: pastix_data_t *pastix_data; /* Pastix data storage structure */
23: MatStructure matstruc;
24: PetscInt n; /* Number of columns in the matrix */
25: PetscInt *colptr; /* Index of first element of each column in row and val */
26: PetscInt *row; /* Row of each element of the matrix */
27: PetscScalar *val; /* Value of each element of the matrix */
28: PetscInt *perm; /* Permutation tabular */
29: PetscInt *invp; /* Reverse permutation tabular */
30: PetscScalar *rhs; /* Rhight-hand-side member */
31: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
32: PetscInt iparm[64]; /* Integer parameters */
33: double dparm[64]; /* Floating point parameters */
34: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
35: PetscMPIInt commRank; /* MPI rank */
36: PetscMPIInt commSize; /* MPI communicator size */
37: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
38: VecScatter scat_rhs;
39: VecScatter scat_sol;
40: Vec b_seq;
41: PetscBool isAIJ;
42: PetscErrorCode (*Destroy)(Mat);
43: } Mat_Pastix;
49: /*
50: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
52: input:
53: A - matrix in seqaij or mpisbaij (bs=1) format
54: valOnly - FALSE: spaces are allocated and values are set for the CSC
55: TRUE: Only fill values
56: output:
57: n - Size of the matrix
58: colptr - Index of first element of each column in row and val
59: row - Row of each element of the matrix
60: values - Value of each element of the matrix
61: */
62: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
63: {
64: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
65: PetscInt *rowptr = aa->i;
66: PetscInt *col = aa->j;
67: PetscScalar *rvalues = aa->a;
68: PetscInt m = A->rmap->N;
69: PetscInt nnz;
70: PetscInt i,j, k;
71: PetscInt base = 1;
72: PetscInt idx;
73: PetscErrorCode ierr;
74: PetscInt colidx;
75: PetscInt *colcount;
76: PetscBool isSBAIJ;
77: PetscBool isSeqSBAIJ;
78: PetscBool isMpiSBAIJ;
79: PetscBool isSym;
82: MatIsSymmetric(A,0.0,&isSym);
83: PetscTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
84: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
85: PetscTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
87: *n = A->cmap->N;
89: /* PaStiX only needs triangular matrix if matrix is symmetric
90: */
91: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) {
92: nnz = (aa->nz - *n)/2 + *n;
93: }
94: else {
95: nnz = aa->nz;
96: }
98: if (!valOnly){
99: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr );
100: PetscMalloc( nnz *sizeof(PetscInt) ,row);
101: PetscMalloc( nnz *sizeof(PetscScalar),values);
103: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
104: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
105: for (i = 0; i < *n+1; i++)
106: (*colptr)[i] += base;
107: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
108: for (i = 0; i < nnz; i++)
109: (*row)[i] += base;
110: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
111: } else {
112: PetscMalloc((*n)*sizeof(PetscInt) ,&colcount);
114: for (i = 0; i < m; i++) colcount[i] = 0;
115: /* Fill-in colptr */
116: for (i = 0; i < m; i++) {
117: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
118: if (!isSym || col[j] <= i) colcount[col[j]]++;
119: }
120: }
121:
122: (*colptr)[0] = base;
123: for (j = 0; j < *n; j++) {
124: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
125: /* in next loop we fill starting from (*colptr)[colidx] - base */
126: colcount[j] = -base;
127: }
128:
129: /* Fill-in rows and values */
130: for (i = 0; i < m; i++) {
131: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
132: if (!isSym || col[j] <= i) {
133: colidx = col[j];
134: idx = (*colptr)[colidx] + colcount[colidx];
135: (*row)[idx] = i + base;
136: (*values)[idx] = rvalues[j];
137: colcount[colidx]++;
138: }
139: }
140: }
141: PetscFree(colcount);
142: }
143: } else {
144: /* Fill-in only values */
145: for (i = 0; i < m; i++) {
146: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
147: colidx = col[j];
148: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i)
149: {
150: /* look for the value to fill */
151: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
152: if (((*row)[k]-base) == i) {
153: (*values)[k] = rvalues[j];
154: break;
155: }
156: }
157: /* data structure of sparse matrix has changed */
158: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
159: }
160: }
161: }
162: }
163: {
164: PetscScalar *tmpvalues;
165: PetscInt *tmprows,*tmpcolptr;
166: tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar)); if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
167: tmprows = (PetscInt*)malloc(nnz*sizeof(PetscInt));if (!tmprows) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
168: tmpcolptr = (PetscInt*)malloc((*n+1)*sizeof(PetscInt));if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
170: PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
171: PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
172: PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
173: PetscFree(*row);
174: PetscFree(*values);
176: pastix_checkMatrix(MPI_COMM_WORLD,API_VERBOSE_NO,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,&tmpvalues,NULL,1);
177:
178: PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
179: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscInt),row);
180: PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
181: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscScalar),values);
182: PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
183: free(tmpvalues);
184: free(tmprows);
185: free(tmpcolptr);
187: }
188: return(0);
189: }
195: /*
196: Call clean step of PaStiX if lu->CleanUpPastix == true.
197: Free the CSC matrix.
198: */
199: PetscErrorCode MatDestroy_Pastix(Mat A)
200: {
201: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
202: PetscErrorCode ierr;
203: PetscMPIInt size=lu->commSize;
206: if (lu && lu->CleanUpPastix) {
207: /* Terminate instance, deallocate memories */
208: if (size > 1){
209: VecScatterDestroy(&lu->scat_rhs);
210: VecDestroy(&lu->b_seq);
211: VecScatterDestroy(&lu->scat_sol);
212: }
214: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
215: lu->iparm[IPARM_END_TASK]=API_TASK_CLEAN;
217: pastix((pastix_data_t **)&(lu->pastix_data),
218: lu->pastix_comm,
219: (pastix_int_t) lu->n,
220: (pastix_int_t*) lu->colptr,
221: (pastix_int_t*) lu->row,
222: (pastix_float_t*) lu->val,
223: (pastix_int_t*) lu->perm,
224: (pastix_int_t*) lu->invp,
225: (pastix_float_t*) lu->rhs,
226: (pastix_int_t) lu->rhsnbr,
227: (pastix_int_t*) lu->iparm,
228: lu->dparm);
230: PetscFree(lu->colptr);
231: PetscFree(lu->row);
232: PetscFree(lu->val);
233: PetscFree(lu->perm);
234: PetscFree(lu->invp);
235: MPI_Comm_free(&(lu->pastix_comm));
236: }
237: if (lu && lu->Destroy) {
238: (lu->Destroy)(A);
239: }
240: PetscFree(A->spptr);
241: return(0);
242: }
246: /*
247: Gather right-hand-side.
248: Call for Solve step.
249: Scatter solution.
250: */
251: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
252: {
253: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
254: PetscScalar *array;
255: Vec x_seq;
256: PetscErrorCode ierr;
259: lu->rhsnbr = 1;
260: x_seq = lu->b_seq;
261: if (lu->commSize > 1){
262: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
263: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
264: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
265: VecGetArray(x_seq,&array);
266: } else { /* size == 1 */
267: VecCopy(b,x);
268: VecGetArray(x,&array);
269: }
270: lu->rhs = array;
271: if (lu->commSize == 1){
272: VecRestoreArray(x,&array);
273: } else {
274: VecRestoreArray(x_seq,&array);
275: }
277: /* solve phase */
278: /*-------------*/
279: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
280: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
281: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
282:
283: pastix((pastix_data_t **)&(lu->pastix_data),
284: (MPI_Comm) lu->pastix_comm,
285: (pastix_int_t) lu->n,
286: (pastix_int_t*) lu->colptr,
287: (pastix_int_t*) lu->row,
288: (pastix_float_t*) lu->val,
289: (pastix_int_t*) lu->perm,
290: (pastix_int_t*) lu->invp,
291: (pastix_float_t*) lu->rhs,
292: (pastix_int_t) lu->rhsnbr,
293: (pastix_int_t*) lu->iparm,
294: (double*) lu->dparm);
295:
296: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER] );
298: if (lu->commSize == 1){
299: VecRestoreArray(x,&(lu->rhs));
300: } else {
301: VecRestoreArray(x_seq,&(lu->rhs));
302: }
304: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
305: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
306: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
307: }
308: return(0);
309: }
311: /*
312: Numeric factorisation using PaStiX solver.
314: */
317: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
318: {
319: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
320: Mat *tseq;
321: PetscErrorCode 0;
322: PetscInt icntl;
323: PetscInt M=A->rmap->N;
324: PetscBool valOnly,flg, isSym;
325: Mat F_diag;
326: IS is_iden;
327: Vec b;
328: IS isrow;
329: PetscBool isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
332:
333: PetscTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
334: PetscTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
335: PetscTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
336: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
337: (F)->ops->solve = MatSolve_PaStiX;
339: /* Initialize a PASTIX instance */
340: MPI_Comm_dup(((PetscObject)A)->comm,&(lu->pastix_comm));
341: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
342: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
344: /* Set pastix options */
345: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
346: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
347: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
348: lu->rhsnbr = 1;
350: /* Call to set default pastix options */
351: pastix((pastix_data_t **)&(lu->pastix_data),
352: (MPI_Comm) lu->pastix_comm,
353: (pastix_int_t) lu->n,
354: (pastix_int_t*) lu->colptr,
355: (pastix_int_t*) lu->row,
356: (pastix_float_t*) lu->val,
357: (pastix_int_t*) lu->perm,
358: (pastix_int_t*) lu->invp,
359: (pastix_float_t*) lu->rhs,
360: (pastix_int_t) lu->rhsnbr,
361: (pastix_int_t*) lu->iparm,
362: (double*) lu->dparm);
364: PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PaStiX Options","Mat");
366: icntl=-1;
367: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
368: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
369: if ((flg && icntl > 0) || PetscLogPrintInfo) {
370: lu->iparm[IPARM_VERBOSE] = icntl;
371: }
372: icntl=-1;
373: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,PETSC_NULL);
374: if ((flg && icntl > 0)) {
375: lu->iparm[IPARM_THREAD_NBR] = icntl;
376: }
377: PetscOptionsEnd();
378: valOnly = PETSC_FALSE;
379: } else {
380: if (isSeqAIJ || isMPIAIJ) {
381: PetscFree(lu->colptr);
382: PetscFree(lu->row);
383: PetscFree(lu->val);
384: valOnly = PETSC_FALSE;
385: } else valOnly = PETSC_TRUE;
386: }
388: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
390: /* convert mpi A to seq mat A */
391: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
392: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
393: ISDestroy(&isrow);
395: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
396: MatIsSymmetric(*tseq,0.0,&isSym);
397: MatDestroyMatrices(1,&tseq);
399: if (!lu->perm) {
400: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->perm));
401: PetscMalloc((lu->n)*sizeof(PetscInt) ,&(lu->invp));
402: }
404: if (isSym) {
405: /* On symmetric matrix, LLT */
406: lu->iparm[IPARM_SYM] = API_SYM_YES;
407: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
408: } else {
409: /* On unsymmetric matrix, LU */
410: lu->iparm[IPARM_SYM] = API_SYM_NO;
411: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
412: }
413:
414: /*----------------*/
415: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
416: if (!(isSeqAIJ || isSeqSBAIJ)) {
417: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
418: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
419: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
420: VecCreate(((PetscObject)A)->comm,&b);
421: VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
422: VecSetFromOptions(b);
423:
424: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
425: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
426: ISDestroy(&is_iden);
427: VecDestroy(&b);
428: }
429: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
430: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
432: pastix((pastix_data_t **)&(lu->pastix_data),
433: (MPI_Comm) lu->pastix_comm,
434: (pastix_int_t) lu->n,
435: (pastix_int_t*) lu->colptr,
436: (pastix_int_t*) lu->row,
437: (pastix_float_t*) lu->val,
438: (pastix_int_t*) lu->perm,
439: (pastix_int_t*) lu->invp,
440: (pastix_float_t*) lu->rhs,
441: (pastix_int_t) lu->rhsnbr,
442: (pastix_int_t*) lu->iparm,
443: (double*) lu->dparm);
444: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
445: } else {
446: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
447: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
448: pastix((pastix_data_t **)&(lu->pastix_data),
449: (MPI_Comm) lu->pastix_comm,
450: (pastix_int_t) lu->n,
451: (pastix_int_t*) lu->colptr,
452: (pastix_int_t*) lu->row,
453: (pastix_float_t*) lu->val,
454: (pastix_int_t*) lu->perm,
455: (pastix_int_t*) lu->invp,
456: (pastix_float_t*) lu->rhs,
457: (pastix_int_t) lu->rhsnbr,
458: (pastix_int_t*) lu->iparm,
459: (double*) lu->dparm);
461: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
462: }
464: if (lu->commSize > 1){
465: if ((F)->factortype == MAT_FACTOR_LU){
466: F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
467: } else {
468: F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
469: }
470: F_diag->assembled = PETSC_TRUE;
471: }
472: (F)->assembled = PETSC_TRUE;
473: lu->matstruc = SAME_NONZERO_PATTERN;
474: lu->CleanUpPastix = PETSC_TRUE;
475: return(0);
476: }
478: /* Note the Petsc r and c permutations are ignored */
481: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
482: {
483: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
486: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
487: lu->iparm[IPARM_SYM] = API_SYM_YES;
488: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
489: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
490: return(0);
491: }
494: /* Note the Petsc r permutation is ignored */
497: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
498: {
499: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
502: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
503: lu->iparm[IPARM_SYM] = API_SYM_NO;
504: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
505: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
506: return(0);
507: }
511: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
512: {
513: PetscErrorCode ierr;
514: PetscBool iascii;
515: PetscViewerFormat format;
518: PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
519: if (iascii) {
520: PetscViewerGetFormat(viewer,&format);
521: if (format == PETSC_VIEWER_ASCII_INFO){
522: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
524: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
525: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES)?"Symmetric":"Unsymmetric"));
526: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
527: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
528: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
529: }
530: }
531: return(0);
532: }
535: /*MC
536: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
537: and sequential matrices via the external package PaStiX.
539: Use ./configure --download-pastix to have PETSc installed with PaStiX
541: Options Database Keys:
542: + -mat_pastix_verbose <0,1,2> - print level
543: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
545: Level: beginner
547: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
549: M*/
554: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
555: {
556: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
559: info->block_size = 1.0;
560: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
561: info->nz_used = lu->iparm[IPARM_NNZEROS];
562: info->nz_unneeded = 0.0;
563: info->assemblies = 0.0;
564: info->mallocs = 0.0;
565: info->memory = 0.0;
566: info->fill_ratio_given = 0;
567: info->fill_ratio_needed = 0;
568: info->factor_mallocs = 0;
569: return(0);
570: }
575: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
576: {
578: *type = MATSOLVERPASTIX;
579: return(0);
580: }
584: /*
585: The seq and mpi versions of this function are the same
586: */
589: PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
590: {
591: Mat B;
593: Mat_Pastix *pastix;
596: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
597: /* Create the factorization matrix */
598: MatCreate(((PetscObject)A)->comm,&B);
599: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
600: MatSetType(B,((PetscObject)A)->type_name);
601: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
603: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
604: B->ops->view = MatView_PaStiX;
605: B->ops->getinfo = MatGetInfo_PaStiX;
606: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix", MatFactorGetSolverPackage_pastix);
607: B->factortype = MAT_FACTOR_LU;
609: PetscNewLog(B,Mat_Pastix,&pastix);
610: pastix->CleanUpPastix = PETSC_FALSE;
611: pastix->isAIJ = PETSC_TRUE;
612: pastix->scat_rhs = PETSC_NULL;
613: pastix->scat_sol = PETSC_NULL;
614: pastix->Destroy = B->ops->destroy;
615: B->ops->destroy = MatDestroy_Pastix;
616: B->spptr = (void*)pastix;
618: *F = B;
619: return(0);
620: }
627: PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
628: {
629: Mat B;
631: Mat_Pastix *pastix;
634: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
635: /* Create the factorization matrix */
636: MatCreate(((PetscObject)A)->comm,&B);
637: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
638: MatSetType(B,((PetscObject)A)->type_name);
639: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
640: MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
642: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
643: B->ops->view = MatView_PaStiX;
644: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
645: B->factortype = MAT_FACTOR_LU;
647: PetscNewLog(B,Mat_Pastix,&pastix);
648: pastix->CleanUpPastix = PETSC_FALSE;
649: pastix->isAIJ = PETSC_TRUE;
650: pastix->scat_rhs = PETSC_NULL;
651: pastix->scat_sol = PETSC_NULL;
652: pastix->Destroy = B->ops->destroy;
653: B->ops->destroy = MatDestroy_Pastix;
654: B->spptr = (void*)pastix;
656: *F = B;
657: return(0);
658: }
664: PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
665: {
666: Mat B;
668: Mat_Pastix *pastix;
671: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
672: /* Create the factorization matrix */
673: MatCreate(((PetscObject)A)->comm,&B);
674: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
675: MatSetType(B,((PetscObject)A)->type_name);
676: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
677: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
679: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
680: B->ops->view = MatView_PaStiX;
681: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
682: B->factortype = MAT_FACTOR_CHOLESKY;
684: PetscNewLog(B,Mat_Pastix,&pastix);
685: pastix->CleanUpPastix = PETSC_FALSE;
686: pastix->isAIJ = PETSC_TRUE;
687: pastix->scat_rhs = PETSC_NULL;
688: pastix->scat_sol = PETSC_NULL;
689: pastix->Destroy = B->ops->destroy;
690: B->ops->destroy = MatDestroy_Pastix;
691: B->spptr = (void*)pastix;
693: *F = B;
694: return(0);
695: }
701: PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
702: {
703: Mat B;
705: Mat_Pastix *pastix;
706:
708: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
710: /* Create the factorization matrix */
711: MatCreate(((PetscObject)A)->comm,&B);
712: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
713: MatSetType(B,((PetscObject)A)->type_name);
714: MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
715: MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);
717: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
718: B->ops->view = MatView_PaStiX;
719: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
720: B->factortype = MAT_FACTOR_CHOLESKY;
722: PetscNewLog(B,Mat_Pastix,&pastix);
723: pastix->CleanUpPastix = PETSC_FALSE;
724: pastix->isAIJ = PETSC_TRUE;
725: pastix->scat_rhs = PETSC_NULL;
726: pastix->scat_sol = PETSC_NULL;
727: pastix->Destroy = B->ops->destroy;
728: B->ops->destroy = MatDestroy_Pastix;
729: B->spptr = (void*)pastix;
731: *F = B;
732: return(0);
733: }