Actual source code: ex40.c
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For a 5X5 example of the 5-pt. stencil,\n\
4: use the file petsc/src/mat/examples/matbinary.ex\n\
5: -nd <size> : > 0 number of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscksp.h>
12: int main(int argc,char **args)
13: {
15: PetscInt nd = 2,ov=1,i,start,m,n,end,lsize;
16: PetscMPIInt rank;
17: PetscBool flg;
18: Mat A,B;
19: char file[PETSC_MAX_PATH_LEN];
20: PetscViewer fd;
21: IS *is1,*is2;
22: PetscRandom r;
23: PetscScalar rand;
25: PetscInitialize(&argc,&args,(char *)0,help);
26: #if defined(PETSC_USE_COMPLEX)
27: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
28: #else
29:
30: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
31: PetscOptionsGetString(PETSC_NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
32: if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must use -f filename to indicate a file containing a PETSc binary matrix");
33: PetscOptionsGetInt(PETSC_NULL,"-nd",&nd,PETSC_NULL);
34: PetscOptionsGetInt(PETSC_NULL,"-ov",&ov,PETSC_NULL);
36: /* Read matrix and RHS */
37: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
38: MatCreate(PETSC_COMM_WORLD,&A);
39: MatSetType(A,MATMPIAIJ);
40: MatLoad(A,fd);
41: PetscViewerDestroy(&fd);
43: /* Read the matrix again as a sequential matrix */
44: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
45: MatCreate(PETSC_COMM_SELF,&B);
46: MatSetType(B,MATSEQAIJ);
47: MatLoad(B,fd);
48: PetscViewerDestroy(&fd);
49:
50: /* Create the IS corresponding to subdomains */
51: PetscMalloc(nd*sizeof(IS **),&is1);
52: PetscMalloc(nd*sizeof(IS **),&is2);
54: /* Create the random Index Sets */
55: MatGetSize(A,&m,&n);
56: PetscRandomCreate(PETSC_COMM_SELF,&r);
57: PetscRandomSetFromOptions(r);
58: for (i=0; i<nd; i++) {
59: PetscRandomGetValue(r,&rand);
60: start = (PetscInt)(rand*m);
61: PetscRandomGetValue(r,&rand);
62: end = (PetscInt)(rand*m);
63: lsize = end - start;
64: if (start > end) { start = end; lsize = -lsize ;}
65: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
66: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
67: }
68: MatIncreaseOverlap(A,nd,is1,ov);
69: MatIncreaseOverlap(B,nd,is2,ov);
73: /* Now see if the serial and parallel case have the same answers */
74: for (i=0; i<nd; ++i) {
75: ISEqual(is1[i],is2[i],&flg);
76: PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
77: }
79: /* Free allocated memory */
80: for (i=0; i<nd; ++i) {
81: ISDestroy(&is1[i]);
82: ISDestroy(&is2[i]);
83: }
84: PetscFree(is1);
85: PetscFree(is2);
86: PetscRandomDestroy(&r);
87: MatDestroy(&A);
88: MatDestroy(&B);
90: PetscFinalize();
91: #endif
92: return 0;
93: }