Actual source code: ex41.c
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For a 5X5 example of the 5-pt. stencil,\n\
5: use the file petsc/src/mat/examples/matbinary.ex\n\
6: -nd <size> : > 0 no of domains per processor \n\
7: -ov <overlap> : >=0 amount of overlap between domains\n\n";
9: #include <petscksp.h>
13: int main(int argc,char **args)
14: {
15: PetscInt nd = 2,ov=1,i,j,m,n,*idx,lsize;
17: PetscMPIInt rank;
18: PetscBool flg;
19: Mat A,B;
20: char file[PETSC_MAX_PATH_LEN];
21: PetscViewer fd;
22: IS *is1,*is2;
23: PetscRandom r;
24: PetscScalar rand;
26: PetscInitialize(&argc,&args,(char *)0,help);
27: #if defined(PETSC_USE_COMPLEX)
28: SETERRQ(PETSC_COMM_WORLD,1,"This example does not work with complex numbers");
29: #else
30:
31: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
32: PetscOptionsGetString(PETSC_NULL,"-f",file,PETSC_MAX_PATH_LEN,PETSC_NULL);
33: PetscOptionsGetInt(PETSC_NULL,"-nd",&nd,PETSC_NULL);
34: PetscOptionsGetInt(PETSC_NULL,"-ov",&ov,PETSC_NULL);
36: /* Read matrix and RHS */
37: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
38: MatCreate(PETSC_COMM_WORLD,&A);
39: MatSetType(A,MATMPIAIJ);
40: MatLoad(A,fd);
41: PetscViewerDestroy(&fd);
43: /* Read the matrix again as a seq matrix */
44: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
45: MatCreate(PETSC_COMM_SELF,&B);
46: MatSetType(B,MATSEQAIJ);
47: MatLoad(B,fd);
48: PetscViewerDestroy(&fd);
49:
50: /* Create the Random no generator */
51: MatGetSize(A,&m,&n);
52: PetscRandomCreate(PETSC_COMM_SELF,&r);
53: PetscRandomSetFromOptions(r);
54:
55: /* Create the IS corresponding to subdomains */
56: PetscMalloc(nd*sizeof(IS **),&is1);
57: PetscMalloc(nd*sizeof(IS **),&is2);
58: PetscMalloc(m *sizeof(PetscInt),&idx);
60: /* Create the random Index Sets */
61: for (i=0; i<nd; i++) {
62: for (j=0; j<rank; j++) {
63: PetscRandomGetValue(r,&rand);
64: }
65: PetscRandomGetValue(r,&rand);
66: lsize = (PetscInt)(rand*m);
67: for (j=0; j<lsize; j++) {
68: PetscRandomGetValue(r,&rand);
69: idx[j] = (PetscInt)(rand*m);
70: }
71: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is1+i);
72: ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,PETSC_COPY_VALUES,is2+i);
73: }
74:
75: MatIncreaseOverlap(A,nd,is1,ov);
76: MatIncreaseOverlap(B,nd,is2,ov);
77:
78: /* Now see if the serial and parallel case have the same answers */
79: for (i=0; i<nd; ++i) {
80: PetscInt sz1,sz2;
81: ISEqual(is1[i],is2[i],&flg);
82: ISGetSize(is1[i],&sz1);
83: ISGetSize(is2[i],&sz2);
84: PetscPrintf(PETSC_COMM_SELF,"[%d], i=%D, flg =%d sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);
85: /* ISView(is1[i],PETSC_VIEWER_STDOUT_SELF);
86: ISView(is2[i],PETSC_VIEWER_STDOUT_SELF); */
87: }
89: /* Free Allocated Memory */
90: for (i=0; i<nd; ++i) {
91: ISDestroy(&is1[i]);
92: ISDestroy(&is2[i]);
93: }
94: PetscRandomDestroy(&r);
95: PetscFree(is1);
96: PetscFree(is2);
97: MatDestroy(&A);
98: MatDestroy(&B);
99: PetscFree(idx);
101: PetscFinalize();
102: #endif
103: return 0;
104: }