Actual source code: ex40.c
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4: -nd <size> : > 0 number of domains per processor \n\
5: -ov <overlap> : >=0 amount of overlap between domains\n\n";
7: #include <petscmat.h>
9: PetscErrorCode ISAllGatherDisjoint(IS iis, IS** ois)
10: {
11: IS *is2,is;
12: const PetscInt *idxs;
13: PetscInt i, ls,*sizes;
14: PetscMPIInt size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)iis),&size);
18: PetscMalloc1(size,&is2);
19: PetscMalloc1(size,&sizes);
20: ISGetLocalSize(iis,&ls);
21: /* we don't have a public ISGetLayout */
22: MPI_Allgather(&ls,1,MPIU_INT,sizes,1,MPIU_INT,PetscObjectComm((PetscObject)iis));
23: ISAllGather(iis,&is);
24: ISGetIndices(is,&idxs);
25: for (i = 0, ls = 0; i < size; i++) {
26: ISCreateGeneral(PETSC_COMM_SELF,sizes[i],idxs+ls,PETSC_COPY_VALUES,&is2[i]);
27: ls += sizes[i];
28: }
29: ISRestoreIndices(is,&idxs);
30: ISDestroy(&is);
31: PetscFree(sizes);
32: *ois = is2;
33: return 0;
34: }
36: int main(int argc,char **args)
37: {
38: PetscInt nd = 2,ov = 1,ndpar,i,start,m,n,end,lsize;
39: PetscMPIInt rank;
40: PetscBool flg, useND = PETSC_FALSE;
41: Mat A,B;
42: char file[PETSC_MAX_PATH_LEN];
43: PetscViewer fd;
44: IS *is1,*is2;
45: PetscRandom r;
46: PetscScalar rand;
48: PetscInitialize(&argc,&args,(char*)0,help);
49: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
50: PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);
52: PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL);
53: PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL);
54: PetscOptionsGetBool(NULL,NULL,"-nested_dissection",&useND,NULL);
56: /* Read matrix */
57: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
58: MatCreate(PETSC_COMM_WORLD,&A);
59: MatSetType(A,MATMPIAIJ);
60: MatLoad(A,fd);
61: MatSetFromOptions(A);
62: PetscViewerDestroy(&fd);
64: /* Read the matrix again as a sequential matrix */
65: PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
66: MatCreate(PETSC_COMM_SELF,&B);
67: MatSetType(B,MATSEQAIJ);
68: MatLoad(B,fd);
69: MatSetFromOptions(B);
70: PetscViewerDestroy(&fd);
72: /* Create the IS corresponding to subdomains */
73: if (useND) {
74: MatPartitioning part;
75: IS ndmap;
76: PetscMPIInt size;
78: ndpar = 1;
79: MPI_Comm_size(PETSC_COMM_WORLD,&size);
80: nd = (PetscInt)size;
81: PetscMalloc1(ndpar,&is1);
82: MatPartitioningCreate(PETSC_COMM_WORLD,&part);
83: MatPartitioningSetAdjacency(part,A);
84: MatPartitioningSetFromOptions(part);
85: MatPartitioningApplyND(part,&ndmap);
86: MatPartitioningDestroy(&part);
87: ISBuildTwoSided(ndmap,NULL,&is1[0]);
88: ISDestroy(&ndmap);
89: ISAllGatherDisjoint(is1[0],&is2);
90: } else {
91: /* Create the random Index Sets */
92: PetscMalloc1(nd,&is1);
93: PetscMalloc1(nd,&is2);
95: MatGetSize(A,&m,&n);
96: PetscRandomCreate(PETSC_COMM_SELF,&r);
97: PetscRandomSetFromOptions(r);
98: for (i=0; i<nd; i++) {
99: PetscRandomGetValue(r,&rand);
100: start = (PetscInt)(rand*m);
101: PetscRandomGetValue(r,&rand);
102: end = (PetscInt)(rand*m);
103: lsize = end - start;
104: if (start > end) { start = end; lsize = -lsize;}
105: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
106: ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
107: }
108: ndpar = nd;
109: PetscRandomDestroy(&r);
110: }
111: MatIncreaseOverlap(A,ndpar,is1,ov);
112: MatIncreaseOverlap(B,nd,is2,ov);
113: if (useND) {
114: IS *is;
116: ISAllGatherDisjoint(is1[0],&is);
117: ISDestroy(&is1[0]);
118: PetscFree(is1);
119: is1 = is;
120: }
121: /* Now see if the serial and parallel case have the same answers */
122: for (i=0; i<nd; ++i) {
123: ISEqual(is1[i],is2[i],&flg);
124: if (!flg) {
125: ISViewFromOptions(is1[i],NULL,"-err_view");
126: ISViewFromOptions(is2[i],NULL,"-err_view");
127: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"proc:[%d], i=%" PetscInt_FMT ", flg =%d",rank,i,(int)flg);
128: }
129: }
131: /* Free allocated memory */
132: for (i=0; i<nd; ++i) {
133: ISDestroy(&is1[i]);
134: ISDestroy(&is2[i]);
135: }
136: PetscFree(is1);
137: PetscFree(is2);
138: MatDestroy(&A);
139: MatDestroy(&B);
140: PetscFinalize();
141: return 0;
142: }
144: /*TEST
146: build:
147: requires: !complex
149: testset:
150: nsize: 5
151: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
152: args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
153: output_file: output/ex40_1.out
154: test:
155: suffix: 1
156: args: -nd 7
157: test:
158: requires: parmetis
159: suffix: 1_nd
160: args: -nested_dissection -mat_partitioning_type parmetis
162: testset:
163: nsize: 3
164: requires: double !defined(PETSC_USE_64BIT_INDICES) !complex
165: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
166: output_file: output/ex40_1.out
167: test:
168: suffix: 2
169: args: -nd 7
170: test:
171: requires: parmetis
172: suffix: 2_nd
173: args: -nested_dissection -mat_partitioning_type parmetis
175: TEST*/