Actual source code: ex73.c


  2: static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";

  4: /*
  5:   Include "petscmat.h" so that we can use matrices.  Note that this file
  6:   automatically includes:
  7:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  8:      petscmat.h - matrices
  9:      petscis.h     - index sets
 10:      petscviewer.h - viewers

 12:   Example of usage:
 13:     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
 14: */
 15: #include <petscmat.h>

 17: int main(int argc,char **args)
 18: {
 19:   MatType         mtype = MATMPIAIJ; /* matrix format */
 20:   Mat             A,B;               /* matrix */
 21:   PetscViewer     fd;                /* viewer */
 22:   char            file[PETSC_MAX_PATH_LEN];         /* input file name */
 23:   PetscBool       flg,viewMats,viewIS,viewVecs,useND,noVecLoad = PETSC_FALSE;
 24:   PetscInt        *nlocal,m,n;
 25:   PetscMPIInt     rank,size;
 26:   MatPartitioning part;
 27:   IS              is,isn;
 28:   Vec             xin, xout;
 29:   VecScatter      scat;

 31:   PetscInitialize(&argc,&args,(char*)0,help);
 32:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 33:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 34:   PetscOptionsHasName(NULL,NULL, "-view_mats", &viewMats);
 35:   PetscOptionsHasName(NULL,NULL, "-view_is", &viewIS);
 36:   PetscOptionsHasName(NULL,NULL, "-view_vecs", &viewVecs);
 37:   PetscOptionsHasName(NULL,NULL, "-use_nd", &useND);
 38:   PetscOptionsHasName(NULL,NULL, "-novec_load", &noVecLoad);

 40:   /*
 41:      Determine file from which we read the matrix
 42:   */
 43:   PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);

 45:   /*
 46:        Open binary file.  Note that we use FILE_MODE_READ to indicate
 47:        reading from this file.
 48:   */
 49:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);

 51:   /*
 52:       Load the matrix and vector; then destroy the viewer.
 53:   */
 54:   MatCreate(PETSC_COMM_WORLD,&A);
 55:   MatSetType(A,mtype);
 56:   MatLoad(A,fd);
 57:   if (!noVecLoad) {
 58:     VecCreate(PETSC_COMM_WORLD,&xin);
 59:     VecLoad(xin,fd);
 60:   } else {
 61:     MatCreateVecs(A,&xin,NULL);
 62:     VecSetRandom(xin,NULL);
 63:   }
 64:   PetscViewerDestroy(&fd);
 65:   if (viewMats) {
 66:     PetscPrintf(PETSC_COMM_WORLD,"Original matrix:\n");
 67:     MatView(A,PETSC_VIEWER_DRAW_WORLD);
 68:   }
 69:   if (viewVecs) {
 70:     PetscPrintf(PETSC_COMM_WORLD,"Original vector:\n");
 71:     VecView(xin,PETSC_VIEWER_STDOUT_WORLD);
 72:   }

 74:   /* Partition the graph of the matrix */
 75:   MatPartitioningCreate(PETSC_COMM_WORLD,&part);
 76:   MatPartitioningSetAdjacency(part,A);
 77:   MatPartitioningSetFromOptions(part);

 79:   /* get new processor owner number of each vertex */
 80:   if (useND) {
 81:     MatPartitioningApplyND(part,&is);
 82:   } else {
 83:     MatPartitioningApply(part,&is);
 84:   }
 85:   if (viewIS) {
 86:     PetscPrintf(PETSC_COMM_WORLD,"IS1 - new processor ownership:\n");
 87:     ISView(is,PETSC_VIEWER_STDOUT_WORLD);
 88:   }

 90:   /* get new global number of each old global number */
 91:   ISPartitioningToNumbering(is,&isn);
 92:   if (viewIS) {
 93:     PetscPrintf(PETSC_COMM_WORLD,"IS2 - new global numbering:\n");
 94:     ISView(isn,PETSC_VIEWER_STDOUT_WORLD);
 95:   }

 97:   /* get number of new vertices for each processor */
 98:   PetscMalloc1(size,&nlocal);
 99:   ISPartitioningCount(is,size,nlocal);
100:   ISDestroy(&is);

102:   /* get old global number of each new global number */
103:   ISInvertPermutation(isn,useND ? PETSC_DECIDE : nlocal[rank],&is);
104:   if (viewIS) {
105:     PetscPrintf(PETSC_COMM_WORLD,"IS3=inv(IS2) - old global number of each new global number:\n");
106:     ISView(is,PETSC_VIEWER_STDOUT_WORLD);
107:   }

109:   /* move the matrix rows to the new processes they have been assigned to by the permutation */
110:   MatCreateSubMatrix(A,is,is,MAT_INITIAL_MATRIX,&B);
111:   PetscFree(nlocal);
112:   ISDestroy(&isn);
113:   MatDestroy(&A);
114:   MatPartitioningDestroy(&part);
115:   if (viewMats) {
116:     PetscPrintf(PETSC_COMM_WORLD,"Partitioned matrix:\n");
117:     MatView(B,PETSC_VIEWER_DRAW_WORLD);
118:   }

120:   /* move the vector rows to the new processes they have been assigned to */
121:   MatGetLocalSize(B,&m,&n);
122:   VecCreateMPI(PETSC_COMM_WORLD,m,PETSC_DECIDE,&xout);
123:   VecScatterCreate(xin,is,xout,NULL,&scat);
124:   VecScatterBegin(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);
125:   VecScatterEnd(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);
126:   VecScatterDestroy(&scat);
127:   if (viewVecs) {
128:     PetscPrintf(PETSC_COMM_WORLD,"Mapped vector:\n");
129:     VecView(xout,PETSC_VIEWER_STDOUT_WORLD);
130:   }
131:   VecDestroy(&xout);
132:   ISDestroy(&is);

134:   {
135:     PetscInt          rstart,i,*nzd,*nzo,nzl,nzmax = 0,*ncols,nrow,j;
136:     Mat               J;
137:     const PetscInt    *cols;
138:     const PetscScalar *vals;
139:     PetscScalar       *nvals;

141:     MatGetOwnershipRange(B,&rstart,NULL);
142:     PetscCalloc2(2*m,&nzd,2*m,&nzo);
143:     for (i=0; i<m; i++) {
144:       MatGetRow(B,i+rstart,&nzl,&cols,NULL);
145:       for (j=0; j<nzl; j++) {
146:         if (cols[j] >= rstart && cols[j] < rstart+n) {
147:           nzd[2*i] += 2;
148:           nzd[2*i+1] += 2;
149:         } else {
150:           nzo[2*i] += 2;
151:           nzo[2*i+1] += 2;
152:         }
153:       }
154:       nzmax = PetscMax(nzmax,nzd[2*i]+nzo[2*i]);
155:       MatRestoreRow(B,i+rstart,&nzl,&cols,NULL);
156:     }
157:     MatCreateAIJ(PETSC_COMM_WORLD,2*m,2*m,PETSC_DECIDE,PETSC_DECIDE,0,nzd,0,nzo,&J);
158:     PetscInfo(0,"Created empty Jacobian matrix\n");
159:     PetscFree2(nzd,nzo);
160:     PetscMalloc2(nzmax,&ncols,nzmax,&nvals);
161:     PetscArrayzero(nvals,nzmax);
162:     for (i=0; i<m; i++) {
163:       MatGetRow(B,i+rstart,&nzl,&cols,&vals);
164:       for (j=0; j<nzl; j++) {
165:         ncols[2*j]   = 2*cols[j];
166:         ncols[2*j+1] = 2*cols[j]+1;
167:       }
168:       nrow = 2*(i+rstart);
169:       MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);
170:       nrow = 2*(i+rstart) + 1;
171:       MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);
172:       MatRestoreRow(B,i+rstart,&nzl,&cols,&vals);
173:     }
174:     MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);
175:     MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);
176:     if (viewMats) {
177:       PetscPrintf(PETSC_COMM_WORLD,"Jacobian matrix structure:\n");
178:       MatView(J,PETSC_VIEWER_DRAW_WORLD);
179:     }
180:     MatDestroy(&J);
181:     PetscFree2(ncols,nvals);
182:   }

184:   /*
185:        Free work space.  All PETSc objects should be destroyed when they
186:        are no longer needed.
187:   */
188:   MatDestroy(&B);
189:   VecDestroy(&xin);
190:   PetscFinalize();
191:   return 0;
192: }

194: /*TEST

196:    test:
197:       nsize: 3
198:       requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
199:       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load

201:    test:
202:       requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
203:       output_file: output/ex73_1.out
204:       suffix: parmetis_nd_32
205:       nsize: 3
206:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

208:    test:
209:       requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
210:       output_file: output/ex73_1.out
211:       suffix: parmetis_nd_64
212:       nsize: 3
213:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

215:    test:
216:       requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
217:       output_file: output/ex73_1.out
218:       suffix: ptscotch_nd_32
219:       nsize: 4
220:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

222:    test:
223:       requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
224:       output_file: output/ex73_1.out
225:       suffix: ptscotch_nd_64
226:       nsize: 4
227:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

229: TEST*/