Actual source code: ex37.c


  2: static char help[] = "Test MatGetMultiProcBlock() and MatCreateRedundantMatrix() \n\
  3: Reads a PETSc matrix and vector from a file and solves a linear system.\n\n";
  4: /*
  5:   Example:
  6:   mpiexec -n 4 ./ex37 -f <input_file> -nsubcomm 2 -psubcomm_view -subcomm_type <1 or 2>
  7: */

  9: #include <petscksp.h>
 10: #include <petscsys.h>

 12: int main(int argc,char **args)
 13: {
 14:   KSP            subksp;
 15:   Mat            A,subA;
 16:   Vec            x,b,u,subb,subx,subu;
 17:   PetscViewer    fd;
 18:   char           file[PETSC_MAX_PATH_LEN];
 19:   PetscBool      flg;
 20:   PetscInt       i,m,n,its;
 21:   PetscReal      norm;
 22:   PetscMPIInt    rank,size;
 23:   MPI_Comm       comm,subcomm;
 24:   PetscSubcomm   psubcomm;
 25:   PetscInt       nsubcomm=1,id;
 26:   PetscScalar    *barray,*xarray,*uarray,*array,one=1.0;
 27:   PetscInt       type=1;

 29:   PetscInitialize(&argc,&args,(char*)0,help);
 30:   /* Load the matrix */
 31:   PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);
 33:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);

 35:   /* Load the matrix; then destroy the viewer.*/
 36:   MatCreate(PETSC_COMM_WORLD,&A);
 37:   MatLoad(A,fd);
 38:   PetscViewerDestroy(&fd);

 40:   PetscObjectGetComm((PetscObject)A,&comm);
 41:   MPI_Comm_size(comm,&size);
 42:   MPI_Comm_rank(comm,&rank);

 44:   /* Create rhs vector b */
 45:   MatGetLocalSize(A,&m,NULL);
 46:   VecCreate(PETSC_COMM_WORLD,&b);
 47:   VecSetSizes(b,m,PETSC_DECIDE);
 48:   VecSetFromOptions(b);
 49:   VecSet(b,one);

 51:   VecDuplicate(b,&x);
 52:   VecDuplicate(b,&u);
 53:   VecSet(x,0.0);

 55:   /* Test MatGetMultiProcBlock() */
 56:   PetscOptionsGetInt(NULL,NULL,"-nsubcomm",&nsubcomm,NULL);
 57:   PetscOptionsGetInt(NULL,NULL,"-subcomm_type",&type,NULL);

 59:   PetscSubcommCreate(comm,&psubcomm);
 60:   PetscSubcommSetNumber(psubcomm,nsubcomm);
 61:   if (type == PETSC_SUBCOMM_GENERAL) { /* user provides color, subrank and duprank */
 62:     PetscMPIInt color,subrank,duprank,subsize;
 63:     duprank = size-1 - rank;
 64:     subsize = size/nsubcomm;
 66:     color   = duprank/subsize;
 67:     subrank = duprank - color*subsize;
 68:     PetscSubcommSetTypeGeneral(psubcomm,color,subrank);
 69:   } else if (type == PETSC_SUBCOMM_CONTIGUOUS) {
 70:     PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
 71:   } else if (type == PETSC_SUBCOMM_INTERLACED) {
 72:     PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);
 73:   } else SETERRQ(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",type);
 74:   PetscSubcommSetFromOptions(psubcomm);
 75:   subcomm = PetscSubcommChild(psubcomm);

 77:   /* Test MatCreateRedundantMatrix() */
 78:   if (size > 1) {

 80:     PetscMPIInt subrank=-1,color=-1;
 81:     MPI_Comm    dcomm;

 83:     if (rank == 0) {
 84:       color = 0; subrank = 0;
 85:     } else if (rank == 1) {
 86:       color = 0; subrank = 1;
 87:     } else {
 88:       color = 1; subrank = 0;
 89:     }

 91:     PetscCommDuplicate(PETSC_COMM_WORLD,&dcomm,NULL);
 92:     MPI_Comm_split(dcomm,color,subrank,&subcomm);

 94:     MatCreate(subcomm,&subA);
 95:     MatSetSizes(subA,PETSC_DECIDE,PETSC_DECIDE,10,10);
 96:     MatSetFromOptions(subA);
 97:     MatSetUp(subA);
 98:     MatAssemblyBegin(subA,MAT_FINAL_ASSEMBLY);
 99:     MatAssemblyEnd(subA,MAT_FINAL_ASSEMBLY);
100:     MatZeroEntries(subA);

102:     /* Test MatMult() */
103:     MatCreateVecs(subA,&subx,&subb);
104:     VecSet(subx,1.0);
105:     MatMult(subA,subx,subb);

107:     VecDestroy(&subx);
108:     VecDestroy(&subb);
109:     MatDestroy(&subA);
110:     PetscCommDestroy(&dcomm);
111:   }

113:   /* Create subA */
114:   MatGetMultiProcBlock(A,subcomm,MAT_INITIAL_MATRIX,&subA);
115:   MatGetMultiProcBlock(A,subcomm,MAT_REUSE_MATRIX,&subA);

117:   /* Create sub vectors without arrays. Place b's and x's local arrays into subb and subx */
118:   MatGetLocalSize(subA,&m,&n);
119:   VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&subb);
120:   VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&subx);
121:   VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&subu);

123:   VecGetArray(b,&barray);
124:   VecGetArray(x,&xarray);
125:   VecGetArray(u,&uarray);
126:   VecPlaceArray(subb,barray);
127:   VecPlaceArray(subx,xarray);
128:   VecPlaceArray(subu,uarray);

130:   /* Create linear solvers associated with subA */
131:   KSPCreate(subcomm,&subksp);
132:   KSPSetOperators(subksp,subA,subA);
133:   KSPSetFromOptions(subksp);

135:   /* Solve sub systems */
136:   KSPSolve(subksp,subb,subx);
137:   KSPGetIterationNumber(subksp,&its);

139:   /* check residual */
140:   MatMult(subA,subx,subu);
141:   VecAXPY(subu,-1.0,subb);
142:   VecNorm(u,NORM_2,&norm);
143:   if (norm > 1.e-4 && rank == 0) {
144:     PetscPrintf(PETSC_COMM_WORLD,"[%D]  Number of iterations = %3D\n",rank,its);
145:     PetscPrintf(PETSC_COMM_WORLD,"Error: Residual norm of each block |subb - subA*subx |= %g\n",(double)norm);
146:   }
147:   VecResetArray(subb);
148:   VecResetArray(subx);
149:   VecResetArray(subu);

151:   PetscOptionsGetInt(NULL,NULL,"-subvec_view",&id,&flg);
152:   if (flg && rank == id) {
153:     PetscPrintf(PETSC_COMM_SELF,"[%D] subb:\n", rank);
154:     VecGetArray(subb,&array);
155:     for (i=0; i<m; i++) PetscPrintf(PETSC_COMM_SELF,"%g\n",(double)PetscRealPart(array[i]));
156:     VecRestoreArray(subb,&array);
157:     PetscPrintf(PETSC_COMM_SELF,"[%D] subx:\n", rank);
158:     VecGetArray(subx,&array);
159:     for (i=0; i<m; i++) PetscPrintf(PETSC_COMM_SELF,"%g\n",(double)PetscRealPart(array[i]));
160:     VecRestoreArray(subx,&array);
161:   }

163:   VecRestoreArray(x,&xarray);
164:   VecRestoreArray(b,&barray);
165:   VecRestoreArray(u,&uarray);
166:   MatDestroy(&subA);
167:   VecDestroy(&subb);
168:   VecDestroy(&subx);
169:   VecDestroy(&subu);
170:   KSPDestroy(&subksp);
171:   PetscSubcommDestroy(&psubcomm);
172:   if (size > 1) {
173:     MPI_Comm_free(&subcomm);
174:   }
175:   MatDestroy(&A)); PetscCall(VecDestroy(&b);
176:   VecDestroy(&u)); PetscCall(VecDestroy(&x);

178:   PetscFinalize();
179:   return 0;
180: }

182: /*TEST

184:     test:
185:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 1
186:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
187:       output_file: output/ex37.out

189:     test:
190:       suffix: 2
191:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2
192:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
193:       nsize: 4
194:       output_file: output/ex37.out

196:     test:
197:       suffix: mumps
198:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -pc_factor_mat_solver_type mumps -pc_type lu
199:       requires: datafilespath  mumps !complex double !defined(PETSC_USE_64BIT_INDICES)
200:       nsize: 4
201:       output_file: output/ex37.out

203:     test:
204:       suffix: 3
205:       nsize: 4
206:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 0
207:       requires: datafilespath  !complex double !defined(PETSC_USE_64BIT_INDICES)
208:       output_file: output/ex37.out

210:     test:
211:       suffix: 4
212:       nsize: 4
213:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 1
214:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
215:       output_file: output/ex37.out

217:     test:
218:       suffix: 5
219:       nsize: 4
220:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 2
221:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
222:       output_file: output/ex37.out

224: TEST*/