Actual source code: ex17.c
1: static const char help[] = "Test PetscSF with MPI large count (more than 2 billion elements in messages)\n\n";
3: #include <petscsys.h>
4: #include <petscsf.h>
6: int main(int argc,char **argv)
7: {
8: PetscSF sf;
9: PetscInt i,nroots,nleaves;
10: PetscInt n = (1ULL<<31) + 1024; /* a little over 2G elements */
11: PetscSFNode *iremote = NULL;
12: PetscMPIInt rank,size;
13: char *rootdata=NULL,*leafdata=NULL;
14: Vec x,y;
15: VecScatter vscat;
16: PetscInt rstart,rend;
17: IS ix;
18: const PetscScalar *xv;
20: PetscInitialize(&argc,&argv,NULL,help);
21: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
22: MPI_Comm_size(PETSC_COMM_WORLD,&size);
25: /* Test PetscSF */
26: PetscSFCreate(PETSC_COMM_WORLD,&sf);
27: PetscSFSetFromOptions(sf);
29: if (!rank) {
30: nroots = n;
31: nleaves = 0;
32: } else {
33: nroots = 0;
34: nleaves = n;
35: PetscMalloc1(nleaves,&iremote);
36: for (i=0; i<nleaves; i++) {
37: iremote[i].rank = 0;
38: iremote[i].index = i;
39: }
40: }
41: PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);
42: PetscMalloc2(nroots,&rootdata,nleaves,&leafdata);
43: if (!rank) {
44: memset(rootdata,11,nroots);
45: rootdata[nroots-1] = 12; /* Use a different value at the end */
46: }
48: PetscSFBcastBegin(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE); /* rank 0->1, bcast rootdata to leafdata */
49: PetscSFBcastEnd(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE);
50: PetscSFReduceBegin(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM); /* rank 1->0, add leafdata to rootdata */
51: PetscSFReduceEnd(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM);
52: if (!rank) {
54: }
56: PetscFree2(rootdata,leafdata);
57: PetscFree(iremote);
58: PetscSFDestroy(&sf);
60: /* Test VecScatter */
61: VecCreate(PETSC_COMM_WORLD,&x);
62: VecCreate(PETSC_COMM_WORLD,&y);
63: VecSetSizes(x,rank==0? n : 64,PETSC_DECIDE);
64: VecSetSizes(y,rank==0? 64 : n,PETSC_DECIDE);
65: VecSetFromOptions(x);
66: VecSetFromOptions(y);
68: VecGetOwnershipRange(x,&rstart,&rend);
69: ISCreateStride(PETSC_COMM_SELF,rend-rstart,rstart,1,&ix);
70: VecScatterCreate(x,ix,y,ix,&vscat);
72: VecSet(x,3.0);
73: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
74: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
76: VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE);
77: VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE);
79: VecGetArrayRead(x,&xv);
81: VecRestoreArrayRead(x,&xv);
82: VecDestroy(&x);
83: VecDestroy(&y);
84: VecScatterDestroy(&vscat);
85: ISDestroy(&ix);
87: PetscFinalize();
88: return 0;
89: }
91: /**TEST
92: test:
93: requires: defined(PETSC_HAVE_MPI_LARGE_COUNT) defined(PETSC_USE_64BIT_INDICES)
94: TODO: need a machine with big memory (~150GB) to run the test
95: nsize: 2
96: args: -sf_type {{basic neighbor}}
98: TEST**/