Actual source code: ex15.c
1: static char help[]= " Test VecScatterRemap() on various vecscatter. \n\
2: We may do optimization based on index patterns. After index remapping by VecScatterRemap(), we need to \n\
3: make sure the vecscatter works as expected with the optimizaiton. \n\
4: VecScatterRemap() does not support all kinds of vecscatters. In addition, it only supports remapping \n\
5: entries where we read the data (i.e., todata in parallel scatter, fromdata in sequential scatter). This test \n\
6: tests VecScatterRemap on parallel to parallel (PtoP) vecscatter, sequential general to sequential \n\
7: general (SGToSG) vecscatter and sequential general to sequential stride 1 (SGToSS_Stride1) vecscatter.\n\n";
9: #include <petscvec.h>
11: int main(int argc,char **argv)
12: {
13: PetscInt i,n,*ix,*iy,*tomap,start;
14: Vec x,y;
15: PetscMPIInt nproc,rank;
16: IS isx,isy;
17: const PetscInt *ranges;
18: VecScatter vscat;
20: PetscInitialize(&argc,&argv,(char*)0,help);
21: MPI_Comm_size(PETSC_COMM_WORLD,&nproc);
22: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
26: /* ====================================================================
27: (1) test VecScatterRemap on a parallel to parallel (PtoP) vecscatter
28: ====================================================================
29: */
31: n = 64; /* long enough to trigger memcpy optimizations both in local scatter and remote scatter */
33: /* create two MPI vectors x, y of length n=64, N=128 */
34: VecCreateMPI(PETSC_COMM_WORLD,n,PETSC_DECIDE,&x);
35: VecDuplicate(x,&y);
37: /* Initialize x as {0~127} */
38: VecGetOwnershipRanges(x,&ranges);
39: for (i=ranges[rank]; i<ranges[rank+1]; i++) VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);
40: VecAssemblyBegin(x);
41: VecAssemblyEnd(x);
43: /* create two general index sets isx = {0~127} and isy = {32~63,64~95,96~127,0~31}. isx is sequential, but we use
44: it as general and let PETSc detect the pattern and optimize it. indices in isy are set to make the vecscatter
45: have both local scatter and remote scatter (i.e., MPI communication)
46: */
47: PetscMalloc2(n,&ix,n,&iy);
48: start = ranges[rank];
49: for (i=ranges[rank]; i<ranges[rank+1]; i++) ix[i-start] = i;
50: ISCreateGeneral(PETSC_COMM_WORLD,n,ix,PETSC_COPY_VALUES,&isx);
52: if (rank == 0) { for (i=0; i<n; i++) iy[i] = i+32; }
53: else for (i=0; i<n/2; i++) { iy[i] = i+96; iy[i+n/2] = i; }
55: ISCreateGeneral(PETSC_COMM_WORLD,n,iy,PETSC_COPY_VALUES,&isy);
57: /* create a vecscatter that shifts x to the tail by quater periodically and puts the results in y */
58: VecScatterCreate(x,isx,y,isy,&vscat);
59: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
60: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
62: /* view y to check the result. y should be {Q3,Q0,Q1,Q2} of x, that is {96~127,0~31,32~63,64~95} */
63: PetscPrintf(PETSC_COMM_WORLD,"Before VecScatterRemap on PtoP, MPI vector y is:\n");
64: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
66: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter. It changes where we read vector
67: x entries to send out, but does not change the communication pattern (i.e., send/recv pairs and msg lengths).
69: We create tomap as {32~63,0~31}. Originally, we read from indices {0~64} of the local x to send out. The remap
70: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices {32~63,0~31} of the local x.
71: isy is unchanged. So, we will shift x to {Q2,Q1,Q0,Q3}, that is {64~95,32~63,0~31,96~127}
72: */
73: PetscMalloc1(n,&tomap);
74: for (i=0; i<n/2; i++) { tomap[i] = i+n/2; tomap[i+n/2] = i; };
75: VecScatterRemap(vscat,tomap,NULL);
76: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
77: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
79: /* view y to check the result. y should be {64~95,32~63,0~31,96~127} */
80: PetscPrintf(PETSC_COMM_WORLD,"After VecScatterRemap on PtoP, MPI vector y is:\n");
81: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
83: /* destroy everything before we recreate them in different types */
84: PetscFree2(ix,iy);
85: VecDestroy(&x);
86: VecDestroy(&y);
87: ISDestroy(&isx);
88: ISDestroy(&isy);
89: PetscFree(tomap);
90: VecScatterDestroy(&vscat);
92: /* ==========================================================================================
93: (2) test VecScatterRemap on a sequential general to sequential general (SGToSG) vecscatter
94: ==========================================================================================
95: */
96: n = 64; /* long enough to trigger memcpy optimizations in local scatter */
98: /* create two seq vectors x, y of length n */
99: VecCreateSeq(PETSC_COMM_SELF,n,&x);
100: VecDuplicate(x,&y);
102: /* Initialize x as {0~63} */
103: for (i=0; i<n; i++) VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);
104: VecAssemblyBegin(x);
105: VecAssemblyEnd(x);
107: /* create two general index sets isx = isy = {0~63}, which are sequential, but we use them as
108: general and let PETSc detect the pattern and optimize it */
109: PetscMalloc2(n,&ix,n,&iy);
110: for (i=0; i<n; i++) ix[i] = i;
111: ISCreateGeneral(PETSC_COMM_SELF,n,ix,PETSC_COPY_VALUES,&isx);
112: ISDuplicate(isx,&isy);
114: /* create a vecscatter that just copies x to y */
115: VecScatterCreate(x,isx,y,isy,&vscat);
116: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
117: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
119: /* view y to check the result. y should be {0~63} */
120: PetscPrintf(PETSC_COMM_WORLD,"\nBefore VecScatterRemap on SGToSG, SEQ vector y is:\n");
121: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
123: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter.
125: Create tomap as {32~63,0~31}. Originally, we read from indices {0~64} of seq x to write to y. The remap
126: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices{32~63,0~31} of seq x.
127: */
128: PetscMalloc1(n,&tomap);
129: for (i=0; i<n/2; i++) { tomap[i] = i+n/2; tomap[i+n/2] = i; };
130: VecScatterRemap(vscat,tomap,NULL);
131: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
132: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
134: /* view y to check the result. y should be {32~63,0~31} */
135: PetscPrintf(PETSC_COMM_WORLD,"After VecScatterRemap on SGToSG, SEQ vector y is:\n");
136: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
138: /* destroy everything before we recreate them in different types */
139: PetscFree2(ix,iy);
140: VecDestroy(&x);
141: VecDestroy(&y);
142: ISDestroy(&isx);
143: ISDestroy(&isy);
144: PetscFree(tomap);
145: VecScatterDestroy(&vscat);
147: /* ===================================================================================================
148: (3) test VecScatterRemap on a sequential general to sequential stride 1 (SGToSS_Stride1) vecscatter
149: ===================================================================================================
150: */
151: n = 64; /* long enough to trigger memcpy optimizations in local scatter */
153: /* create two seq vectors x of length n, and y of length n/2 */
154: VecCreateSeq(PETSC_COMM_SELF,n,&x);
155: VecCreateSeq(PETSC_COMM_SELF,n/2,&y);
157: /* Initialize x as {0~63} */
158: for (i=0; i<n; i++) VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES);
159: VecAssemblyBegin(x);
160: VecAssemblyEnd(x);
162: /* create a general index set isx = {0:63:2}, which actually is a stride IS with first=0, n=32, step=2,
163: but we use it as general and let PETSc detect the pattern and optimize it. */
164: PetscMalloc2(n/2,&ix,n/2,&iy);
165: for (i=0; i<n/2; i++) ix[i] = i*2;
166: ISCreateGeneral(PETSC_COMM_SELF,n/2,ix,PETSC_COPY_VALUES,&isx);
168: /* create a stride1 index set isy = {0~31}. We intentionally set the step to 1 to trigger optimizations */
169: ISCreateStride(PETSC_COMM_SELF,32,0,1,&isy);
171: /* create a vecscatter that just copies even entries of x to y */
172: VecScatterCreate(x,isx,y,isy,&vscat);
173: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
174: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
176: /* view y to check the result. y should be {0:63:2} */
177: PetscPrintf(PETSC_COMM_WORLD,"\nBefore VecScatterRemap on SGToSS_Stride1, SEQ vector y is:\n");
178: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
180: /* now call the weird subroutine VecScatterRemap to slightly change the vecscatter.
182: Create tomap as {32~63,0~31}. Originally, we read from indices{0:63:2} of seq x to write to y. The remap
183: does indices[i] = tomap[indices[i]]. Therefore, after the remap, we read from indices{32:63:2,0:31:2} of seq x.
184: */
185: PetscMalloc1(n,&tomap);
186: for (i=0; i<n/2; i++) { tomap[i] = i+n/2; tomap[i+n/2] = i; };
187: VecScatterRemap(vscat,tomap,NULL);
188: VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
189: VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD);
191: /* view y to check the result. y should be {32:63:2,0:31:2} */
192: PetscPrintf(PETSC_COMM_WORLD,"After VecScatterRemap on SGToSS_Stride1, SEQ vector y is:\n");
193: VecView(y,PETSC_VIEWER_STDOUT_WORLD);
195: /* destroy everything before PetscFinalize */
196: PetscFree2(ix,iy);
197: VecDestroy(&x);
198: VecDestroy(&y);
199: ISDestroy(&isx);
200: ISDestroy(&isy);
201: PetscFree(tomap);
202: VecScatterDestroy(&vscat);
204: PetscFinalize();
205: return 0;
206: }
208: /*TEST
210: test:
211: suffix: 1
212: nsize: 2
213: diff_args: -j
214: requires: double
215: TEST*/