Actual source code: ex147.c
1: /* This program illustrates use of parallel real FFT */
2: static char help[]="This program illustrates the use of parallel real multi-dimensional fftw (without PETSc interface)";
3: #include <petscmat.h>
4: #include <fftw3.h>
5: #include <fftw3-mpi.h>
7: int main(int argc,char **args)
8: {
9: ptrdiff_t N0=2,N1=2,N2=2,N3=2,dim[4],N,D;
10: fftw_plan bplan,fplan;
11: fftw_complex *out;
12: double *in1,*in2;
13: ptrdiff_t alloc_local,local_n0,local_0_start;
14: ptrdiff_t local_n1,local_1_start;
15: PetscInt i,j,indx[100],n1;
16: PetscInt size,rank,n,*in,N_factor;
17: PetscScalar *data_fin,value1,one=1.0,zero=0.0;
18: PetscScalar a,*x_arr,*y_arr,*z_arr,enorm;
19: Vec fin,fout,fout1,x,y;
20: PetscRandom rnd;
22: PetscInitialize(&argc,&args,(char*)0,help);
23: #if defined(PETSC_USE_COMPLEX)
24: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP, "This example requires real numbers. Your current scalar type is complex");
25: #endif
26: MPI_Comm_size(PETSC_COMM_WORLD, &size);
27: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
29: PetscRandomCreate(PETSC_COMM_WORLD,&rnd);
30: D =4;
31: dim[0]=N0;dim[1]=N1;dim[2]=N2;dim[3]=N3/2+1;
33: alloc_local = fftw_mpi_local_size_transposed(D,dim,PETSC_COMM_WORLD,&local_n0,&local_0_start,&local_n1,&local_1_start);
35: printf("The value alloc_local is %ld from process %d\n",alloc_local,rank);
36: printf("The value local_n0 is %ld from process %d\n",local_n0,rank);
37: printf("The value local_0_start is %ld from process %d\n",local_0_start,rank);
38: printf("The value local_n1 is %ld from process %d\n",local_n1,rank);
39: printf("The value local_1_start is %ld from process %d\n",local_1_start,rank);
41: /* Allocate space for input and output arrays */
43: in1=(double*)fftw_malloc(sizeof(double)*alloc_local*2);
44: in2=(double*)fftw_malloc(sizeof(double)*alloc_local*2);
45: out=(fftw_complex*)fftw_malloc(sizeof(fftw_complex)*alloc_local);
47: N=2*N0*N1*N2*(N3/2+1);N_factor=N0*N1*N2*N3;
48: n=2*local_n0*N1*N2*(N3/2+1);n1=local_n1*N0*2*N1*N2;
50: /* printf("The value N is %d from process %d\n",N,rank); */
51: /* printf("The value n is %d from process %d\n",n,rank); */
52: /* printf("The value n1 is %d from process %d\n",n1,rank); */
53: /* Creating data vector and accompanying array with VeccreateMPIWithArray */
54: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)in1,&fin);
55: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)out,&fout);
56: VecCreateMPIWithArray(PETSC_COMM_WORLD,1,n,N,(PetscScalar*)in2,&fout1);
58: /* VecGetSize(fin,&size); */
59: /* printf("The size is %d\n",size); */
61: VecSet(fin,one);
62: /* VecAssemblyBegin(fin); */
63: /* VecAssemblyEnd(fin); */
64: /* VecView(fin,PETSC_VIEWER_STDOUT_WORLD); */
66: VecGetArray(fin,&x_arr);
67: VecGetArray(fout1,&z_arr);
68: VecGetArray(fout,&y_arr);
70: dim[3]=N3;
72: fplan=fftw_mpi_plan_dft_r2c(D,dim,(double*)x_arr,(fftw_complex*)y_arr,PETSC_COMM_WORLD,FFTW_ESTIMATE);
73: bplan=fftw_mpi_plan_dft_c2r(D,dim,(fftw_complex*)y_arr,(double*)z_arr,PETSC_COMM_WORLD,FFTW_ESTIMATE);
75: fftw_execute(fplan);
76: fftw_execute(bplan);
78: VecRestoreArray(fin,&x_arr);
79: VecRestoreArray(fout1,&z_arr);
80: VecRestoreArray(fout,&y_arr);
82: /* a = 1.0/(PetscReal)N_factor; */
83: /* VecScale(fout1,a); */
85: VecAssemblyBegin(fout1);
86: VecAssemblyEnd(fout1);
88: VecView(fout1,PETSC_VIEWER_STDOUT_WORLD);
90: fftw_destroy_plan(fplan);
91: fftw_destroy_plan(bplan);
92: fftw_free(in1); VecDestroy(&fin);
93: fftw_free(out); VecDestroy(&fout);
94: fftw_free(in2); VecDestroy(&fout1);
96: PetscFinalize();
97: return 0;
98: }