Actual source code: ex5opt_ic.c
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: See ex5.c for details on the equation.
5: This code demonestrates the TSAdjoint/TAO interface to solve an inverse initial value problem built on a system of
6: time-dependent partial differential equations.
7: In this problem, the initial value for the PDE is unknown, but the output (the final solution of the PDE) is known.
8: We want to determine the initial value that can produce the given output.
9: We formulate the problem as a nonlinear optimization problem that minimizes the discrepency between the simulated
10: result and given reference solution, calculate the gradient of the objective function with the discrete adjoint
11: solver, and solve the optimization problem with TAO.
13: Runtime options:
14: -forwardonly - run only the forward simulation
15: -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
16: */
18: #include "reaction_diffusion.h"
19: #include <petscdm.h>
20: #include <petscdmda.h>
21: #include <petsctao.h>
23: /* User-defined routines */
24: extern PetscErrorCode FormFunctionAndGradient(Tao,Vec,PetscReal*,Vec,void*);
26: /*
27: Set terminal condition for the adjoint variable
28: */
29: PetscErrorCode InitializeLambda(DM da,Vec lambda,Vec U,AppCtx *appctx)
30: {
31: char filename[PETSC_MAX_PATH_LEN]="";
32: PetscViewer viewer;
33: Vec Uob;
35: VecDuplicate(U,&Uob);
36: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
37: PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);
38: VecLoad(Uob,viewer);
39: PetscViewerDestroy(&viewer);
40: VecAYPX(Uob,-1.,U);
41: VecScale(Uob,2.0);
42: VecAXPY(lambda,1.,Uob);
43: VecDestroy(&Uob);
44: return 0;
45: }
47: /*
48: Set up a viewer that dumps data into a binary file
49: */
50: PetscErrorCode OutputBIN(DM da, const char *filename, PetscViewer *viewer)
51: {
52: PetscViewerCreate(PetscObjectComm((PetscObject)da),viewer);
53: PetscViewerSetType(*viewer,PETSCVIEWERBINARY);
54: PetscViewerFileSetMode(*viewer,FILE_MODE_WRITE);
55: PetscViewerFileSetName(*viewer,filename);
56: return 0;
57: }
59: /*
60: Generate a reference solution and save it to a binary file
61: */
62: PetscErrorCode GenerateOBs(TS ts,Vec U,AppCtx *appctx)
63: {
64: char filename[PETSC_MAX_PATH_LEN] = "";
65: PetscViewer viewer;
66: DM da;
68: TSGetDM(ts,&da);
69: TSSolve(ts,U);
70: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
71: OutputBIN(da,filename,&viewer);
72: VecView(U,viewer);
73: PetscViewerDestroy(&viewer);
74: return 0;
75: }
77: PetscErrorCode InitialConditions(DM da,Vec U)
78: {
79: PetscInt i,j,xs,ys,xm,ym,Mx,My;
80: Field **u;
81: PetscReal hx,hy,x,y;
83: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
85: hx = 2.5/(PetscReal)Mx;
86: hy = 2.5/(PetscReal)My;
88: DMDAVecGetArray(da,U,&u);
89: /* Get local grid boundaries */
90: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
92: /* Compute function over the locally owned part of the grid */
93: for (j=ys; j<ys+ym; j++) {
94: y = j*hy;
95: for (i=xs; i<xs+xm; i++) {
96: x = i*hx;
97: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
98: else u[j][i].v = 0.0;
100: u[j][i].u = 1.0 - 2.0*u[j][i].v;
101: }
102: }
104: DMDAVecRestoreArray(da,U,&u);
105: return 0;
106: }
108: PetscErrorCode PerturbedInitialConditions(DM da,Vec U)
109: {
110: PetscInt i,j,xs,ys,xm,ym,Mx,My;
111: Field **u;
112: PetscReal hx,hy,x,y;
114: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
116: hx = 2.5/(PetscReal)Mx;
117: hy = 2.5/(PetscReal)My;
119: DMDAVecGetArray(da,U,&u);
120: /* Get local grid boundaries */
121: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
123: /* Compute function over the locally owned part of the grid */
124: for (j=ys; j<ys+ym; j++) {
125: y = j*hy;
126: for (i=xs; i<xs+xm; i++) {
127: x = i*hx;
128: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*0.5*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*0.5*y),2.0);
129: else u[j][i].v = 0.0;
131: u[j][i].u = 1.0 - 2.0*u[j][i].v;
132: }
133: }
135: DMDAVecRestoreArray(da,U,&u);
136: return 0;
137: }
139: PetscErrorCode PerturbedInitialConditions2(DM da,Vec U)
140: {
141: PetscInt i,j,xs,ys,xm,ym,Mx,My;
142: Field **u;
143: PetscReal hx,hy,x,y;
145: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
147: hx = 2.5/(PetscReal)Mx;
148: hy = 2.5/(PetscReal)My;
150: DMDAVecGetArray(da,U,&u);
151: /* Get local grid boundaries */
152: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
154: /* Compute function over the locally owned part of the grid */
155: for (j=ys; j<ys+ym; j++) {
156: y = j*hy;
157: for (i=xs; i<xs+xm; i++) {
158: x = i*hx;
159: if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*(x-0.5)),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
160: else u[j][i].v = 0.0;
162: u[j][i].u = 1.0 - 2.0*u[j][i].v;
163: }
164: }
166: DMDAVecRestoreArray(da,U,&u);
167: return 0;
168: }
170: PetscErrorCode PerturbedInitialConditions3(DM da,Vec U)
171: {
172: PetscInt i,j,xs,ys,xm,ym,Mx,My;
173: Field **u;
174: PetscReal hx,hy,x,y;
176: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
178: hx = 2.5/(PetscReal)Mx;
179: hy = 2.5/(PetscReal)My;
181: DMDAVecGetArray(da,U,&u);
182: /* Get local grid boundaries */
183: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
185: /* Compute function over the locally owned part of the grid */
186: for (j=ys; j<ys+ym; j++) {
187: y = j*hy;
188: for (i=xs; i<xs+xm; i++) {
189: x = i*hx;
190: if ((0.5 <= x) && (x <= 2.0) && (0.5 <= y) && (y <= 2.0)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
191: else u[j][i].v = 0.0;
193: u[j][i].u = 1.0 - 2.0*u[j][i].v;
194: }
195: }
197: DMDAVecRestoreArray(da,U,&u);
198: return 0;
199: }
201: int main(int argc,char **argv)
202: {
203: DM da;
204: AppCtx appctx;
205: PetscBool forwardonly = PETSC_FALSE,implicitform = PETSC_FALSE;
206: PetscInt perturbic = 1;
208: PetscInitialize(&argc,&argv,(char*)0,help);
209: PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);
210: PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);
211: PetscOptionsGetInt(NULL,NULL,"-perturbic",&perturbic,NULL);
213: appctx.D1 = 8.0e-5;
214: appctx.D2 = 4.0e-5;
215: appctx.gamma = .024;
216: appctx.kappa = .06;
217: appctx.aijpc = PETSC_FALSE;
219: /* Create distributed array (DMDA) to manage parallel grid and vectors */
220: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
221: DMSetFromOptions(da);
222: DMSetUp(da);
223: DMDASetFieldName(da,0,"u");
224: DMDASetFieldName(da,1,"v");
226: /* Extract global vectors from DMDA; then duplicate for remaining
227: vectors that are the same types */
228: DMCreateGlobalVector(da,&appctx.U);
230: /* Create timestepping solver context */
231: TSCreate(PETSC_COMM_WORLD,&appctx.ts);
232: TSSetType(appctx.ts,TSCN);
233: TSSetDM(appctx.ts,da);
234: TSSetProblemType(appctx.ts,TS_NONLINEAR);
235: TSSetEquationType(appctx.ts,TS_EQ_ODE_EXPLICIT); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
236: if (!implicitform) {
237: TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
238: TSSetRHSJacobian(appctx.ts,NULL,NULL,RHSJacobian,&appctx);
239: } else {
240: TSSetIFunction(appctx.ts,NULL,IFunction,&appctx);
241: TSSetIJacobian(appctx.ts,NULL,NULL,IJacobian,&appctx);
242: }
244: /* Set initial conditions */
245: InitialConditions(da,appctx.U);
246: TSSetSolution(appctx.ts,appctx.U);
248: /* Set solver options */
249: TSSetMaxTime(appctx.ts,2000.0);
250: TSSetTimeStep(appctx.ts,0.5);
251: TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
252: TSSetFromOptions(appctx.ts);
254: GenerateOBs(appctx.ts,appctx.U,&appctx);
256: if (!forwardonly) {
257: Tao tao;
258: Vec P;
259: Vec lambda[1];
260: #if defined(PETSC_USE_LOG)
261: PetscLogStage opt_stage;
262: #endif
264: PetscLogStageRegister("Optimization",&opt_stage);
265: PetscLogStagePush(opt_stage);
266: if (perturbic == 1) {
267: PerturbedInitialConditions(da,appctx.U);
268: } else if (perturbic == 2) {
269: PerturbedInitialConditions2(da,appctx.U);
270: } else if (perturbic == 3) {
271: PerturbedInitialConditions3(da,appctx.U);
272: }
274: VecDuplicate(appctx.U,&lambda[0]);
275: TSSetCostGradients(appctx.ts,1,lambda,NULL);
277: /* Have the TS save its trajectory needed by TSAdjointSolve() */
278: TSSetSaveTrajectory(appctx.ts);
280: /* Create TAO solver and set desired solution method */
281: TaoCreate(PETSC_COMM_WORLD,&tao);
282: TaoSetType(tao,TAOBLMVM);
284: /* Set initial guess for TAO */
285: VecDuplicate(appctx.U,&P);
286: VecCopy(appctx.U,P);
287: TaoSetSolution(tao,P);
289: /* Set routine for function and gradient evaluation */
290: TaoSetObjectiveAndGradient(tao,NULL,FormFunctionAndGradient,&appctx);
292: /* Check for any TAO command line options */
293: TaoSetFromOptions(tao);
295: TaoSolve(tao);
296: TaoDestroy(&tao);
297: VecDestroy(&lambda[0]);
298: VecDestroy(&P);
299: PetscLogStagePop();
300: }
302: /* Free work space. All PETSc objects should be destroyed when they
303: are no longer needed. */
304: VecDestroy(&appctx.U);
305: TSDestroy(&appctx.ts);
306: DMDestroy(&da);
307: PetscFinalize();
308: return 0;
309: }
311: /* ------------------------ TAO callbacks ---------------------------- */
313: /*
314: FormFunctionAndGradient - Evaluates the function and corresponding gradient.
316: Input Parameters:
317: tao - the Tao context
318: P - the input vector
319: ctx - optional user-defined context, as set by TaoSetObjectiveAndGradient()
321: Output Parameters:
322: f - the newly evaluated function
323: G - the newly evaluated gradient
324: */
325: PetscErrorCode FormFunctionAndGradient(Tao tao,Vec P,PetscReal *f,Vec G,void *ctx)
326: {
327: AppCtx *appctx = (AppCtx*)ctx;
328: PetscReal soberr,timestep;
329: Vec *lambda;
330: Vec SDiff;
331: DM da;
332: char filename[PETSC_MAX_PATH_LEN]="";
333: PetscViewer viewer;
336: TSSetTime(appctx->ts,0.0);
337: TSGetTimeStep(appctx->ts,×tep);
338: if (timestep<0) {
339: TSSetTimeStep(appctx->ts,-timestep);
340: }
341: TSSetStepNumber(appctx->ts,0);
342: TSSetFromOptions(appctx->ts);
344: VecDuplicate(P,&SDiff);
345: VecCopy(P,appctx->U);
346: TSGetDM(appctx->ts,&da);
347: *f = 0;
349: TSSolve(appctx->ts,appctx->U);
350: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
351: PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);
352: VecLoad(SDiff,viewer);
353: PetscViewerDestroy(&viewer);
354: VecAYPX(SDiff,-1.,appctx->U);
355: VecDot(SDiff,SDiff,&soberr);
356: *f += soberr;
358: TSGetCostGradients(appctx->ts,NULL,&lambda,NULL);
359: VecSet(lambda[0],0.0);
360: InitializeLambda(da,lambda[0],appctx->U,appctx);
361: TSAdjointSolve(appctx->ts);
363: VecCopy(lambda[0],G);
365: VecDestroy(&SDiff);
366: return 0;
367: }
369: /*TEST
371: build:
372: depends: reaction_diffusion.c
373: requires: !complex !single
375: test:
376: args: -ts_max_steps 5 -ts_type rk -ts_rk_type 3 -ts_trajectory_type memory -tao_monitor -tao_view -tao_gatol 1e-6
377: output_file: output/ex5opt_ic_1.out
379: TEST*/