Actual source code: ex5adj.c
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: See ex5.c for details on the equation.
5: This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6: It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7: The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
9: Runtime options:
10: -forwardonly - run the forward simulation without adjoint
11: -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12: -aijpc - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13: */
14: #include "reaction_diffusion.h"
15: #include <petscdm.h>
16: #include <petscdmda.h>
18: PetscErrorCode InitialConditions(DM,Vec);
20: PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
21: {
22: PetscInt i,j,Mx,My,xs,ys,xm,ym;
24: Field **l;
27: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
28: /* locate the global i index for x and j index for y */
29: i = (PetscInt)(x*(Mx-1));
30: j = (PetscInt)(y*(My-1));
31: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
33: if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
34: /* the i,j vertex is on this process */
35: DMDAVecGetArray(da,lambda,&l);
36: l[j][i].u = 1.0;
37: l[j][i].v = 1.0;
38: DMDAVecRestoreArray(da,lambda,&l);
39: }
40: return(0);
41: }
43: int main(int argc,char **argv)
44: {
45: TS ts; /* ODE integrator */
46: Vec x; /* solution */
48: DM da;
49: AppCtx appctx;
50: Vec lambda[1];
51: PetscBool forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE;
53: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
54: PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);
55: PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);
56: appctx.aijpc = PETSC_FALSE;
57: PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL);
59: appctx.D1 = 8.0e-5;
60: appctx.D2 = 4.0e-5;
61: appctx.gamma = .024;
62: appctx.kappa = .06;
64: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
65: Create distributed array (DMDA) to manage parallel grid and vectors
66: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
67: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
68: DMSetFromOptions(da);
69: DMSetUp(da);
70: DMDASetFieldName(da,0,"u");
71: DMDASetFieldName(da,1,"v");
73: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74: Extract global vectors from DMDA; then duplicate for remaining
75: vectors that are the same types
76: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
77: DMCreateGlobalVector(da,&x);
79: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: Create timestepping solver context
81: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82: TSCreate(PETSC_COMM_WORLD,&ts);
83: TSSetDM(ts,da);
84: TSSetProblemType(ts,TS_NONLINEAR);
85: TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
86: if (!implicitform) {
87: TSSetType(ts,TSRK);
88: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
89: TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
90: } else {
91: TSSetType(ts,TSCN);
92: TSSetIFunction(ts,NULL,IFunction,&appctx);
93: if (appctx.aijpc) {
94: Mat A,B;
96: DMSetMatType(da,MATSELL);
97: DMCreateMatrix(da,&A);
98: MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B);
99: /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
100: TSSetIJacobian(ts,A,B,IJacobian,&appctx);
101: MatDestroy(&A);
102: MatDestroy(&B);
103: } else {
104: TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx);
105: }
106: }
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Set initial conditions
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111: InitialConditions(da,x);
112: TSSetSolution(ts,x);
114: /*
115: Have the TS save its trajectory so that TSAdjointSolve() may be used
116: */
117: if (!forwardonly) { TSSetSaveTrajectory(ts); }
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Set solver options
121: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122: TSSetMaxTime(ts,200.0);
123: TSSetTimeStep(ts,0.5);
124: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP);
125: TSSetFromOptions(ts);
127: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: Solve ODE system
129: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130: TSSolve(ts,x);
131: if (!forwardonly) {
132: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: Start the Adjoint model
134: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135: VecDuplicate(x,&lambda[0]);
136: /* Reset initial conditions for the adjoint integration */
137: InitializeLambda(da,lambda[0],0.5,0.5);
138: TSSetCostGradients(ts,1,lambda,NULL);
139: TSAdjointSolve(ts);
140: VecDestroy(&lambda[0]);
141: }
142: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143: Free work space. All PETSc objects should be destroyed when they
144: are no longer needed.
145: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
146: VecDestroy(&x);
147: TSDestroy(&ts);
148: DMDestroy(&da);
149: PetscFinalize();
150: return ierr;
151: }
153: /* ------------------------------------------------------------------- */
154: PetscErrorCode InitialConditions(DM da,Vec U)
155: {
157: PetscInt i,j,xs,ys,xm,ym,Mx,My;
158: Field **u;
159: PetscReal hx,hy,x,y;
162: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
164: hx = 2.5/(PetscReal)Mx;
165: hy = 2.5/(PetscReal)My;
167: /*
168: Get pointers to vector data
169: */
170: DMDAVecGetArray(da,U,&u);
172: /*
173: Get local grid boundaries
174: */
175: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
177: /*
178: Compute function over the locally owned part of the grid
179: */
180: for (j=ys; j<ys+ym; j++) {
181: y = j*hy;
182: for (i=xs; i<xs+xm; i++) {
183: x = i*hx;
184: if (PetscGTE(x,1.0) && PetscLTE(x,1.5) && PetscGTE(y,1.0) && PetscLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
185: else u[j][i].v = 0.0;
187: u[j][i].u = 1.0 - 2.0*u[j][i].v;
188: }
189: }
191: /*
192: Restore vectors
193: */
194: DMDAVecRestoreArray(da,U,&u);
195: return(0);
196: }
198: /*TEST
200: build:
201: depends: reaction_diffusion.c
202: requires: !complex !single
204: test:
205: args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20
206: output_file: output/ex5adj_1.out
208: test:
209: suffix: 2
210: nsize: 2
211: args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -ts_trajectory_dirname Test-dir -ts_trajectory_file_template test-%06D.cp
213: test:
214: suffix: 3
215: nsize: 2
216: args: -ts_max_steps 10 -ts_dt 10 -ts_adjoint_monitor_draw_sensi
218: test:
219: suffix: 4
220: nsize: 2
221: args: -ts_max_steps 10 -ts_dt 10 -ts_monitor -ts_adjoint_monitor -ksp_monitor_short -da_grid_x 20 -da_grid_y 20 -snes_fd_color
222: output_file: output/ex5adj_2.out
224: test:
225: suffix: 5
226: nsize: 2
227: args: -ts_max_steps 10 -implicitform 0 -ts_type rk -ts_rk_type 4 -ts_monitor -ts_adjoint_monitor -da_grid_x 20 -da_grid_y 20 -snes_fd_color
228: output_file: output/ex5adj_1.out
230: test:
231: suffix: knl
232: args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -malloc_hbw -ts_trajectory_use_dram 1
233: output_file: output/ex5adj_3.out
234: requires: knl
236: test:
237: suffix: sell
238: nsize: 4
239: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type none
240: output_file: output/ex5adj_sell_1.out
242: test:
243: suffix: aijsell
244: nsize: 4
245: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type none
246: output_file: output/ex5adj_sell_1.out
248: test:
249: suffix: sell2
250: nsize: 4
251: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
252: output_file: output/ex5adj_sell_2.out
254: test:
255: suffix: aijsell2
256: nsize: 4
257: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type sor
258: output_file: output/ex5adj_sell_2.out
260: test:
261: suffix: sell3
262: nsize: 4
263: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
264: output_file: output/ex5adj_sell_3.out
266: test:
267: suffix: sell4
268: nsize: 4
269: args: -forwardonly -implicitform -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -pc_type mg -pc_mg_levels 2 -mg_coarse_pc_type bjacobi -mg_levels_pc_type bjacobi
270: output_file: output/ex5adj_sell_4.out
272: test:
273: suffix: sell5
274: nsize: 4
275: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type sell -aijpc
276: output_file: output/ex5adj_sell_5.out
278: test:
279: suffix: aijsell5
280: nsize: 4
281: args: -forwardonly -ts_max_steps 10 -ts_monitor -snes_monitor_short -dm_mat_type aijsell
282: output_file: output/ex5adj_sell_5.out
284: test:
285: suffix: sell6
286: args: -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -dm_mat_type sell -pc_type jacobi
287: output_file: output/ex5adj_sell_6.out
289: test:
290: suffix: gamg1
291: args: -pc_type gamg -pc_mg_levels 2 -mg_levels_pc_type jacobi -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory
292: output_file: output/ex5adj_gamg_1.out
294: test:
295: suffix: gamg2
296: args: -pc_type gamg -pc_mg_levels 2 -mg_levels_pc_type jacobi -ksp_monitor_short -ts_max_steps 2 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ksp_use_explicittranspose
297: output_file: output/ex5adj_gamg_2.out
298: TEST*/