Actual source code: ex5adj_mf.c
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: See ex5.c for details on the equation.
5: This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6: It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7: The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
9: Runtime options:
10: -forwardonly - run the forward simulation without adjoint
11: -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12: -aijpc - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13: */
15: #include "reaction_diffusion.h"
16: #include <petscdm.h>
17: #include <petscdmda.h>
19: /* Matrix free support */
20: typedef struct {
21: PetscReal time;
22: Vec U;
23: Vec Udot;
24: PetscReal shift;
25: AppCtx *appctx;
26: TS ts;
27: } MCtx;
29: /*
30: User-defined routines
31: */
32: PetscErrorCode InitialConditions(DM, Vec);
33: PetscErrorCode RHSJacobianShell(TS, PetscReal, Vec, Mat, Mat, void *);
34: PetscErrorCode IJacobianShell(TS, PetscReal, Vec, Vec, PetscReal, Mat, Mat, void *);
36: PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
37: {
38: PetscInt i, j, Mx, My, xs, ys, xm, ym;
39: Field **l;
40: PetscFunctionBegin;
42: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
43: /* locate the global i index for x and j index for y */
44: i = (PetscInt)(x * (Mx - 1));
45: j = (PetscInt)(y * (My - 1));
46: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
48: if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
49: /* the i,j vertex is on this process */
50: PetscCall(DMDAVecGetArray(da, lambda, &l));
51: l[j][i].u = 1.0;
52: l[j][i].v = 1.0;
53: PetscCall(DMDAVecRestoreArray(da, lambda, &l));
54: }
55: PetscFunctionReturn(PETSC_SUCCESS);
56: }
58: static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell, Vec X, Vec Y)
59: {
60: MCtx *mctx;
61: AppCtx *appctx;
62: DM da;
63: PetscInt i, j, Mx, My, xs, ys, xm, ym;
64: PetscReal hx, hy, sx, sy;
65: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
66: Field **u, **x, **y;
67: Vec localX;
69: PetscFunctionBeginUser;
70: PetscCall(MatShellGetContext(A_shell, &mctx));
71: appctx = mctx->appctx;
72: PetscCall(TSGetDM(mctx->ts, &da));
73: PetscCall(DMGetLocalVector(da, &localX));
74: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
75: hx = 2.50 / (PetscReal)Mx;
76: sx = 1.0 / (hx * hx);
77: hy = 2.50 / (PetscReal)My;
78: sy = 1.0 / (hy * hy);
80: /* Scatter ghost points to local vector,using the 2-step process
81: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
82: By placing code between these two statements, computations can be
83: done while messages are in transition. */
84: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
85: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
87: /* Get pointers to vector data */
88: PetscCall(DMDAVecGetArrayRead(da, localX, &x));
89: PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
90: PetscCall(DMDAVecGetArray(da, Y, &y));
92: /* Get local grid boundaries */
93: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
95: /* Compute function over the locally owned part of the grid */
96: for (j = ys; j < ys + ym; j++) {
97: for (i = xs; i < xs + xm; i++) {
98: uc = u[j][i].u;
99: ucb = x[j][i].u;
100: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
101: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
102: vc = u[j][i].v;
103: vcb = x[j][i].v;
104: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
105: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
106: y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
107: y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
108: }
109: }
110: PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
111: PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
112: PetscCall(DMDAVecRestoreArray(da, Y, &y));
113: PetscCall(DMRestoreLocalVector(da, &localX));
114: PetscFunctionReturn(PETSC_SUCCESS);
115: }
117: static PetscErrorCode MyIMatMultTranspose(Mat A_shell, Vec X, Vec Y)
118: {
119: MCtx *mctx;
120: AppCtx *appctx;
121: DM da;
122: PetscInt i, j, Mx, My, xs, ys, xm, ym;
123: PetscReal hx, hy, sx, sy;
124: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
125: Field **u, **x, **y;
126: Vec localX;
128: PetscFunctionBeginUser;
129: PetscCall(MatShellGetContext(A_shell, &mctx));
130: appctx = mctx->appctx;
131: PetscCall(TSGetDM(mctx->ts, &da));
132: PetscCall(DMGetLocalVector(da, &localX));
133: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
134: hx = 2.50 / (PetscReal)Mx;
135: sx = 1.0 / (hx * hx);
136: hy = 2.50 / (PetscReal)My;
137: sy = 1.0 / (hy * hy);
139: /* Scatter ghost points to local vector,using the 2-step process
140: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
141: By placing code between these two statements, computations can be
142: done while messages are in transition. */
143: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
144: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
146: /* Get pointers to vector data */
147: PetscCall(DMDAVecGetArrayRead(da, localX, &x));
148: PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
149: PetscCall(DMDAVecGetArray(da, Y, &y));
151: /* Get local grid boundaries */
152: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
154: /* Compute function over the locally owned part of the grid */
155: for (j = ys; j < ys + ym; j++) {
156: for (i = xs; i < xs + xm; i++) {
157: uc = u[j][i].u;
158: ucb = x[j][i].u;
159: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
160: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
161: vc = u[j][i].v;
162: vcb = x[j][i].v;
163: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
164: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
165: y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
166: y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
167: y[j][i].u = mctx->shift * ucb - y[j][i].u;
168: y[j][i].v = mctx->shift * vcb - y[j][i].v;
169: }
170: }
171: PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
172: PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
173: PetscCall(DMDAVecRestoreArray(da, Y, &y));
174: PetscCall(DMRestoreLocalVector(da, &localX));
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: static PetscErrorCode MyIMatMult(Mat A_shell, Vec X, Vec Y)
179: {
180: MCtx *mctx;
181: AppCtx *appctx;
182: DM da;
183: PetscInt i, j, Mx, My, xs, ys, xm, ym;
184: PetscReal hx, hy, sx, sy;
185: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
186: Field **u, **x, **y;
187: Vec localX;
189: PetscFunctionBeginUser;
190: PetscCall(MatShellGetContext(A_shell, &mctx));
191: appctx = mctx->appctx;
192: PetscCall(TSGetDM(mctx->ts, &da));
193: PetscCall(DMGetLocalVector(da, &localX));
194: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
195: hx = 2.50 / (PetscReal)Mx;
196: sx = 1.0 / (hx * hx);
197: hy = 2.50 / (PetscReal)My;
198: sy = 1.0 / (hy * hy);
200: /* Scatter ghost points to local vector,using the 2-step process
201: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
202: By placing code between these two statements, computations can be
203: done while messages are in transition. */
204: PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
205: PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
207: /* Get pointers to vector data */
208: PetscCall(DMDAVecGetArrayRead(da, localX, &x));
209: PetscCall(DMDAVecGetArrayRead(da, mctx->U, &u));
210: PetscCall(DMDAVecGetArray(da, Y, &y));
212: /* Get local grid boundaries */
213: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
215: /* Compute function over the locally owned part of the grid */
216: for (j = ys; j < ys + ym; j++) {
217: for (i = xs; i < xs + xm; i++) {
218: uc = u[j][i].u;
219: ucb = x[j][i].u;
220: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
221: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
222: vc = u[j][i].v;
223: vcb = x[j][i].v;
224: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
225: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
226: y[j][i].u = appctx->D1 * (uxx + uyy) - (vc * vc + appctx->gamma) * ucb - 2.0 * uc * vc * vcb;
227: y[j][i].v = appctx->D2 * (vxx + vyy) + vc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
228: y[j][i].u = mctx->shift * ucb - y[j][i].u;
229: y[j][i].v = mctx->shift * vcb - y[j][i].v;
230: }
231: }
232: PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
233: PetscCall(DMDAVecRestoreArrayRead(da, mctx->U, &u));
234: PetscCall(DMDAVecRestoreArray(da, Y, &y));
235: PetscCall(DMRestoreLocalVector(da, &localX));
236: PetscFunctionReturn(PETSC_SUCCESS);
237: }
239: int main(int argc, char **argv)
240: {
241: TS ts; /* ODE integrator */
242: Vec x; /* solution */
243: DM da;
244: AppCtx appctx;
245: MCtx mctx;
246: Vec lambda[1];
247: PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE, mf = PETSC_FALSE;
248: PetscLogDouble v1, v2;
249: #if defined(PETSC_USE_LOG)
250: PetscLogStage stage;
251: #endif
253: PetscFunctionBeginUser;
254: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
255: PetscCall(PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL));
256: PetscCall(PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL));
257: appctx.aijpc = PETSC_FALSE;
258: PetscCall(PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL));
259: PetscCall(PetscOptionsGetBool(NULL, NULL, "-mf", &mf, NULL));
261: appctx.D1 = 8.0e-5;
262: appctx.D2 = 4.0e-5;
263: appctx.gamma = .024;
264: appctx.kappa = .06;
266: PetscCall(PetscLogStageRegister("MyAdjoint", &stage));
267: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
268: Create distributed array (DMDA) to manage parallel grid and vectors
269: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
270: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
271: PetscCall(DMSetFromOptions(da));
272: PetscCall(DMSetUp(da));
273: PetscCall(DMDASetFieldName(da, 0, "u"));
274: PetscCall(DMDASetFieldName(da, 1, "v"));
276: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
277: Extract global vectors from DMDA; then duplicate for remaining
278: vectors that are the same types
279: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
280: PetscCall(DMCreateGlobalVector(da, &x));
282: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283: Create timestepping solver context
284: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
285: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
286: PetscCall(TSSetDM(ts, da));
287: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
288: PetscCall(TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
289: if (!implicitform) {
290: PetscCall(TSSetType(ts, TSRK));
291: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
292: PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));
293: } else {
294: PetscCall(TSSetType(ts, TSCN));
295: PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
296: if (appctx.aijpc) {
297: Mat A, B;
299: PetscCall(DMSetMatType(da, MATSELL));
300: PetscCall(DMCreateMatrix(da, &A));
301: PetscCall(MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B));
302: /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
303: PetscCall(TSSetIJacobian(ts, A, B, IJacobian, &appctx));
304: PetscCall(MatDestroy(&A));
305: PetscCall(MatDestroy(&B));
306: } else {
307: PetscCall(TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx));
308: }
309: }
311: if (mf) {
312: PetscInt xm, ym, Mx, My, dof;
313: mctx.ts = ts;
314: mctx.appctx = &appctx;
315: PetscCall(VecDuplicate(x, &mctx.U));
316: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
317: Create matrix free context
318: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
319: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, &dof, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
320: PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &xm, &ym, NULL));
321: PetscCall(MatCreateShell(PETSC_COMM_WORLD, dof * xm * ym, PETSC_DETERMINE, dof * Mx * My, dof * Mx * My, &mctx, &appctx.A));
322: PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyRHSMatMultTranspose));
323: if (!implicitform) { /* for explicit methods only */
324: PetscCall(TSSetRHSJacobian(ts, appctx.A, appctx.A, RHSJacobianShell, &appctx));
325: } else {
326: /* PetscCall(VecDuplicate(appctx.U,&mctx.Udot)); */
327: PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT, (void (*)(void))MyIMatMult));
328: PetscCall(MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyIMatMultTranspose));
329: PetscCall(TSSetIJacobian(ts, appctx.A, appctx.A, IJacobianShell, &appctx));
330: }
331: }
333: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
334: Set initial conditions
335: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
336: PetscCall(InitialConditions(da, x));
337: PetscCall(TSSetSolution(ts, x));
339: /*
340: Have the TS save its trajectory so that TSAdjointSolve() may be used
341: */
342: if (!forwardonly) PetscCall(TSSetSaveTrajectory(ts));
344: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
345: Set solver options
346: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
347: PetscCall(TSSetMaxTime(ts, 200.0));
348: PetscCall(TSSetTimeStep(ts, 0.5));
349: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
350: PetscCall(TSSetFromOptions(ts));
352: PetscCall(PetscTime(&v1));
353: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
354: Solve ODE system
355: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
356: PetscCall(TSSolve(ts, x));
357: if (!forwardonly) {
358: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
359: Start the Adjoint model
360: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
361: PetscCall(VecDuplicate(x, &lambda[0]));
362: /* Reset initial conditions for the adjoint integration */
363: PetscCall(InitializeLambda(da, lambda[0], 0.5, 0.5));
364: PetscCall(TSSetCostGradients(ts, 1, lambda, NULL));
365: PetscCall(PetscLogStagePush(stage));
366: PetscCall(TSAdjointSolve(ts));
367: PetscCall(PetscLogStagePop());
368: PetscCall(VecDestroy(&lambda[0]));
369: }
370: PetscCall(PetscTime(&v2));
371: PetscCall(PetscPrintf(PETSC_COMM_WORLD, " %.3lf ", v2 - v1));
373: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
374: Free work space. All PETSc objects should be destroyed when they
375: are no longer needed.
376: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
377: PetscCall(VecDestroy(&x));
378: PetscCall(TSDestroy(&ts));
379: PetscCall(DMDestroy(&da));
380: if (mf) {
381: PetscCall(VecDestroy(&mctx.U));
382: /* PetscCall(VecDestroy(&mctx.Udot));*/
383: PetscCall(MatDestroy(&appctx.A));
384: }
385: PetscCall(PetscFinalize());
386: return 0;
387: }
389: /* ------------------------------------------------------------------- */
390: PetscErrorCode RHSJacobianShell(TS ts, PetscReal t, Vec U, Mat A, Mat BB, void *ctx)
391: {
392: MCtx *mctx;
394: PetscFunctionBegin;
395: PetscCall(MatShellGetContext(A, &mctx));
396: PetscCall(VecCopy(U, mctx->U));
397: PetscFunctionReturn(PETSC_SUCCESS);
398: }
400: PetscErrorCode IJacobianShell(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat BB, void *ctx)
401: {
402: MCtx *mctx;
404: PetscFunctionBegin;
405: PetscCall(MatShellGetContext(A, &mctx));
406: PetscCall(VecCopy(U, mctx->U));
407: /* PetscCall(VecCopy(Udot,mctx->Udot)); */
408: mctx->shift = a;
409: PetscFunctionReturn(PETSC_SUCCESS);
410: }
412: /* ------------------------------------------------------------------- */
413: PetscErrorCode InitialConditions(DM da, Vec U)
414: {
415: PetscInt i, j, xs, ys, xm, ym, Mx, My;
416: Field **u;
417: PetscReal hx, hy, x, y;
419: PetscFunctionBegin;
420: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
422: hx = 2.5 / (PetscReal)Mx;
423: hy = 2.5 / (PetscReal)My;
425: /*
426: Get pointers to vector data
427: */
428: PetscCall(DMDAVecGetArray(da, U, &u));
430: /*
431: Get local grid boundaries
432: */
433: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
435: /*
436: Compute function over the locally owned part of the grid
437: */
438: for (j = ys; j < ys + ym; j++) {
439: y = j * hy;
440: for (i = xs; i < xs + xm; i++) {
441: x = i * hx;
442: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
443: else u[j][i].v = 0.0;
445: u[j][i].u = 1.0 - 2.0 * u[j][i].v;
446: }
447: }
449: /*
450: Restore vectors
451: */
452: PetscCall(DMDAVecRestoreArray(da, U, &u));
453: PetscFunctionReturn(PETSC_SUCCESS);
454: }
456: /*TEST
458: build:
459: depends: reaction_diffusion.c
460: requires: !complex !single
462: test:
463: requires: cams
464: args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
465: TEST*/