Actual source code: burgers_spectral.c


  2: static char help[] = "Solves a simple data assimilation problem with one dimensional Burger's equation using TSAdjoint\n\n";

  4: /*

  6:     Not yet tested in parallel

  8: */

 10: /* ------------------------------------------------------------------------

 12:    This program uses the one-dimensional Burger's equation
 13:        u_t = mu*u_xx - u u_x,
 14:    on the domain 0 <= x <= 1, with periodic boundary conditions

 16:    to demonstrate solving a data assimilation problem of finding the initial conditions
 17:    to produce a given solution at a fixed time.

 19:    The operators are discretized with the spectral element method

 21:    See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
 22:    by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
 23:    used

 25:   ------------------------------------------------------------------------- */

 27: #include <petsctao.h>
 28: #include <petscts.h>
 29: #include <petscdt.h>
 30: #include <petscdraw.h>
 31: #include <petscdmda.h>

 33: /*
 34:    User-defined application context - contains data needed by the
 35:    application-provided call-back routines.
 36: */

 38: typedef struct {
 39:   PetscInt   n;       /* number of nodes */
 40:   PetscReal *nodes;   /* GLL nodes */
 41:   PetscReal *weights; /* GLL weights */
 42: } PetscGLL;

 44: typedef struct {
 45:   PetscInt  N;               /* grid points per elements*/
 46:   PetscInt  E;               /* number of elements */
 47:   PetscReal tol_L2, tol_max; /* error norms */
 48:   PetscInt  steps;           /* number of timesteps */
 49:   PetscReal Tend;            /* endtime */
 50:   PetscReal mu;              /* viscosity */
 51:   PetscReal L;               /* total length of domain */
 52:   PetscReal Le;
 53:   PetscReal Tadj;
 54: } PetscParam;

 56: typedef struct {
 57:   Vec obj;  /* desired end state */
 58:   Vec grid; /* total grid */
 59:   Vec grad;
 60:   Vec ic;
 61:   Vec curr_sol;
 62:   Vec true_solution; /* actual initial conditions for the final solution */
 63: } PetscData;

 65: typedef struct {
 66:   Vec      grid;  /* total grid */
 67:   Vec      mass;  /* mass matrix for total integration */
 68:   Mat      stiff; /* stifness matrix */
 69:   Mat      keptstiff;
 70:   Mat      grad;
 71:   PetscGLL gll;
 72: } PetscSEMOperators;

 74: typedef struct {
 75:   DM                da; /* distributed array data structure */
 76:   PetscSEMOperators SEMop;
 77:   PetscParam        param;
 78:   PetscData         dat;
 79:   TS                ts;
 80:   PetscReal         initial_dt;
 81: } AppCtx;

 83: /*
 84:    User-defined routines
 85: */
 86: extern PetscErrorCode FormFunctionGradient(Tao, Vec, PetscReal *, Vec, void *);
 87: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
 88: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
 89: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 90: extern PetscErrorCode TrueSolution(Vec, AppCtx *);
 91: extern PetscErrorCode ComputeObjective(PetscReal, Vec, AppCtx *);
 92: extern PetscErrorCode MonitorError(Tao, void *);
 93: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 94: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);

 96: int main(int argc, char **argv)
 97: {
 98:   AppCtx       appctx; /* user-defined application context */
 99:   Tao          tao;
100:   Vec          u; /* approximate solution vector */
101:   PetscInt     i, xs, xm, ind, j, lenglob;
102:   PetscReal    x, *wrk_ptr1, *wrk_ptr2;
103:   MatNullSpace nsp;
104:   PetscMPIInt  size;

106:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107:      Initialize program and set problem parameters
108:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109:   PetscFunctionBegin;

111:   PetscFunctionBeginUser;
112:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

114:   /*initialize parameters */
115:   appctx.param.N     = 10;   /* order of the spectral element */
116:   appctx.param.E     = 10;   /* number of elements */
117:   appctx.param.L     = 4.0;  /* length of the domain */
118:   appctx.param.mu    = 0.01; /* diffusion coefficient */
119:   appctx.initial_dt  = 5e-3;
120:   appctx.param.steps = PETSC_MAX_INT;
121:   appctx.param.Tend  = 4;

123:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
124:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
125:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
126:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
127:   appctx.param.Le = appctx.param.L / appctx.param.E;

129:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
130:   PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");

132:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133:      Create GLL data structures
134:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
135:   PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
136:   PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
137:   appctx.SEMop.gll.n = appctx.param.N;
138:   lenglob            = appctx.param.E * (appctx.param.N - 1);

140:   /*
141:      Create distributed array (DMDA) to manage parallel grid and vectors
142:      and to set up the ghost point communication pattern.  There are E*(Nl-1)+1
143:      total grid values spread equally among all the processors, except first and last
144:   */

146:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
147:   PetscCall(DMSetFromOptions(appctx.da));
148:   PetscCall(DMSetUp(appctx.da));

150:   /*
151:      Extract global and local vectors from DMDA; we use these to store the
152:      approximate solution.  Then duplicate these for remaining vectors that
153:      have the same types.
154:   */

156:   PetscCall(DMCreateGlobalVector(appctx.da, &u));
157:   PetscCall(VecDuplicate(u, &appctx.dat.ic));
158:   PetscCall(VecDuplicate(u, &appctx.dat.true_solution));
159:   PetscCall(VecDuplicate(u, &appctx.dat.obj));
160:   PetscCall(VecDuplicate(u, &appctx.SEMop.grid));
161:   PetscCall(VecDuplicate(u, &appctx.SEMop.mass));
162:   PetscCall(VecDuplicate(u, &appctx.dat.curr_sol));

164:   PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
165:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
166:   PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

168:   /* Compute function over the locally owned part of the grid */

170:   xs = xs / (appctx.param.N - 1);
171:   xm = xm / (appctx.param.N - 1);

173:   /*
174:      Build total grid and mass over entire mesh (multi-elemental)
175:   */

177:   for (i = xs; i < xs + xm; i++) {
178:     for (j = 0; j < appctx.param.N - 1; j++) {
179:       x             = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
180:       ind           = i * (appctx.param.N - 1) + j;
181:       wrk_ptr1[ind] = x;
182:       wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
183:       if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
184:     }
185:   }
186:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
187:   PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));

189:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190:    Create matrix data structure; set matrix evaluation routine.
191:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192:   PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
193:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
194:   PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
195:   /*
196:    For linear problems with a time-dependent f(u,t) in the equation
197:    u_t = f(u,t), the user provides the discretized right-hand-side
198:    as a time-dependent matrix.
199:    */
200:   PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, u, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
201:   PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, u, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
202:   /*
203:        For linear problems with a time-dependent f(u,t) in the equation
204:        u_t = f(u,t), the user provides the discretized right-hand-side
205:        as a time-dependent matrix.
206:     */

208:   PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));

210:   /* attach the null space to the matrix, this probably is not needed but does no harm */
211:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
212:   PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
213:   PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
214:   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
215:   PetscCall(MatNullSpaceDestroy(&nsp));
216:   /* attach the null space to the matrix, this probably is not needed but does no harm */
217:   PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
218:   PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
219:   PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
220:   PetscCall(MatNullSpaceDestroy(&nsp));

222:   /* Create the TS solver that solves the ODE and its adjoint; set its options */
223:   PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
224:   PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
225:   PetscCall(TSSetType(appctx.ts, TSRK));
226:   PetscCall(TSSetDM(appctx.ts, appctx.da));
227:   PetscCall(TSSetTime(appctx.ts, 0.0));
228:   PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
229:   PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
230:   PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
231:   PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
232:   PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
233:   PetscCall(TSSetFromOptions(appctx.ts));
234:   /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */
235:   PetscCall(TSGetTimeStep(appctx.ts, &appctx.initial_dt));
236:   PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
237:   PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));

239:   /* Set Objective and Initial conditions for the problem and compute Objective function (evolution of true_solution to final time */
240:   PetscCall(InitialConditions(appctx.dat.ic, &appctx));
241:   PetscCall(TrueSolution(appctx.dat.true_solution, &appctx));
242:   PetscCall(ComputeObjective(appctx.param.Tend, appctx.dat.obj, &appctx));

244:   PetscCall(TSSetSaveTrajectory(appctx.ts));
245:   PetscCall(TSSetFromOptions(appctx.ts));

247:   /* Create TAO solver and set desired solution method  */
248:   PetscCall(TaoCreate(PETSC_COMM_WORLD, &tao));
249:   PetscCall(TaoSetMonitor(tao, MonitorError, &appctx, NULL));
250:   PetscCall(TaoSetType(tao, TAOBQNLS));
251:   PetscCall(TaoSetSolution(tao, appctx.dat.ic));
252:   /* Set routine for function and gradient evaluation  */
253:   PetscCall(TaoSetObjectiveAndGradient(tao, NULL, FormFunctionGradient, (void *)&appctx));
254:   /* Check for any TAO command line options  */
255:   PetscCall(TaoSetTolerances(tao, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT));
256:   PetscCall(TaoSetFromOptions(tao));
257:   PetscCall(TaoSolve(tao));

259:   PetscCall(TaoDestroy(&tao));
260:   PetscCall(MatDestroy(&appctx.SEMop.stiff));
261:   PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
262:   PetscCall(MatDestroy(&appctx.SEMop.grad));
263:   PetscCall(VecDestroy(&u));
264:   PetscCall(VecDestroy(&appctx.dat.ic));
265:   PetscCall(VecDestroy(&appctx.dat.true_solution));
266:   PetscCall(VecDestroy(&appctx.dat.obj));
267:   PetscCall(VecDestroy(&appctx.SEMop.grid));
268:   PetscCall(VecDestroy(&appctx.SEMop.mass));
269:   PetscCall(VecDestroy(&appctx.dat.curr_sol));
270:   PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
271:   PetscCall(DMDestroy(&appctx.da));
272:   PetscCall(TSDestroy(&appctx.ts));

274:   /*
275:      Always call PetscFinalize() before exiting a program.  This routine
276:        - finalizes the PETSc libraries as well as MPI
277:        - provides summary and diagnostic information if certain runtime
278:          options are chosen (e.g., -log_summary).
279:   */
280:   PetscCall(PetscFinalize());
281:   return 0;
282: }

284: /* --------------------------------------------------------------------- */
285: /*
286:    InitialConditions - Computes the initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve()

288:                        The routine TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function

290:    Input Parameter:
291:    u - uninitialized solution vector (global)
292:    appctx - user-defined application context

294:    Output Parameter:
295:    u - vector with solution at initial time (global)
296: */
297: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
298: {
299:   PetscScalar       *s;
300:   const PetscScalar *xg;
301:   PetscInt           i, xs, xn;

303:   PetscFunctionBegin;
304:   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
305:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
306:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
307:   for (i = xs; i < xs + xn; i++) s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) / (2.0 + PetscCosScalar(PETSC_PI * xg[i])) + 0.25 * PetscExpReal(-4.0 * PetscPowReal(xg[i] - 2.0, 2.0));
308:   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
309:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
310:   PetscFunctionReturn(PETSC_SUCCESS);
311: }

313: /*
314:    TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function.

316:              InitialConditions() computes the initial conditions for the beginning of the Tao iterations

318:    Input Parameter:
319:    u - uninitialized solution vector (global)
320:    appctx - user-defined application context

322:    Output Parameter:
323:    u - vector with solution at initial time (global)
324: */
325: PetscErrorCode TrueSolution(Vec u, AppCtx *appctx)
326: {
327:   PetscScalar       *s;
328:   const PetscScalar *xg;
329:   PetscInt           i, xs, xn;

331:   PetscFunctionBegin;
332:   PetscCall(DMDAVecGetArray(appctx->da, u, &s));
333:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
334:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
335:   for (i = xs; i < xs + xn; i++) s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]));
336:   PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
337:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
338:   PetscFunctionReturn(PETSC_SUCCESS);
339: }
340: /* --------------------------------------------------------------------- */
341: /*
342:    Sets the desired profile for the final end time

344:    Input Parameters:
345:    t - final time
346:    obj - vector storing the desired profile
347:    appctx - user-defined application context

349: */
350: PetscErrorCode ComputeObjective(PetscReal t, Vec obj, AppCtx *appctx)
351: {
352:   PetscScalar       *s;
353:   const PetscScalar *xg;
354:   PetscInt           i, xs, xn;

356:   PetscFunctionBegin;
357:   PetscCall(DMDAVecGetArray(appctx->da, obj, &s));
358:   PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
359:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
360:   for (i = xs; i < xs + xn; i++) {
361:     s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpScalar(-PETSC_PI * PETSC_PI * t * appctx->param.mu) / (2.0 + PetscExpScalar(-PETSC_PI * PETSC_PI * t * appctx->param.mu) * PetscCosScalar(PETSC_PI * xg[i]));
362:   }
363:   PetscCall(DMDAVecRestoreArray(appctx->da, obj, &s));
364:   PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
365:   PetscFunctionReturn(PETSC_SUCCESS);
366: }

368: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
369: {
370:   AppCtx *appctx = (AppCtx *)ctx;

372:   PetscFunctionBegin;
373:   PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
374:   PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
375:   PetscCall(VecScale(globalout, -1.0));
376:   PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
377:   PetscFunctionReturn(PETSC_SUCCESS);
378: }

380: /*

382:       K is the discretiziation of the Laplacian
383:       G is the discretization of the gradient

385:       Computes Jacobian of      K u + diag(u) G u   which is given by
386:               K   + diag(u)G + diag(Gu)
387: */
388: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
389: {
390:   AppCtx *appctx = (AppCtx *)ctx;
391:   Vec     Gglobalin;

393:   PetscFunctionBegin;
394:   /*    A = diag(u) G */

396:   PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
397:   PetscCall(MatDiagonalScale(A, globalin, NULL));

399:   /*    A  = A + diag(Gu) */
400:   PetscCall(VecDuplicate(globalin, &Gglobalin));
401:   PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
402:   PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
403:   PetscCall(VecDestroy(&Gglobalin));

405:   /*   A  = K - A    */
406:   PetscCall(MatScale(A, -1.0));
407:   PetscCall(MatAXPY(A, 1.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
408:   PetscFunctionReturn(PETSC_SUCCESS);
409: }

411: /* --------------------------------------------------------------------- */

413: /*
414:    RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
415:    matrix for the heat equation.

417:    Input Parameters:
418:    ts - the TS context
419:    t - current time  (ignored)
420:    X - current solution (ignored)
421:    dummy - optional user-defined context, as set by TSetRHSJacobian()

423:    Output Parameters:
424:    AA - Jacobian matrix
425:    BB - optionally different matrix from which the preconditioner is built
426:    str - flag indicating matrix structure

428: */
429: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
430: {
431:   PetscReal **temp;
432:   PetscReal   vv;
433:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
434:   PetscInt    i, xs, xn, l, j;
435:   PetscInt   *rowsDM;

437:   PetscFunctionBegin;
438:   /*
439:    Creates the element stiffness matrix for the given gll
440:    */
441:   PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
442:   /* workaround for clang analyzer warning: Division by zero */
443:   PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");

445:   /* scale by the size of the element */
446:   for (i = 0; i < appctx->param.N; i++) {
447:     vv = -appctx->param.mu * 2.0 / appctx->param.Le;
448:     for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
449:   }

451:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
452:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));

454:   xs = xs / (appctx->param.N - 1);
455:   xn = xn / (appctx->param.N - 1);

457:   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
458:   /*
459:    loop over local elements
460:    */
461:   for (j = xs; j < xs + xn; j++) {
462:     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
463:     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
464:   }
465:   PetscCall(PetscFree(rowsDM));
466:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
467:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
468:   PetscCall(VecReciprocal(appctx->SEMop.mass));
469:   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
470:   PetscCall(VecReciprocal(appctx->SEMop.mass));

472:   PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
473:   PetscFunctionReturn(PETSC_SUCCESS);
474: }

476: /*
477:    RHSMatrixAdvection - User-provided routine to compute the right-hand-side
478:    matrix for the Advection equation.

480:    Input Parameters:
481:    ts - the TS context
482:    t - current time
483:    global_in - global input vector
484:    dummy - optional user-defined context, as set by TSetRHSJacobian()

486:    Output Parameters:
487:    AA - Jacobian matrix
488:    BB - optionally different preconditioning matrix
489:    str - flag indicating matrix structure

491: */
492: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
493: {
494:   PetscReal **temp;
495:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
496:   PetscInt    xs, xn, l, j;
497:   PetscInt   *rowsDM;

499:   PetscFunctionBegin;
500:   /*
501:    Creates the advection matrix for the given gll
502:    */
503:   PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
504:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));

506:   PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));

508:   xs = xs / (appctx->param.N - 1);
509:   xn = xn / (appctx->param.N - 1);

511:   PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
512:   for (j = xs; j < xs + xn; j++) {
513:     for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
514:     PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
515:   }
516:   PetscCall(PetscFree(rowsDM));
517:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
518:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

520:   PetscCall(VecReciprocal(appctx->SEMop.mass));
521:   PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
522:   PetscCall(VecReciprocal(appctx->SEMop.mass));
523:   PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
524:   PetscFunctionReturn(PETSC_SUCCESS);
525: }
526: /* ------------------------------------------------------------------ */
527: /*
528:    FormFunctionGradient - Evaluates the function and corresponding gradient.

530:    Input Parameters:
531:    tao - the Tao context
532:    IC   - the input vector
533:    ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient()

535:    Output Parameters:
536:    f   - the newly evaluated function
537:    G   - the newly evaluated gradient

539:    Notes:

541:           The forward equation is
542:               M u_t = F(U)
543:           which is converted to
544:                 u_t = M^{-1} F(u)
545:           in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is
546:                  M^{-1} J
547:           where J is the Jacobian of F. Now the adjoint equation is
548:                 M v_t = J^T v
549:           but TSAdjoint does not solve this since it can only solve the transposed system for the
550:           Jacobian the user provided. Hence TSAdjoint solves
551:                  w_t = J^T M^{-1} w  (where w = M v)
552:           since there is no way to indicate the mass matrix as a separate entity to TS. Thus one
553:           must be careful in initializing the "adjoint equation" and using the result. This is
554:           why
555:               G = -2 M(u(T) - u_d)
556:           below (instead of -2(u(T) - u_d) and why the result is
557:               G = G/appctx->SEMop.mass (that is G = M^{-1}w)
558:           below (instead of just the result of the "adjoint solve").

560: */
561: PetscErrorCode FormFunctionGradient(Tao tao, Vec IC, PetscReal *f, Vec G, void *ctx)
562: {
563:   AppCtx            *appctx = (AppCtx *)ctx; /* user-defined application context */
564:   Vec                temp;
565:   PetscInt           its;
566:   PetscReal          ff, gnorm, cnorm, xdiff, errex;
567:   TaoConvergedReason reason;

569:   PetscFunctionBegin;
570:   PetscCall(TSSetTime(appctx->ts, 0.0));
571:   PetscCall(TSSetStepNumber(appctx->ts, 0));
572:   PetscCall(TSSetTimeStep(appctx->ts, appctx->initial_dt));
573:   PetscCall(VecCopy(IC, appctx->dat.curr_sol));

575:   PetscCall(TSSolve(appctx->ts, appctx->dat.curr_sol));

577:   PetscCall(VecWAXPY(G, -1.0, appctx->dat.curr_sol, appctx->dat.obj));

579:   /*
580:      Compute the L2-norm of the objective function, cost function is f
581:   */
582:   PetscCall(VecDuplicate(G, &temp));
583:   PetscCall(VecPointwiseMult(temp, G, G));
584:   PetscCall(VecDot(temp, appctx->SEMop.mass, f));

586:   /* local error evaluation   */
587:   PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
588:   PetscCall(VecPointwiseMult(temp, temp, temp));
589:   /* for error evaluation */
590:   PetscCall(VecDot(temp, appctx->SEMop.mass, &errex));
591:   PetscCall(VecDestroy(&temp));
592:   errex = PetscSqrtReal(errex);

594:   /*
595:      Compute initial conditions for the adjoint integration. See Notes above
596:   */

598:   PetscCall(VecScale(G, -2.0));
599:   PetscCall(VecPointwiseMult(G, G, appctx->SEMop.mass));
600:   PetscCall(TSSetCostGradients(appctx->ts, 1, &G, NULL));
601:   PetscCall(TSAdjointSolve(appctx->ts));
602:   PetscCall(VecPointwiseDivide(G, G, appctx->SEMop.mass));

604:   PetscCall(TaoGetSolutionStatus(tao, &its, &ff, &gnorm, &cnorm, &xdiff, &reason));
605:   PetscFunctionReturn(PETSC_SUCCESS);
606: }

608: PetscErrorCode MonitorError(Tao tao, void *ctx)
609: {
610:   AppCtx   *appctx = (AppCtx *)ctx;
611:   Vec       temp;
612:   PetscReal nrm;

614:   PetscFunctionBegin;
615:   PetscCall(VecDuplicate(appctx->dat.ic, &temp));
616:   PetscCall(VecWAXPY(temp, -1.0, appctx->dat.ic, appctx->dat.true_solution));
617:   PetscCall(VecPointwiseMult(temp, temp, temp));
618:   PetscCall(VecDot(temp, appctx->SEMop.mass, &nrm));
619:   PetscCall(VecDestroy(&temp));
620:   nrm = PetscSqrtReal(nrm);
621:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error for initial conditions %g\n", (double)nrm));
622:   PetscFunctionReturn(PETSC_SUCCESS);
623: }

625: /*TEST

627:     build:
628:       requires: !complex

630:     test:
631:       args: -tao_max_it 5 -tao_gatol 1.e-4
632:       requires: !single

634:     test:
635:       suffix: 2
636:       nsize: 2
637:       args: -tao_max_it 5 -tao_gatol 1.e-4
638:       requires: !single

640: TEST*/