Actual source code: ex50.c
2: static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
4: /*
6: Not yet tested in parallel
8: */
10: /* ------------------------------------------------------------------------
12: This program uses the one-dimensional Burger's equation
13: u_t = mu*u_xx - u u_x,
14: on the domain 0 <= x <= 1, with periodic boundary conditions
16: The operators are discretized with the spectral element method
18: See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
19: by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
20: used
22: See src/tao/unconstrained/tutorials/burgers_spectral.c
24: ------------------------------------------------------------------------- */
26: #include <petscts.h>
27: #include <petscdt.h>
28: #include <petscdraw.h>
29: #include <petscdmda.h>
31: /*
32: User-defined application context - contains data needed by the
33: application-provided call-back routines.
34: */
36: typedef struct {
37: PetscInt n; /* number of nodes */
38: PetscReal *nodes; /* GLL nodes */
39: PetscReal *weights; /* GLL weights */
40: } PetscGLL;
42: typedef struct {
43: PetscInt N; /* grid points per elements*/
44: PetscInt E; /* number of elements */
45: PetscReal tol_L2, tol_max; /* error norms */
46: PetscInt steps; /* number of timesteps */
47: PetscReal Tend; /* endtime */
48: PetscReal mu; /* viscosity */
49: PetscReal L; /* total length of domain */
50: PetscReal Le;
51: PetscReal Tadj;
52: } PetscParam;
54: typedef struct {
55: Vec grid; /* total grid */
56: Vec curr_sol;
57: } PetscData;
59: typedef struct {
60: Vec grid; /* total grid */
61: Vec mass; /* mass matrix for total integration */
62: Mat stiff; /* stifness matrix */
63: Mat keptstiff;
64: Mat grad;
65: PetscGLL gll;
66: } PetscSEMOperators;
68: typedef struct {
69: DM da; /* distributed array data structure */
70: PetscSEMOperators SEMop;
71: PetscParam param;
72: PetscData dat;
73: TS ts;
74: PetscReal initial_dt;
75: } AppCtx;
77: /*
78: User-defined routines
79: */
80: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
81: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
82: extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
83: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
84: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
86: int main(int argc, char **argv)
87: {
88: AppCtx appctx; /* user-defined application context */
89: PetscInt i, xs, xm, ind, j, lenglob;
90: PetscReal x, *wrk_ptr1, *wrk_ptr2;
91: MatNullSpace nsp;
92: PetscMPIInt size;
94: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: Initialize program and set problem parameters
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: PetscFunctionBeginUser;
99: PetscFunctionBeginUser;
100: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
102: /*initialize parameters */
103: appctx.param.N = 10; /* order of the spectral element */
104: appctx.param.E = 10; /* number of elements */
105: appctx.param.L = 4.0; /* length of the domain */
106: appctx.param.mu = 0.01; /* diffusion coefficient */
107: appctx.initial_dt = 5e-3;
108: appctx.param.steps = PETSC_MAX_INT;
109: appctx.param.Tend = 4;
111: PetscCall(PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL));
112: PetscCall(PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL));
113: PetscCall(PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL));
114: PetscCall(PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL));
115: appctx.param.Le = appctx.param.L / appctx.param.E;
117: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
118: PetscCheck((appctx.param.E % size) == 0, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Number of elements must be divisible by number of processes");
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create GLL data structures
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: PetscCall(PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights));
124: PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
125: appctx.SEMop.gll.n = appctx.param.N;
126: lenglob = appctx.param.E * (appctx.param.N - 1);
128: /*
129: Create distributed array (DMDA) to manage parallel grid and vectors
130: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
131: total grid values spread equally among all the processors, except first and last
132: */
134: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da));
135: PetscCall(DMSetFromOptions(appctx.da));
136: PetscCall(DMSetUp(appctx.da));
138: /*
139: Extract global and local vectors from DMDA; we use these to store the
140: approximate solution. Then duplicate these for remaining vectors that
141: have the same types.
142: */
144: PetscCall(DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol));
145: PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid));
146: PetscCall(VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass));
148: PetscCall(DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL));
149: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
150: PetscCall(DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
152: /* Compute function over the locally owned part of the grid */
154: xs = xs / (appctx.param.N - 1);
155: xm = xm / (appctx.param.N - 1);
157: /*
158: Build total grid and mass over entire mesh (multi-elemental)
159: */
161: for (i = xs; i < xs + xm; i++) {
162: for (j = 0; j < appctx.param.N - 1; j++) {
163: x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
164: ind = i * (appctx.param.N - 1) + j;
165: wrk_ptr1[ind] = x;
166: wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
167: if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
168: }
169: }
170: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1));
171: PetscCall(DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2));
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Create matrix data structure; set matrix evaluation routine.
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176: PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE));
177: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.stiff));
178: PetscCall(DMCreateMatrix(appctx.da, &appctx.SEMop.grad));
179: /*
180: For linear problems with a time-dependent f(u,t) in the equation
181: u_t = f(u,t), the user provides the discretized right-hand-side
182: as a time-dependent matrix.
183: */
184: PetscCall(RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx));
185: PetscCall(RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx));
186: /*
187: For linear problems with a time-dependent f(u,t) in the equation
188: u_t = f(u,t), the user provides the discretized right-hand-side
189: as a time-dependent matrix.
190: */
192: PetscCall(MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff));
194: /* attach the null space to the matrix, this probably is not needed but does no harm */
195: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
196: PetscCall(MatSetNullSpace(appctx.SEMop.stiff, nsp));
197: PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff, nsp));
198: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL));
199: PetscCall(MatNullSpaceDestroy(&nsp));
200: /* attach the null space to the matrix, this probably is not needed but does no harm */
201: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp));
202: PetscCall(MatSetNullSpace(appctx.SEMop.grad, nsp));
203: PetscCall(MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL));
204: PetscCall(MatNullSpaceDestroy(&nsp));
206: /* Create the TS solver that solves the ODE and its adjoint; set its options */
207: PetscCall(TSCreate(PETSC_COMM_WORLD, &appctx.ts));
208: PetscCall(TSSetProblemType(appctx.ts, TS_NONLINEAR));
209: PetscCall(TSSetType(appctx.ts, TSRK));
210: PetscCall(TSSetDM(appctx.ts, appctx.da));
211: PetscCall(TSSetTime(appctx.ts, 0.0));
212: PetscCall(TSSetTimeStep(appctx.ts, appctx.initial_dt));
213: PetscCall(TSSetMaxSteps(appctx.ts, appctx.param.steps));
214: PetscCall(TSSetMaxTime(appctx.ts, appctx.param.Tend));
215: PetscCall(TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP));
216: PetscCall(TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL));
217: PetscCall(TSSetSaveTrajectory(appctx.ts));
218: PetscCall(TSSetFromOptions(appctx.ts));
219: PetscCall(TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx));
220: PetscCall(TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx));
222: /* Set Initial conditions for the problem */
223: PetscCall(TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx));
225: PetscCall(TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx));
226: PetscCall(TSSetTime(appctx.ts, 0.0));
227: PetscCall(TSSetStepNumber(appctx.ts, 0));
229: PetscCall(TSSolve(appctx.ts, appctx.dat.curr_sol));
231: PetscCall(MatDestroy(&appctx.SEMop.stiff));
232: PetscCall(MatDestroy(&appctx.SEMop.keptstiff));
233: PetscCall(MatDestroy(&appctx.SEMop.grad));
234: PetscCall(VecDestroy(&appctx.SEMop.grid));
235: PetscCall(VecDestroy(&appctx.SEMop.mass));
236: PetscCall(VecDestroy(&appctx.dat.curr_sol));
237: PetscCall(PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights));
238: PetscCall(DMDestroy(&appctx.da));
239: PetscCall(TSDestroy(&appctx.ts));
241: /*
242: Always call PetscFinalize() before exiting a program. This routine
243: - finalizes the PETSc libraries as well as MPI
244: - provides summary and diagnostic information if certain runtime
245: options are chosen (e.g., -log_summary).
246: */
247: PetscCall(PetscFinalize());
248: return 0;
249: }
251: /*
252: TrueSolution() computes the true solution for the PDE
254: Input Parameter:
255: u - uninitialized solution vector (global)
256: appctx - user-defined application context
258: Output Parameter:
259: u - vector with solution at initial time (global)
260: */
261: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
262: {
263: PetscScalar *s;
264: const PetscScalar *xg;
265: PetscInt i, xs, xn;
267: PetscFunctionBeginUser;
268: PetscCall(DMDAVecGetArray(appctx->da, u, &s));
269: PetscCall(DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
270: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
271: for (i = xs; i < xs + xn; i++) {
272: s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
273: }
274: PetscCall(DMDAVecRestoreArray(appctx->da, u, &s));
275: PetscCall(DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg));
276: PetscFunctionReturn(PETSC_SUCCESS);
277: }
279: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
280: {
281: AppCtx *appctx = (AppCtx *)ctx;
283: PetscFunctionBeginUser;
284: PetscCall(MatMult(appctx->SEMop.grad, globalin, globalout)); /* grad u */
285: PetscCall(VecPointwiseMult(globalout, globalin, globalout)); /* u grad u */
286: PetscCall(VecScale(globalout, -1.0));
287: PetscCall(MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout));
288: PetscFunctionReturn(PETSC_SUCCESS);
289: }
291: /*
293: K is the discretiziation of the Laplacian
294: G is the discretization of the gradient
296: Computes Jacobian of K u + diag(u) G u which is given by
297: K + diag(u)G + diag(Gu)
298: */
299: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
300: {
301: AppCtx *appctx = (AppCtx *)ctx;
302: Vec Gglobalin;
304: PetscFunctionBeginUser;
305: /* A = diag(u) G */
307: PetscCall(MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN));
308: PetscCall(MatDiagonalScale(A, globalin, NULL));
310: /* A = A + diag(Gu) */
311: PetscCall(VecDuplicate(globalin, &Gglobalin));
312: PetscCall(MatMult(appctx->SEMop.grad, globalin, Gglobalin));
313: PetscCall(MatDiagonalSet(A, Gglobalin, ADD_VALUES));
314: PetscCall(VecDestroy(&Gglobalin));
316: /* A = K - A */
317: PetscCall(MatScale(A, -1.0));
318: PetscCall(MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN));
319: PetscFunctionReturn(PETSC_SUCCESS);
320: }
322: /* --------------------------------------------------------------------- */
324: #include "petscblaslapack.h"
325: /*
326: Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
327: */
328: PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
329: {
330: AppCtx *appctx;
331: PetscReal **temp, vv;
332: PetscInt i, j, xs, xn;
333: Vec xlocal, ylocal;
334: const PetscScalar *xl;
335: PetscScalar *yl;
336: PetscBLASInt _One = 1, n;
337: PetscScalar _DOne = 1;
339: PetscFunctionBeginUser;
340: PetscCall(MatShellGetContext(A, &appctx));
341: PetscCall(DMGetLocalVector(appctx->da, &xlocal));
342: PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
343: PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
344: PetscCall(DMGetLocalVector(appctx->da, &ylocal));
345: PetscCall(VecSet(ylocal, 0.0));
346: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
347: for (i = 0; i < appctx->param.N; i++) {
348: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
349: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
350: }
351: PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
352: PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
353: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
354: PetscCall(PetscBLASIntCast(appctx->param.N, &n));
355: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
356: PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
357: PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
358: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
359: PetscCall(VecSet(y, 0.0));
360: PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
361: PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
362: PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
363: PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
364: PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
365: PetscFunctionReturn(PETSC_SUCCESS);
366: }
368: PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
369: {
370: AppCtx *appctx;
371: PetscReal **temp;
372: PetscInt j, xs, xn;
373: Vec xlocal, ylocal;
374: const PetscScalar *xl;
375: PetscScalar *yl;
376: PetscBLASInt _One = 1, n;
377: PetscScalar _DOne = 1;
379: PetscFunctionBeginUser;
380: PetscCall(MatShellGetContext(A, &appctx));
381: PetscCall(DMGetLocalVector(appctx->da, &xlocal));
382: PetscCall(DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal));
383: PetscCall(DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal));
384: PetscCall(DMGetLocalVector(appctx->da, &ylocal));
385: PetscCall(VecSet(ylocal, 0.0));
386: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
387: PetscCall(DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl));
388: PetscCall(DMDAVecGetArray(appctx->da, ylocal, &yl));
389: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
390: PetscCall(PetscBLASIntCast(appctx->param.N, &n));
391: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
392: PetscCall(DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl));
393: PetscCall(DMDAVecRestoreArray(appctx->da, ylocal, &yl));
394: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
395: PetscCall(VecSet(y, 0.0));
396: PetscCall(DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y));
397: PetscCall(DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y));
398: PetscCall(DMRestoreLocalVector(appctx->da, &xlocal));
399: PetscCall(DMRestoreLocalVector(appctx->da, &ylocal));
400: PetscCall(VecPointwiseDivide(y, y, appctx->SEMop.mass));
401: PetscCall(VecScale(y, -1.0));
402: PetscFunctionReturn(PETSC_SUCCESS);
403: }
405: /*
406: RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
407: matrix for the Laplacian operator
409: Input Parameters:
410: ts - the TS context
411: t - current time (ignored)
412: X - current solution (ignored)
413: dummy - optional user-defined context, as set by TSetRHSJacobian()
415: Output Parameters:
416: AA - Jacobian matrix
417: BB - optionally different matrix from which the preconditioner is built
418: str - flag indicating matrix structure
420: */
421: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
422: {
423: PetscReal **temp;
424: PetscReal vv;
425: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
426: PetscInt i, xs, xn, l, j;
427: PetscInt *rowsDM;
428: PetscBool flg = PETSC_FALSE;
430: PetscFunctionBeginUser;
431: PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
433: if (!flg) {
434: /*
435: Creates the element stiffness matrix for the given gll
436: */
437: PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
438: /* workaround for clang analyzer warning: Division by zero */
439: PetscCheck(appctx->param.N > 1, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Spectral element order should be > 1");
441: /* scale by the size of the element */
442: for (i = 0; i < appctx->param.N; i++) {
443: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
444: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
445: }
447: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
448: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
450: xs = xs / (appctx->param.N - 1);
451: xn = xn / (appctx->param.N - 1);
453: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
454: /*
455: loop over local elements
456: */
457: for (j = xs; j < xs + xn; j++) {
458: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
459: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
460: }
461: PetscCall(PetscFree(rowsDM));
462: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
463: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
464: PetscCall(VecReciprocal(appctx->SEMop.mass));
465: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
466: PetscCall(VecReciprocal(appctx->SEMop.mass));
468: PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
469: } else {
470: PetscCall(MatSetType(A, MATSHELL));
471: PetscCall(MatSetUp(A));
472: PetscCall(MatShellSetContext(A, appctx));
473: PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian));
474: }
475: PetscFunctionReturn(PETSC_SUCCESS);
476: }
478: /*
479: RHSMatrixAdvection - User-provided routine to compute the right-hand-side
480: matrix for the Advection (gradient) operator.
482: Input Parameters:
483: ts - the TS context
484: t - current time
485: global_in - global input vector
486: dummy - optional user-defined context, as set by TSetRHSJacobian()
488: Output Parameters:
489: AA - Jacobian matrix
490: BB - optionally different preconditioning matrix
491: str - flag indicating matrix structure
493: */
494: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
495: {
496: PetscReal **temp;
497: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
498: PetscInt xs, xn, l, j;
499: PetscInt *rowsDM;
500: PetscBool flg = PETSC_FALSE;
502: PetscFunctionBeginUser;
503: PetscCall(PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL));
505: if (!flg) {
506: /*
507: Creates the advection matrix for the given gll
508: */
509: PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
510: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
511: PetscCall(DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL));
512: xs = xs / (appctx->param.N - 1);
513: xn = xn / (appctx->param.N - 1);
515: PetscCall(PetscMalloc1(appctx->param.N, &rowsDM));
516: for (j = xs; j < xs + xn; j++) {
517: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
518: PetscCall(MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES));
519: }
520: PetscCall(PetscFree(rowsDM));
521: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
522: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
524: PetscCall(VecReciprocal(appctx->SEMop.mass));
525: PetscCall(MatDiagonalScale(A, appctx->SEMop.mass, 0));
526: PetscCall(VecReciprocal(appctx->SEMop.mass));
528: PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp));
529: } else {
530: PetscCall(MatSetType(A, MATSHELL));
531: PetscCall(MatSetUp(A));
532: PetscCall(MatShellSetContext(A, appctx));
533: PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection));
534: }
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: /*TEST
540: build:
541: requires: !complex
543: test:
544: suffix: 1
545: requires: !single
547: test:
548: suffix: 2
549: nsize: 5
550: requires: !single
552: test:
553: suffix: 3
554: requires: !single
555: args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
557: test:
558: suffix: 4
559: requires: !single
560: args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
562: TEST*/