Actual source code: ex3.c


  2: static char help[] = "Model Equations for Advection-Diffusion\n";

  4: /*
  5:     Page 9, Section 1.2 Model Equations for Advection-Diffusion

  7:           u_t = a u_x + d u_xx

  9:    The initial conditions used here different then in the book.

 11: */

 13: /*
 14:      Helpful runtime linear solver options:
 15:            -pc_type mg -da_refine 2 -snes_monitor -ksp_monitor -ts_view   (geometric multigrid with three levels)

 17: */

 19: /*
 20:    Include "petscts.h" so that we can use TS solvers.  Note that this file
 21:    automatically includes:
 22:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 23:      petscmat.h  - matrices
 24:      petscis.h     - index sets            petscksp.h  - Krylov subspace methods
 25:      petscviewer.h - viewers               petscpc.h   - preconditioners
 26:      petscksp.h   - linear solvers        petscsnes.h - nonlinear solvers
 27: */

 29: #include <petscts.h>
 30: #include <petscdm.h>
 31: #include <petscdmda.h>

 33: /*
 34:    User-defined application context - contains data needed by the
 35:    application-provided call-back routines.
 36: */
 37: typedef struct {
 38:   PetscScalar a, d; /* advection and diffusion strength */
 39:   PetscBool   upwind;
 40: } AppCtx;

 42: /*
 43:    User-defined routines
 44: */
 45: extern PetscErrorCode InitialConditions(TS, Vec, AppCtx *);
 46: extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
 47: extern PetscErrorCode Solution(TS, PetscReal, Vec, AppCtx *);

 49: int main(int argc, char **argv)
 50: {
 51:   AppCtx    appctx; /* user-defined application context */
 52:   TS        ts;     /* timestepping context */
 53:   Vec       U;      /* approximate solution vector */
 54:   PetscReal dt;
 55:   DM        da;
 56:   PetscInt  M;

 58:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59:      Initialize program and set problem parameters
 60:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 62:   PetscFunctionBeginUser;
 63:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 64:   appctx.a = 1.0;
 65:   appctx.d = 0.0;
 66:   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-a", &appctx.a, NULL));
 67:   PetscCall(PetscOptionsGetScalar(NULL, NULL, "-d", &appctx.d, NULL));
 68:   appctx.upwind = PETSC_TRUE;
 69:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-upwind", &appctx.upwind, NULL));

 71:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, 60, 1, 1, NULL, &da));
 72:   PetscCall(DMSetFromOptions(da));
 73:   PetscCall(DMSetUp(da));
 74:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 75:      Create vector data structures
 76:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 78:   /*
 79:      Create vector data structures for approximate and exact solutions
 80:   */
 81:   PetscCall(DMCreateGlobalVector(da, &U));

 83:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 84:      Create timestepping solver context
 85:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 87:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
 88:   PetscCall(TSSetDM(ts, da));

 90:   /*
 91:       For linear problems with a time-dependent f(U,t) in the equation
 92:      u_t = f(u,t), the user provides the discretized right-hand-side
 93:       as a time-dependent matrix.
 94:   */
 95:   PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
 96:   PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSMatrixHeat, &appctx));
 97:   PetscCall(TSSetSolutionFunction(ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))Solution, &appctx));

 99:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100:      Customize timestepping solver:
101:        - Set timestepping duration info
102:      Then set runtime options, which can override these defaults.
103:      For example,
104:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
105:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
106:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

108:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
109:   dt = .48 / (M * M);
110:   PetscCall(TSSetTimeStep(ts, dt));
111:   PetscCall(TSSetMaxSteps(ts, 1000));
112:   PetscCall(TSSetMaxTime(ts, 100.0));
113:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
114:   PetscCall(TSSetType(ts, TSARKIMEX));
115:   PetscCall(TSSetFromOptions(ts));

117:   /*
118:      Evaluate initial conditions
119:   */
120:   PetscCall(InitialConditions(ts, U, &appctx));

122:   /*
123:      Run the timestepping solver
124:   */
125:   PetscCall(TSSolve(ts, U));

127:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128:      Free work space.  All PETSc objects should be destroyed when they
129:      are no longer needed.
130:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

132:   PetscCall(TSDestroy(&ts));
133:   PetscCall(VecDestroy(&U));
134:   PetscCall(DMDestroy(&da));

136:   /*
137:      Always call PetscFinalize() before exiting a program.  This routine
138:        - finalizes the PETSc libraries as well as MPI
139:        - provides summary and diagnostic information if certain runtime
140:          options are chosen (e.g., -log_view).
141:   */
142:   PetscCall(PetscFinalize());
143:   return 0;
144: }
145: /* --------------------------------------------------------------------- */
146: /*
147:    InitialConditions - Computes the solution at the initial time.

149:    Input Parameter:
150:    u - uninitialized solution vector (global)
151:    appctx - user-defined application context

153:    Output Parameter:
154:    u - vector with solution at initial time (global)
155: */
156: PetscErrorCode InitialConditions(TS ts, Vec U, AppCtx *appctx)
157: {
158:   PetscScalar *u, h;
159:   PetscInt     i, mstart, mend, xm, M;
160:   DM           da;

162:   PetscFunctionBeginUser;
163:   PetscCall(TSGetDM(ts, &da));
164:   PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
165:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
166:   h    = 1.0 / M;
167:   mend = mstart + xm;
168:   /*
169:     Get a pointer to vector data.
170:     - For default PETSc vectors, VecGetArray() returns a pointer to
171:       the data array.  Otherwise, the routine is implementation dependent.
172:     - You MUST call VecRestoreArray() when you no longer need access to
173:       the array.
174:     - Note that the Fortran interface to VecGetArray() differs from the
175:       C version.  See the users manual for details.
176:   */
177:   PetscCall(DMDAVecGetArray(da, U, &u));

179:   /*
180:      We initialize the solution array by simply writing the solution
181:      directly into the array locations.  Alternatively, we could use
182:      VecSetValues() or VecSetValuesLocal().
183:   */
184:   for (i = mstart; i < mend; i++) u[i] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);

186:   /*
187:      Restore vector
188:   */
189:   PetscCall(DMDAVecRestoreArray(da, U, &u));
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }
192: /* --------------------------------------------------------------------- */
193: /*
194:    Solution - Computes the exact solution at a given time.

196:    Input Parameters:
197:    t - current time
198:    solution - vector in which exact solution will be computed
199:    appctx - user-defined application context

201:    Output Parameter:
202:    solution - vector with the newly computed exact solution
203: */
204: PetscErrorCode Solution(TS ts, PetscReal t, Vec U, AppCtx *appctx)
205: {
206:   PetscScalar *u, ex1, ex2, sc1, sc2, h;
207:   PetscInt     i, mstart, mend, xm, M;
208:   DM           da;

210:   PetscFunctionBeginUser;
211:   PetscCall(TSGetDM(ts, &da));
212:   PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
213:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
214:   h    = 1.0 / M;
215:   mend = mstart + xm;
216:   /*
217:      Get a pointer to vector data.
218:   */
219:   PetscCall(DMDAVecGetArray(da, U, &u));

221:   /*
222:      Simply write the solution directly into the array locations.
223:      Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
224:   */
225:   ex1 = PetscExpScalar(-36. * PETSC_PI * PETSC_PI * appctx->d * t);
226:   ex2 = PetscExpScalar(-4. * PETSC_PI * PETSC_PI * appctx->d * t);
227:   sc1 = PETSC_PI * 6. * h;
228:   sc2 = PETSC_PI * 2. * h;
229:   for (i = mstart; i < mend; i++) u[i] = PetscSinScalar(sc1 * (PetscReal)i + appctx->a * PETSC_PI * 6. * t) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i + appctx->a * PETSC_PI * 2. * t) * ex2;

231:   /*
232:      Restore vector
233:   */
234:   PetscCall(DMDAVecRestoreArray(da, U, &u));
235:   PetscFunctionReturn(PETSC_SUCCESS);
236: }

238: /* --------------------------------------------------------------------- */
239: /*
240:    RHSMatrixHeat - User-provided routine to compute the right-hand-side
241:    matrix for the heat equation.

243:    Input Parameters:
244:    ts - the TS context
245:    t - current time
246:    global_in - global input vector
247:    dummy - optional user-defined context, as set by TSetRHSJacobian()

249:    Output Parameters:
250:    AA - Jacobian matrix
251:    BB - optionally different preconditioning matrix
252:    str - flag indicating matrix structure

254:    Notes:
255:    Recall that MatSetValues() uses 0-based row and column numbers
256:    in Fortran as well as in C.
257: */
258: PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec U, Mat AA, Mat BB, void *ctx)
259: {
260:   Mat         A      = AA;            /* Jacobian matrix */
261:   AppCtx     *appctx = (AppCtx *)ctx; /* user-defined application context */
262:   PetscInt    mstart, mend;
263:   PetscInt    i, idx[3], M, xm;
264:   PetscScalar v[3], h;
265:   DM          da;

267:   PetscFunctionBeginUser;
268:   PetscCall(TSGetDM(ts, &da));
269:   PetscCall(DMDAGetInfo(da, 0, &M, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
270:   PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
271:   h    = 1.0 / M;
272:   mend = mstart + xm;
273:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
274:      Compute entries for the locally owned part of the matrix
275:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
276:   /*
277:      Set matrix rows corresponding to boundary data
278:   */

280:   /* diffusion */
281:   v[0] = appctx->d / (h * h);
282:   v[1] = -2.0 * appctx->d / (h * h);
283:   v[2] = appctx->d / (h * h);
284:   if (!mstart) {
285:     idx[0] = M - 1;
286:     idx[1] = 0;
287:     idx[2] = 1;
288:     PetscCall(MatSetValues(A, 1, &mstart, 3, idx, v, INSERT_VALUES));
289:     mstart++;
290:   }

292:   if (mend == M) {
293:     mend--;
294:     idx[0] = M - 2;
295:     idx[1] = M - 1;
296:     idx[2] = 0;
297:     PetscCall(MatSetValues(A, 1, &mend, 3, idx, v, INSERT_VALUES));
298:   }

300:   /*
301:      Set matrix rows corresponding to interior data.  We construct the
302:      matrix one row at a time.
303:   */
304:   for (i = mstart; i < mend; i++) {
305:     idx[0] = i - 1;
306:     idx[1] = i;
307:     idx[2] = i + 1;
308:     PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
309:   }
310:   PetscCall(MatAssemblyBegin(A, MAT_FLUSH_ASSEMBLY));
311:   PetscCall(MatAssemblyEnd(A, MAT_FLUSH_ASSEMBLY));

313:   PetscCall(DMDAGetCorners(da, &mstart, 0, 0, &xm, 0, 0));
314:   mend = mstart + xm;
315:   if (!appctx->upwind) {
316:     /* advection -- centered differencing */
317:     v[0] = -.5 * appctx->a / (h);
318:     v[1] = .5 * appctx->a / (h);
319:     if (!mstart) {
320:       idx[0] = M - 1;
321:       idx[1] = 1;
322:       PetscCall(MatSetValues(A, 1, &mstart, 2, idx, v, ADD_VALUES));
323:       mstart++;
324:     }

326:     if (mend == M) {
327:       mend--;
328:       idx[0] = M - 2;
329:       idx[1] = 0;
330:       PetscCall(MatSetValues(A, 1, &mend, 2, idx, v, ADD_VALUES));
331:     }

333:     for (i = mstart; i < mend; i++) {
334:       idx[0] = i - 1;
335:       idx[1] = i + 1;
336:       PetscCall(MatSetValues(A, 1, &i, 2, idx, v, ADD_VALUES));
337:     }
338:   } else {
339:     /* advection -- upwinding */
340:     v[0] = -appctx->a / (h);
341:     v[1] = appctx->a / (h);
342:     if (!mstart) {
343:       idx[0] = 0;
344:       idx[1] = 1;
345:       PetscCall(MatSetValues(A, 1, &mstart, 2, idx, v, ADD_VALUES));
346:       mstart++;
347:     }

349:     if (mend == M) {
350:       mend--;
351:       idx[0] = M - 1;
352:       idx[1] = 0;
353:       PetscCall(MatSetValues(A, 1, &mend, 2, idx, v, ADD_VALUES));
354:     }

356:     for (i = mstart; i < mend; i++) {
357:       idx[0] = i;
358:       idx[1] = i + 1;
359:       PetscCall(MatSetValues(A, 1, &i, 2, idx, v, ADD_VALUES));
360:     }
361:   }

363:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
364:      Complete the matrix assembly process and set some options
365:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
366:   /*
367:      Assemble matrix, using the 2-step process:
368:        MatAssemblyBegin(), MatAssemblyEnd()
369:      Computations can be done while messages are in transition
370:      by placing code between these two statements.
371:   */
372:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
373:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

375:   /*
376:      Set and option to indicate that we will never add a new nonzero location
377:      to the matrix. If we do, it will generate an error.
378:   */
379:   PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
380:   PetscFunctionReturn(PETSC_SUCCESS);
381: }

383: /*TEST

385:    test:
386:       args: -pc_type mg -da_refine 2  -ts_view  -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
387:       requires: double
388:       filter: grep -v "total number of"

390:    test:
391:       suffix: 2
392:       args:  -pc_type mg -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time .3 -mg_levels_ksp_max_it 3
393:       requires: x
394:       output_file: output/ex3_1.out
395:       requires: double
396:       filter: grep -v "total number of"

398: TEST*/