Actual source code: ex5.c


  2: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";

  4: /*F
  5:      This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
  6:       W. Hundsdorf and J.G. Verwer,  Page 21, Pattern Formation with Reaction-Diffusion Equations
  7: \begin{eqnarray*}
  8:         u_t = D_1 (u_{xx} + u_{yy})  - u*v^2 + \gamma(1 -u)           \\
  9:         v_t = D_2 (v_{xx} + v_{yy})  + u*v^2 - (\gamma + \kappa)v
 10: \end{eqnarray*}
 11:     Unlike in the book this uses periodic boundary conditions instead of Neumann
 12:     (since they are easier for finite differences).
 13: F*/

 15: /*
 16:       Helpful runtime monitor options:
 17:            -ts_monitor_draw_solution
 18:            -draw_save -draw_save_movie

 20:       Helpful runtime linear solver options:
 21:            -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view  (note that these Jacobians are so well-conditioned multigrid may not be the best solver)

 23:       Point your browser to localhost:8080 to monitor the simulation
 24:            ./ex5  -ts_view_pre saws  -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .

 26: */

 28: /*

 30:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 31:    Include "petscts.h" so that we can use SNES numerical (ODE) integrators.  Note that this
 32:    file automatically includes:
 33:      petscsys.h       - base PETSc routines   petscvec.h  - vectors
 34:      petscmat.h - matrices                    petscis.h   - index sets
 35:      petscksp.h - Krylov subspace methods     petscpc.h   - preconditioners
 36:      petscviewer.h - viewers                  petscsnes.h - nonlinear solvers
 37: */
 38: #include "reaction_diffusion.h"
 39: #include <petscdm.h>
 40: #include <petscdmda.h>

 42: /* ------------------------------------------------------------------- */
 43: PetscErrorCode InitialConditions(DM da, Vec U)
 44: {
 45:   PetscInt  i, j, xs, ys, xm, ym, Mx, My;
 46:   Field   **u;
 47:   PetscReal hx, hy, x, y;

 49:   PetscFunctionBegin;
 50:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

 52:   hx = 2.5 / (PetscReal)(Mx);
 53:   hy = 2.5 / (PetscReal)(My);

 55:   /*
 56:      Get pointers to actual vector data
 57:   */
 58:   PetscCall(DMDAVecGetArray(da, U, &u));

 60:   /*
 61:      Get local grid boundaries
 62:   */
 63:   PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));

 65:   /*
 66:      Compute function over the locally owned part of the grid
 67:   */
 68:   for (j = ys; j < ys + ym; j++) {
 69:     y = j * hy;
 70:     for (i = xs; i < xs + xm; i++) {
 71:       x = i * hx;
 72:       if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
 73:         u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
 74:       else u[j][i].v = 0.0;

 76:       u[j][i].u = 1.0 - 2.0 * u[j][i].v;
 77:     }
 78:   }

 80:   /*
 81:      Restore access to vector
 82:   */
 83:   PetscCall(DMDAVecRestoreArray(da, U, &u));
 84:   PetscFunctionReturn(PETSC_SUCCESS);
 85: }

 87: int main(int argc, char **argv)
 88: {
 89:   TS     ts; /* ODE integrator */
 90:   Vec    x;  /* solution */
 91:   DM     da;
 92:   AppCtx appctx;

 94:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 95:      Initialize program
 96:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 97:   PetscFunctionBeginUser;
 98:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 99:   PetscFunctionBeginUser;
100:   appctx.D1    = 8.0e-5;
101:   appctx.D2    = 4.0e-5;
102:   appctx.gamma = .024;
103:   appctx.kappa = .06;
104:   appctx.aijpc = PETSC_FALSE;

106:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107:      Create distributed array (DMDA) to manage parallel grid and vectors
108:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da));
110:   PetscCall(DMSetFromOptions(da));
111:   PetscCall(DMSetUp(da));
112:   PetscCall(DMDASetFieldName(da, 0, "u"));
113:   PetscCall(DMDASetFieldName(da, 1, "v"));

115:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116:      Create global vector from DMDA; this will be used to store the solution
117:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118:   PetscCall(DMCreateGlobalVector(da, &x));

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121:      Create timestepping solver context
122:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
124:   PetscCall(TSSetType(ts, TSARKIMEX));
125:   PetscCall(TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE));
126:   PetscCall(TSSetDM(ts, da));
127:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
128:   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
129:   PetscCall(TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx));

131:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132:      Set initial conditions
133:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134:   PetscCall(InitialConditions(da, x));
135:   PetscCall(TSSetSolution(ts, x));

137:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138:      Set solver options
139:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140:   PetscCall(TSSetMaxTime(ts, 2000.0));
141:   PetscCall(TSSetTimeStep(ts, .0001));
142:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
143:   PetscCall(TSSetFromOptions(ts));

145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146:      Solve ODE system
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148:   PetscCall(TSSolve(ts, x));

150:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151:      Free work space.
152:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153:   PetscCall(VecDestroy(&x));
154:   PetscCall(TSDestroy(&ts));
155:   PetscCall(DMDestroy(&da));

157:   PetscCall(PetscFinalize());
158:   return 0;
159: }

161: /*TEST

163:    build:
164:      depends: reaction_diffusion.c

166:    test:
167:       args: -ts_view  -ts_monitor -ts_max_time 500
168:       requires: double
169:       timeoutfactor: 3

171:    test:
172:       suffix: 2
173:       args: -ts_view  -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
174:       requires: x double
175:       output_file: output/ex5_1.out
176:       timeoutfactor: 3

178: TEST*/