Actual source code: ex4.c
2: static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";
4: /*
5: Page 18, Chemo-taxis Problems from Mathematical Biology
7: rho_t =
8: c_t =
10: Further discussion on Page 134 and in 2d on Page 409
11: */
13: /*
15: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
16: Include "petscts.h" so that we can use SNES solvers. Note that this
17: file automatically includes:
18: petscsys.h - base PETSc routines petscvec.h - vectors
19: petscmat.h - matrices
20: petscis.h - index sets petscksp.h - Krylov subspace methods
21: petscviewer.h - viewers petscpc.h - preconditioners
22: petscksp.h - linear solvers
23: */
25: #include <petscdm.h>
26: #include <petscdmda.h>
27: #include <petscts.h>
29: typedef struct {
30: PetscScalar rho, c;
31: } Field;
33: typedef struct {
34: PetscScalar epsilon, delta, alpha, beta, gamma, kappa, lambda, mu, cstar;
35: PetscBool upwind;
36: } AppCtx;
38: /*
39: User-defined routines
40: */
41: extern PetscErrorCode IFunction(TS, PetscReal, Vec, Vec, Vec, void *), InitialConditions(DM, Vec);
43: int main(int argc, char **argv)
44: {
45: TS ts; /* nonlinear solver */
46: Vec U; /* solution, residual vectors */
47: DM da;
48: AppCtx appctx;
50: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51: Initialize program
52: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
53: PetscFunctionBeginUser;
54: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
56: appctx.epsilon = 1.0e-3;
57: appctx.delta = 1.0;
58: appctx.alpha = 10.0;
59: appctx.beta = 4.0;
60: appctx.gamma = 1.0;
61: appctx.kappa = .75;
62: appctx.lambda = 1.0;
63: appctx.mu = 100.;
64: appctx.cstar = .2;
65: appctx.upwind = PETSC_TRUE;
67: PetscCall(PetscOptionsGetScalar(NULL, NULL, "-delta", &appctx.delta, NULL));
68: PetscCall(PetscOptionsGetBool(NULL, NULL, "-upwind", &appctx.upwind, NULL));
70: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71: Create distributed array (DMDA) to manage parallel grid and vectors
72: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 8, 2, 1, NULL, &da));
74: PetscCall(DMSetFromOptions(da));
75: PetscCall(DMSetUp(da));
76: PetscCall(DMDASetFieldName(da, 0, "rho"));
77: PetscCall(DMDASetFieldName(da, 1, "c"));
79: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80: Extract global vectors from DMDA; then duplicate for remaining
81: vectors that are the same types
82: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83: PetscCall(DMCreateGlobalVector(da, &U));
85: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: Create timestepping solver context
87: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
88: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
89: PetscCall(TSSetType(ts, TSROSW));
90: PetscCall(TSSetDM(ts, da));
91: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
92: PetscCall(TSSetIFunction(ts, NULL, IFunction, &appctx));
94: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: Set initial conditions
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: PetscCall(InitialConditions(da, U));
98: PetscCall(TSSetSolution(ts, U));
100: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101: Set solver options
102: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
103: PetscCall(TSSetTimeStep(ts, .0001));
104: PetscCall(TSSetMaxTime(ts, 1.0));
105: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
106: PetscCall(TSSetFromOptions(ts));
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Solve nonlinear system
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111: PetscCall(TSSolve(ts, U));
113: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: Free work space. All PETSc objects should be destroyed when they
115: are no longer needed.
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117: PetscCall(VecDestroy(&U));
118: PetscCall(TSDestroy(&ts));
119: PetscCall(DMDestroy(&da));
121: PetscCall(PetscFinalize());
122: return 0;
123: }
124: /* ------------------------------------------------------------------- */
125: /*
126: IFunction - Evaluates nonlinear function, F(U).
128: Input Parameters:
129: . ts - the TS context
130: . U - input vector
131: . ptr - optional user-defined context, as set by SNESSetFunction()
133: Output Parameter:
134: . F - function vector
135: */
136: PetscErrorCode IFunction(TS ts, PetscReal ftime, Vec U, Vec Udot, Vec F, void *ptr)
137: {
138: AppCtx *appctx = (AppCtx *)ptr;
139: DM da;
140: PetscInt i, Mx, xs, xm;
141: PetscReal hx, sx;
142: PetscScalar rho, c, rhoxx, cxx, cx, rhox, kcxrhox;
143: Field *u, *f, *udot;
144: Vec localU;
146: PetscFunctionBegin;
147: PetscCall(TSGetDM(ts, &da));
148: PetscCall(DMGetLocalVector(da, &localU));
149: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
151: hx = 1.0 / (PetscReal)(Mx - 1);
152: sx = 1.0 / (hx * hx);
154: /*
155: Scatter ghost points to local vector,using the 2-step process
156: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
157: By placing code between these two statements, computations can be
158: done while messages are in transition.
159: */
160: PetscCall(DMGlobalToLocalBegin(da, U, INSERT_VALUES, localU));
161: PetscCall(DMGlobalToLocalEnd(da, U, INSERT_VALUES, localU));
163: /*
164: Get pointers to vector data
165: */
166: PetscCall(DMDAVecGetArrayRead(da, localU, &u));
167: PetscCall(DMDAVecGetArrayRead(da, Udot, &udot));
168: PetscCall(DMDAVecGetArrayWrite(da, F, &f));
170: /*
171: Get local grid boundaries
172: */
173: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
175: if (!xs) {
176: f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
177: f[0].c = udot[0].c; /* u[0].c - 1.0; */
178: xs++;
179: xm--;
180: }
181: if (xs + xm == Mx) {
182: f[Mx - 1].rho = udot[Mx - 1].rho; /* u[Mx-1].rho - 1.0; */
183: f[Mx - 1].c = udot[Mx - 1].c; /* u[Mx-1].c - 0.0; */
184: xm--;
185: }
187: /*
188: Compute function over the locally owned part of the grid
189: */
190: for (i = xs; i < xs + xm; i++) {
191: rho = u[i].rho;
192: rhoxx = (-2.0 * rho + u[i - 1].rho + u[i + 1].rho) * sx;
193: c = u[i].c;
194: cxx = (-2.0 * c + u[i - 1].c + u[i + 1].c) * sx;
196: if (!appctx->upwind) {
197: rhox = .5 * (u[i + 1].rho - u[i - 1].rho) / hx;
198: cx = .5 * (u[i + 1].c - u[i - 1].c) / hx;
199: kcxrhox = appctx->kappa * (cxx * rho + cx * rhox);
200: } else {
201: kcxrhox = appctx->kappa * ((u[i + 1].c - u[i].c) * u[i + 1].rho - (u[i].c - u[i - 1].c) * u[i].rho) * sx;
202: }
204: f[i].rho = udot[i].rho - appctx->epsilon * rhoxx + kcxrhox - appctx->mu * PetscAbsScalar(rho) * (1.0 - rho) * PetscMax(0, PetscRealPart(c - appctx->cstar)) + appctx->beta * rho;
205: f[i].c = udot[i].c - appctx->delta * cxx + appctx->lambda * c + appctx->alpha * rho * c / (appctx->gamma + c);
206: }
208: /*
209: Restore vectors
210: */
211: PetscCall(DMDAVecRestoreArrayRead(da, localU, &u));
212: PetscCall(DMDAVecRestoreArrayRead(da, Udot, &udot));
213: PetscCall(DMDAVecRestoreArrayWrite(da, F, &f));
214: PetscCall(DMRestoreLocalVector(da, &localU));
215: PetscFunctionReturn(PETSC_SUCCESS);
216: }
218: /* ------------------------------------------------------------------- */
219: PetscErrorCode InitialConditions(DM da, Vec U)
220: {
221: PetscInt i, xs, xm, Mx;
222: Field *u;
223: PetscReal hx, x;
225: PetscFunctionBegin;
226: PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));
228: hx = 1.0 / (PetscReal)(Mx - 1);
230: /*
231: Get pointers to vector data
232: */
233: PetscCall(DMDAVecGetArrayWrite(da, U, &u));
235: /*
236: Get local grid boundaries
237: */
238: PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
240: /*
241: Compute function over the locally owned part of the grid
242: */
243: for (i = xs; i < xs + xm; i++) {
244: x = i * hx;
245: if (i < Mx - 1) u[i].rho = 0.0;
246: else u[i].rho = 1.0;
247: u[i].c = PetscCosReal(.5 * PETSC_PI * x);
248: }
250: /*
251: Restore vectors
252: */
253: PetscCall(DMDAVecRestoreArrayWrite(da, U, &u));
254: PetscFunctionReturn(PETSC_SUCCESS);
255: }
257: /*TEST
259: test:
260: args: -pc_type lu -da_refine 2 -ts_view -ts_monitor -ts_max_time 1
261: requires: double
263: test:
264: suffix: 2
265: args: -pc_type lu -da_refine 2 -ts_view -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
266: requires: x double
267: output_file: output/ex4_1.out
269: TEST*/