Actual source code: biharmonic3.c


  2: static char help[] = "Solves biharmonic equation in 1d.\n";

  4: /*
  5:   Solves the equation biharmonic equation in split form

  7:     w = -kappa \Delta u
  8:     u_t =  \Delta w
  9:     -1  <= u <= 1
 10:     Periodic boundary conditions

 12: Evolve the biharmonic heat equation with bounds:  (same as biharmonic)
 13: ---------------
 14: ./biharmonic3 -ts_monitor -snes_monitor -ts_monitor_draw_solution  -pc_type lu  -draw_pause .1 -snes_converged_reason -ts_type beuler  -da_refine 5 -draw_fields 1 -ts_dt 9.53674e-9

 16:     w = -kappa \Delta u  + u^3  - u
 17:     u_t =  \Delta w
 18:     -1  <= u <= 1
 19:     Periodic boundary conditions

 21: Evolve the Cahn-Hillard equations:
 22: ---------------
 23: ./biharmonic3 -ts_monitor -snes_monitor -ts_monitor_draw_solution  -pc_type lu  -draw_pause .1 -snes_converged_reason  -ts_type beuler    -da_refine 6  -draw_fields 1  -kappa .00001 -ts_dt 5.96046e-06 -cahn-hillard

 25: */
 26: #include <petscdm.h>
 27: #include <petscdmda.h>
 28: #include <petscts.h>
 29: #include <petscdraw.h>

 31: /*
 32:    User-defined routines
 33: */
 34: extern PetscErrorCode FormFunction(TS, PetscReal, Vec, Vec, Vec, void *), FormInitialSolution(DM, Vec, PetscReal);
 35: typedef struct {
 36:   PetscBool cahnhillard;
 37:   PetscReal kappa;
 38:   PetscInt  energy;
 39:   PetscReal tol;
 40:   PetscReal theta;
 41:   PetscReal theta_c;
 42: } UserCtx;

 44: int main(int argc, char **argv)
 45: {
 46:   TS            ts;   /* nonlinear solver */
 47:   Vec           x, r; /* solution, residual vectors */
 48:   Mat           J;    /* Jacobian matrix */
 49:   PetscInt      steps, Mx;
 50:   DM            da;
 51:   MatFDColoring matfdcoloring;
 52:   ISColoring    iscoloring;
 53:   PetscReal     dt;
 54:   PetscReal     vbounds[] = {-100000, 100000, -1.1, 1.1};
 55:   SNES          snes;
 56:   UserCtx       ctx;

 58:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59:      Initialize program
 60:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 61:   PetscFunctionBeginUser;
 62:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 63:   ctx.kappa = 1.0;
 64:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-kappa", &ctx.kappa, NULL));
 65:   ctx.cahnhillard = PETSC_FALSE;
 66:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-cahn-hillard", &ctx.cahnhillard, NULL));
 67:   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 2, vbounds));
 68:   PetscCall(PetscViewerDrawResize(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 600, 600));
 69:   ctx.energy = 1;
 70:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-energy", &ctx.energy, NULL));
 71:   ctx.tol = 1.0e-8;
 72:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-tol", &ctx.tol, NULL));
 73:   ctx.theta   = .001;
 74:   ctx.theta_c = 1.0;
 75:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-theta", &ctx.theta, NULL));
 76:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-theta_c", &ctx.theta_c, NULL));

 78:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Create distributed array (DMDA) to manage parallel grid and vectors
 80:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 81:   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, 10, 2, 2, NULL, &da));
 82:   PetscCall(DMSetFromOptions(da));
 83:   PetscCall(DMSetUp(da));
 84:   PetscCall(DMDASetFieldName(da, 0, "Biharmonic heat equation: w = -kappa*u_xx"));
 85:   PetscCall(DMDASetFieldName(da, 1, "Biharmonic heat equation: u"));
 86:   PetscCall(DMDAGetInfo(da, 0, &Mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0));
 87:   dt = 1.0 / (10. * ctx.kappa * Mx * Mx * Mx * Mx);

 89:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90:      Extract global vectors from DMDA; then duplicate for remaining
 91:      vectors that are the same types
 92:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 93:   PetscCall(DMCreateGlobalVector(da, &x));
 94:   PetscCall(VecDuplicate(x, &r));

 96:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 97:      Create timestepping solver context
 98:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 99:   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
100:   PetscCall(TSSetDM(ts, da));
101:   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
102:   PetscCall(TSSetIFunction(ts, NULL, FormFunction, &ctx));
103:   PetscCall(TSSetMaxTime(ts, .02));
104:   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));

106:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107:      Create matrix data structure; set Jacobian evaluation routine

109: <     Set Jacobian matrix data structure and default Jacobian evaluation
110:      routine. User can override with:
111:      -snes_mf : matrix-free Newton-Krylov method with no preconditioning
112:                 (unless user explicitly sets preconditioner)
113:      -snes_mf_operator : form preconditioning matrix as set by the user,
114:                          but use matrix-free approx for Jacobian-vector
115:                          products within Newton-Krylov method

117:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118:   PetscCall(TSGetSNES(ts, &snes));
119:   PetscCall(SNESSetType(snes, SNESVINEWTONRSLS));
120:   PetscCall(DMCreateColoring(da, IS_COLORING_GLOBAL, &iscoloring));
121:   PetscCall(DMSetMatType(da, MATAIJ));
122:   PetscCall(DMCreateMatrix(da, &J));
123:   PetscCall(MatFDColoringCreate(J, iscoloring, &matfdcoloring));
124:   PetscCall(MatFDColoringSetFunction(matfdcoloring, (PetscErrorCode(*)(void))SNESTSFormFunction, ts));
125:   PetscCall(MatFDColoringSetFromOptions(matfdcoloring));
126:   PetscCall(MatFDColoringSetUp(J, iscoloring, matfdcoloring));
127:   PetscCall(ISColoringDestroy(&iscoloring));
128:   PetscCall(SNESSetJacobian(snes, J, J, SNESComputeJacobianDefaultColor, matfdcoloring));

130:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131:      Customize nonlinear solver
132:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133:   PetscCall(TSSetType(ts, TSBEULER));

135:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136:      Set initial conditions
137:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138:   PetscCall(FormInitialSolution(da, x, ctx.kappa));
139:   PetscCall(TSSetTimeStep(ts, dt));
140:   PetscCall(TSSetSolution(ts, x));

142:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
143:      Set runtime options
144:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145:   PetscCall(TSSetFromOptions(ts));

147:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148:      Solve nonlinear system
149:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150:   PetscCall(TSSolve(ts, x));
151:   PetscCall(TSGetStepNumber(ts, &steps));

153:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154:      Free work space.  All PETSc objects should be destroyed when they
155:      are no longer needed.
156:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157:   PetscCall(MatDestroy(&J));
158:   PetscCall(MatFDColoringDestroy(&matfdcoloring));
159:   PetscCall(VecDestroy(&x));
160:   PetscCall(VecDestroy(&r));
161:   PetscCall(TSDestroy(&ts));
162:   PetscCall(DMDestroy(&da));

164:   PetscCall(PetscFinalize());
165:   return 0;
166: }

168: typedef struct {
169:   PetscScalar w, u;
170: } Field;
171: /* ------------------------------------------------------------------- */
172: /*
173:    FormFunction - Evaluates nonlinear function, F(x).

175:    Input Parameters:
176: .  ts - the TS context
177: .  X - input vector
178: .  ptr - optional user-defined context, as set by SNESSetFunction()

180:    Output Parameter:
181: .  F - function vector
182:  */
183: PetscErrorCode FormFunction(TS ts, PetscReal ftime, Vec X, Vec Xdot, Vec F, void *ptr)
184: {
185:   DM          da;
186:   PetscInt    i, Mx, xs, xm;
187:   PetscReal   hx, sx;
188:   PetscScalar r, l;
189:   Field      *x, *xdot, *f;
190:   Vec         localX, localXdot;
191:   UserCtx    *ctx = (UserCtx *)ptr;

193:   PetscFunctionBegin;
194:   PetscCall(TSGetDM(ts, &da));
195:   PetscCall(DMGetLocalVector(da, &localX));
196:   PetscCall(DMGetLocalVector(da, &localXdot));
197:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

199:   hx = 1.0 / (PetscReal)Mx;
200:   sx = 1.0 / (hx * hx);

202:   /*
203:      Scatter ghost points to local vector,using the 2-step process
204:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
205:      By placing code between these two statements, computations can be
206:      done while messages are in transition.
207:   */
208:   PetscCall(DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX));
209:   PetscCall(DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX));
210:   PetscCall(DMGlobalToLocalBegin(da, Xdot, INSERT_VALUES, localXdot));
211:   PetscCall(DMGlobalToLocalEnd(da, Xdot, INSERT_VALUES, localXdot));

213:   /*
214:      Get pointers to vector data
215:   */
216:   PetscCall(DMDAVecGetArrayRead(da, localX, &x));
217:   PetscCall(DMDAVecGetArrayRead(da, localXdot, &xdot));
218:   PetscCall(DMDAVecGetArray(da, F, &f));

220:   /*
221:      Get local grid boundaries
222:   */
223:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));

225:   /*
226:      Compute function over the locally owned part of the grid
227:   */
228:   for (i = xs; i < xs + xm; i++) {
229:     f[i].w = x[i].w + ctx->kappa * (x[i - 1].u + x[i + 1].u - 2.0 * x[i].u) * sx;
230:     if (ctx->cahnhillard) {
231:       switch (ctx->energy) {
232:       case 1: /* double well */
233:         f[i].w += -x[i].u * x[i].u * x[i].u + x[i].u;
234:         break;
235:       case 2: /* double obstacle */
236:         f[i].w += x[i].u;
237:         break;
238:       case 3: /* logarithmic */
239:         if (x[i].u < -1.0 + 2.0 * ctx->tol) f[i].w += .5 * ctx->theta * (-PetscLogScalar(ctx->tol) + PetscLogScalar((1.0 - x[i].u) / 2.0)) + ctx->theta_c * x[i].u;
240:         else if (x[i].u > 1.0 - 2.0 * ctx->tol) f[i].w += .5 * ctx->theta * (-PetscLogScalar((1.0 + x[i].u) / 2.0) + PetscLogScalar(ctx->tol)) + ctx->theta_c * x[i].u;
241:         else f[i].w += .5 * ctx->theta * (-PetscLogScalar((1.0 + x[i].u) / 2.0) + PetscLogScalar((1.0 - x[i].u) / 2.0)) + ctx->theta_c * x[i].u;
242:         break;
243:       case 4:
244:         break;
245:       }
246:     }
247:     f[i].u = xdot[i].u - (x[i - 1].w + x[i + 1].w - 2.0 * x[i].w) * sx;
248:     if (ctx->energy == 4) {
249:       f[i].u = xdot[i].u;
250:       /* approximation of \grad (M(u) \grad w), where M(u) = (1-u^2) */
251:       r = (1.0 - x[i + 1].u * x[i + 1].u) * (x[i + 2].w - x[i].w) * .5 / hx;
252:       l = (1.0 - x[i - 1].u * x[i - 1].u) * (x[i].w - x[i - 2].w) * .5 / hx;
253:       f[i].u -= (r - l) * .5 / hx;
254:       f[i].u += 2.0 * ctx->theta_c * x[i].u * (x[i + 1].u - x[i - 1].u) * (x[i + 1].u - x[i - 1].u) * .25 * sx - (ctx->theta - ctx->theta_c * (1 - x[i].u * x[i].u)) * (x[i + 1].u + x[i - 1].u - 2.0 * x[i].u) * sx;
255:     }
256:   }

258:   /*
259:      Restore vectors
260:   */
261:   PetscCall(DMDAVecRestoreArrayRead(da, localXdot, &xdot));
262:   PetscCall(DMDAVecRestoreArrayRead(da, localX, &x));
263:   PetscCall(DMDAVecRestoreArray(da, F, &f));
264:   PetscCall(DMRestoreLocalVector(da, &localX));
265:   PetscCall(DMRestoreLocalVector(da, &localXdot));
266:   PetscFunctionReturn(PETSC_SUCCESS);
267: }

269: /* ------------------------------------------------------------------- */
270: PetscErrorCode FormInitialSolution(DM da, Vec X, PetscReal kappa)
271: {
272:   PetscInt  i, xs, xm, Mx, xgs, xgm;
273:   Field    *x;
274:   PetscReal hx, xx, r, sx;
275:   Vec       Xg;

277:   PetscFunctionBegin;
278:   PetscCall(DMDAGetInfo(da, PETSC_IGNORE, &Mx, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE));

280:   hx = 1.0 / (PetscReal)Mx;
281:   sx = 1.0 / (hx * hx);

283:   /*
284:      Get pointers to vector data
285:   */
286:   PetscCall(DMCreateLocalVector(da, &Xg));
287:   PetscCall(DMDAVecGetArray(da, Xg, &x));

289:   /*
290:      Get local grid boundaries
291:   */
292:   PetscCall(DMDAGetCorners(da, &xs, NULL, NULL, &xm, NULL, NULL));
293:   PetscCall(DMDAGetGhostCorners(da, &xgs, NULL, NULL, &xgm, NULL, NULL));

295:   /*
296:      Compute u function over the locally owned part of the grid including ghost points
297:   */
298:   for (i = xgs; i < xgs + xgm; i++) {
299:     xx = i * hx;
300:     r  = PetscSqrtReal((xx - .5) * (xx - .5));
301:     if (r < .125) x[i].u = 1.0;
302:     else x[i].u = -.50;
303:     /* fill in x[i].w so that valgrind doesn't detect use of uninitialized memory */
304:     x[i].w = 0;
305:   }
306:   for (i = xs; i < xs + xm; i++) x[i].w = -kappa * (x[i - 1].u + x[i + 1].u - 2.0 * x[i].u) * sx;

308:   /*
309:      Restore vectors
310:   */
311:   PetscCall(DMDAVecRestoreArray(da, Xg, &x));

313:   /* Grab only the global part of the vector */
314:   PetscCall(VecSet(X, 0));
315:   PetscCall(DMLocalToGlobalBegin(da, Xg, ADD_VALUES, X));
316:   PetscCall(DMLocalToGlobalEnd(da, Xg, ADD_VALUES, X));
317:   PetscCall(VecDestroy(&Xg));
318:   PetscFunctionReturn(PETSC_SUCCESS);
319: }

321: /*TEST

323:    build:
324:      requires: !complex !single

326:    test:
327:      args: -ts_monitor -snes_monitor  -pc_type lu   -snes_converged_reason  -ts_type beuler  -da_refine 5 -ts_dt 9.53674e-9 -ts_max_steps 50
328:      requires: x

330: TEST*/