Actual source code: ex2.c
2: static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\n";
8: /* ------------------------------------------------------------------------
10: This program solves the PDE
12: u * u_xx
13: u_t = ---------
14: 2*(t+1)^2
16: on the domain 0 <= x <= 1, with boundary conditions
17: u(t,0) = t + 1, u(t,1) = 2*t + 2,
18: and initial condition
19: u(0,x) = 1 + x*x.
21: The exact solution is:
22: u(t,x) = (1 + x*x) * (1 + t)
24: Note that since the solution is linear in time and quadratic in x,
25: the finite difference scheme actually computes the "exact" solution.
27: We use by default the backward Euler method.
29: ------------------------------------------------------------------------- */
31: /*
32: Include "petscts.h" to use the PETSc timestepping routines. Note that
33: this file automatically includes "petscsys.h" and other lower-level
34: PETSc include files.
36: Include the "petscdmda.h" to allow us to use the distributed array data
37: structures to manage the parallel grid.
38: */
39: #include <petscts.h>
40: #include <petscdm.h>
41: #include <petscdmda.h>
42: #include <petscdraw.h>
44: /*
45: User-defined application context - contains data needed by the
46: application-provided callback routines.
47: */
48: typedef struct {
49: MPI_Comm comm; /* communicator */
50: DM da; /* distributed array data structure */
51: Vec localwork; /* local ghosted work vector */
52: Vec u_local; /* local ghosted approximate solution vector */
53: Vec solution; /* global exact solution vector */
54: PetscInt m; /* total number of grid points */
55: PetscReal h; /* mesh width: h = 1/(m-1) */
56: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
57: } AppCtx;
59: /*
60: User-defined routines, provided below.
61: */
62: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
63: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
64: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
65: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
66: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
68: int main(int argc, char **argv)
69: {
70: AppCtx appctx; /* user-defined application context */
71: TS ts; /* timestepping context */
72: Mat A; /* Jacobian matrix data structure */
73: Vec u; /* approximate solution vector */
74: PetscInt time_steps_max = 100; /* default max timesteps */
75: PetscReal dt;
76: PetscReal time_total_max = 100.0; /* default max total time */
77: PetscBool mymonitor = PETSC_FALSE;
78: PetscReal bounds[] = {1.0, 3.3};
80: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81: Initialize program and set problem parameters
82: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
84: PetscFunctionBeginUser;
85: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
86: PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
88: appctx.comm = PETSC_COMM_WORLD;
89: appctx.m = 60;
91: PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
92: PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
93: PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
95: appctx.h = 1.0 / (appctx.m - 1.0);
97: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98: Create vector data structures
99: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101: /*
102: Create distributed array (DMDA) to manage parallel grid and vectors
103: and to set up the ghost point communication pattern. There are M
104: total grid values spread equally among all the processors.
105: */
106: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
107: PetscCall(DMSetFromOptions(appctx.da));
108: PetscCall(DMSetUp(appctx.da));
110: /*
111: Extract global and local vectors from DMDA; we use these to store the
112: approximate solution. Then duplicate these for remaining vectors that
113: have the same types.
114: */
115: PetscCall(DMCreateGlobalVector(appctx.da, &u));
116: PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
118: /*
119: Create local work vector for use in evaluating right-hand-side function;
120: create global work vector for storing exact solution.
121: */
122: PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
123: PetscCall(VecDuplicate(u, &appctx.solution));
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Create timestepping solver context; set callback routine for
127: right-hand-side function evaluation.
128: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
131: PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
132: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Set optional user-defined monitoring routine
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
140: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: For nonlinear problems, the user can provide a Jacobian evaluation
142: routine (or use a finite differencing approximation).
144: Create matrix data structure; set Jacobian evaluation routine.
145: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
148: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
149: PetscCall(MatSetFromOptions(A));
150: PetscCall(MatSetUp(A));
151: PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
153: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154: Set solution vector and initial timestep
155: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157: dt = appctx.h / 2.0;
158: PetscCall(TSSetTimeStep(ts, dt));
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Customize timestepping solver:
162: - Set the solution method to be the Backward Euler method.
163: - Set timestepping duration info
164: Then set runtime options, which can override these defaults.
165: For example,
166: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
167: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
168: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170: PetscCall(TSSetType(ts, TSBEULER));
171: PetscCall(TSSetMaxSteps(ts, time_steps_max));
172: PetscCall(TSSetMaxTime(ts, time_total_max));
173: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
174: PetscCall(TSSetFromOptions(ts));
176: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: Solve the problem
178: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
180: /*
181: Evaluate initial conditions
182: */
183: PetscCall(InitialConditions(u, &appctx));
185: /*
186: Run the timestepping solver
187: */
188: PetscCall(TSSolve(ts, u));
190: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: Free work space. All PETSc objects should be destroyed when they
192: are no longer needed.
193: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195: PetscCall(TSDestroy(&ts));
196: PetscCall(VecDestroy(&u));
197: PetscCall(MatDestroy(&A));
198: PetscCall(DMDestroy(&appctx.da));
199: PetscCall(VecDestroy(&appctx.localwork));
200: PetscCall(VecDestroy(&appctx.solution));
201: PetscCall(VecDestroy(&appctx.u_local));
203: /*
204: Always call PetscFinalize() before exiting a program. This routine
205: - finalizes the PETSc libraries as well as MPI
206: - provides summary and diagnostic information if certain runtime
207: options are chosen (e.g., -log_view).
208: */
209: PetscCall(PetscFinalize());
210: return 0;
211: }
212: /* --------------------------------------------------------------------- */
213: /*
214: InitialConditions - Computes the solution at the initial time.
216: Input Parameters:
217: u - uninitialized solution vector (global)
218: appctx - user-defined application context
220: Output Parameter:
221: u - vector with solution at initial time (global)
222: */
223: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
224: {
225: PetscScalar *u_localptr, h = appctx->h, x;
226: PetscInt i, mybase, myend;
228: PetscFunctionBeginUser;
229: /*
230: Determine starting point of each processor's range of
231: grid values.
232: */
233: PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
235: /*
236: Get a pointer to vector data.
237: - For default PETSc vectors, VecGetArray() returns a pointer to
238: the data array. Otherwise, the routine is implementation dependent.
239: - You MUST call VecRestoreArray() when you no longer need access to
240: the array.
241: - Note that the Fortran interface to VecGetArray() differs from the
242: C version. See the users manual for details.
243: */
244: PetscCall(VecGetArray(u, &u_localptr));
246: /*
247: We initialize the solution array by simply writing the solution
248: directly into the array locations. Alternatively, we could use
249: VecSetValues() or VecSetValuesLocal().
250: */
251: for (i = mybase; i < myend; i++) {
252: x = h * (PetscReal)i; /* current location in global grid */
253: u_localptr[i - mybase] = 1.0 + x * x;
254: }
256: /*
257: Restore vector
258: */
259: PetscCall(VecRestoreArray(u, &u_localptr));
261: /*
262: Print debugging information if desired
263: */
264: if (appctx->debug) {
265: PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
266: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
267: }
269: PetscFunctionReturn(PETSC_SUCCESS);
270: }
271: /* --------------------------------------------------------------------- */
272: /*
273: ExactSolution - Computes the exact solution at a given time.
275: Input Parameters:
276: t - current time
277: solution - vector in which exact solution will be computed
278: appctx - user-defined application context
280: Output Parameter:
281: solution - vector with the newly computed exact solution
282: */
283: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
284: {
285: PetscScalar *s_localptr, h = appctx->h, x;
286: PetscInt i, mybase, myend;
288: PetscFunctionBeginUser;
289: /*
290: Determine starting and ending points of each processor's
291: range of grid values
292: */
293: PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
295: /*
296: Get a pointer to vector data.
297: */
298: PetscCall(VecGetArray(solution, &s_localptr));
300: /*
301: Simply write the solution directly into the array locations.
302: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
303: */
304: for (i = mybase; i < myend; i++) {
305: x = h * (PetscReal)i;
306: s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
307: }
309: /*
310: Restore vector
311: */
312: PetscCall(VecRestoreArray(solution, &s_localptr));
313: PetscFunctionReturn(PETSC_SUCCESS);
314: }
315: /* --------------------------------------------------------------------- */
316: /*
317: Monitor - User-provided routine to monitor the solution computed at
318: each timestep. This example plots the solution and computes the
319: error in two different norms.
321: Input Parameters:
322: ts - the timestep context
323: step - the count of the current step (with 0 meaning the
324: initial condition)
325: time - the current time
326: u - the solution at this timestep
327: ctx - the user-provided context for this monitoring routine.
328: In this case we use the application context which contains
329: information about the problem size, workspace and the exact
330: solution.
331: */
332: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
333: {
334: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
335: PetscReal en2, en2s, enmax;
336: PetscDraw draw;
338: PetscFunctionBeginUser;
339: /*
340: We use the default X Windows viewer
341: PETSC_VIEWER_DRAW_(appctx->comm)
342: that is associated with the current communicator. This saves
343: the effort of calling PetscViewerDrawOpen() to create the window.
344: Note that if we wished to plot several items in separate windows we
345: would create each viewer with PetscViewerDrawOpen() and store them in
346: the application context, appctx.
348: PetscReal buffering makes graphics look better.
349: */
350: PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
351: PetscCall(PetscDrawSetDoubleBuffer(draw));
352: PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
354: /*
355: Compute the exact solution at this timestep
356: */
357: PetscCall(ExactSolution(time, appctx->solution, appctx));
359: /*
360: Print debugging information if desired
361: */
362: if (appctx->debug) {
363: PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
364: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
365: PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
366: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
367: }
369: /*
370: Compute the 2-norm and max-norm of the error
371: */
372: PetscCall(VecAXPY(appctx->solution, -1.0, u));
373: PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
374: en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
375: PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
377: /*
378: PetscPrintf() causes only the first processor in this
379: communicator to print the timestep information.
380: */
381: PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
383: /*
384: Print debugging information if desired
385: */
386: if (appctx->debug) {
387: PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
388: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
389: }
390: PetscFunctionReturn(PETSC_SUCCESS);
391: }
392: /* --------------------------------------------------------------------- */
393: /*
394: RHSFunction - User-provided routine that evalues the right-hand-side
395: function of the ODE. This routine is set in the main program by
396: calling TSSetRHSFunction(). We compute:
397: global_out = F(global_in)
399: Input Parameters:
400: ts - timesteping context
401: t - current time
402: global_in - vector containing the current iterate
403: ctx - (optional) user-provided context for function evaluation.
404: In this case we use the appctx defined above.
406: Output Parameter:
407: global_out - vector containing the newly evaluated function
408: */
409: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
410: {
411: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
412: DM da = appctx->da; /* distributed array */
413: Vec local_in = appctx->u_local; /* local ghosted input vector */
414: Vec localwork = appctx->localwork; /* local ghosted work vector */
415: PetscInt i, localsize;
416: PetscMPIInt rank, size;
417: PetscScalar *copyptr, sc;
418: const PetscScalar *localptr;
420: PetscFunctionBeginUser;
421: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
422: Get ready for local function computations
423: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
424: /*
425: Scatter ghost points to local vector, using the 2-step process
426: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
427: By placing code between these two statements, computations can be
428: done while messages are in transition.
429: */
430: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
431: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
433: /*
434: Access directly the values in our local INPUT work array
435: */
436: PetscCall(VecGetArrayRead(local_in, &localptr));
438: /*
439: Access directly the values in our local OUTPUT work array
440: */
441: PetscCall(VecGetArray(localwork, ©ptr));
443: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
445: /*
446: Evaluate our function on the nodes owned by this processor
447: */
448: PetscCall(VecGetLocalSize(local_in, &localsize));
450: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
451: Compute entries for the locally owned part
452: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
454: /*
455: Handle boundary conditions: This is done by using the boundary condition
456: u(t,boundary) = g(t,boundary)
457: for some function g. Now take the derivative with respect to t to obtain
458: u_{t}(t,boundary) = g_{t}(t,boundary)
460: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
461: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
462: */
463: PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
464: PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
465: if (rank == 0) copyptr[0] = 1.0;
466: if (rank == size - 1) copyptr[localsize - 1] = 2.0;
468: /*
469: Handle the interior nodes where the PDE is replace by finite
470: difference operators.
471: */
472: for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
474: /*
475: Restore vectors
476: */
477: PetscCall(VecRestoreArrayRead(local_in, &localptr));
478: PetscCall(VecRestoreArray(localwork, ©ptr));
480: /*
481: Insert values from the local OUTPUT vector into the global
482: output vector
483: */
484: PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
485: PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
487: /* Print debugging information if desired */
488: if (appctx->debug) {
489: PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
490: PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
491: }
493: PetscFunctionReturn(PETSC_SUCCESS);
494: }
495: /* --------------------------------------------------------------------- */
496: /*
497: RHSJacobian - User-provided routine to compute the Jacobian of
498: the nonlinear right-hand-side function of the ODE.
500: Input Parameters:
501: ts - the TS context
502: t - current time
503: global_in - global input vector
504: dummy - optional user-defined context, as set by TSetRHSJacobian()
506: Output Parameters:
507: AA - Jacobian matrix
508: BB - optionally different preconditioning matrix
509: str - flag indicating matrix structure
511: Notes:
512: RHSJacobian computes entries for the locally owned part of the Jacobian.
513: - Currently, all PETSc parallel matrix formats are partitioned by
514: contiguous chunks of rows across the processors.
515: - Each processor needs to insert only elements that it owns
516: locally (but any non-local elements will be sent to the
517: appropriate processor during matrix assembly).
518: - Always specify global row and columns of matrix entries when
519: using MatSetValues().
520: - Here, we set all entries for a particular row at once.
521: - Note that MatSetValues() uses 0-based row and column numbers
522: in Fortran as well as in C.
523: */
524: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
525: {
526: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
527: Vec local_in = appctx->u_local; /* local ghosted input vector */
528: DM da = appctx->da; /* distributed array */
529: PetscScalar v[3], sc;
530: const PetscScalar *localptr;
531: PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
533: PetscFunctionBeginUser;
534: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
535: Get ready for local Jacobian computations
536: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
537: /*
538: Scatter ghost points to local vector, using the 2-step process
539: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
540: By placing code between these two statements, computations can be
541: done while messages are in transition.
542: */
543: PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
544: PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
546: /*
547: Get pointer to vector data
548: */
549: PetscCall(VecGetArrayRead(local_in, &localptr));
551: /*
552: Get starting and ending locally owned rows of the matrix
553: */
554: PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
555: mstart = mstarts;
556: mend = mends;
558: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
559: Compute entries for the locally owned part of the Jacobian.
560: - Currently, all PETSc parallel matrix formats are partitioned by
561: contiguous chunks of rows across the processors.
562: - Each processor needs to insert only elements that it owns
563: locally (but any non-local elements will be sent to the
564: appropriate processor during matrix assembly).
565: - Here, we set all entries for a particular row at once.
566: - We can set matrix entries either using either
567: MatSetValuesLocal() or MatSetValues().
568: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
570: /*
571: Set matrix rows corresponding to boundary data
572: */
573: if (mstart == 0) {
574: v[0] = 0.0;
575: PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
576: mstart++;
577: }
578: if (mend == appctx->m) {
579: mend--;
580: v[0] = 0.0;
581: PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
582: }
584: /*
585: Set matrix rows corresponding to interior data. We construct the
586: matrix one row at a time.
587: */
588: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
589: for (i = mstart; i < mend; i++) {
590: idx[0] = i - 1;
591: idx[1] = i;
592: idx[2] = i + 1;
593: is = i - mstart + 1;
594: v[0] = sc * localptr[is];
595: v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
596: v[2] = sc * localptr[is];
597: PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
598: }
600: /*
601: Restore vector
602: */
603: PetscCall(VecRestoreArrayRead(local_in, &localptr));
605: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
606: Complete the matrix assembly process and set some options
607: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
608: /*
609: Assemble matrix, using the 2-step process:
610: MatAssemblyBegin(), MatAssemblyEnd()
611: Computations can be done while messages are in transition
612: by placing code between these two statements.
613: */
614: PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
615: PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
616: if (BB != AA) {
617: PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
618: PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
619: }
621: /*
622: Set and option to indicate that we will never add a new nonzero location
623: to the matrix. If we do, it will generate an error.
624: */
625: PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
627: PetscFunctionReturn(PETSC_SUCCESS);
628: }
630: /*TEST
632: test:
633: args: -nox -ts_dt 10 -mymonitor
634: nsize: 2
635: requires: !single
637: test:
638: suffix: tut_1
639: nsize: 1
640: args: -ts_max_steps 10 -ts_monitor
642: test:
643: suffix: tut_2
644: nsize: 4
645: args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
647: test:
648: suffix: tut_3
649: nsize: 4
650: args: -ts_max_steps 10 -ts_monitor -M 128
652: TEST*/