Actual source code: ex4.c
2: static char help[] = "Solves a simple time-dependent linear PDE (the heat equation).\n\
3: Input parameters include:\n\
4: -m <points>, where <points> = number of grid points\n\
5: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
6: -debug : Activate debugging printouts\n\
7: -nox : Deactivate x-window graphics\n\n";
9: /* ------------------------------------------------------------------------
11: This program solves the one-dimensional heat equation (also called the
12: diffusion equation),
13: u_t = u_xx,
14: on the domain 0 <= x <= 1, with the boundary conditions
15: u(t,0) = 0, u(t,1) = 0,
16: and the initial condition
17: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
18: This is a linear, second-order, parabolic equation.
20: We discretize the right-hand side using finite differences with
21: uniform grid spacing h:
22: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
23: We then demonstrate time evolution using the various TS methods by
24: running the program via
25: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
27: We compare the approximate solution with the exact solution, given by
28: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
29: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
31: Notes:
32: This code demonstrates the TS solver interface to two variants of
33: linear problems, u_t = f(u,t), namely
34: - time-dependent f: f(u,t) is a function of t
35: - time-independent f: f(u,t) is simply f(u)
37: The uniprocessor version of this code is ts/tutorials/ex3.c
39: ------------------------------------------------------------------------- */
41: /*
42: Include "petscdmda.h" so that we can use distributed arrays (DMDAs) to manage
43: the parallel grid. Include "petscts.h" so that we can use TS solvers.
44: Note that this file automatically includes:
45: petscsys.h - base PETSc routines petscvec.h - vectors
46: petscmat.h - matrices
47: petscis.h - index sets petscksp.h - Krylov subspace methods
48: petscviewer.h - viewers petscpc.h - preconditioners
49: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
50: */
52: #include <petscdm.h>
53: #include <petscdmda.h>
54: #include <petscts.h>
55: #include <petscdraw.h>
57: /*
58: User-defined application context - contains data needed by the
59: application-provided call-back routines.
60: */
61: typedef struct {
62: MPI_Comm comm; /* communicator */
63: DM da; /* distributed array data structure */
64: Vec localwork; /* local ghosted work vector */
65: Vec u_local; /* local ghosted approximate solution vector */
66: Vec solution; /* global exact solution vector */
67: PetscInt m; /* total number of grid points */
68: PetscReal h; /* mesh width h = 1/(m-1) */
69: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
70: PetscViewer viewer1, viewer2; /* viewers for the solution and error */
71: PetscReal norm_2, norm_max; /* error norms */
72: } AppCtx;
74: /*
75: User-defined routines
76: */
77: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
78: extern PetscErrorCode RHSMatrixHeat(TS, PetscReal, Vec, Mat, Mat, void *);
79: extern PetscErrorCode RHSFunctionHeat(TS, PetscReal, Vec, Vec, void *);
80: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
81: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
83: int main(int argc, char **argv)
84: {
85: AppCtx appctx; /* user-defined application context */
86: TS ts; /* timestepping context */
87: Mat A; /* matrix data structure */
88: Vec u; /* approximate solution vector */
89: PetscReal time_total_max = 1.0; /* default max total time */
90: PetscInt time_steps_max = 100; /* default max timesteps */
91: PetscDraw draw; /* drawing context */
92: PetscInt steps, m;
93: PetscMPIInt size;
94: PetscReal dt, ftime;
95: PetscBool flg;
96: TSProblemType tsproblem = TS_LINEAR;
98: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: Initialize program and set problem parameters
100: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102: PetscFunctionBeginUser;
103: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
104: appctx.comm = PETSC_COMM_WORLD;
106: m = 60;
107: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
108: PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
109: appctx.m = m;
110: appctx.h = 1.0 / (m - 1.0);
111: appctx.norm_2 = 0.0;
112: appctx.norm_max = 0.0;
114: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
115: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Solving a linear TS problem, number of processors = %d\n", size));
117: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: Create vector data structures
119: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120: /*
121: Create distributed array (DMDA) to manage parallel grid and vectors
122: and to set up the ghost point communication pattern. There are M
123: total grid values spread equally among all the processors.
124: */
126: PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, m, 1, 1, NULL, &appctx.da));
127: PetscCall(DMSetFromOptions(appctx.da));
128: PetscCall(DMSetUp(appctx.da));
130: /*
131: Extract global and local vectors from DMDA; we use these to store the
132: approximate solution. Then duplicate these for remaining vectors that
133: have the same types.
134: */
135: PetscCall(DMCreateGlobalVector(appctx.da, &u));
136: PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
138: /*
139: Create local work vector for use in evaluating right-hand-side function;
140: create global work vector for storing exact solution.
141: */
142: PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
143: PetscCall(VecDuplicate(u, &appctx.solution));
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Set up displays to show graphs of the solution and error
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 380, 400, 160, &appctx.viewer1));
150: PetscCall(PetscViewerDrawGetDraw(appctx.viewer1, 0, &draw));
151: PetscCall(PetscDrawSetDoubleBuffer(draw));
152: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 80, 0, 400, 160, &appctx.viewer2));
153: PetscCall(PetscViewerDrawGetDraw(appctx.viewer2, 0, &draw));
154: PetscCall(PetscDrawSetDoubleBuffer(draw));
156: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157: Create timestepping solver context
158: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160: PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
162: flg = PETSC_FALSE;
163: PetscCall(PetscOptionsGetBool(NULL, NULL, "-nonlinear", &flg, NULL));
164: PetscCall(TSSetProblemType(ts, flg ? TS_NONLINEAR : TS_LINEAR));
166: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167: Set optional user-defined monitoring routine
168: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169: PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
171: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Create matrix data structure; set matrix evaluation routine.
174: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
177: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m, m));
178: PetscCall(MatSetFromOptions(A));
179: PetscCall(MatSetUp(A));
181: flg = PETSC_FALSE;
182: PetscCall(PetscOptionsGetBool(NULL, NULL, "-time_dependent_rhs", &flg, NULL));
183: if (flg) {
184: /*
185: For linear problems with a time-dependent f(u,t) in the equation
186: u_t = f(u,t), the user provides the discretized right-hand-side
187: as a time-dependent matrix.
188: */
189: PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
190: PetscCall(TSSetRHSJacobian(ts, A, A, RHSMatrixHeat, &appctx));
191: } else {
192: /*
193: For linear problems with a time-independent f(u) in the equation
194: u_t = f(u), the user provides the discretized right-hand-side
195: as a matrix only once, and then sets a null matrix evaluation
196: routine.
197: */
198: PetscCall(RHSMatrixHeat(ts, 0.0, u, A, A, &appctx));
199: PetscCall(TSSetRHSFunction(ts, NULL, TSComputeRHSFunctionLinear, &appctx));
200: PetscCall(TSSetRHSJacobian(ts, A, A, TSComputeRHSJacobianConstant, &appctx));
201: }
203: if (tsproblem == TS_NONLINEAR) {
204: SNES snes;
205: PetscCall(TSSetRHSFunction(ts, NULL, RHSFunctionHeat, &appctx));
206: PetscCall(TSGetSNES(ts, &snes));
207: PetscCall(SNESSetJacobian(snes, NULL, NULL, SNESComputeJacobianDefault, NULL));
208: }
210: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: Set solution vector and initial timestep
212: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
214: dt = appctx.h * appctx.h / 2.0;
215: PetscCall(TSSetTimeStep(ts, dt));
216: PetscCall(TSSetSolution(ts, u));
218: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
219: Customize timestepping solver:
220: - Set the solution method to be the Backward Euler method.
221: - Set timestepping duration info
222: Then set runtime options, which can override these defaults.
223: For example,
224: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
225: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
226: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
228: PetscCall(TSSetMaxSteps(ts, time_steps_max));
229: PetscCall(TSSetMaxTime(ts, time_total_max));
230: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
231: PetscCall(TSSetFromOptions(ts));
233: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
234: Solve the problem
235: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
237: /*
238: Evaluate initial conditions
239: */
240: PetscCall(InitialConditions(u, &appctx));
242: /*
243: Run the timestepping solver
244: */
245: PetscCall(TSSolve(ts, u));
246: PetscCall(TSGetSolveTime(ts, &ftime));
247: PetscCall(TSGetStepNumber(ts, &steps));
249: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
250: View timestepping solver info
251: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
252: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Total timesteps %" PetscInt_FMT ", Final time %g\n", steps, (double)ftime));
253: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Avg. error (2 norm) = %g Avg. error (max norm) = %g\n", (double)(appctx.norm_2 / steps), (double)(appctx.norm_max / steps)));
255: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
256: Free work space. All PETSc objects should be destroyed when they
257: are no longer needed.
258: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
260: PetscCall(TSDestroy(&ts));
261: PetscCall(MatDestroy(&A));
262: PetscCall(VecDestroy(&u));
263: PetscCall(PetscViewerDestroy(&appctx.viewer1));
264: PetscCall(PetscViewerDestroy(&appctx.viewer2));
265: PetscCall(VecDestroy(&appctx.localwork));
266: PetscCall(VecDestroy(&appctx.solution));
267: PetscCall(VecDestroy(&appctx.u_local));
268: PetscCall(DMDestroy(&appctx.da));
270: /*
271: Always call PetscFinalize() before exiting a program. This routine
272: - finalizes the PETSc libraries as well as MPI
273: - provides summary and diagnostic information if certain runtime
274: options are chosen (e.g., -log_view).
275: */
276: PetscCall(PetscFinalize());
277: return 0;
278: }
279: /* --------------------------------------------------------------------- */
280: /*
281: InitialConditions - Computes the solution at the initial time.
283: Input Parameter:
284: u - uninitialized solution vector (global)
285: appctx - user-defined application context
287: Output Parameter:
288: u - vector with solution at initial time (global)
289: */
290: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
291: {
292: PetscScalar *u_localptr, h = appctx->h;
293: PetscInt i, mybase, myend;
295: PetscFunctionBeginUser;
296: /*
297: Determine starting point of each processor's range of
298: grid values.
299: */
300: PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
302: /*
303: Get a pointer to vector data.
304: - For default PETSc vectors, VecGetArray() returns a pointer to
305: the data array. Otherwise, the routine is implementation dependent.
306: - You MUST call VecRestoreArray() when you no longer need access to
307: the array.
308: - Note that the Fortran interface to VecGetArray() differs from the
309: C version. See the users manual for details.
310: */
311: PetscCall(VecGetArray(u, &u_localptr));
313: /*
314: We initialize the solution array by simply writing the solution
315: directly into the array locations. Alternatively, we could use
316: VecSetValues() or VecSetValuesLocal().
317: */
318: for (i = mybase; i < myend; i++) u_localptr[i - mybase] = PetscSinScalar(PETSC_PI * i * 6. * h) + 3. * PetscSinScalar(PETSC_PI * i * 2. * h);
320: /*
321: Restore vector
322: */
323: PetscCall(VecRestoreArray(u, &u_localptr));
325: /*
326: Print debugging information if desired
327: */
328: if (appctx->debug) {
329: PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
330: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
331: }
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
335: /* --------------------------------------------------------------------- */
336: /*
337: ExactSolution - Computes the exact solution at a given time.
339: Input Parameters:
340: t - current time
341: solution - vector in which exact solution will be computed
342: appctx - user-defined application context
344: Output Parameter:
345: solution - vector with the newly computed exact solution
346: */
347: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
348: {
349: PetscScalar *s_localptr, h = appctx->h, ex1, ex2, sc1, sc2;
350: PetscInt i, mybase, myend;
352: PetscFunctionBeginUser;
353: /*
354: Determine starting and ending points of each processor's
355: range of grid values
356: */
357: PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
359: /*
360: Get a pointer to vector data.
361: */
362: PetscCall(VecGetArray(solution, &s_localptr));
364: /*
365: Simply write the solution directly into the array locations.
366: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
367: */
368: ex1 = PetscExpReal(-36. * PETSC_PI * PETSC_PI * t);
369: ex2 = PetscExpReal(-4. * PETSC_PI * PETSC_PI * t);
370: sc1 = PETSC_PI * 6. * h;
371: sc2 = PETSC_PI * 2. * h;
372: for (i = mybase; i < myend; i++) s_localptr[i - mybase] = PetscSinScalar(sc1 * (PetscReal)i) * ex1 + 3. * PetscSinScalar(sc2 * (PetscReal)i) * ex2;
374: /*
375: Restore vector
376: */
377: PetscCall(VecRestoreArray(solution, &s_localptr));
378: PetscFunctionReturn(PETSC_SUCCESS);
379: }
380: /* --------------------------------------------------------------------- */
381: /*
382: Monitor - User-provided routine to monitor the solution computed at
383: each timestep. This example plots the solution and computes the
384: error in two different norms.
386: Input Parameters:
387: ts - the timestep context
388: step - the count of the current step (with 0 meaning the
389: initial condition)
390: time - the current time
391: u - the solution at this timestep
392: ctx - the user-provided context for this monitoring routine.
393: In this case we use the application context which contains
394: information about the problem size, workspace and the exact
395: solution.
396: */
397: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
398: {
399: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
400: PetscReal norm_2, norm_max;
402: PetscFunctionBeginUser;
403: /*
404: View a graph of the current iterate
405: */
406: PetscCall(VecView(u, appctx->viewer2));
408: /*
409: Compute the exact solution
410: */
411: PetscCall(ExactSolution(time, appctx->solution, appctx));
413: /*
414: Print debugging information if desired
415: */
416: if (appctx->debug) {
417: PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
418: PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
419: PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
420: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
421: }
423: /*
424: Compute the 2-norm and max-norm of the error
425: */
426: PetscCall(VecAXPY(appctx->solution, -1.0, u));
427: PetscCall(VecNorm(appctx->solution, NORM_2, &norm_2));
428: norm_2 = PetscSqrtReal(appctx->h) * norm_2;
429: PetscCall(VecNorm(appctx->solution, NORM_MAX, &norm_max));
430: if (norm_2 < 1e-14) norm_2 = 0;
431: if (norm_max < 1e-14) norm_max = 0;
433: /*
434: PetscPrintf() causes only the first processor in this
435: communicator to print the timestep information.
436: */
437: PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g max norm error = %g\n", step, (double)time, (double)norm_2, (double)norm_max));
438: appctx->norm_2 += norm_2;
439: appctx->norm_max += norm_max;
441: /*
442: View a graph of the error
443: */
444: PetscCall(VecView(appctx->solution, appctx->viewer1));
446: /*
447: Print debugging information if desired
448: */
449: if (appctx->debug) {
450: PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
451: PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
452: }
454: PetscFunctionReturn(PETSC_SUCCESS);
455: }
457: /* --------------------------------------------------------------------- */
458: /*
459: RHSMatrixHeat - User-provided routine to compute the right-hand-side
460: matrix for the heat equation.
462: Input Parameters:
463: ts - the TS context
464: t - current time
465: global_in - global input vector
466: dummy - optional user-defined context, as set by TSetRHSJacobian()
468: Output Parameters:
469: AA - Jacobian matrix
470: BB - optionally different preconditioning matrix
471: str - flag indicating matrix structure
473: Notes:
474: RHSMatrixHeat computes entries for the locally owned part of the system.
475: - Currently, all PETSc parallel matrix formats are partitioned by
476: contiguous chunks of rows across the processors.
477: - Each processor needs to insert only elements that it owns
478: locally (but any non-local elements will be sent to the
479: appropriate processor during matrix assembly).
480: - Always specify global row and columns of matrix entries when
481: using MatSetValues(); we could alternatively use MatSetValuesLocal().
482: - Here, we set all entries for a particular row at once.
483: - Note that MatSetValues() uses 0-based row and column numbers
484: in Fortran as well as in C.
485: */
486: PetscErrorCode RHSMatrixHeat(TS ts, PetscReal t, Vec X, Mat AA, Mat BB, void *ctx)
487: {
488: Mat A = AA; /* Jacobian matrix */
489: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
490: PetscInt i, mstart, mend, idx[3];
491: PetscScalar v[3], stwo = -2. / (appctx->h * appctx->h), sone = -.5 * stwo;
493: PetscFunctionBeginUser;
494: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
495: Compute entries for the locally owned part of the matrix
496: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
498: PetscCall(MatGetOwnershipRange(A, &mstart, &mend));
500: /*
501: Set matrix rows corresponding to boundary data
502: */
504: if (mstart == 0) { /* first processor only */
505: v[0] = 1.0;
506: PetscCall(MatSetValues(A, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
507: mstart++;
508: }
510: if (mend == appctx->m) { /* last processor only */
511: mend--;
512: v[0] = 1.0;
513: PetscCall(MatSetValues(A, 1, &mend, 1, &mend, v, INSERT_VALUES));
514: }
516: /*
517: Set matrix rows corresponding to interior data. We construct the
518: matrix one row at a time.
519: */
520: v[0] = sone;
521: v[1] = stwo;
522: v[2] = sone;
523: for (i = mstart; i < mend; i++) {
524: idx[0] = i - 1;
525: idx[1] = i;
526: idx[2] = i + 1;
527: PetscCall(MatSetValues(A, 1, &i, 3, idx, v, INSERT_VALUES));
528: }
530: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
531: Complete the matrix assembly process and set some options
532: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
533: /*
534: Assemble matrix, using the 2-step process:
535: MatAssemblyBegin(), MatAssemblyEnd()
536: Computations can be done while messages are in transition
537: by placing code between these two statements.
538: */
539: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
540: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
542: /*
543: Set and option to indicate that we will never add a new nonzero location
544: to the matrix. If we do, it will generate an error.
545: */
546: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: PetscErrorCode RHSFunctionHeat(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
552: {
553: Mat A;
555: PetscFunctionBeginUser;
556: PetscCall(TSGetRHSJacobian(ts, &A, NULL, NULL, &ctx));
557: PetscCall(RHSMatrixHeat(ts, t, globalin, A, NULL, ctx));
558: /* PetscCall(MatView(A,PETSC_VIEWER_STDOUT_WORLD)); */
559: PetscCall(MatMult(A, globalin, globalout));
560: PetscFunctionReturn(PETSC_SUCCESS);
561: }
563: /*TEST
565: test:
566: args: -ts_view -nox
568: test:
569: suffix: 2
570: args: -ts_view -nox
571: nsize: 3
573: test:
574: suffix: 3
575: args: -ts_view -nox -nonlinear
577: test:
578: suffix: 4
579: args: -ts_view -nox -nonlinear
580: nsize: 3
581: timeoutfactor: 3
583: test:
584: suffix: sundials
585: requires: sundials2
586: args: -nox -ts_type sundials -ts_max_steps 5 -nonlinear
587: nsize: 4
589: test:
590: suffix: sundials_dense
591: requires: sundials2
592: args: -nox -ts_type sundials -ts_sundials_use_dense -ts_max_steps 5 -nonlinear
593: nsize: 1
595: TEST*/