Actual source code: ex5f90t.F90
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !
10: ! --------------------------------------------------------------------------
11: !
12: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
13: ! the partial differential equation
14: !
15: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
16: !
17: ! with boundary conditions
18: !
19: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
20: !
21: ! A finite difference approximation with the usual 5-point stencil
22: ! is used to discretize the boundary value problem to obtain a nonlinear
23: ! system of equations.
24: !
25: ! The uniprocessor version of this code is snes/tutorials/ex4f.F
26: !
27: ! --------------------------------------------------------------------------
28: ! The following define must be used before including any PETSc include files
29: ! into a module or interface. This is because they can't handle declarations
30: ! in them
31: !
33: module ex5f90tmodule
34: #include <petsc/finclude/petscdm.h>
35: use petscdmdef
36: type userctx
37: type(tDM) da
38: PetscInt xs,xe,xm,gxs,gxe,gxm
39: PetscInt ys,ye,ym,gys,gye,gym
40: PetscInt mx,my
41: PetscMPIInt rank
42: PetscReal lambda
43: end type userctx
45: contains
46: ! ---------------------------------------------------------------------
47: !
48: ! FormFunction - Evaluates nonlinear function, F(x).
49: !
50: ! Input Parameters:
51: ! snes - the SNES context
52: ! X - input vector
53: ! dummy - optional user-defined context, as set by SNESSetFunction()
54: ! (not used here)
55: !
56: ! Output Parameter:
57: ! F - function vector
58: !
59: ! Notes:
60: ! This routine serves as a wrapper for the lower-level routine
61: ! "FormFunctionLocal", where the actual computations are
62: ! done using the standard Fortran style of treating the local
63: ! vector data as a multidimensional array over the local mesh.
64: ! This routine merely handles ghost point scatters and accesses
65: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
66: !
67: subroutine FormFunction(snesIn,X,F,user,ierr)
68: #include <petsc/finclude/petscsnes.h>
69: use petscsnes
71: ! Input/output variables:
72: type(tSNES) snesIn
73: type(tVec) X,F
74: PetscErrorCode ierr
75: type (userctx) user
77: ! Declarations for use with local arrays:
78: PetscScalar,pointer :: lx_v(:),lf_v(:)
79: type(tVec) localX
81: ! Scatter ghost points to local vector, using the 2-step process
82: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
83: ! By placing code between these two statements, computations can
84: ! be done while messages are in transition.
85: PetscCall(DMGetLocalVector(user%da,localX,ierr))
86: PetscCall(DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr))
87: PetscCall(DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr))
89: ! Get a pointer to vector data.
90: ! - VecGetArray90() returns a pointer to the data array.
91: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
92: ! the array.
94: PetscCall(VecGetArrayF90(localX,lx_v,ierr))
95: PetscCall(VecGetArrayF90(F,lf_v,ierr))
97: ! Compute function over the locally owned part of the grid
98: PetscCall(FormFunctionLocal(lx_v,lf_v,user,ierr))
100: ! Restore vectors
101: PetscCall(VecRestoreArrayF90(localX,lx_v,ierr))
102: PetscCall(VecRestoreArrayF90(F,lf_v,ierr))
104: ! Insert values into global vector
106: PetscCall(DMRestoreLocalVector(user%da,localX,ierr))
107: PetscCall(PetscLogFlops(11.0d0*user%ym*user%xm,ierr))
109: ! PetscCall(VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr))
110: ! PetscCall(VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr))
111: return
112: end subroutine formfunction
113: end module ex5f90tmodule
115: module f90moduleinterfacest
116: use ex5f90tmodule
118: Interface SNESSetApplicationContext
119: Subroutine SNESSetApplicationContext(snesIn,ctx,ierr)
120: #include <petsc/finclude/petscsnes.h>
121: use petscsnes
122: use ex5f90tmodule
123: type(tSNES) snesIn
124: type(userctx) ctx
125: PetscErrorCode ierr
126: End Subroutine
127: End Interface SNESSetApplicationContext
129: Interface SNESGetApplicationContext
130: Subroutine SNESGetApplicationContext(snesIn,ctx,ierr)
131: #include <petsc/finclude/petscsnes.h>
132: use petscsnes
133: use ex5f90tmodule
134: type(tSNES) snesIn
135: type(userctx), pointer :: ctx
136: PetscErrorCode ierr
137: End Subroutine
138: End Interface SNESGetApplicationContext
139: end module f90moduleinterfacest
141: program main
142: #include <petsc/finclude/petscdm.h>
143: #include <petsc/finclude/petscsnes.h>
144: use petscdmda
145: use petscdm
146: use petscsnes
147: use ex5f90tmodule
148: use f90moduleinterfacest
149: implicit none
150: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: ! Variable declarations
152: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: !
154: ! Variables:
155: ! mysnes - nonlinear solver
156: ! x, r - solution, residual vectors
157: ! J - Jacobian matrix
158: ! its - iterations for convergence
159: ! Nx, Ny - number of preocessors in x- and y- directions
160: ! matrix_free - flag - 1 indicates matrix-free version
161: !
162: type(tSNES) mysnes
163: type(tVec) x,r
164: type(tMat) J
165: PetscErrorCode ierr
166: PetscInt its
167: PetscBool flg,matrix_free,set
168: PetscInt ione,nfour
169: PetscReal lambda_max,lambda_min
170: type(userctx) user
171: type(userctx), pointer:: puser
172: type(tPetscOptions) :: options
174: ! Note: Any user-defined Fortran routines (such as FormJacobian)
175: ! MUST be declared as external.
176: external FormInitialGuess,FormJacobian
178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: ! Initialize program
180: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: PetscCallA(PetscInitialize(ierr))
182: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr))
184: ! Initialize problem parameters
185: options%v = 0
186: lambda_max = 6.81
187: lambda_min = 0.0
188: user%lambda = 6.0
189: ione = 1
190: nfour = 4
191: PetscCallA(PetscOptionsGetReal(options,PETSC_NULL_CHARACTER,'-par',user%lambda,flg,ierr))
192: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) then
193: SETERRA(PETSC_COMM_SELF,PETSC_ERR_USER,'Lambda provided with -par is out of range ')
194: endif
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: ! Create nonlinear solver context
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: PetscCallA(SNESCreate(PETSC_COMM_WORLD,mysnes,ierr))
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: ! Create vector data structures; set function evaluation routine
203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Create distributed array (DMDA) to manage parallel grid and vectors
207: ! This really needs only the star-type stencil, but we use the box
208: ! stencil temporarily.
209: PetscCallA(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr))
210: PetscCallA(DMSetFromOptions(user%da,ierr))
211: PetscCallA(DMSetUp(user%da,ierr))
212: PetscCallA(DMDAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,ierr))
214: !
215: ! Visualize the distribution of the array across the processors
216: !
217: ! PetscCallA(DMView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr))
219: ! Extract global and local vectors from DMDA; then duplicate for remaining
220: ! vectors that are the same types
221: PetscCallA(DMCreateGlobalVector(user%da,x,ierr))
222: PetscCallA(VecDuplicate(x,r,ierr))
224: ! Get local grid boundaries (for 2-dimensional DMDA)
225: PetscCallA(DMDAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER,user%xm,user%ym,PETSC_NULL_INTEGER,ierr))
226: PetscCallA(DMDAGetGhostCorners(user%da,user%gxs,user%gys,PETSC_NULL_INTEGER,user%gxm,user%gym,PETSC_NULL_INTEGER,ierr))
228: ! Here we shift the starting indices up by one so that we can easily
229: ! use the Fortran convention of 1-based indices (rather 0-based indices).
230: user%xs = user%xs+1
231: user%ys = user%ys+1
232: user%gxs = user%gxs+1
233: user%gys = user%gys+1
235: user%ye = user%ys+user%ym-1
236: user%xe = user%xs+user%xm-1
237: user%gye = user%gys+user%gym-1
238: user%gxe = user%gxs+user%gxm-1
240: PetscCallA(SNESSetApplicationContext(mysnes,user,ierr))
242: ! Set function evaluation routine and vector
243: PetscCallA(SNESSetFunction(mysnes,r,FormFunction,user,ierr))
245: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246: ! Create matrix data structure; set Jacobian evaluation routine
247: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
249: ! Set Jacobian matrix data structure and default Jacobian evaluation
250: ! routine. User can override with:
251: ! -snes_fd : default finite differencing approximation of Jacobian
252: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
253: ! (unless user explicitly sets preconditioner)
254: ! -snes_mf_operator : form preconditioning matrix as set by the user,
255: ! but use matrix-free approx for Jacobian-vector
256: ! products within Newton-Krylov method
257: !
258: ! Note: For the parallel case, vectors and matrices MUST be partitioned
259: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
260: ! the DMDAs determine the problem partitioning. We must explicitly
261: ! specify the local matrix dimensions upon its creation for compatibility
262: ! with the vector distribution. Thus, the generic MatCreate() routine
263: ! is NOT sufficient when working with distributed arrays.
264: !
265: ! Note: Here we only approximately preallocate storage space for the
266: ! Jacobian. See the users manual for a discussion of better techniques
267: ! for preallocating matrix memory.
269: PetscCallA(PetscOptionsHasName(options,PETSC_NULL_CHARACTER,'-snes_mf',matrix_free,ierr))
270: if (.not. matrix_free) then
271: PetscCallA(DMSetMatType(user%da,MATAIJ,ierr))
272: PetscCallA(DMCreateMatrix(user%da,J,ierr))
273: PetscCallA(SNESSetJacobian(mysnes,J,J,FormJacobian,user,ierr))
274: endif
276: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
277: ! Customize nonlinear solver; set runtime options
278: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
280: PetscCallA(SNESSetFromOptions(mysnes,ierr))
282: ! Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
283: PetscCallA(PetscOptionsGetBool(options,PETSC_NULL_CHARACTER,'-test_appctx',flg,set,ierr))
284: if (flg) then
285: PetscCallA(SNESGetApplicationContext(mysnes,puser,ierr))
286: endif
288: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
289: ! Evaluate initial guess; then solve nonlinear system.
290: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
291: ! Note: The user should initialize the vector, x, with the initial guess
292: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
293: ! to employ an initial guess of zero, the user should explicitly set
294: ! this vector to zero by calling VecSet().
296: PetscCallA(FormInitialGuess(mysnes,x,ierr))
297: PetscCallA(SNESSolve(mysnes,PETSC_NULL_VEC,x,ierr))
298: PetscCallA(SNESGetIterationNumber(mysnes,its,ierr))
299: if (user%rank .eq. 0) then
300: write(6,100) its
301: endif
302: 100 format('Number of SNES iterations = ',i5)
304: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
305: ! Free work space. All PETSc objects should be destroyed when they
306: ! are no longer needed.
307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: if (.not. matrix_free) PetscCallA(MatDestroy(J,ierr))
309: PetscCallA(VecDestroy(x,ierr))
310: PetscCallA(VecDestroy(r,ierr))
311: PetscCallA(SNESDestroy(mysnes,ierr))
312: PetscCallA(DMDestroy(user%da,ierr))
314: PetscCallA(PetscFinalize(ierr))
315: end
317: ! ---------------------------------------------------------------------
318: !
319: ! FormInitialGuess - Forms initial approximation.
320: !
321: ! Input Parameters:
322: ! X - vector
323: !
324: ! Output Parameter:
325: ! X - vector
326: !
327: ! Notes:
328: ! This routine serves as a wrapper for the lower-level routine
329: ! "InitialGuessLocal", where the actual computations are
330: ! done using the standard Fortran style of treating the local
331: ! vector data as a multidimensional array over the local mesh.
332: ! This routine merely handles ghost point scatters and accesses
333: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
334: !
335: subroutine FormInitialGuess(mysnes,X,ierr)
336: #include <petsc/finclude/petscsnes.h>
337: use petscsnes
338: use ex5f90tmodule
339: use f90moduleinterfacest
340: ! Input/output variables:
341: type(tSNES) mysnes
342: type(userctx), pointer:: puser
343: type(tVec) X
344: PetscErrorCode ierr
346: ! Declarations for use with local arrays:
347: PetscScalar,pointer :: lx_v(:)
349: ierr = 0
350: PetscCallA(SNESGetApplicationContext(mysnes,puser,ierr))
351: ! Get a pointer to vector data.
352: ! - VecGetArray90() returns a pointer to the data array.
353: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
354: ! the array.
356: PetscCallA(VecGetArrayF90(X,lx_v,ierr))
358: ! Compute initial guess over the locally owned part of the grid
359: PetscCallA(InitialGuessLocal(puser,lx_v,ierr))
361: ! Restore vector
362: PetscCallA(VecRestoreArrayF90(X,lx_v,ierr))
364: ! Insert values into global vector
366: return
367: end
369: ! ---------------------------------------------------------------------
370: !
371: ! InitialGuessLocal - Computes initial approximation, called by
372: ! the higher level routine FormInitialGuess().
373: !
374: ! Input Parameter:
375: ! x - local vector data
376: !
377: ! Output Parameters:
378: ! x - local vector data
379: ! ierr - error code
380: !
381: ! Notes:
382: ! This routine uses standard Fortran-style computations over a 2-dim array.
383: !
384: subroutine InitialGuessLocal(user,x,ierr)
385: #include <petsc/finclude/petscsys.h>
386: use petscsys
387: use ex5f90tmodule
388: ! Input/output variables:
389: type (userctx) user
390: PetscScalar x(user%xs:user%xe,user%ys:user%ye)
391: PetscErrorCode ierr
393: ! Local variables:
394: PetscInt i,j
395: PetscScalar temp1,temp,hx,hy
396: PetscScalar one
398: ! Set parameters
400: ierr = 0
401: one = 1.0
402: hx = one/(PetscIntToReal(user%mx-1))
403: hy = one/(PetscIntToReal(user%my-1))
404: temp1 = user%lambda/(user%lambda + one)
406: do 20 j=user%ys,user%ye
407: temp = PetscIntToReal(min(j-1,user%my-j))*hy
408: do 10 i=user%xs,user%xe
409: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
410: x(i,j) = 0.0
411: else
412: x(i,j) = temp1 * sqrt(min(PetscIntToReal(min(i-1,user%mx-i)*hx),PetscIntToReal(temp)))
413: endif
414: 10 continue
415: 20 continue
417: return
418: end
420: ! ---------------------------------------------------------------------
421: !
422: ! FormFunctionLocal - Computes nonlinear function, called by
423: ! the higher level routine FormFunction().
424: !
425: ! Input Parameter:
426: ! x - local vector data
427: !
428: ! Output Parameters:
429: ! f - local vector data, f(x)
430: ! ierr - error code
431: !
432: ! Notes:
433: ! This routine uses standard Fortran-style computations over a 2-dim array.
434: !
435: subroutine FormFunctionLocal(x,f,user,ierr)
436: #include <petsc/finclude/petscsys.h>
437: use petscsys
438: use ex5f90tmodule
439: ! Input/output variables:
440: type (userctx) user
441: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
442: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
443: PetscErrorCode ierr
445: ! Local variables:
446: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
447: PetscScalar u,uxx,uyy
448: PetscInt i,j
450: one = 1.0
451: two = 2.0
452: hx = one/PetscIntToReal(user%mx-1)
453: hy = one/PetscIntToReal(user%my-1)
454: sc = hx*hy*user%lambda
455: hxdhy = hx/hy
456: hydhx = hy/hx
458: ! Compute function over the locally owned part of the grid
460: do 20 j=user%ys,user%ye
461: do 10 i=user%xs,user%xe
462: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
463: f(i,j) = x(i,j)
464: else
465: u = x(i,j)
466: uxx = hydhx * (two*u - x(i-1,j) - x(i+1,j))
467: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
468: f(i,j) = uxx + uyy - sc*exp(u)
469: endif
470: 10 continue
471: 20 continue
472: ierr = 0
473: return
474: end
476: ! ---------------------------------------------------------------------
477: !
478: ! FormJacobian - Evaluates Jacobian matrix.
479: !
480: ! Input Parameters:
481: ! snes - the SNES context
482: ! x - input vector
483: ! dummy - optional user-defined context, as set by SNESSetJacobian()
484: ! (not used here)
485: !
486: ! Output Parameters:
487: ! jac - Jacobian matrix
488: ! jac_prec - optionally different preconditioning matrix (not used here)
489: ! flag - flag indicating matrix structure
490: !
491: ! Notes:
492: ! This routine serves as a wrapper for the lower-level routine
493: ! "FormJacobianLocal", where the actual computations are
494: ! done using the standard Fortran style of treating the local
495: ! vector data as a multidimensional array over the local mesh.
496: ! This routine merely accesses the local vector data via
497: ! VecGetArrayF90() and VecRestoreArrayF90().
498: !
499: ! Notes:
500: ! Due to grid point reordering with DMDAs, we must always work
501: ! with the local grid points, and then transform them to the new
502: ! global numbering with the "ltog" mapping
503: ! We cannot work directly with the global numbers for the original
504: ! uniprocessor grid!
505: !
506: ! Two methods are available for imposing this transformation
507: ! when setting matrix entries:
508: ! (A) MatSetValuesLocal(), using the local ordering (including
509: ! ghost points!)
510: ! - Set matrix entries using the local ordering
511: ! by calling MatSetValuesLocal()
512: ! (B) MatSetValues(), using the global ordering
513: ! - Use DMGetLocalToGlobalMapping() then
514: ! ISLocalToGlobalMappingGetIndicesF90() to extract the local-to-global map
515: ! - Then apply this map explicitly yourself
516: ! - Set matrix entries using the global ordering by calling
517: ! MatSetValues()
518: ! Option (A) seems cleaner/easier in many cases, and is the procedure
519: ! used in this example.
520: !
521: subroutine FormJacobian(mysnes,X,jac,jac_prec,user,ierr)
522: #include <petsc/finclude/petscsnes.h>
523: use petscsnes
524: use ex5f90tmodule
525: ! Input/output variables:
526: type(tSNES) mysnes
527: type(tVec) X
528: type(tMat) jac,jac_prec
529: type(userctx) user
530: PetscErrorCode ierr
532: ! Declarations for use with local arrays:
533: PetscScalar,pointer :: lx_v(:)
534: type(tVec) localX
536: ! Scatter ghost points to local vector, using the 2-step process
537: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
538: ! Computations can be done while messages are in transition,
539: ! by placing code between these two statements.
541: PetscCallA(DMGetLocalVector(user%da,localX,ierr))
542: PetscCallA(DMGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,ierr))
543: PetscCallA(DMGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr))
545: ! Get a pointer to vector data
546: PetscCallA(VecGetArrayF90(localX,lx_v,ierr))
548: ! Compute entries for the locally owned part of the Jacobian preconditioner.
549: PetscCallA(FormJacobianLocal(lx_v,jac_prec,user,ierr))
551: ! Assemble matrix, using the 2-step process:
552: ! MatAssemblyBegin(), MatAssemblyEnd()
553: ! Computations can be done while messages are in transition,
554: ! by placing code between these two statements.
556: PetscCallA(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr))
557: ! if (jac .ne. jac_prec) then
558: PetscCallA(MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
559: ! endif
560: PetscCallA(VecRestoreArrayF90(localX,lx_v,ierr))
561: PetscCallA(DMRestoreLocalVector(user%da,localX,ierr))
562: PetscCallA(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr))
563: ! if (jac .ne. jac_prec) then
564: PetscCallA(MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr))
565: ! endif
567: ! Tell the matrix we will never add a new nonzero location to the
568: ! matrix. If we do it will generate an error.
570: PetscCallA(MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,ierr))
572: return
573: end
575: ! ---------------------------------------------------------------------
576: !
577: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
578: ! called by the higher level routine FormJacobian().
579: !
580: ! Input Parameters:
581: ! x - local vector data
582: !
583: ! Output Parameters:
584: ! jac_prec - Jacobian preconditioner matrix
585: ! ierr - error code
586: !
587: ! Notes:
588: ! This routine uses standard Fortran-style computations over a 2-dim array.
589: !
590: ! Notes:
591: ! Due to grid point reordering with DMDAs, we must always work
592: ! with the local grid points, and then transform them to the new
593: ! global numbering with the "ltog" mapping
594: ! We cannot work directly with the global numbers for the original
595: ! uniprocessor grid!
596: !
597: ! Two methods are available for imposing this transformation
598: ! when setting matrix entries:
599: ! (A) MatSetValuesLocal(), using the local ordering (including
600: ! ghost points!)
601: ! - Set matrix entries using the local ordering
602: ! by calling MatSetValuesLocal()
603: ! (B) MatSetValues(), using the global ordering
604: ! - Set matrix entries using the global ordering by calling
605: ! MatSetValues()
606: ! Option (A) seems cleaner/easier in many cases, and is the procedure
607: ! used in this example.
608: !
609: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
610: #include <petsc/finclude/petscmat.h>
611: use petscmat
612: use ex5f90tmodule
613: ! Input/output variables:
614: type (userctx) user
615: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
616: type(tMat) jac_prec
617: PetscErrorCode ierr
619: ! Local variables:
620: PetscInt row,col(5),i,j
621: PetscInt ione,ifive
622: PetscScalar two,one,hx,hy,hxdhy
623: PetscScalar hydhx,sc,v(5)
625: ! Set parameters
626: ione = 1
627: ifive = 5
628: one = 1.0
629: two = 2.0
630: hx = one/PetscIntToReal(user%mx-1)
631: hy = one/PetscIntToReal(user%my-1)
632: sc = hx*hy
633: hxdhy = hx/hy
634: hydhx = hy/hx
636: ! Compute entries for the locally owned part of the Jacobian.
637: ! - Currently, all PETSc parallel matrix formats are partitioned by
638: ! contiguous chunks of rows across the processors.
639: ! - Each processor needs to insert only elements that it owns
640: ! locally (but any non-local elements will be sent to the
641: ! appropriate processor during matrix assembly).
642: ! - Here, we set all entries for a particular row at once.
643: ! - We can set matrix entries either using either
644: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
645: ! - Note that MatSetValues() uses 0-based row and column numbers
646: ! in Fortran as well as in C.
648: do 20 j=user%ys,user%ye
649: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
650: do 10 i=user%xs,user%xe
651: row = row + 1
652: ! boundary points
653: if (i .eq. 1 .or. j .eq. 1 .or. i .eq. user%mx .or. j .eq. user%my) then
654: col(1) = row
655: v(1) = one
656: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ione,col,v,INSERT_VALUES,ierr))
657: ! interior grid points
658: else
659: v(1) = -hxdhy
660: v(2) = -hydhx
661: v(3) = two*(hydhx + hxdhy) - sc*user%lambda*exp(x(i,j))
662: v(4) = -hydhx
663: v(5) = -hxdhy
664: col(1) = row - user%gxm
665: col(2) = row - 1
666: col(3) = row
667: col(4) = row + 1
668: col(5) = row + user%gxm
669: PetscCallA(MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,INSERT_VALUES,ierr))
670: endif
671: 10 continue
672: 20 continue
673: return
674: end
676: !/*TEST
677: !
678: ! test:
679: ! nsize: 4
680: ! args: -snes_mf -pc_type none -da_processors_x 4 -da_processors_y 1 -snes_monitor_short -ksp_gmres_cgs_refinement_type refine_always
681: !
682: !TEST*/