Actual source code: ex13f90.F90


  2:       module ex13f90module
  3: #include <petsc/finclude/petscksp.h>
  4:         use petscksp
  5:         type User
  6:           Vec x
  7:           Vec b
  8:           Mat A
  9:           KSP ksp
 10:           PetscInt m
 11:           PetscInt n
 12:         end type User
 13:       end module

 15:       program main
 16:       use ex13f90module
 17:       implicit none

 19: !    User-defined context that contains all the data structures used
 20: !    in the linear solution process.

 22: !   Vec    x,b      /* solution vector, right hand side vector and work vector */
 23: !   Mat    A        /* sparse matrix */
 24: !   KSP   ksp     /* linear solver context */
 25: !   int    m,n      /* grid dimensions */
 26: !
 27: !   Since we cannot store Scalars and integers in the same context,
 28: !   we store the integers/pointers in the user-defined context, and
 29: !   the scalar values are carried in the common block.
 30: !   The scalar values in this simplistic example could easily
 31: !   be recalculated in each routine, where they are needed.
 32: !
 33: !   Scalar hx2,hy2  /* 1/(m+1)*(m+1) and 1/(n+1)*(n+1) */

 35: !  Note: Any user-defined Fortran routines MUST be declared as external.

 37:       external UserInitializeLinearSolver
 38:       external UserFinalizeLinearSolver
 39:       external UserDoLinearSolver

 41: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 42: !                   Variable declarations
 43: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 45:       PetscScalar  hx,hy,x,y
 46:       type(User) userctx
 47:       PetscErrorCode ierr
 48:       PetscInt m,n,t,tmax,i,j
 49:       PetscBool  flg
 50:       PetscMPIInt size
 51:       PetscReal  enorm
 52:       PetscScalar cnorm
 53:       PetscScalar,ALLOCATABLE :: userx(:,:)
 54:       PetscScalar,ALLOCATABLE :: userb(:,:)
 55:       PetscScalar,ALLOCATABLE :: solution(:,:)
 56:       PetscScalar,ALLOCATABLE :: rho(:,:)

 58:       PetscReal hx2,hy2
 59:       common /param/ hx2,hy2

 61:       tmax = 2
 62:       m = 6
 63:       n = 7

 65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 66: !                 Beginning of program
 67: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 69:       PetscCallA(PetscInitialize(ierr))
 70:       PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
 71:       if (size .ne. 1) then; SETERRA(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,'This is a uniprocessor example only'); endif

 73: !  The next two lines are for testing only; these allow the user to
 74: !  decide the grid size at runtime.

 76:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
 77:       PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))

 79: !  Create the empty sparse matrix and linear solver data structures

 81:       PetscCallA(UserInitializeLinearSolver(m,n,userctx,ierr))

 83: !  Allocate arrays to hold the solution to the linear system.  This
 84: !  approach is not normally done in PETSc programs, but in this case,
 85: !  since we are calling these routines from a non-PETSc program, we
 86: !  would like to reuse the data structures from another code. So in
 87: !  the context of a larger application these would be provided by
 88: !  other (non-PETSc) parts of the application code.

 90:       ALLOCATE (userx(m,n),userb(m,n),solution(m,n))

 92: !  Allocate an array to hold the coefficients in the elliptic operator

 94:        ALLOCATE (rho(m,n))

 96: !  Fill up the array rho[] with the function rho(x,y) = x; fill the
 97: !  right-hand-side b[] and the solution with a known problem for testing.

 99:       hx = 1.0/real(m+1)
100:       hy = 1.0/real(n+1)
101:       y  = hy
102:       do 20 j=1,n
103:          x = hx
104:          do 10 i=1,m
105:             rho(i,j)      = x
106:             solution(i,j) = sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
107:             userb(i,j)    = -2.*PETSC_PI*cos(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y) + 8*PETSC_PI*PETSC_PI*x*sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
108:            x = x + hx
109:  10      continue
110:          y = y + hy
111:  20   continue

113: !  Loop over a bunch of timesteps, setting up and solver the linear
114: !  system for each time-step.
115: !  Note that this loop is somewhat artificial. It is intended to
116: !  demonstrate how one may reuse the linear solvers in each time-step.

118:       do 100 t=1,tmax
119:          PetscCallA(UserDoLinearSolver(rho,userctx,userb,userx,ierr))

121: !        Compute error: Note that this could (and usually should) all be done
122: !        using the PETSc vector operations. Here we demonstrate using more
123: !        standard programming practices to show how they may be mixed with
124: !        PETSc.
125:          cnorm = 0.0
126:          do 90 j=1,n
127:             do 80 i=1,m
128:               cnorm = cnorm + PetscConj(solution(i,j)-userx(i,j))*(solution(i,j)-userx(i,j))
129:  80         continue
130:  90      continue
131:          enorm =  PetscRealPart(cnorm*hx*hy)
132:          write(6,115) m,n,enorm
133:  115     format ('m = ',I2,' n = ',I2,' error norm = ',1PE11.4)
134:  100  continue

136: !  We are finished solving linear systems, so we clean up the
137: !  data structures.

139:       DEALLOCATE (userx,userb,solution,rho)

141:       PetscCallA(UserFinalizeLinearSolver(userctx,ierr))
142:       PetscCallA(PetscFinalize(ierr))
143:       end

145: ! ----------------------------------------------------------------
146:       subroutine UserInitializeLinearSolver(m,n,userctx,ierr)
147:       use ex13f90module
148:       implicit none

150:       PetscInt m,n
151:       PetscErrorCode ierr
152:       type(User) userctx

154:       common /param/ hx2,hy2
155:       PetscReal hx2,hy2

157: !  Local variable declararions
158:       Mat     A
159:       Vec     b,x
160:       KSP    ksp
161:       PetscInt Ntot,five,one

163: !  Here we assume use of a grid of size m x n, with all points on the
164: !  interior of the domain, i.e., we do not include the points corresponding
165: !  to homogeneous Dirichlet boundary conditions.  We assume that the domain
166: !  is [0,1]x[0,1].

168:       hx2 = (m+1)*(m+1)
169:       hy2 = (n+1)*(n+1)
170:       Ntot = m*n

172:       five = 5
173:       one = 1

175: !  Create the sparse matrix. Preallocate 5 nonzeros per row.

177:       PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,Ntot,Ntot,five,PETSC_NULL_INTEGER,A,ierr))
178: !
179: !  Create vectors. Here we create vectors with no memory allocated.
180: !  This way, we can use the data structures already in the program
181: !  by using VecPlaceArray() subroutine at a later stage.
182: !
183:       PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,one,Ntot,PETSC_NULL_SCALAR,b,ierr))
184:       PetscCall(VecDuplicate(b,x,ierr))

186: !  Create linear solver context. This will be used repeatedly for all
187: !  the linear solves needed.

189:       PetscCall(KSPCreate(PETSC_COMM_SELF,ksp,ierr))

191:       userctx%x = x
192:       userctx%b = b
193:       userctx%A = A
194:       userctx%ksp = ksp
195:       userctx%m = m
196:       userctx%n = n

198:       return
199:       end
200: ! -----------------------------------------------------------------------

202: !   Solves -div (rho grad psi) = F using finite differences.
203: !   rho is a 2-dimensional array of size m by n, stored in Fortran
204: !   style by columns. userb is a standard one-dimensional array.

206:       subroutine UserDoLinearSolver(rho,userctx,userb,userx,ierr)
207:       use ex13f90module
208:       implicit none

210:       PetscErrorCode ierr
211:       type(User) userctx
212:       PetscScalar rho(*),userb(*),userx(*)

214:       common /param/ hx2,hy2
215:       PetscReal hx2,hy2

217:       PC   pc
218:       KSP ksp
219:       Vec  b,x
220:       Mat  A
221:       PetscInt m,n,one
222:       PetscInt i,j,II,JJ
223:       PetscScalar  v

225:       one  = 1
226:       x    = userctx%x
227:       b    = userctx%b
228:       A    = userctx%A
229:       ksp  = userctx%ksp
230:       m    = userctx%m
231:       n    = userctx%n

233: !  This is not the most efficient way of generating the matrix,
234: !  but let's not worry about it.  We should have separate code for
235: !  the four corners, each edge and then the interior. Then we won't
236: !  have the slow if-tests inside the loop.
237: !
238: !  Compute the operator
239: !          -div rho grad
240: !  on an m by n grid with zero Dirichlet boundary conditions. The rho
241: !  is assumed to be given on the same grid as the finite difference
242: !  stencil is applied.  For a staggered grid, one would have to change
243: !  things slightly.

245:       II = 0
246:       do 110 j=1,n
247:          do 100 i=1,m
248:             if (j .gt. 1) then
249:                JJ = II - m
250:                v = -0.5*(rho(II+1) + rho(JJ+1))*hy2
251:                PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
252:             endif
253:             if (j .lt. n) then
254:                JJ = II + m
255:                v = -0.5*(rho(II+1) + rho(JJ+1))*hy2
256:                PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
257:             endif
258:             if (i .gt. 1) then
259:                JJ = II - 1
260:                v = -0.5*(rho(II+1) + rho(JJ+1))*hx2
261:                PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
262:             endif
263:             if (i .lt. m) then
264:                JJ = II + 1
265:                v = -0.5*(rho(II+1) + rho(JJ+1))*hx2
266:                PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
267:             endif
268:             v = 2*rho(II+1)*(hx2+hy2)
269:             PetscCall(MatSetValues(A,one,II,one,II,v,INSERT_VALUES,ierr))
270:             II = II+1
271:  100     continue
272:  110  continue
273: !
274: !     Assemble matrix
275: !
276:       PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
277:       PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))

279: !
280: !     Set operators. Here the matrix that defines the linear system
281: !     also serves as the preconditioning matrix. Since all the matrices
282: !     will have the same nonzero pattern here, we indicate this so the
283: !     linear solvers can take advantage of this.
284: !
285:       PetscCall(KSPSetOperators(ksp,A,A,ierr))

287: !
288: !     Set linear solver defaults for this problem (optional).
289: !     - Here we set it to use direct LU factorization for the solution
290: !
291:       PetscCall(KSPGetPC(ksp,pc,ierr))
292:       PetscCall(PCSetType(pc,PCLU,ierr))

294: !
295: !     Set runtime options, e.g.,
296: !        -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
297: !     These options will override those specified above as long as
298: !     KSPSetFromOptions() is called _after_ any other customization
299: !     routines.
300: !
301: !     Run the program with the option -help to see all the possible
302: !     linear solver options.
303: !
304:       PetscCall(KSPSetFromOptions(ksp,ierr))

306: !
307: !     This allows the PETSc linear solvers to compute the solution
308: !     directly in the user's array rather than in the PETSc vector.
309: !
310: !     This is essentially a hack and not highly recommend unless you
311: !     are quite comfortable with using PETSc. In general, users should
312: !     write their entire application using PETSc vectors rather than
313: !     arrays.
314: !
315:       PetscCall(VecPlaceArray(x,userx,ierr))
316:       PetscCall(VecPlaceArray(b,userb,ierr))

318: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
319: !                      Solve the linear system
320: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

322:       PetscCall(KSPSolve(ksp,b,x,ierr))

324:       PetscCall(VecResetArray(x,ierr))
325:       PetscCall(VecResetArray(b,ierr))
326:       return
327:       end

329: ! ------------------------------------------------------------------------

331:       subroutine UserFinalizeLinearSolver(userctx,ierr)
332:       use ex13f90module
333:       implicit none

335:       PetscErrorCode ierr
336:       type(User) userctx

338: !
339: !     We are all done and don't need to solve any more linear systems, so
340: !     we free the work space.  All PETSc objects should be destroyed when
341: !     they are no longer needed.
342: !
343:       PetscCall(VecDestroy(userctx%x,ierr))
344:       PetscCall(VecDestroy(userctx%b,ierr))
345:       PetscCall(MatDestroy(userctx%A,ierr))
346:       PetscCall(KSPDestroy(userctx%ksp,ierr))
347:       return
348:       end

350: !
351: !/*TEST
352: !
353: !   test:
354: !      args: -m 19 -n 20 -ksp_gmres_cgs_refinement_type refine_always
355: !      output_file: output/ex13f90_1.out
356: !
357: !TEST*/