Actual source code: ex62f90.F90
1: program ex62f90
2: #include "petsc/finclude/petsc.h"
3: use petsc
4: implicit none
5: #include "exodusII.inc"
7: ! Get the fortran kind associated with PetscInt and PetscReal so that we can use literal constants.
8: PetscInt :: dummyPetscInt
9: PetscReal :: dummyPetscreal
10: integer,parameter :: kPI = kind(dummyPetscInt)
11: integer,parameter :: kPR = kind(dummyPetscReal)
13: type(tDM) :: dm,dmU,dmA,dmS,dmUA,dmUA2,pDM
14: type(tDM),dimension(:),pointer :: dmList
15: type(tVec) :: X,U,A,S,UA,UA2
16: type(tIS) :: isU,isA,isS,isUA
17: type(tPetscSection) :: section, rootSection, leafSection
18: PetscInt,dimension(1) :: fieldU = [0]
19: PetscInt,dimension(1) :: fieldA = [2]
20: PetscInt,dimension(1) :: fieldS = [1]
21: PetscInt,dimension(2) :: fieldUA = [0,2]
22: character(len=PETSC_MAX_PATH_LEN) :: ifilename,ofilename,IOBuffer
23: integer :: exoid = -1
24: type(tIS) :: csIS
25: PetscInt,dimension(:),pointer :: csID
26: PetscInt,dimension(:),pointer :: pStartDepth,pEndDepth
27: PetscInt :: order = 1
28: PetscInt :: sdim,d,pStart,pEnd,p,numCS,set,i,j
29: PetscMPIInt :: rank,numProc
30: PetscBool :: flg
31: PetscErrorCode :: ierr
32: MPI_Comm :: comm
33: type(tPetscViewer) :: viewer
35: Character(len=MXSTLN) :: sJunk
36: PetscInt :: numstep = 3, step
37: PetscInt :: numNodalVar,numZonalVar
38: character(len=MXSTLN) :: nodalVarName(4)
39: character(len=MXSTLN) :: zonalVarName(6)
40: logical,dimension(:,:),pointer :: truthtable
42: type(tIS) :: cellIS
43: PetscInt,dimension(:),pointer :: cellID
44: PetscInt :: numCells, cell, closureSize
45: PetscInt,dimension(:),pointer :: closureA,closure
47: type(tPetscSection) :: sectionUA,coordSection
48: type(tVec) :: UALoc,coord
49: PetscScalar,dimension(:),pointer :: cval,xyz
50: PetscInt :: dofUA,offUA,c
52: ! dof layout ordered by increasing height in the DAG: cell, face, edge, vertex
53: PetscInt,dimension(3),target :: dofS2D = [0, 0, 3]
54: PetscInt,dimension(3),target :: dofUP1Tri = [2, 0, 0]
55: PetscInt,dimension(3),target :: dofAP1Tri = [1, 0, 0]
56: PetscInt,dimension(3),target :: dofUP2Tri = [2, 2, 0]
57: PetscInt,dimension(3),target :: dofAP2Tri = [1, 1, 0]
58: PetscInt,dimension(3),target :: dofUP1Quad = [2, 0, 0]
59: PetscInt,dimension(3),target :: dofAP1Quad = [1, 0, 0]
60: PetscInt,dimension(3),target :: dofUP2Quad = [2, 2, 2]
61: PetscInt,dimension(3),target :: dofAP2Quad = [1, 1, 1]
62: PetscInt,dimension(4),target :: dofS3D = [0, 0, 0, 6]
63: PetscInt,dimension(4),target :: dofUP1Tet = [3, 0, 0, 0]
64: PetscInt,dimension(4),target :: dofAP1Tet = [1, 0, 0, 0]
65: PetscInt,dimension(4),target :: dofUP2Tet = [3, 3, 0, 0]
66: PetscInt,dimension(4),target :: dofAP2Tet = [1, 1, 0, 0]
67: PetscInt,dimension(4),target :: dofUP1Hex = [3, 0, 0, 0]
68: PetscInt,dimension(4),target :: dofAP1Hex = [1, 0, 0, 0]
69: PetscInt,dimension(4),target :: dofUP2Hex = [3, 3, 3, 3]
70: PetscInt,dimension(4),target :: dofAP2Hex = [1, 1, 1, 1]
71: PetscInt,dimension(:),pointer :: dofU,dofA,dofS
73: type(tPetscSF) :: migrationSF, natSF, natPointSF, natPointSFInv
74: PetscPartitioner :: part
76: type(tVec) :: tmpVec
77: PetscReal :: norm
78: PetscReal :: time = 1.234_kPR
80: PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr))
81: if (ierr /= 0) then
82: print*,'Unable to initialize PETSc'
83: stop
84: endif
86: PetscCallA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr))
87: PetscCallA(MPI_Comm_size(PETSC_COMM_WORLD,numProc,ierr))
88: PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-i",ifilename,flg,ierr))
89: if (.not. flg) then
90: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"missing input file name -i <input file name>")
91: end if
92: PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-o",ofilename,flg,ierr))
93: if (.not. flg) then
94: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"missing output file name -o <output file name>")
95: end if
96: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-order",order,flg,ierr))
97: if ((order > 2) .or. (order < 1)) then
98: write(IOBuffer,'("Unsupported polynomial order ", I2, " not in [1,2]")') order
99: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,IOBuffer)
100: end if
102: ! Read the mesh in any supported format
103: PetscCallA(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename,PETSC_NULL_CHARACTER,PETSC_TRUE,dm,ierr))
104: PetscCallA(DMPlexDistributeSetDefault(dm,PETSC_FALSE,ierr))
105: PetscCallA(DMSetFromOptions(dm,ierr))
106: PetscCallA(DMGetDimension(dm, sdim,ierr))
107: PetscCallA(DMViewFromOptions(dm, PETSC_NULL_OPTIONS,"-dm_view",ierr))
109: ! Create the exodus result file
111: ! enable exodus debugging information
112: PetscCallA(exopts(EXVRBS+EXDEBG,ierr))
113: ! Create the exodus file
114: PetscCallA(PetscViewerExodusIIOpen(PETSC_COMM_WORLD,ofilename,FILE_MODE_WRITE,viewer,ierr))
115: ! The long way would be
116: !
117: ! PetscCallA(PetscViewerCreate(PETSC_COMM_WORLD,viewer,ierr))
118: ! PetscCallA(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII,ierr))
119: ! PetscCallA(PetscViewerFileSetMode(viewer,FILE_MODE_WRITE,ierr))
120: ! PetscCallA(PetscViewerFileSetName(viewer,ofilename,ierr))
122: ! set the mesh order
123: PetscCallA(PetscViewerExodusIISetOrder(viewer,order,ierr))
124: PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr))
125: !
126: ! Notice how the exodus file is actually NOT open at this point (exoid is -1)
127: ! Since we are overwriting the file (mode is FILE_MODE_WRITE), we are going to have to
128: ! write the geometry (the DM), which can only be done on a brand new file.
129: !
131: ! Save the geometry to the file, erasing all previous content
132: PetscCallA(DMView(dm,viewer,ierr))
133: PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr))
134: !
135: ! Note how the exodus file is now open
136: !
137: ! "Format" the exodus result file, i.e. allocate space for nodal and zonal variables
138: select case(sdim)
139: case(2)
140: numNodalVar = 3
141: nodalVarName(1:numNodalVar) = ["U_x ","U_y ","Alpha"]
142: numZonalVar = 3
143: zonalVarName(1:numZonalVar) = ["Sigma_11","Sigma_22","Sigma_12"]
144: case(3)
145: numNodalVar = 4
146: nodalVarName(1:numNodalVar) = ["U_x ","U_y ","U_z ","Alpha"]
147: numZonalVar = 6
148: zonalVarName(1:numZonalVar) = ["Sigma_11","Sigma_22","Sigma_33","Sigma_23","Sigma_13","Sigma_12"]
149: case default
150: write(IOBuffer,'("No layout for dimension ",I2)') sdim
151: end select
152: PetscCallA(PetscViewerExodusIIGetId(viewer,exoid,ierr))
153: PetscCallA(expvp(exoid, "E", numZonalVar,ierr))
154: PetscCallA(expvan(exoid, "E", numZonalVar, zonalVarName,ierr))
155: PetscCallA(expvp(exoid, "N", numNodalVar,ierr))
156: PetscCallA(expvan(exoid, "N", numNodalVar, nodalVarName,ierr))
157: PetscCallA(exinq(exoid, EX_INQ_ELEM_BLK,numCS,PETSC_NULL_REAL,sjunk,ierr))
159: ! An exodusII truth table specifies which fields are saved at which time step
160: ! It speeds up I/O but reserving space for fields in the file ahead of time.
161: allocate(truthtable(numCS,numZonalVar))
162: truthtable = .true.
163: PetscCallA(expvtt(exoid, numCS, numZonalVar, truthtable, ierr))
164: deallocate(truthtable)
166: ! Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */
167: do step = 1,numstep
168: PetscCallA(exptim(exoid,step,Real(step,kind=kPR),ierr))
169: end do
171: PetscCallA(DMSetUseNatural(dm, PETSC_TRUE, ierr))
172: PetscCallA(DMPlexGetPartitioner(dm,part,ierr))
173: PetscCallA(PetscPartitionerSetFromOptions(part,ierr))
174: PetscCallA(DMPlexDistribute(dm,0_kPI,migrationSF,pdm,ierr))
176: if (numProc > 1) then
177: PetscCallA(DMPlexSetMigrationSF(pdm,migrationSF,ierr))
178: PetscCallA(PetscSFDestroy(migrationSF,ierr))
179: else
180: pdm = dm
181: end if
182: PetscCallA(DMViewFromOptions(pdm,PETSC_NULL_OPTIONS,"-dm_view",ierr))
184: PetscCallA(PetscObjectGetComm(pdm,comm,ierr))
185: PetscCallA(PetscSectionCreate(comm, section,ierr))
186: PetscCallA(PetscSectionSetNumFields(section, 3_kPI,ierr))
187: PetscCallA(PetscSectionSetFieldName(section, fieldU, "U",ierr))
188: PetscCallA(PetscSectionSetFieldName(section, fieldA, "Alpha",ierr))
189: PetscCallA(PetscSectionSetFieldName(section, fieldS, "Sigma",ierr))
190: PetscCallA(DMPlexGetChart(pdm, pStart, pEnd,ierr))
191: PetscCallA(PetscSectionSetChart(section, pStart, pEnd,ierr))
193: allocate(pStartDepth(sdim+1))
194: allocate(pEndDepth(sdim+1))
195: do d = 1, sdim+1
196: PetscCallA(DMPlexGetDepthStratum(pdm, d-1, pStartDepth(d), pEndDepth(d),ierr))
197: end do
199: ! Vector field U, Scalar field Alpha, Tensor field Sigma
200: PetscCallA(PetscSectionSetFieldComponents(section, fieldU, sdim,ierr));
201: PetscCallA(PetscSectionSetFieldComponents(section, fieldA, 1_kPI,ierr));
202: PetscCallA(PetscSectionSetFieldComponents(section, fieldS, sdim*(sdim+1)/2,ierr));
204: ! Going through cell sets then cells, and setting up storage for the sections
205: PetscCallA(DMGetLabelSize(pdm, "Cell Sets", numCS, ierr))
206: PetscCallA(DMGetLabelIdIS(pdm, "Cell Sets", csIS, ierr))
207: PetscCallA(ISGetIndicesF90(csIS, csID, ierr))
208: do set = 1,numCS
209: PetscCallA(DMGetStratumSize(pdm, "Cell Sets", csID(set), numCells,ierr))
210: PetscCallA(DMGetStratumIS(pdm, "Cell Sets", csID(set), cellIS,ierr))
211: if (numCells > 0) then
212: select case(sdim)
213: case(2)
214: dofs => dofS2D
215: case(3)
216: dofs => dofS3D
217: case default
218: write(IOBuffer,'("No layout for dimension ",I2)') sdim
219: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE,IOBuffer)
220: end select ! sdim
222: ! Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes
223: ! It will not be enough to identify more exotic elements like pyramid or prisms... */
224: PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr))
225: nullify(closureA)
226: PetscCallA(DMPlexGetTransitiveClosure(pdm,cellID(1), PETSC_TRUE, closureA,ierr))
227: select case(size(closureA)/2)
228: case(7) ! Tri
229: if (order == 1) then
230: dofU => dofUP1Tri
231: dofA => dofAP1Tri
232: else
233: dofU => dofUP2Tri
234: dofA => dofAP2Tri
235: end if
236: case(9) ! Quad
237: if (order == 1) then
238: dofU => dofUP1Quad
239: dofA => dofAP1Quad
240: else
241: dofU => dofUP2Quad
242: dofA => dofAP2Quad
243: end if
244: case(15) ! Tet
245: if (order == 1) then
246: dofU => dofUP1Tet
247: dofA => dofAP1Tet
248: else
249: dofU => dofUP2Tet
250: dofA => dofAP2Tet
251: end if
252: case(27) ! Hex
253: if (order == 1) then
254: dofU => dofUP1Hex
255: dofA => dofAP1Hex
256: else
257: dofU => dofUP2Hex
258: dofA => dofAP2Hex
259: end if
260: case default
261: write(IOBuffer,'("Unknown element with closure size ",I2)') size(closureA)/2
262: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,IOBuffer)
263: end select
264: PetscCallA(DMPlexRestoreTransitiveClosure(pdm, cellID(1), PETSC_TRUE,closureA,ierr))
265: do cell = 1,numCells!
266: nullify(closure)
267: PetscCallA(DMPlexGetTransitiveClosure(pdm, cellID(cell), PETSC_TRUE, closure,ierr))
268: do p = 1,size(closure),2
269: ! find the depth of p
270: do d = 1,sdim+1
271: if ((closure(p) >= pStartDepth(d)) .and. (closure(p) < pEndDepth(d))) then
272: PetscCallA(PetscSectionSetDof(section, closure(p), dofU(d)+dofA(d)+dofS(d),ierr))
273: PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldU, dofU(d),ierr))
274: PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldA, dofA(d),ierr))
275: PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldS, dofS(d),ierr))
276: end if ! closure(p)
277: end do ! d
278: end do ! p
279: PetscCallA(DMPlexRestoreTransitiveClosure(pdm, cellID(cell), PETSC_TRUE, closure,ierr))
280: end do ! cell
281: PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr))
282: PetscCallA(ISDestroy(cellIS,ierr))
283: end if ! numCells
284: end do ! set
285: PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr))
286: PetscCallA(ISDestroy(csIS,ierr))
287: PetscCallA(PetscSectionSetUp(section,ierr))
288: PetscCallA(DMSetLocalSection(pdm, section,ierr))
289: PetscCallA(PetscObjectViewFromOptions(section, PETSC_NULL_SECTION, "-dm_section_view",ierr))
290: PetscCallA(PetscSectionDestroy(section,ierr))
292: ! Creating section on the sequential DM + creating the GlobalToNatural SF
293: if (numProc > 1) then
294: PetscCallA(DMPlexGetMigrationSF(pdm, natPointSF, ierr))
295: PetscCallA(DMGetLocalSection(pdm, rootSection, ierr))
296: PetscCallA(PetscSFCreateInverseSF(natPointSF, natPointSFInv, ierr))
297: PetscCallA(PetscSectionCreate(PETSC_COMM_WORLD, leafSection, ierr))
298: PetscCallA(PetscSFDistributeSection(natPointSFInv, rootSection, PETSC_NULL_INTEGER, leafSection, ierr))
299: PetscCallA(DMSetLocalSection(dm, leafSection, ierr))
300: PetscCallA(DMPlexCreateGlobalToNaturalSF(pdm, leafSection, natPointSF, natSF, ierr))
301: PetscCallA(PetscSFDestroy(natPointSFInv, ierr))
302: PetscCallA(PetscSectionDestroy(leafSection,ierr))
303: PetscCallA(DMSetNaturalSF(pdm, natSF, ierr))
304: PetscCallA(PetscObjectDereference(natSF, ierr))
305: end if
307: ! Get DM and IS for each field of dm
308: PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldU, isU, dmU,ierr))
309: PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldA, isA, dmA,ierr))
310: PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldS, isS, dmS,ierr))
311: PetscCallA(DMCreateSubDM(pdm, 2_kPI, fieldUA, isUA, dmUA,ierr))
313: !Create the exodus result file
314: allocate(dmList(2))
315: dmList(1) = dmU;
316: dmList(2) = dmA;
317: PetscCallA(DMCreateSuperDM(dmList,2_kPI,PETSC_NULL_IS,dmUA2,ierr))
318: deallocate(dmList)
320: PetscCallA(DMGetGlobalVector(pdm, X,ierr))
321: PetscCallA(DMGetGlobalVector(dmU, U,ierr))
322: PetscCallA(DMGetGlobalVector(dmA, A,ierr))
323: PetscCallA(DMGetGlobalVector(dmS, S,ierr))
324: PetscCallA(DMGetGlobalVector(dmUA, UA,ierr))
325: PetscCallA(DMGetGlobalVector(dmUA2, UA2,ierr))
327: PetscCallA(PetscObjectSetName(U, "U",ierr))
328: PetscCallA(PetscObjectSetName(A, "Alpha",ierr))
329: PetscCallA(PetscObjectSetName(S, "Sigma",ierr))
330: PetscCallA(PetscObjectSetName(UA, "UAlpha",ierr))
331: PetscCallA(PetscObjectSetName(UA2, "UAlpha2",ierr))
332: PetscCallA(VecSet(X, -111.0_kPR,ierr))
334: ! Setting u to [x,y,z] and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */
335: PetscCallA(DMGetLocalSection(dmUA, sectionUA,ierr))
336: PetscCallA(DMGetLocalVector(dmUA, UALoc,ierr))
337: PetscCallA(VecGetArrayF90(UALoc, cval,ierr))
338: PetscCallA(DMGetCoordinateSection(dmUA, coordSection,ierr))
339: PetscCallA(DMGetCoordinatesLocal(dmUA, coord,ierr))
340: PetscCallA(DMPlexGetChart(dmUA, pStart, pEnd,ierr))
342: do p = pStart,pEnd-1
343: PetscCallA(PetscSectionGetDof(sectionUA, p, dofUA,ierr))
344: if (dofUA > 0) then
345: PetscCallA(PetscSectionGetOffset(sectionUA, p, offUA,ierr))
346: PetscCallA(DMPlexVecGetClosure(dmUA, coordSection, coord, p, xyz,ierr))
347: closureSize = size(xyz)
348: do i = 1,sdim
349: do j = 0, closureSize-1,sdim
350: cval(offUA+i) = cval(offUA+i) + xyz(j/sdim+i)
351: end do
352: cval(offUA+i) = cval(offUA+i) * sdim / closureSize;
353: cval(offUA+sdim+1) = cval(offUA+sdim+1) + cval(offUA+i)**2
354: end do
355: PetscCallA(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, xyz,ierr))
356: end if
357: end do
359: PetscCallA(VecRestoreArrayF90(UALoc, cval,ierr))
360: PetscCallA(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA,ierr))
361: PetscCallA(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA,ierr))
362: PetscCallA(DMRestoreLocalVector(dmUA, UALoc,ierr))
364: !Update X
365: PetscCallA(VecISCopy(X, isUA, SCATTER_FORWARD, UA,ierr))
366: ! Restrict to U and Alpha
367: PetscCallA(VecISCopy(X, isU, SCATTER_REVERSE, U,ierr))
368: PetscCallA(VecISCopy(X, isA, SCATTER_REVERSE, A,ierr))
369: PetscCallA(VecViewFromOptions(UA, PETSC_NULL_OPTIONS, "-ua_vec_view",ierr))
370: PetscCallA(VecViewFromOptions(U, PETSC_NULL_OPTIONS, "-u_vec_view",ierr))
371: PetscCallA(VecViewFromOptions(A, PETSC_NULL_OPTIONS, "-a_vec_view",ierr))
372: ! restrict to UA2
373: PetscCallA(VecISCopy(X, isUA, SCATTER_REVERSE, UA2,ierr))
374: PetscCallA(VecViewFromOptions(UA2, PETSC_NULL_OPTIONS, "-ua2_vec_view",ierr))
376: ! Getting Natural Vec
377: PetscCallA(DMSetOutputSequenceNumber(dmU, 0_kPI, time, ierr))
378: PetscCallA(DMSetOutputSequenceNumber(dmA, 0_kPI, time, ierr))
380: PetscCallA(VecView(U, viewer,ierr))
381: PetscCallA(VecView(A, viewer,ierr))
383: ! Saving U and Alpha in one shot.
384: ! For this, we need to cheat and change the Vec's name
385: ! Note that in the end we write variables one component at a time,
386: ! so that there is no real value in doing this
387: PetscCallA(DMSetOutputSequenceNumber(dmUA,1_kPI,time,ierr))
388: PetscCallA(DMGetGlobalVector(dmUA, tmpVec,ierr))
389: PetscCallA(VecCopy(UA, tmpVec,ierr))
390: PetscCallA(PetscObjectSetName(tmpVec, "U",ierr))
391: PetscCallA(VecView(tmpVec, viewer,ierr))
393: ! Reading nodal variables in Exodus file
394: PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr))
395: PetscCallA(VecLoad(tmpVec, viewer,ierr))
396: PetscCallA(VecAXPY(UA, -1.0_kPR, tmpVec,ierr))
397: PetscCallA(VecNorm(UA, NORM_INFINITY, norm,ierr))
398: if (norm > PETSC_SQRT_MACHINE_EPSILON) then
399: write(IOBuffer,'("UAlpha ||Vin - Vout|| = ",ES12.5)') norm
400: end if
401: PetscCallA(DMRestoreGlobalVector(dmUA, tmpVec,ierr))
403: ! same thing with the UA2 Vec obtained from the superDM
404: PetscCallA(DMGetGlobalVector(dmUA2, tmpVec,ierr))
405: PetscCallA(VecCopy(UA2, tmpVec,ierr))
406: PetscCallA(PetscObjectSetName(tmpVec, "U",ierr))
407: PetscCallA(DMSetOutputSequenceNumber(dmUA2,2_kPI,time,ierr))
408: PetscCallA(VecView(tmpVec, viewer,ierr))
410: ! Reading nodal variables in Exodus file
411: PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr))
412: PetscCallA(VecLoad(tmpVec,viewer,ierr))
413: PetscCallA(VecAXPY(UA2, -1.0_kPR, tmpVec,ierr))
414: PetscCallA(VecNorm(UA2, NORM_INFINITY, norm,ierr))
415: if (norm > PETSC_SQRT_MACHINE_EPSILON) then
416: write(IOBuffer,'("UAlpha2 ||Vin - Vout|| = ",ES12.5)') norm
417: end if
418: PetscCallA(DMRestoreGlobalVector(dmUA2, tmpVec,ierr))
420: ! Building and saving Sigma
421: ! We set sigma_0 = rank (to see partitioning)
422: ! sigma_1 = cell set ID
423: ! sigma_2 = x_coordinate of the cell center of mass
424: PetscCallA(DMGetCoordinateSection(dmS, coordSection,ierr))
425: PetscCallA(DMGetCoordinatesLocal(dmS, coord,ierr))
426: PetscCallA(DMGetLabelIdIS(dmS, "Cell Sets", csIS,ierr))
427: PetscCallA(DMGetLabelSize(dmS, "Cell Sets",numCS,ierr))
428: PetscCallA(ISGetIndicesF90(csIS, csID,ierr))
430: do set = 1, numCS
431: PetscCallA(DMGetStratumIS(dmS, "Cell Sets", csID(set), cellIS,ierr))
432: PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr))
433: PetscCallA(ISGetSize(cellIS, numCells,ierr))
434: do cell = 1,numCells
435: PetscCallA(DMPlexVecGetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr))
436: PetscCallA(DMPlexVecGetClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr))
437: cval(1) = rank
438: cval(2) = csID(set)
439: cval(3) = 0.0_kPR
440: do c = 1, size(xyz),sdim
441: cval(3) = cval(3) + xyz(c)
442: end do
443: cval(3) = cval(3) * sdim / size(xyz)
444: PetscCallA(DMPlexVecSetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval, INSERT_ALL_VALUES,ierr))
445: PetscCallA(DMPlexVecRestoreClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr))
446: PetscCallA(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr))
447: end do
448: PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr))
449: PetscCallA(ISDestroy(cellIS,ierr))
450: end do
451: PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr))
452: PetscCallA(ISDestroy(csIS,ierr))
453: PetscCallA(VecViewFromOptions(S, PETSC_NULL_OPTIONS, "-s_vec_view",ierr))
455: ! Writing zonal variables in Exodus file
456: PetscCallA(DMSetOutputSequenceNumber(dmS,0_kPI,time,ierr))
457: PetscCallA(VecView(S,viewer,ierr))
459: ! Reading zonal variables in Exodus file */
460: PetscCallA(DMGetGlobalVector(dmS, tmpVec,ierr))
461: PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr))
462: PetscCallA(PetscObjectSetName(tmpVec, "Sigma",ierr))
463: PetscCallA(VecLoad(tmpVec,viewer,ierr))
464: PetscCallA(VecAXPY(S, -1.0_kPR, tmpVec,ierr))
465: PetscCallA(VecNorm(S, NORM_INFINITY,norm,ierr))
466: if (norm > PETSC_SQRT_MACHINE_EPSILON) then
467: write(IOBuffer,'("Sigma ||Vin - Vout|| = ",ES12.5)') norm
468: end if
469: PetscCallA(DMRestoreGlobalVector(dmS, tmpVec,ierr))
471: PetscCallA(DMRestoreGlobalVector(dmUA2, UA2,ierr))
472: PetscCallA(DMRestoreGlobalVector(dmUA, UA,ierr))
473: PetscCallA(DMRestoreGlobalVector(dmS, S,ierr))
474: PetscCallA(DMRestoreGlobalVector(dmA, A,ierr))
475: PetscCallA(DMRestoreGlobalVector(dmU, U,ierr))
476: PetscCallA(DMRestoreGlobalVector(pdm, X,ierr))
477: PetscCallA(DMDestroy(dmU,ierr))
478: PetscCallA(ISDestroy(isU,ierr))
479: PetscCallA(DMDestroy(dmA,ierr))
480: PetscCallA(ISDestroy(isA,ierr))
481: PetscCallA(DMDestroy(dmS,ierr))
482: PetscCallA(ISDestroy(isS,ierr))
483: PetscCallA(DMDestroy(dmUA,ierr))
484: PetscCallA(ISDestroy(isUA,ierr))
485: PetscCallA(DMDestroy(dmUA2,ierr))
486: PetscCallA(DMDestroy(pdm,ierr))
487: if (numProc > 1) then
488: PetscCallA(DMDestroy(dm,ierr))
489: end if
491: deallocate(pStartDepth)
492: deallocate(pEndDepth)
494: PetscCallA(PetscViewerDestroy(viewer,ierr))
495: PetscCallA(PetscFinalize(ierr))
496: end program ex62f90
498: ! /*TEST
499: !
500: ! build:
501: ! requires: exodusii pnetcdf !complex
502: ! # 2D seq
503: ! test:
504: ! suffix: 0
505: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
506: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
507: ! test:
508: ! suffix: 1
509: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
510: !
511: ! test:
512: ! suffix: 2
513: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
514: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
515: ! test:
516: ! suffix: 3
517: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
518: ! test:
519: ! suffix: 4
520: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
521: ! test:
522: ! suffix: 5
523: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2
524: ! # 2D par
525: ! test:
526: ! suffix: 6
527: ! nsize: 2
528: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
529: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
530: ! test:
531: ! suffix: 7
532: ! nsize: 2
533: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
534: ! test:
535: ! suffix: 8
536: ! nsize: 2
537: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
538: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name
539: ! test:
540: ! suffix: 9
541: ! nsize: 2
542: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
543: ! test:
544: ! suffix: 10
545: ! nsize: 2
546: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
547: ! test:
548: ! # Something is now broken with parallel read/write for whatever shape H is
549: ! TODO: broken
550: ! suffix: 11
551: ! nsize: 2
552: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2
554: ! #3d seq
555: ! test:
556: ! suffix: 12
557: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
558: ! test:
559: ! suffix: 13
560: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
561: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
562: ! test:
563: ! suffix: 14
564: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
565: ! test:
566: ! suffix: 15
567: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
568: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
569: ! #3d par
570: ! test:
571: ! suffix: 16
572: ! nsize: 2
573: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
574: ! test:
575: ! suffix: 17
576: ! nsize: 2
577: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
578: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
579: ! test:
580: ! suffix: 18
581: ! nsize: 2
582: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
583: ! test:
584: ! suffix: 19
585: ! nsize: 2
586: ! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
587: ! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
589: ! TEST*/