Actual source code: ex47.c

  1: static char help[] = "The main goal of this code is to retrieve the original element numbers as found in the "
  2:                      "initial partitions (sInitialPartition)... but after the call to DMPlexDistribute";

  4: #include <petsc.h>

  6: PetscReal sCoords2x5Mesh[18][2] = {
  7:   {0.00000000000000000e+00, 0.00000000000000000e+00},
  8:   {2.00000000000000000e+00, 0.00000000000000000e+00},
  9:   {0.00000000000000000e+00, 1.00000000000000000e+00},
 10:   {2.00000000000000000e+00, 1.00000000000000000e+00},
 11:   {9.99999999997387978e-01, 0.00000000000000000e+00},
 12:   {9.99999999997387978e-01, 1.00000000000000000e+00},
 13:   {0.00000000000000000e+00, 2.00000000000000011e-01},
 14:   {0.00000000000000000e+00, 4.00000000000000022e-01},
 15:   {0.00000000000000000e+00, 5.99999999999999978e-01},
 16:   {0.00000000000000000e+00, 8.00000000000000044e-01},
 17:   {2.00000000000000000e+00, 2.00000000000000011e-01},
 18:   {2.00000000000000000e+00, 4.00000000000000022e-01},
 19:   {2.00000000000000000e+00, 5.99999999999999978e-01},
 20:   {2.00000000000000000e+00, 8.00000000000000044e-01},
 21:   {9.99999999997387756e-01, 2.00000000000000011e-01},
 22:   {9.99999999997387978e-01, 4.00000000000000022e-01},
 23:   {9.99999999997387978e-01, 6.00000000000000089e-01},
 24:   {9.99999999997388089e-01, 8.00000000000000044e-01}
 25: };

 27: //Connectivity of a 2x5 rectangular mesh of quads :
 28: const PetscInt sConnectivity2x5Mesh[10][4] = {
 29:   {0,  4,  14, 6 },
 30:   {6,  14, 15, 7 },
 31:   {7,  15, 16, 8 },
 32:   {8,  16, 17, 9 },
 33:   {9,  17, 5,  2 },
 34:   {4,  1,  10, 14},
 35:   {14, 10, 11, 15},
 36:   {15, 11, 12, 16},
 37:   {16, 12, 13, 17},
 38:   {17, 13, 3,  5 }
 39: };

 41: const PetscInt sInitialPartition2x5Mesh[2][5] = {
 42:   {0, 2, 4, 6, 8},
 43:   {1, 3, 5, 7, 9}
 44: };

 46: const PetscInt sNLoclCells2x5Mesh = 5;
 47: const PetscInt sNGlobVerts2x5Mesh = 18;

 49: int main(int argc, char **argv)
 50: {
 51:   const PetscInt  Nc                 = sNLoclCells2x5Mesh; //Same on each rank for this example...
 52:   const PetscInt  Nv                 = sNGlobVerts2x5Mesh;
 53:   const PetscInt *InitPartForRank[2] = {&sInitialPartition2x5Mesh[0][0], &sInitialPartition2x5Mesh[1][0]};
 54:   const PetscInt(*Conn)[4]           = sConnectivity2x5Mesh;

 56:   const PetscInt   Ncor = 4;
 57:   const PetscInt   dim  = 2;
 58:   DM               dm, idm, ddm;
 59:   PetscSF          sfVert, sfMig, sfPart;
 60:   PetscPartitioner part;
 61:   PetscSection     s;
 62:   PetscInt        *cells, c;
 63:   PetscMPIInt      size, rank;
 64:   PetscBool        box = PETSC_FALSE, field = PETSC_FALSE;

 66:   PetscFunctionBeginUser;
 67:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 68:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 69:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 70:   PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_SUP, "This is a 2 processors example only");
 71:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &box, NULL));
 72:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-field", &field, NULL));

 74:   PetscCall(DMPlexCreate(PETSC_COMM_WORLD, &dm));
 75:   if (box) {
 76:     PetscCall(DMSetType(dm, DMPLEX));
 77:     PetscCall(DMSetFromOptions(dm));
 78:   } else {
 79:     PetscCall(PetscMalloc1(Nc * Ncor, &cells));
 80:     for (c = 0; c < Nc; ++c) {
 81:       PetscInt cell = (InitPartForRank[rank])[c], cor;

 83:       for (cor = 0; cor < Ncor; ++cor) cells[c * Ncor + cor] = Conn[cell][cor];
 84:     }
 85:     PetscCall(DMSetDimension(dm, dim));
 86:     PetscCall(DMPlexBuildFromCellListParallel(dm, Nc, PETSC_DECIDE, Nv, Ncor, cells, &sfVert, NULL));
 87:     PetscCall(PetscSFDestroy(&sfVert));
 88:     PetscCall(PetscFree(cells));
 89:     PetscCall(DMPlexInterpolate(dm, &idm));
 90:     PetscCall(DMDestroy(&dm));
 91:     dm = idm;
 92:   }
 93:   PetscCall(DMSetUseNatural(dm, PETSC_TRUE));
 94:   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));

 96:   if (field) {
 97:     const PetscInt Nf         = 1;
 98:     const PetscInt numComp[1] = {1};
 99:     const PetscInt numDof[3]  = {0, 0, 1};
100:     const PetscInt numBC      = 0;

102:     PetscCall(DMSetNumFields(dm, Nf));
103:     PetscCall(DMPlexCreateSection(dm, NULL, numComp, numDof, numBC, NULL, NULL, NULL, NULL, &s));
104:     PetscCall(DMSetLocalSection(dm, s));
105:     PetscCall(PetscSectionView(s, PETSC_VIEWER_STDOUT_WORLD));
106:     PetscCall(PetscSectionDestroy(&s));
107:   }

109:   PetscCall(DMPlexGetPartitioner(dm, &part));
110:   PetscCall(PetscPartitionerSetFromOptions(part));

112:   PetscCall(DMPlexDistribute(dm, 0, &sfMig, &ddm));
113:   PetscCall(PetscSFView(sfMig, PETSC_VIEWER_STDOUT_WORLD));
114:   PetscCall(PetscSFCreateInverseSF(sfMig, &sfPart));
115:   PetscCall(PetscObjectSetName((PetscObject)sfPart, "Inverse Migration SF"));
116:   PetscCall(PetscSFView(sfPart, PETSC_VIEWER_STDOUT_WORLD));

118:   Vec          lGlobalVec, lNatVec;
119:   PetscScalar *lNatVecArray;

121:   {
122:     PetscSection s;

124:     PetscCall(DMGetGlobalSection(dm, &s));
125:     PetscCall(PetscSectionView(s, PETSC_VIEWER_STDOUT_WORLD));
126:   }
127:   PetscCall(DMGetGlobalVector(dm, &lNatVec));
128:   PetscCall(PetscObjectSetName((PetscObject)lNatVec, "Natural Vector (initial partition)"));

130:   //Copying the initial partition into the "natural" vector:
131:   PetscCall(VecGetArray(lNatVec, &lNatVecArray));
132:   for (c = 0; c < Nc; ++c) lNatVecArray[c] = (InitPartForRank[rank])[c];
133:   PetscCall(VecRestoreArray(lNatVec, &lNatVecArray));

135:   PetscCall(DMGetGlobalVector(ddm, &lGlobalVec));
136:   PetscCall(PetscObjectSetName((PetscObject)lGlobalVec, "Global Vector (reordered element numbers in the petsc distributed order)"));
137:   PetscCall(VecZeroEntries(lGlobalVec));

139:   // The call to DMPlexNaturalToGlobalBegin/End does not produce our expected result...
140:   // In lGlobalVec, we expect to have:
141:   /*
142:    * Process [0]
143:    * 2.
144:    * 4.
145:    * 8.
146:    * 3.
147:    * 9.
148:    * Process [1]
149:    * 1.
150:    * 5.
151:    * 7.
152:    * 0.
153:    * 6.
154:    *
155:    * but we obtained:
156:    *
157:    * Process [0]
158:    * 2.
159:    * 4.
160:    * 8.
161:    * 0.
162:    * 0.
163:    * Process [1]
164:    * 0.
165:    * 0.
166:    * 0.
167:    * 0.
168:    * 0.
169:    */

171:   {
172:     PetscSF nsf;

174:     PetscCall(DMPlexGetGlobalToNaturalSF(ddm, &nsf));
175:     PetscCall(PetscSFView(nsf, NULL));
176:   }
177:   PetscCall(DMPlexNaturalToGlobalBegin(ddm, lNatVec, lGlobalVec));
178:   PetscCall(DMPlexNaturalToGlobalEnd(ddm, lNatVec, lGlobalVec));

180:   PetscCall(VecView(lNatVec, PETSC_VIEWER_STDOUT_WORLD));
181:   PetscCall(VecView(lGlobalVec, PETSC_VIEWER_STDOUT_WORLD));

183:   PetscCall(DMRestoreGlobalVector(dm, &lNatVec));
184:   PetscCall(DMRestoreGlobalVector(ddm, &lGlobalVec));

186:   PetscCall(PetscSFDestroy(&sfMig));
187:   PetscCall(PetscSFDestroy(&sfPart));
188:   PetscCall(DMDestroy(&dm));
189:   PetscCall(DMDestroy(&ddm));
190:   PetscCall(PetscFinalize());
191:   return 0;
192: }

194: /*TEST

196:   testset:
197:     args: -field -petscpartitioner_type simple
198:     nsize: 2

200:     test:
201:       suffix: 0
202:       args:

204:     test:
205:       suffix: 1
206:       args: -box -dm_plex_simplex 0 -dm_plex_box_faces 2,5 -dm_distribute

208: TEST*/