Actual source code: networkview.c

  1: #include <petscconf.h>
  2: // We need to define this ahead of any other includes to make sure mkstemp is actually defined
  3: #if defined(PETSC_HAVE_MKSTEMP)
  4:   #define _XOPEN_SOURCE 600
  5: #endif
  6: #include <petsc/private/dmnetworkimpl.h>
  7: #include <petscdraw.h>

  9: static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
 10: {
 11:   DM              dmcoords;
 12:   PetscInt        nsubnets, i, subnet, nvertices, nedges, vertex, edge;
 13:   PetscInt        vertexOffsets[2], globalEdgeVertices[2];
 14:   PetscScalar     vertexCoords[2];
 15:   const PetscInt *vertices, *edges, *edgeVertices;
 16:   Vec             allVertexCoords;
 17:   PetscMPIInt     rank;
 18:   MPI_Comm        comm;

 20:   PetscFunctionBegin;
 21:   // Get the network containing coordinate information
 22:   PetscCall(DMGetCoordinateDM(dm, &dmcoords));
 23:   // Get the coordinate vector for the network
 24:   PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));
 25:   // Get the MPI communicator and this process' rank
 26:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
 27:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 28:   // Start synchronized printing
 29:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));

 31:   // Write the header
 32:   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));

 34:   // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
 35:   PetscCall(DMNetworkGetNumSubNetworks(dm, NULL, &nsubnets));
 36:   for (subnet = 0; subnet < nsubnets; subnet++) {
 37:     // Get the subnetwork's vertices and edges
 38:     PetscCall(DMNetworkGetSubnetwork(dm, subnet, &nvertices, &nedges, &vertices, &edges));

 40:     // Write out each vertex
 41:     for (i = 0; i < nvertices; i++) {
 42:       vertex = vertices[i];
 43:       // Get the offset into the coordinate vector for the vertex
 44:       PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
 45:       vertexOffsets[1] = vertexOffsets[0] + 1;
 46:       // Remap vertex to the global value
 47:       PetscCall(DMNetworkGetGlobalVertexIndex(dm, vertex, &vertex));
 48:       // Get the vertex position from the coordinate vector
 49:       PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));

 51:       // TODO: Determine vertex color/name
 52:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT "\n", (PetscInt)rank, vertex, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), vertex));
 53:     }

 55:     // Write out each edge
 56:     for (i = 0; i < nedges; i++) {
 57:       edge = edges[i];
 58:       PetscCall(DMNetworkGetConnectedVertices(dm, edge, &edgeVertices));
 59:       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[0], &globalEdgeVertices[0]));
 60:       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[1], &globalEdgeVertices[1]));
 61:       PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edge, &edge));

 63:       // TODO: Determine edge color/name
 64:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
 65:     }
 66:   }
 67:   // End synchronized printing
 68:   PetscCall(PetscViewerFlush(viewer));
 69:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 70:   PetscFunctionReturn(PETSC_SUCCESS);
 71: }

 73: static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
 74: {
 75:   PetscMPIInt rank, size;
 76:   MPI_Comm    comm;
 77:   char        filename[PETSC_MAX_PATH_LEN + 1], proccall[PETSC_MAX_PATH_LEN + 500], scriptFile[PETSC_MAX_PATH_LEN + 1], streamBuffer[256];
 78:   PetscViewer csvViewer;
 79:   FILE       *processFile = NULL;
 80:   PetscBool   isnull;
 81:   PetscDraw   draw;
 82: #if defined(PETSC_HAVE_MKSTEMP)
 83:   PetscBool isSharedTmp;
 84: #endif

 86:   PetscFunctionBegin;
 87:   // Deal with the PetscDraw we are given
 88:   PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
 89:   PetscCall(PetscDrawIsNull(draw, &isnull));
 90:   PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));

 92:   // Clear the file name buffer so all communicated bytes are well-defined
 93:   PetscCall(PetscMemzero(filename, sizeof(filename)));

 95:   // Get the MPI communicator and this process' rank
 96:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
 97:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 98:   PetscCallMPI(MPI_Comm_size(comm, &size));

100: #if defined(PETSC_HAVE_MKSTEMP)
101:   // Get if the temporary directory is shared
102:   // Note: This must be done collectively on every rank, it cannot be done on a single rank
103:   PetscCall(PetscSharedTmp(comm, &isSharedTmp));
104: #endif

106:   // Generate and broadcast the temporary file name from rank 0
107:   if (rank == 0) {
108: #if defined(PETSC_HAVE_TMPNAM_S)
109:     // Acquire a temporary file to write to and open an ASCII/CSV viewer
110:     PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
111: #elif defined(PETSC_HAVE_MKSTEMP)
112:     PetscBool isTmpOverridden;
113:     size_t    numChars;
114:     // Same thing, but for POSIX systems on which tmpnam is deprecated
115:     // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
116:     // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
117:     PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden));
118:     // If not specified by option try using a shared tmp on the system
119:     if (!isTmpOverridden) {
120:       // Validate that if tmp is not overridden it is at least shared
121:       PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory");
122:       PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename)));
123:     }
124:     // Make sure the filename ends with a '/'
125:     PetscCall(PetscStrlen(filename, &numChars));
126:     if (filename[numChars - 1] != '/') {
127:       filename[numChars]     = '/';
128:       filename[numChars + 1] = 0;
129:     }
130:     // Perform the actual temporary file creation
131:     PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
132:     PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
133: #else
134:     // Same thing, but for older C versions which don't have the safe form
135:     PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
136: #endif
137:   }

139:   // Broadcast the filename to all other MPI ranks
140:   PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm));

142:   PetscCall(PetscViewerASCIIOpen(comm, filename, &csvViewer));
143:   PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));

145:   // Use the CSV viewer to write out the local network
146:   PetscCall(DMView_Network_CSV(dm, csvViewer));

148:   // Close the viewer
149:   PetscCall(PetscViewerDestroy(&csvViewer));

151:   // Get the value of $PETSC_DIR
152:   PetscCall(PetscStrreplace(comm, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
153:   PetscCall(PetscFixFilename(scriptFile, scriptFile));
154:   // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py '
155:   PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
156:   PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, (isnull ? "-tx" : ""), filename));

158: #if defined(PETSC_HAVE_POPEN)
159:   // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
160:   PetscCall(PetscPOpen(comm, NULL, proccall, "r", &processFile));
161:   if (processFile != NULL) {
162:     while (fgets(streamBuffer, sizeof(streamBuffer), processFile) != NULL) PetscCall(PetscPrintf(comm, "%s", streamBuffer));
163:   }
164:   PetscCall(PetscPClose(comm, processFile));
165: #else
166:   // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
167:   if (rank == 0) PetscCheck(system(proccall) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to call viewer script");
168:   // Barrier so that all ranks wait until the call completes
169:   PetscCallMPI(MPI_Barrier(comm));
170: #endif
171:   // Clean up the temporary file we used using rank 0
172:   if (rank == 0) PetscCheck(remove(filename) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to delete temporary file");
173:   PetscFunctionReturn(PETSC_SUCCESS);
174: }

176: PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
177: {
178:   PetscBool         iascii, isdraw;
179:   PetscViewerFormat format;

181:   PetscFunctionBegin;
184:   PetscCall(PetscViewerGetFormat(viewer, &format));

186:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
187:   if (isdraw) {
188:     PetscCall(DMView_Network_Matplotlib(dm, viewer));
189:     PetscFunctionReturn(PETSC_SUCCESS);
190:   }

192:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
193:   if (iascii) {
194:     const PetscInt *cone, *vtx, *edges;
195:     PetscInt        vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
196:     DM_Network     *network = (DM_Network *)dm->data;
197:     PetscMPIInt     rank;

199:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
200:     if (format == PETSC_VIEWER_ASCII_CSV) {
201:       PetscCall(DMView_Network_CSV(dm, viewer));
202:       PetscFunctionReturn(PETSC_SUCCESS);
203:     }

205:     nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
206:     if (!rank) {
207:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
208:                             network->cloneshared->Nsvtx));
209:     }

211:     PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
212:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
213:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));

215:     for (i = 0; i < nsubnet; i++) {
216:       PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
217:       if (ne) {
218:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
219:         for (j = 0; j < ne; j++) {
220:           p = edges[j];
221:           PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
222:           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
223:           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
224:           PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
225:           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
226:         }
227:       }
228:     }

230:     /* Shared vertices */
231:     PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
232:     if (nsv) {
233:       PetscInt        gidx;
234:       PetscBool       ghost;
235:       const PetscInt *sv = NULL;

237:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     SharedVertices:\n"));
238:       for (i = 0; i < nsv; i++) {
239:         PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
240:         if (ghost) continue;

242:         PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
243:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
244:         for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "                                           ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
245:       }
246:     }
247:     PetscCall(PetscViewerFlush(viewer));
248:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
249:   } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
250:   PetscFunctionReturn(PETSC_SUCCESS);
251: }