Actual source code: networkview.c

  1: #include <petscconf.h>
  2: // We need to define this ahead of any other includes to make sure mkstemp is actually defined
  3: #if defined(PETSC_HAVE_MKSTEMP)
  4:   #define _XOPEN_SOURCE 600
  5: #endif
  6: #include "petsc/private/petscimpl.h"
  7: #include "petscerror.h"
  8: #include "petscis.h"
  9: #include "petscstring.h"
 10: #include "petscsys.h"
 11: #include "petscsystypes.h"
 12: #include <petsc/private/dmnetworkimpl.h>
 13: #include <petscdraw.h>

 15: static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
 16: {
 17:   DM              dmcoords;
 18:   PetscInt        nsubnets, i, subnet, nvertices, nedges, vertex, edge, gidx, ncomp;
 19:   PetscInt        vertexOffsets[2], globalEdgeVertices[2];
 20:   PetscScalar     vertexCoords[2], *color_ptr, color;
 21:   const PetscInt *vertices, *edges, *edgeVertices;
 22:   Vec             allVertexCoords;
 23:   PetscMPIInt     rank;
 24:   MPI_Comm        comm;

 26:   PetscFunctionBegin;
 27:   // Get the coordinate information from dmcoords
 28:   PetscCheck(dm->coordinates[0].dm, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_NULL, "CoordinateDM not created");
 29:   PetscCall(DMGetCoordinateDM(dm, &dmcoords));

 31:   PetscCall(DMGetCoordinateDim(dmcoords, &i));
 32:   PetscCheck(i == 2, PETSC_COMM_WORLD, PETSC_ERR_SUP, "dim %" PetscInt_FMT " != 2 is not supported yet", i);

 34:   // Get the coordinate vector from dm
 35:   PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));

 37:   // Get the MPI communicator and this process' rank
 38:   PetscCall(PetscObjectGetComm((PetscObject)dmcoords, &comm));
 39:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

 41:   // Start synchronized printing
 42:   PetscCall(PetscViewerASCIIPushSynchronized(viewer));

 44:   // Write the header
 45:   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));

 47:   // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
 48:   PetscCall(DMNetworkGetNumSubNetworks(dmcoords, NULL, &nsubnets));
 49:   for (subnet = 0; subnet < nsubnets; subnet++) {
 50:     // Get the subnetwork's vertices and edges
 51:     PetscCall(DMNetworkGetSubnetwork(dmcoords, subnet, &nvertices, &nedges, &vertices, &edges));

 53:     // Write out each vertex
 54:     for (i = 0; i < nvertices; i++) {
 55:       vertex = vertices[i];

 57:       // Get the offset into the coordinate vector for the vertex
 58:       PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
 59:       vertexOffsets[1] = vertexOffsets[0] + 1;
 60:       // Remap vertex to the global value
 61:       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, vertex, &gidx));
 62:       // Get the vertex position from the coordinate vector
 63:       PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));

 65:       // Get vertex color; TODO: name
 66:       PetscCall(DMNetworkGetNumComponents(dmcoords, vertex, &ncomp));
 67:       PetscCheck(ncomp <= 1, PETSC_COMM_WORLD, PETSC_ERR_SUP, "num of components %" PetscInt_FMT " must be <= 1", ncomp);
 68:       color = 0.0;
 69:       if (ncomp == 1) {
 70:         PetscCall(DMNetworkGetComponent(dmcoords, vertex, 0, NULL, (void **)&color_ptr, NULL));
 71:         color = *color_ptr;
 72:       }
 73:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT ",%lf\n", (PetscInt)rank, gidx, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), gidx, (double)PetscRealPart(color)));
 74:     }

 76:     // Write out each edge
 77:     for (i = 0; i < nedges; i++) {
 78:       edge = edges[i];
 79:       PetscCall(DMNetworkGetConnectedVertices(dmcoords, edge, &edgeVertices));
 80:       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[0], &globalEdgeVertices[0]));
 81:       PetscCall(DMNetworkGetGlobalVertexIndex(dmcoords, edgeVertices[1], &globalEdgeVertices[1]));
 82:       PetscCall(DMNetworkGetGlobalEdgeIndex(dmcoords, edge, &edge));

 84:       // TODO: Determine edge color/name
 85:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
 86:     }
 87:   }
 88:   // End synchronized printing
 89:   PetscCall(PetscViewerFlush(viewer));
 90:   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 91:   PetscFunctionReturn(PETSC_SUCCESS);
 92: }

 94: static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
 95: {
 96:   PetscMPIInt rank, size;
 97:   MPI_Comm    comm;
 98:   char        filename[PETSC_MAX_PATH_LEN + 1], options[512], proccall[PETSC_MAX_PATH_LEN + 512], scriptFile[PETSC_MAX_PATH_LEN + 1], buffer[256], buffer2[256];
 99:   PetscViewer csvViewer;
100:   FILE       *processFile = NULL;
101:   PetscBool   isnull, optionShowRanks = PETSC_FALSE, optionRankIsSet = PETSC_FALSE, showNoNodes = PETSC_FALSE, showNoNumbering = PETSC_FALSE, optionShowVertices = PETSC_FALSE, optionViewPadding = PETSC_FALSE;
102:   PetscDraw   draw;
103:   DM_Network *network = (DM_Network *)dm->data;
104:   PetscReal   drawPause, viewPadding = 1.0;
105:   PetscInt    i;
106: #if defined(PETSC_HAVE_MKSTEMP)
107:   PetscBool isSharedTmp;
108: #endif

110:   PetscFunctionBegin;
111:   // Deal with the PetscDraw we are given
112:   PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
113:   PetscCall(PetscDrawIsNull(draw, &isnull));
114:   PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));

116:   // Clear the file name buffer so all communicated bytes are well-defined
117:   PetscCall(PetscMemzero(filename, sizeof(filename)));

119:   // Get the MPI communicator and this process' rank
120:   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
121:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
122:   PetscCallMPI(MPI_Comm_size(comm, &size));

124: #if defined(PETSC_HAVE_MKSTEMP)
125:   // Get if the temporary directory is shared
126:   // Note: This must be done collectively on every rank, it cannot be done on a single rank
127:   PetscCall(PetscSharedTmp(comm, &isSharedTmp));
128: #endif

130:   /* Process Options */
131:   optionShowRanks = network->vieweroptions.showallranks;
132:   showNoNodes     = network->vieweroptions.shownovertices;
133:   showNoNumbering = network->vieweroptions.shownonumbering;

135:   /*
136:     TODO:  if the option -dmnetwork_view_tmpdir can be moved up here that would be good as well.
137:   */
138:   PetscOptionsBegin(PetscObjectComm((PetscObject)dm), ((PetscObject)dm)->prefix, "MatPlotLib PetscViewer DMNetwork Options", "PetscViewer");
139:   PetscCall(PetscOptionsBool("-dmnetwork_view_all_ranks", "View all ranks in the DMNetwork", NULL, optionShowRanks, &optionShowRanks, NULL));
140:   PetscCall(PetscOptionsString("-dmnetwork_view_rank_range", "Set of ranks to view the DMNetwork on", NULL, buffer, buffer, sizeof(buffer), &optionRankIsSet));
141:   PetscCall(PetscOptionsBool("-dmnetwork_view_no_vertices", "Do not view vertices", NULL, showNoNodes, &showNoNodes, NULL));
142:   PetscCall(PetscOptionsBool("-dmnetwork_view_no_numbering", "Do not view edge and vertex numbering", NULL, showNoNumbering, &showNoNumbering, NULL));
143:   PetscCall(PetscOptionsString("-dmnetwork_view_zoomin_vertices", "Focus the view on the given set of vertices", NULL, buffer2, buffer2, sizeof(buffer2), &optionShowVertices));
144:   PetscCall(PetscOptionsReal("-dmnetwork_view_zoomin_vertices_padding", "Set the padding when viewing specific vertices", NULL, viewPadding, &viewPadding, &optionViewPadding));
145:   PetscOptionsEnd();

147:   // Generate and broadcast the temporary file name from rank 0
148:   if (rank == 0) {
149: #if defined(PETSC_HAVE_TMPNAM_S)
150:     // Acquire a temporary file to write to and open an ASCII/CSV viewer
151:     PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
152: #elif defined(PETSC_HAVE_MKSTEMP)
153:     PetscBool isTmpOverridden;
154:     size_t    numChars;
155:     // Same thing, but for POSIX systems on which tmpnam is deprecated
156:     // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
157:     // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
158:     PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), &isTmpOverridden));
159:     // If not specified by option try using a shared tmp on the system
160:     if (!isTmpOverridden) {
161:       // Validate that if tmp is not overridden it is at least shared
162:       PetscCheck(isSharedTmp, comm, PETSC_ERR_SUP_SYS, "Temporary file directory is not shared between ranks, try using -dmnetwork_view_tmpdir to specify a shared directory");
163:       PetscCall(PetscGetTmp(PETSC_COMM_SELF, filename, sizeof(filename)));
164:     }
165:     // Make sure the filename ends with a '/'
166:     PetscCall(PetscStrlen(filename, &numChars));
167:     if (filename[numChars - 1] != '/') {
168:       filename[numChars]     = '/';
169:       filename[numChars + 1] = 0;
170:     }
171:     // Perform the actual temporary file creation
172:     PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
173:     PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
174: #else
175:     // Same thing, but for older C versions which don't have the safe form
176:     PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
177: #endif
178:   }

180:   // Broadcast the filename to all other MPI ranks
181:   PetscCallMPI(MPI_Bcast(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, comm));

183:   PetscCall(PetscViewerASCIIOpen(comm, filename, &csvViewer));
184:   PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));

186:   // Use the CSV viewer to write out the local network
187:   PetscCall(DMView_Network_CSV(dm, csvViewer));

189:   // Close the viewer
190:   PetscCall(PetscViewerDestroy(&csvViewer));

192:   // Generate options string
193:   PetscCall(PetscMemzero(options, sizeof(options)));
194:   // If the draw is null run as a "test execute" ie. do nothing just test that the script was called correctly
195:   PetscCall(PetscStrlcat(options, isnull ? " -tx " : " ", sizeof(options)));
196:   PetscCall(PetscDrawGetPause(draw, &drawPause));
197:   if (drawPause > 0) {
198:     char pausebuffer[64];
199:     PetscCall(PetscSNPrintf(pausebuffer, sizeof(pausebuffer), "%f", (double)drawPause));
200:     PetscCall(PetscStrlcat(options, " -dt ", sizeof(options)));
201:     PetscCall(PetscStrlcat(options, pausebuffer, sizeof(options)));
202:   }
203:   if (optionShowRanks || optionRankIsSet) {
204:     // Show all ranks only if the option is set in code or by the user AND not showing specific ranks AND there is more than one process
205:     if (optionShowRanks && !optionRankIsSet && size != 1) PetscCall(PetscStrlcat(options, " -dar ", sizeof(options)));
206:     // Do not show the global plot if the user requests it OR if one specific rank is requested
207:     if (network->vieweroptions.dontshowglobal || optionRankIsSet) PetscCall(PetscStrlcat(options, " -ncp ", sizeof(options)));

209:     if (optionRankIsSet) {
210:       // If a range of ranks to draw is specified append it
211:       PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
212:       PetscCall(PetscStrlcat(options, buffer, sizeof(options)));
213:     } else {
214:       // Otherwise, use the options provided in code
215:       if (network->vieweroptions.viewranks) {
216:         const PetscInt *viewranks;
217:         PetscInt        viewrankssize;
218:         char            rankbuffer[64];
219:         PetscCall(ISGetTotalIndices(network->vieweroptions.viewranks, &viewranks));
220:         PetscCall(ISGetSize(network->vieweroptions.viewranks, &viewrankssize));
221:         PetscCall(PetscStrlcat(options, " -drr ", sizeof(options)));
222:         for (i = 0; i < viewrankssize; i++) {
223:           PetscCall(PetscSNPrintf(rankbuffer, sizeof(rankbuffer), "%" PetscInt_FMT, viewranks[i]));
224:           PetscCall(PetscStrlcat(options, rankbuffer, sizeof(options)));
225:         }
226:         PetscCall(ISRestoreTotalIndices(network->vieweroptions.viewranks, &viewranks));
227:       } // if not provided an IS of viewing ranks, skip viewing
228:     }
229:   }
230:   if (optionShowVertices) {
231:     // Pass vertices to focus on if defined
232:     PetscCall(PetscStrlcat(options, " -vsv ", sizeof(options)));
233:     PetscCall(PetscStrlcat(options, buffer2, sizeof(options)));
234:     optionViewPadding = PETSC_TRUE;
235:     // Pass padding if set
236:     if (optionViewPadding) {
237:       PetscCall(PetscSNPrintf(buffer2, sizeof(buffer2), "%f", (double)viewPadding));
238:       PetscCall(PetscStrlcat(options, " -vp ", sizeof(options)));
239:       PetscCall(PetscStrlcat(options, buffer2, sizeof(options)));
240:     }
241:   }

243:   // Check for options for visibility...
244:   if (showNoNodes) PetscCall(PetscStrlcat(options, " -nn ", sizeof(options)));
245:   if (showNoNumbering) PetscCall(PetscStrlcat(options, " -nnl -nel ", sizeof(options)));

247:   // Get the value of $PETSC_DIR
248:   PetscCall(PetscStrreplace(comm, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
249:   PetscCall(PetscFixFilename(scriptFile, scriptFile));
250:   // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py  '
251:   PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
252:   PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, options, filename));

254: #if defined(PETSC_HAVE_POPEN)
255:   // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
256:   PetscCall(PetscPOpen(comm, NULL, proccall, "r", &processFile));
257:   if (processFile != NULL) {
258:     while (fgets(buffer, sizeof(buffer), processFile) != NULL) PetscCall(PetscPrintf(comm, "%s", buffer));
259:   }
260:   PetscCall(PetscPClose(comm, processFile));
261: #else
262:   // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
263:   if (rank == 0) PetscCheck(system(proccall) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to call viewer script");
264:   // Barrier so that all ranks wait until the call completes
265:   PetscCallMPI(MPI_Barrier(comm));
266: #endif
267:   // Clean up the temporary file we used using rank 0
268:   if (rank == 0) PetscCheck(remove(filename) == 0, PETSC_COMM_SELF, PETSC_ERR_SYS, "Failed to delete temporary file");
269:   PetscFunctionReturn(PETSC_SUCCESS);
270: }

272: PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
273: {
274:   PetscBool         iascii, isdraw;
275:   PetscViewerFormat format;

277:   PetscFunctionBegin;
280:   PetscCall(PetscViewerGetFormat(viewer, &format));

282:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
283:   if (isdraw) {
284:     PetscCall(DMView_Network_Matplotlib(dm, viewer));
285:     PetscFunctionReturn(PETSC_SUCCESS);
286:   }

288:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
289:   if (iascii) {
290:     const PetscInt *cone, *vtx, *edges;
291:     PetscInt        vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
292:     DM_Network     *network = (DM_Network *)dm->data;
293:     PetscMPIInt     rank;

295:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
296:     if (format == PETSC_VIEWER_ASCII_CSV) {
297:       PetscCall(DMView_Network_CSV(dm, viewer));
298:       PetscFunctionReturn(PETSC_SUCCESS);
299:     }

301:     nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
302:     if (!rank) {
303:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
304:                             network->cloneshared->Nsvtx));
305:     }

307:     PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
308:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
309:     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));

311:     for (i = 0; i < nsubnet; i++) {
312:       PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
313:       if (ne) {
314:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
315:         for (j = 0; j < ne; j++) {
316:           p = edges[j];
317:           PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
318:           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
319:           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
320:           PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
321:           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
322:         }
323:       }
324:     }

326:     /* Shared vertices */
327:     PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
328:     if (nsv) {
329:       PetscInt        gidx;
330:       PetscBool       ghost;
331:       const PetscInt *sv = NULL;

333:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     SharedVertices:\n"));
334:       for (i = 0; i < nsv; i++) {
335:         PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
336:         if (ghost) continue;

338:         PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
339:         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
340:         for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "                                           ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
341:       }
342:     }
343:     PetscCall(PetscViewerFlush(viewer));
344:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
345:   } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
346:   PetscFunctionReturn(PETSC_SUCCESS);
347: }

349: /*@
350:   DMNetworkViewSetShowRanks - Sets viewing the `DMETNWORK` on each rank individually.

352:   Logically Collective

354:   Input Parameter:
355: . dm - the `DMNETWORK` object

357:   Output Parameter:
358: . showranks - `PETSC_TRUE` if viewing each rank's sub network individually

360:   Level: beginner

362: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
363: @*/
364: PetscErrorCode DMNetworkViewSetShowRanks(DM dm, PetscBool showranks)
365: {
366:   DM_Network *network = (DM_Network *)dm->data;

368:   PetscFunctionBegin;
370:   network->vieweroptions.showallranks = showranks;
371:   PetscFunctionReturn(PETSC_SUCCESS);
372: }

374: /*@
375:   DMNetworkViewSetShowGlobal - Set viewing the global network.

377:   Logically Collective

379:   Input Parameter:
380: . dm - the `DMNETWORK` object

382:   Output Parameter:
383: . showglobal - `PETSC_TRUE` if viewing the global network

385:   Level: beginner

387: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
388: @*/
389: PetscErrorCode DMNetworkViewSetShowGlobal(DM dm, PetscBool showglobal)
390: {
391:   DM_Network *network = (DM_Network *)dm->data;

393:   PetscFunctionBegin;
395:   network->vieweroptions.dontshowglobal = (PetscBool)(!showglobal);
396:   PetscFunctionReturn(PETSC_SUCCESS);
397: }

399: /*@
400:   DMNetworkViewSetShowVertices - Sets whether to display the vertices in viewing routines.

402:   Logically Collective

404:   Input Parameter:
405: . dm - the `DMNETWORK` object

407:   Output Parameter:
408: . showvertices - `PETSC_TRUE` if visualizing the vertices

410:   Level: beginner

412: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowNumbering()`, `DMNetworkViewSetViewRanks()`
413: @*/
414: PetscErrorCode DMNetworkViewSetShowVertices(DM dm, PetscBool showvertices)
415: {
416:   DM_Network *network = (DM_Network *)dm->data;

418:   PetscFunctionBegin;
420:   network->vieweroptions.shownovertices = (PetscBool)(!showvertices);
421:   PetscFunctionReturn(PETSC_SUCCESS);
422: }

424: /*@
425:   DMNetworkViewSetShowNumbering - Set displaying the numbering of edges and vertices in viewing routines.

427:   Logically Collective

429:   Input Parameter:
430: . dm - the `DMNETWORK` object

432:   Output Parameter:
433: . shownumbering - `PETSC_TRUE` if displaying the numbering of edges and vertices

435:   Level: beginner

437: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetViewRanks()`
438: @*/
439: PetscErrorCode DMNetworkViewSetShowNumbering(DM dm, PetscBool shownumbering)
440: {
441:   DM_Network *network = (DM_Network *)dm->data;

443:   PetscFunctionBegin;
445:   network->vieweroptions.shownonumbering = (PetscBool)(!shownumbering);
446:   PetscFunctionReturn(PETSC_SUCCESS);
447: }

449: /*@
450:   DMNetworkViewSetViewRanks - View the `DMNETWORK` on each of the specified ranks individually.

452:   Collective

454:   Input Parameter:
455: . dm - the `DMNETWORK` object

457:   Output Parameter:
458: . viewranks - set of ranks to view the `DMNETWORK` on individually

460:   Level: beginner

462:   Note:
463:   `DMNetwork` takes ownership of the input viewranks `IS`, it should be destroyed by the caller.

465: .seealso: `DM`, `DMNETWORK`, `DMNetworkViewSetShowRanks()`, `DMNetworkViewSetShowGlobal()`, `DMNetworkViewSetShowVertices()`, `DMNetworkViewSetShowNumbering()`
466: @*/
467: PetscErrorCode DMNetworkViewSetViewRanks(DM dm, IS viewranks)
468: {
469:   DM_Network *network = (DM_Network *)dm->data;

471:   PetscFunctionBegin;
474:   PetscCheckSameComm(dm, 1, viewranks, 2);
475:   PetscCall(ISDestroy(&network->vieweroptions.viewranks));
476:   PetscCall(PetscObjectReference((PetscObject)viewranks));
477:   network->vieweroptions.viewranks = viewranks;
478:   PetscFunctionReturn(PETSC_SUCCESS);
479: }