Actual source code: mpiadj.c


  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5: #include <../src/mat/impls/adj/mpi/mpiadj.h>
  6: #include <petscsf.h>

  8: /*
  9:   The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
 10: */
 11: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj, IS irows, IS icols, PetscInt **sadj_xadj, PetscInt **sadj_adjncy, PetscInt **sadj_values)
 12: {
 13:   PetscInt        nlrows_is, icols_n, i, j, nroots, nleaves, rlocalindex, *ncols_send, *ncols_recv;
 14:   PetscInt        nlrows_mat, *adjncy_recv, Ncols_recv, Ncols_send, *xadj_recv, *values_recv;
 15:   PetscInt       *ncols_recv_offsets, loc, rnclos, *sadjncy, *sxadj, *svalues;
 16:   const PetscInt *irows_indices, *icols_indices, *xadj, *adjncy;
 17:   PetscMPIInt     owner;
 18:   Mat_MPIAdj     *a = (Mat_MPIAdj *)adj->data;
 19:   PetscLayout     rmap;
 20:   MPI_Comm        comm;
 21:   PetscSF         sf;
 22:   PetscSFNode    *iremote;
 23:   PetscBool       done;

 25:   PetscFunctionBegin;
 26:   PetscCall(PetscObjectGetComm((PetscObject)adj, &comm));
 27:   PetscCall(MatGetLayouts(adj, &rmap, NULL));
 28:   PetscCall(ISGetLocalSize(irows, &nlrows_is));
 29:   PetscCall(ISGetIndices(irows, &irows_indices));
 30:   PetscCall(PetscMalloc1(nlrows_is, &iremote));
 31:   /* construct sf graph*/
 32:   nleaves = nlrows_is;
 33:   for (i = 0; i < nlrows_is; i++) {
 34:     owner       = -1;
 35:     rlocalindex = -1;
 36:     PetscCall(PetscLayoutFindOwnerIndex(rmap, irows_indices[i], &owner, &rlocalindex));
 37:     iremote[i].rank  = owner;
 38:     iremote[i].index = rlocalindex;
 39:   }
 40:   PetscCall(MatGetRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
 41:   PetscCall(PetscCalloc4(nlrows_mat, &ncols_send, nlrows_is, &xadj_recv, nlrows_is + 1, &ncols_recv_offsets, nlrows_is, &ncols_recv));
 42:   nroots = nlrows_mat;
 43:   for (i = 0; i < nlrows_mat; i++) ncols_send[i] = xadj[i + 1] - xadj[i];
 44:   PetscCall(PetscSFCreate(comm, &sf));
 45:   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
 46:   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
 47:   PetscCall(PetscSFSetFromOptions(sf));
 48:   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
 49:   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, ncols_send, ncols_recv, MPI_REPLACE));
 50:   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
 51:   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, xadj, xadj_recv, MPI_REPLACE));
 52:   PetscCall(PetscSFDestroy(&sf));
 53:   Ncols_recv = 0;
 54:   for (i = 0; i < nlrows_is; i++) {
 55:     Ncols_recv += ncols_recv[i];
 56:     ncols_recv_offsets[i + 1] = ncols_recv[i] + ncols_recv_offsets[i];
 57:   }
 58:   Ncols_send = 0;
 59:   for (i = 0; i < nlrows_mat; i++) Ncols_send += ncols_send[i];
 60:   PetscCall(PetscCalloc1(Ncols_recv, &iremote));
 61:   PetscCall(PetscCalloc1(Ncols_recv, &adjncy_recv));
 62:   nleaves    = Ncols_recv;
 63:   Ncols_recv = 0;
 64:   for (i = 0; i < nlrows_is; i++) {
 65:     PetscCall(PetscLayoutFindOwner(rmap, irows_indices[i], &owner));
 66:     for (j = 0; j < ncols_recv[i]; j++) {
 67:       iremote[Ncols_recv].rank    = owner;
 68:       iremote[Ncols_recv++].index = xadj_recv[i] + j;
 69:     }
 70:   }
 71:   PetscCall(ISRestoreIndices(irows, &irows_indices));
 72:   /*if we need to deal with edge weights ???*/
 73:   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv, &values_recv));
 74:   nroots = Ncols_send;
 75:   PetscCall(PetscSFCreate(comm, &sf));
 76:   PetscCall(PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
 77:   PetscCall(PetscSFSetType(sf, PETSCSFBASIC));
 78:   PetscCall(PetscSFSetFromOptions(sf));
 79:   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
 80:   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, adjncy, adjncy_recv, MPI_REPLACE));
 81:   if (a->useedgeweights) {
 82:     PetscCall(PetscSFBcastBegin(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
 83:     PetscCall(PetscSFBcastEnd(sf, MPIU_INT, a->values, values_recv, MPI_REPLACE));
 84:   }
 85:   PetscCall(PetscSFDestroy(&sf));
 86:   PetscCall(MatRestoreRowIJ(adj, 0, PETSC_FALSE, PETSC_FALSE, &nlrows_mat, &xadj, &adjncy, &done));
 87:   PetscCall(ISGetLocalSize(icols, &icols_n));
 88:   PetscCall(ISGetIndices(icols, &icols_indices));
 89:   rnclos = 0;
 90:   for (i = 0; i < nlrows_is; i++) {
 91:     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
 92:       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
 93:       if (loc < 0) {
 94:         adjncy_recv[j] = -1;
 95:         if (a->useedgeweights) values_recv[j] = -1;
 96:         ncols_recv[i]--;
 97:       } else {
 98:         rnclos++;
 99:       }
100:     }
101:   }
102:   PetscCall(ISRestoreIndices(icols, &icols_indices));
103:   PetscCall(PetscCalloc1(rnclos, &sadjncy));
104:   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos, &svalues));
105:   PetscCall(PetscCalloc1(nlrows_is + 1, &sxadj));
106:   rnclos = 0;
107:   for (i = 0; i < nlrows_is; i++) {
108:     for (j = ncols_recv_offsets[i]; j < ncols_recv_offsets[i + 1]; j++) {
109:       if (adjncy_recv[j] < 0) continue;
110:       sadjncy[rnclos] = adjncy_recv[j];
111:       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
112:       rnclos++;
113:     }
114:   }
115:   for (i = 0; i < nlrows_is; i++) sxadj[i + 1] = sxadj[i] + ncols_recv[i];
116:   if (sadj_xadj) {
117:     *sadj_xadj = sxadj;
118:   } else PetscCall(PetscFree(sxadj));
119:   if (sadj_adjncy) {
120:     *sadj_adjncy = sadjncy;
121:   } else PetscCall(PetscFree(sadjncy));
122:   if (sadj_values) {
123:     if (a->useedgeweights) *sadj_values = svalues;
124:     else *sadj_values = NULL;
125:   } else {
126:     if (a->useedgeweights) PetscCall(PetscFree(svalues));
127:   }
128:   PetscCall(PetscFree4(ncols_send, xadj_recv, ncols_recv_offsets, ncols_recv));
129:   PetscCall(PetscFree(adjncy_recv));
130:   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
131:   PetscFunctionReturn(PETSC_SUCCESS);
132: }

134: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat, PetscInt n, const IS irow[], const IS icol[], PetscBool subcomm, MatReuse scall, Mat *submat[])
135: {
136:   PetscInt        i, irow_n, icol_n, *sxadj, *sadjncy, *svalues;
137:   PetscInt       *indices, nindx, j, k, loc;
138:   PetscMPIInt     issame;
139:   const PetscInt *irow_indices, *icol_indices;
140:   MPI_Comm        scomm_row, scomm_col, scomm_mat;

142:   PetscFunctionBegin;
143:   nindx = 0;
144:   /*
145:    * Estimate a maximum number for allocating memory
146:    */
147:   for (i = 0; i < n; i++) {
148:     PetscCall(ISGetLocalSize(irow[i], &irow_n));
149:     PetscCall(ISGetLocalSize(icol[i], &icol_n));
150:     nindx = nindx > (irow_n + icol_n) ? nindx : (irow_n + icol_n);
151:   }
152:   PetscCall(PetscMalloc1(nindx, &indices));
153:   /* construct a submat */
154:   //  if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));

156:   for (i = 0; i < n; i++) {
157:     if (subcomm) {
158:       PetscCall(PetscObjectGetComm((PetscObject)irow[i], &scomm_row));
159:       PetscCall(PetscObjectGetComm((PetscObject)icol[i], &scomm_col));
160:       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_col, &issame));
161:       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "row index set must have the same comm as the col index set");
162:       PetscCallMPI(MPI_Comm_compare(scomm_row, PETSC_COMM_SELF, &issame));
163:       PetscCheck(issame != MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, " can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
164:     } else {
165:       scomm_row = PETSC_COMM_SELF;
166:     }
167:     /*get sub-matrix data*/
168:     sxadj   = NULL;
169:     sadjncy = NULL;
170:     svalues = NULL;
171:     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat, irow[i], icol[i], &sxadj, &sadjncy, &svalues));
172:     PetscCall(ISGetLocalSize(irow[i], &irow_n));
173:     PetscCall(ISGetLocalSize(icol[i], &icol_n));
174:     PetscCall(ISGetIndices(irow[i], &irow_indices));
175:     PetscCall(PetscArraycpy(indices, irow_indices, irow_n));
176:     PetscCall(ISRestoreIndices(irow[i], &irow_indices));
177:     PetscCall(ISGetIndices(icol[i], &icol_indices));
178:     PetscCall(PetscArraycpy(indices + irow_n, icol_indices, icol_n));
179:     PetscCall(ISRestoreIndices(icol[i], &icol_indices));
180:     nindx = irow_n + icol_n;
181:     PetscCall(PetscSortRemoveDupsInt(&nindx, indices));
182:     /* renumber columns */
183:     for (j = 0; j < irow_n; j++) {
184:       for (k = sxadj[j]; k < sxadj[j + 1]; k++) {
185:         PetscCall(PetscFindInt(sadjncy[k], nindx, indices, &loc));
186:         PetscCheck(loc >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "can not find col %" PetscInt_FMT, sadjncy[k]);
187:         sadjncy[k] = loc;
188:       }
189:     }
190:     if (scall == MAT_INITIAL_MATRIX) {
191:       PetscCall(MatCreateMPIAdj(scomm_row, irow_n, icol_n, sxadj, sadjncy, svalues, submat[i]));
192:     } else {
193:       Mat         sadj = *(submat[i]);
194:       Mat_MPIAdj *sa   = (Mat_MPIAdj *)((sadj)->data);
195:       PetscCall(PetscObjectGetComm((PetscObject)sadj, &scomm_mat));
196:       PetscCallMPI(MPI_Comm_compare(scomm_row, scomm_mat, &issame));
197:       PetscCheck(issame == MPI_IDENT, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "submatrix  must have the same comm as the col index set");
198:       PetscCall(PetscArraycpy(sa->i, sxadj, irow_n + 1));
199:       PetscCall(PetscArraycpy(sa->j, sadjncy, sxadj[irow_n]));
200:       if (svalues) PetscCall(PetscArraycpy(sa->values, svalues, sxadj[irow_n]));
201:       PetscCall(PetscFree(sxadj));
202:       PetscCall(PetscFree(sadjncy));
203:       PetscCall(PetscFree(svalues));
204:     }
205:   }
206:   PetscCall(PetscFree(indices));
207:   PetscFunctionReturn(PETSC_SUCCESS);
208: }

210: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
211: {
212:   /*get sub-matrices across a sub communicator */
213:   PetscFunctionBegin;
214:   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_TRUE, scall, submat));
215:   PetscFunctionReturn(PETSC_SUCCESS);
216: }

218: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
219: {
220:   PetscFunctionBegin;
221:   /*get sub-matrices based on PETSC_COMM_SELF */
222:   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat, n, irow, icol, PETSC_FALSE, scall, submat));
223:   PetscFunctionReturn(PETSC_SUCCESS);
224: }

226: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A, PetscViewer viewer)
227: {
228:   Mat_MPIAdj       *a = (Mat_MPIAdj *)A->data;
229:   PetscInt          i, j, m = A->rmap->n;
230:   const char       *name;
231:   PetscViewerFormat format;

233:   PetscFunctionBegin;
234:   PetscCall(PetscObjectGetName((PetscObject)A, &name));
235:   PetscCall(PetscViewerGetFormat(viewer, &format));
236:   if (format == PETSC_VIEWER_ASCII_INFO) {
237:     PetscFunctionReturn(PETSC_SUCCESS);
238:   } else {
239:     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MATLAB format not supported");
240:     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE));
241:     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
242:     for (i = 0; i < m; i++) {
243:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "row %" PetscInt_FMT ":", i + A->rmap->rstart));
244:       for (j = a->i[i]; j < a->i[i + 1]; j++) {
245:         if (a->values) {
246:           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " (%" PetscInt_FMT ", %" PetscInt_FMT ") ", a->j[j], a->values[j]));
247:         } else {
248:           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " %" PetscInt_FMT " ", a->j[j]));
249:         }
250:       }
251:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "\n"));
252:     }
253:     PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE));
254:     PetscCall(PetscViewerFlush(viewer));
255:     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
256:   }
257:   PetscFunctionReturn(PETSC_SUCCESS);
258: }

260: static PetscErrorCode MatView_MPIAdj(Mat A, PetscViewer viewer)
261: {
262:   PetscBool iascii;

264:   PetscFunctionBegin;
265:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
266:   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A, viewer));
267:   PetscFunctionReturn(PETSC_SUCCESS);
268: }

270: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
271: {
272:   Mat_MPIAdj *a = (Mat_MPIAdj *)mat->data;

274:   PetscFunctionBegin;
275: #if defined(PETSC_USE_LOG)
276:   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, mat->rmap->n, mat->cmap->n, a->nz));
277: #endif
278:   PetscCall(PetscFree(a->diag));
279:   if (a->freeaij) {
280:     if (a->freeaijwithfree) {
281:       if (a->i) free(a->i);
282:       if (a->j) free(a->j);
283:     } else {
284:       PetscCall(PetscFree(a->i));
285:       PetscCall(PetscFree(a->j));
286:       PetscCall(PetscFree(a->values));
287:     }
288:   }
289:   PetscCall(PetscFree(a->rowvalues));
290:   PetscCall(PetscFree(mat->data));
291:   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
292:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjSetPreallocation_C", NULL));
293:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjCreateNonemptySubcommMat_C", NULL));
294:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeq_C", NULL));
295:   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIAdjToSeqRankZero_C", NULL));
296:   PetscFunctionReturn(PETSC_SUCCESS);
297: }

299: static PetscErrorCode MatSetOption_MPIAdj(Mat A, MatOption op, PetscBool flg)
300: {
301:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

303:   PetscFunctionBegin;
304:   switch (op) {
305:   case MAT_SYMMETRIC:
306:   case MAT_STRUCTURALLY_SYMMETRIC:
307:   case MAT_HERMITIAN:
308:   case MAT_SPD:
309:     a->symmetric = flg;
310:     break;
311:   case MAT_SYMMETRY_ETERNAL:
312:   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
313:   case MAT_SPD_ETERNAL:
314:     break;
315:   default:
316:     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
317:     break;
318:   }
319:   PetscFunctionReturn(PETSC_SUCCESS);
320: }

322: static PetscErrorCode MatGetRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
323: {
324:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

326:   PetscFunctionBegin;
327:   row -= A->rmap->rstart;
328:   PetscCheck(row >= 0 && row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row out of range");
329:   *nz = a->i[row + 1] - a->i[row];
330:   if (v) {
331:     PetscInt j;
332:     if (a->rowvalues_alloc < *nz) {
333:       PetscCall(PetscFree(a->rowvalues));
334:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc * 2, *nz);
335:       PetscCall(PetscMalloc1(a->rowvalues_alloc, &a->rowvalues));
336:     }
337:     for (j = 0; j < *nz; j++) a->rowvalues[j] = a->values ? a->values[a->i[row] + j] : 1.0;
338:     *v = (*nz) ? a->rowvalues : NULL;
339:   }
340:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
341:   PetscFunctionReturn(PETSC_SUCCESS);
342: }

344: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
345: {
346:   PetscFunctionBegin;
347:   PetscFunctionReturn(PETSC_SUCCESS);
348: }

350: static PetscErrorCode MatEqual_MPIAdj(Mat A, Mat B, PetscBool *flg)
351: {
352:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
353:   PetscBool   flag;

355:   PetscFunctionBegin;
356:   /* If the  matrix dimensions are not equal,or no of nonzeros */
357:   if ((A->rmap->n != B->rmap->n) || (a->nz != b->nz)) flag = PETSC_FALSE;

359:   /* if the a->i are the same */
360:   PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, &flag));

362:   /* if a->j are the same */
363:   PetscCall(PetscMemcmp(a->j, b->j, (a->nz) * sizeof(PetscInt), &flag));

365:   PetscCall(MPIU_Allreduce(&flag, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
366:   PetscFunctionReturn(PETSC_SUCCESS);
367: }

369: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
370: {
371:   PetscInt    i;
372:   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
373:   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;

375:   PetscFunctionBegin;
376:   *m    = A->rmap->n;
377:   *ia   = a->i;
378:   *ja   = a->j;
379:   *done = PETSC_TRUE;
380:   if (oshift) {
381:     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]++;
382:     for (i = 0; i <= (*m); i++) (*ia)[i]++;
383:   }
384:   PetscFunctionReturn(PETSC_SUCCESS);
385: }

387: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool blockcompressed, PetscInt *m, const PetscInt *inia[], const PetscInt *inja[], PetscBool *done)
388: {
389:   PetscInt    i;
390:   Mat_MPIAdj *a  = (Mat_MPIAdj *)A->data;
391:   PetscInt  **ia = (PetscInt **)inia, **ja = (PetscInt **)inja;

393:   PetscFunctionBegin;
394:   PetscCheck(!ia || a->i == *ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ia passed back is not one obtained with MatGetRowIJ()");
395:   PetscCheck(!ja || a->j == *ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "ja passed back is not one obtained with MatGetRowIJ()");
396:   if (oshift) {
397:     PetscCheck(ia, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inia[] argument");
398:     PetscCheck(ja, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "If oshift then you must passed in inja[] argument");
399:     for (i = 0; i <= (*m); i++) (*ia)[i]--;
400:     for (i = 0; i < (*ia)[*m]; i++) (*ja)[i]--;
401:   }
402:   PetscFunctionReturn(PETSC_SUCCESS);
403: }

405: PetscErrorCode MatConvertFrom_MPIAdj(Mat A, MatType type, MatReuse reuse, Mat *newmat)
406: {
407:   Mat                B;
408:   PetscInt           i, m, N, nzeros = 0, *ia, *ja, len, rstart, cnt, j, *a;
409:   const PetscInt    *rj;
410:   const PetscScalar *ra;
411:   MPI_Comm           comm;

413:   PetscFunctionBegin;
414:   PetscCall(MatGetSize(A, NULL, &N));
415:   PetscCall(MatGetLocalSize(A, &m, NULL));
416:   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));

418:   /* count the number of nonzeros per row */
419:   for (i = 0; i < m; i++) {
420:     PetscCall(MatGetRow(A, i + rstart, &len, &rj, NULL));
421:     for (j = 0; j < len; j++) {
422:       if (rj[j] == i + rstart) {
423:         len--;
424:         break;
425:       } /* don't count diagonal */
426:     }
427:     nzeros += len;
428:     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, NULL));
429:   }

431:   /* malloc space for nonzeros */
432:   PetscCall(PetscMalloc1(nzeros + 1, &a));
433:   PetscCall(PetscMalloc1(N + 1, &ia));
434:   PetscCall(PetscMalloc1(nzeros + 1, &ja));

436:   nzeros = 0;
437:   ia[0]  = 0;
438:   for (i = 0; i < m; i++) {
439:     PetscCall(MatGetRow(A, i + rstart, &len, &rj, &ra));
440:     cnt = 0;
441:     for (j = 0; j < len; j++) {
442:       if (rj[j] != i + rstart) { /* if not diagonal */
443:         a[nzeros + cnt]    = (PetscInt)PetscAbsScalar(ra[j]);
444:         ja[nzeros + cnt++] = rj[j];
445:       }
446:     }
447:     PetscCall(MatRestoreRow(A, i + rstart, &len, &rj, &ra));
448:     nzeros += cnt;
449:     ia[i + 1] = nzeros;
450:   }

452:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
453:   PetscCall(MatCreate(comm, &B));
454:   PetscCall(MatSetSizes(B, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
455:   PetscCall(MatSetType(B, type));
456:   PetscCall(MatMPIAdjSetPreallocation(B, ia, ja, a));

458:   if (reuse == MAT_INPLACE_MATRIX) {
459:     PetscCall(MatHeaderReplace(A, &B));
460:   } else {
461:     *newmat = B;
462:   }
463:   PetscFunctionReturn(PETSC_SUCCESS);
464: }

466: PetscErrorCode MatSetValues_MPIAdj(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode im)
467: {
468:   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;
469:   PetscInt    rStart, rEnd, cStart, cEnd;

471:   PetscFunctionBegin;
472:   PetscCheck(!adj->i, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is already assembled, cannot change its values");
473:   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
474:   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
475:   if (!adj->ht) {
476:     PetscCall(PetscHSetIJCreate(&adj->ht));
477:     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), 1, &A->stash));
478:     PetscCall(PetscLayoutSetUp(A->rmap));
479:     PetscCall(PetscLayoutSetUp(A->cmap));
480:   }
481:   for (PetscInt r = 0; r < m; ++r) {
482:     PetscHashIJKey key;

484:     key.i = rows[r];
485:     if (key.i < 0) continue;
486:     if ((key.i < rStart) || (key.i >= rEnd)) {
487:       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
488:     } else {
489:       for (PetscInt c = 0; c < n; ++c) {
490:         key.j = cols[c];
491:         if (key.j < 0 || key.i == key.j) continue;
492:         PetscCall(PetscHSetIJAdd(adj->ht, key));
493:       }
494:     }
495:   }
496:   PetscFunctionReturn(PETSC_SUCCESS);
497: }

499: PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
500: {
501:   PetscInt    nstash, reallocs;
502:   Mat_MPIAdj *adj = (Mat_MPIAdj *)A->data;

504:   PetscFunctionBegin;
505:   if (!adj->ht) {
506:     PetscCall(PetscHSetIJCreate(&adj->ht));
507:     PetscCall(PetscLayoutSetUp(A->rmap));
508:     PetscCall(PetscLayoutSetUp(A->cmap));
509:   }
510:   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
511:   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
512:   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
513:   PetscFunctionReturn(PETSC_SUCCESS);
514: }

516: PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
517: {
518:   PetscScalar   *val;
519:   PetscInt      *row, *col, m, rstart, *rowstarts;
520:   PetscInt       i, j, ncols, flg, nz;
521:   PetscMPIInt    n;
522:   Mat_MPIAdj    *adj = (Mat_MPIAdj *)A->data;
523:   PetscHashIter  hi;
524:   PetscHashIJKey key;
525:   PetscHSetIJ    ht = adj->ht;

527:   PetscFunctionBegin;
528:   while (1) {
529:     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
530:     if (!flg) break;

532:     for (i = 0; i < n;) {
533:       /* Identify the consecutive vals belonging to the same row */
534:       for (j = i, rstart = row[j]; j < n; j++) {
535:         if (row[j] != rstart) break;
536:       }
537:       if (j < n) ncols = j - i;
538:       else ncols = n - i;
539:       /* Set all these values with a single function call */
540:       PetscCall(MatSetValues_MPIAdj(A, 1, row + i, ncols, col + i, val + i, INSERT_VALUES));
541:       i = j;
542:     }
543:   }
544:   PetscCall(MatStashScatterEnd_Private(&A->stash));
545:   PetscCall(MatStashDestroy_Private(&A->stash));

547:   PetscCall(MatGetLocalSize(A, &m, NULL));
548:   PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
549:   PetscCall(PetscCalloc1(m + 1, &rowstarts));
550:   PetscHashIterBegin(ht, hi);
551:   for (; !PetscHashIterAtEnd(ht, hi);) {
552:     PetscHashIterGetKey(ht, hi, key);
553:     rowstarts[key.i - rstart + 1]++;
554:     PetscHashIterNext(ht, hi);
555:   }
556:   for (i = 1; i < m + 1; i++) rowstarts[i] = rowstarts[i - 1] + rowstarts[i];

558:   PetscCall(PetscHSetIJGetSize(ht, &nz));
559:   PetscCall(PetscMalloc1(nz, &col));
560:   PetscHashIterBegin(ht, hi);
561:   for (; !PetscHashIterAtEnd(ht, hi);) {
562:     PetscHashIterGetKey(ht, hi, key);
563:     col[rowstarts[key.i - rstart]++] = key.j;
564:     PetscHashIterNext(ht, hi);
565:   }
566:   PetscCall(PetscHSetIJDestroy(&ht));
567:   for (i = m; i > 0; i--) rowstarts[i] = rowstarts[i - 1];
568:   rowstarts[0] = 0;

570:   for (PetscInt i = 0; i < m; i++) PetscCall(PetscSortInt(rowstarts[i + 1] - rowstarts[i], &col[rowstarts[i]]));

572:   adj->i       = rowstarts;
573:   adj->j       = col;
574:   adj->nz      = rowstarts[m];
575:   adj->freeaij = PETSC_TRUE;
576:   PetscFunctionReturn(PETSC_SUCCESS);
577: }

579: static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
580:                                        MatGetRow_MPIAdj,
581:                                        MatRestoreRow_MPIAdj,
582:                                        NULL,
583:                                        /* 4*/ NULL,
584:                                        NULL,
585:                                        NULL,
586:                                        NULL,
587:                                        NULL,
588:                                        NULL,
589:                                        /*10*/ NULL,
590:                                        NULL,
591:                                        NULL,
592:                                        NULL,
593:                                        NULL,
594:                                        /*15*/ NULL,
595:                                        MatEqual_MPIAdj,
596:                                        NULL,
597:                                        NULL,
598:                                        NULL,
599:                                        /*20*/ MatAssemblyBegin_MPIAdj,
600:                                        MatAssemblyEnd_MPIAdj,
601:                                        MatSetOption_MPIAdj,
602:                                        NULL,
603:                                        /*24*/ NULL,
604:                                        NULL,
605:                                        NULL,
606:                                        NULL,
607:                                        NULL,
608:                                        /*29*/ NULL,
609:                                        NULL,
610:                                        NULL,
611:                                        NULL,
612:                                        NULL,
613:                                        /*34*/ NULL,
614:                                        NULL,
615:                                        NULL,
616:                                        NULL,
617:                                        NULL,
618:                                        /*39*/ NULL,
619:                                        MatCreateSubMatrices_MPIAdj,
620:                                        NULL,
621:                                        NULL,
622:                                        NULL,
623:                                        /*44*/ NULL,
624:                                        NULL,
625:                                        MatShift_Basic,
626:                                        NULL,
627:                                        NULL,
628:                                        /*49*/ NULL,
629:                                        MatGetRowIJ_MPIAdj,
630:                                        MatRestoreRowIJ_MPIAdj,
631:                                        NULL,
632:                                        NULL,
633:                                        /*54*/ NULL,
634:                                        NULL,
635:                                        NULL,
636:                                        NULL,
637:                                        NULL,
638:                                        /*59*/ NULL,
639:                                        MatDestroy_MPIAdj,
640:                                        MatView_MPIAdj,
641:                                        MatConvertFrom_MPIAdj,
642:                                        NULL,
643:                                        /*64*/ NULL,
644:                                        NULL,
645:                                        NULL,
646:                                        NULL,
647:                                        NULL,
648:                                        /*69*/ NULL,
649:                                        NULL,
650:                                        NULL,
651:                                        NULL,
652:                                        NULL,
653:                                        /*74*/ NULL,
654:                                        NULL,
655:                                        NULL,
656:                                        NULL,
657:                                        NULL,
658:                                        /*79*/ NULL,
659:                                        NULL,
660:                                        NULL,
661:                                        NULL,
662:                                        NULL,
663:                                        /*84*/ NULL,
664:                                        NULL,
665:                                        NULL,
666:                                        NULL,
667:                                        NULL,
668:                                        /*89*/ NULL,
669:                                        NULL,
670:                                        NULL,
671:                                        NULL,
672:                                        NULL,
673:                                        /*94*/ NULL,
674:                                        NULL,
675:                                        NULL,
676:                                        NULL,
677:                                        NULL,
678:                                        /*99*/ NULL,
679:                                        NULL,
680:                                        NULL,
681:                                        NULL,
682:                                        NULL,
683:                                        /*104*/ NULL,
684:                                        NULL,
685:                                        NULL,
686:                                        NULL,
687:                                        NULL,
688:                                        /*109*/ NULL,
689:                                        NULL,
690:                                        NULL,
691:                                        NULL,
692:                                        NULL,
693:                                        /*114*/ NULL,
694:                                        NULL,
695:                                        NULL,
696:                                        NULL,
697:                                        NULL,
698:                                        /*119*/ NULL,
699:                                        NULL,
700:                                        NULL,
701:                                        NULL,
702:                                        NULL,
703:                                        /*124*/ NULL,
704:                                        NULL,
705:                                        NULL,
706:                                        NULL,
707:                                        MatCreateSubMatricesMPI_MPIAdj,
708:                                        /*129*/ NULL,
709:                                        NULL,
710:                                        NULL,
711:                                        NULL,
712:                                        NULL,
713:                                        /*134*/ NULL,
714:                                        NULL,
715:                                        NULL,
716:                                        NULL,
717:                                        NULL,
718:                                        /*139*/ NULL,
719:                                        NULL,
720:                                        NULL,
721:                                        NULL,
722:                                        NULL,
723:                                        /*144*/ NULL,
724:                                        NULL,
725:                                        NULL,
726:                                        NULL,
727:                                        NULL,
728:                                        NULL,
729:                                        /*150*/ NULL,
730:                                        NULL};

732: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
733: {
734:   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
735:   PetscBool   useedgeweights;

737:   PetscFunctionBegin;
738:   PetscCall(PetscLayoutSetUp(B->rmap));
739:   PetscCall(PetscLayoutSetUp(B->cmap));
740:   if (values) useedgeweights = PETSC_TRUE;
741:   else useedgeweights = PETSC_FALSE;
742:   /* Make everybody knows if they are using edge weights or not */
743:   PetscCall(MPIU_Allreduce((int *)&useedgeweights, (int *)&b->useedgeweights, 1, MPI_INT, MPI_MAX, PetscObjectComm((PetscObject)B)));

745:   if (PetscDefined(USE_DEBUG)) {
746:     PetscInt ii;

748:     PetscCheck(i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "First i[] index must be zero, instead it is %" PetscInt_FMT, i[0]);
749:     for (ii = 1; ii < B->rmap->n; ii++) {
750:       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT, ii, i[ii], ii - 1, i[ii - 1]);
751:     }
752:     for (ii = 0; ii < i[B->rmap->n]; ii++) PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index %" PetscInt_FMT " out of range %" PetscInt_FMT, ii, j[ii]);
753:   }
754:   b->j      = j;
755:   b->i      = i;
756:   b->values = values;

758:   b->nz        = i[B->rmap->n];
759:   b->diag      = NULL;
760:   b->symmetric = PETSC_FALSE;
761:   b->freeaij   = PETSC_TRUE;

763:   B->ops->assemblybegin = NULL;
764:   B->ops->assemblyend   = NULL;
765:   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
766:   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
767:   PetscCall(MatStashDestroy_Private(&B->stash));
768:   PetscFunctionReturn(PETSC_SUCCESS);
769: }

771: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A, Mat *B)
772: {
773:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
774:   const PetscInt *ranges;
775:   MPI_Comm        acomm, bcomm;
776:   MPI_Group       agroup, bgroup;
777:   PetscMPIInt     i, rank, size, nranks, *ranks;

779:   PetscFunctionBegin;
780:   *B = NULL;
781:   PetscCall(PetscObjectGetComm((PetscObject)A, &acomm));
782:   PetscCallMPI(MPI_Comm_size(acomm, &size));
783:   PetscCallMPI(MPI_Comm_size(acomm, &rank));
784:   PetscCall(MatGetOwnershipRanges(A, &ranges));
785:   for (i = 0, nranks = 0; i < size; i++) {
786:     if (ranges[i + 1] - ranges[i] > 0) nranks++;
787:   }
788:   if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
789:     PetscCall(PetscObjectReference((PetscObject)A));
790:     *B = A;
791:     PetscFunctionReturn(PETSC_SUCCESS);
792:   }

794:   PetscCall(PetscMalloc1(nranks, &ranks));
795:   for (i = 0, nranks = 0; i < size; i++) {
796:     if (ranges[i + 1] - ranges[i] > 0) ranks[nranks++] = i;
797:   }
798:   PetscCallMPI(MPI_Comm_group(acomm, &agroup));
799:   PetscCallMPI(MPI_Group_incl(agroup, nranks, ranks, &bgroup));
800:   PetscCall(PetscFree(ranks));
801:   PetscCallMPI(MPI_Comm_create(acomm, bgroup, &bcomm));
802:   PetscCallMPI(MPI_Group_free(&agroup));
803:   PetscCallMPI(MPI_Group_free(&bgroup));
804:   if (bcomm != MPI_COMM_NULL) {
805:     PetscInt    m, N;
806:     Mat_MPIAdj *b;
807:     PetscCall(MatGetLocalSize(A, &m, NULL));
808:     PetscCall(MatGetSize(A, NULL, &N));
809:     PetscCall(MatCreateMPIAdj(bcomm, m, N, a->i, a->j, a->values, B));
810:     b          = (Mat_MPIAdj *)(*B)->data;
811:     b->freeaij = PETSC_FALSE;
812:     PetscCallMPI(MPI_Comm_free(&bcomm));
813:   }
814:   PetscFunctionReturn(PETSC_SUCCESS);
815: }

817: PetscErrorCode MatMPIAdjToSeq_MPIAdj(Mat A, Mat *B)
818: {
819:   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
820:   PetscInt   *Values = NULL;
821:   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
822:   PetscMPIInt mnz, mm, *allnz, *allm, size, *dispnz, *dispm;

824:   PetscFunctionBegin;
825:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
826:   PetscCall(MatGetSize(A, &M, &N));
827:   PetscCall(MatGetLocalSize(A, &m, NULL));
828:   nz = adj->nz;
829:   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
830:   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));

832:   PetscCall(PetscMPIIntCast(nz, &mnz));
833:   PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
834:   PetscCallMPI(MPI_Allgather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
835:   dispnz[0] = 0;
836:   for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
837:   if (adj->values) {
838:     PetscCall(PetscMalloc1(NZ, &Values));
839:     PetscCallMPI(MPI_Allgatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
840:   }
841:   PetscCall(PetscMalloc1(NZ, &J));
842:   PetscCallMPI(MPI_Allgatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, PetscObjectComm((PetscObject)A)));
843:   PetscCall(PetscFree2(allnz, dispnz));
844:   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
845:   nzstart -= nz;
846:   /* shift the i[] values so they will be correct after being received */
847:   for (i = 0; i < m; i++) adj->i[i] += nzstart;
848:   PetscCall(PetscMalloc1(M + 1, &II));
849:   PetscCall(PetscMPIIntCast(m, &mm));
850:   PetscCall(PetscMalloc2(size, &allm, size, &dispm));
851:   PetscCallMPI(MPI_Allgather(&mm, 1, MPI_INT, allm, 1, MPI_INT, PetscObjectComm((PetscObject)A)));
852:   dispm[0] = 0;
853:   for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
854:   PetscCallMPI(MPI_Allgatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, PetscObjectComm((PetscObject)A)));
855:   PetscCall(PetscFree2(allm, dispm));
856:   II[M] = NZ;
857:   /* shift the i[] values back */
858:   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
859:   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
860:   PetscFunctionReturn(PETSC_SUCCESS);
861: }

863: PetscErrorCode MatMPIAdjToSeqRankZero_MPIAdj(Mat A, Mat *B)
864: {
865:   PetscInt    M, N, *II, *J, NZ, nz, m, nzstart, i;
866:   PetscInt   *Values = NULL;
867:   Mat_MPIAdj *adj    = (Mat_MPIAdj *)A->data;
868:   PetscMPIInt mnz, mm, *allnz = NULL, *allm, size, *dispnz, *dispm, rank;

870:   PetscFunctionBegin;
871:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
872:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
873:   PetscCall(MatGetSize(A, &M, &N));
874:   PetscCall(MatGetLocalSize(A, &m, NULL));
875:   nz = adj->nz;
876:   PetscCheck(adj->i[m] == nz, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT, nz, adj->i[m]);
877:   PetscCall(MPIU_Allreduce(&nz, &NZ, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));

879:   PetscCall(PetscMPIIntCast(nz, &mnz));
880:   if (!rank) PetscCall(PetscMalloc2(size, &allnz, size, &dispnz));
881:   PetscCallMPI(MPI_Gather(&mnz, 1, MPI_INT, allnz, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
882:   if (!rank) {
883:     dispnz[0] = 0;
884:     for (i = 1; i < size; i++) dispnz[i] = dispnz[i - 1] + allnz[i - 1];
885:     if (adj->values) {
886:       PetscCall(PetscMalloc1(NZ, &Values));
887:       PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, Values, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
888:     }
889:     PetscCall(PetscMalloc1(NZ, &J));
890:     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, J, allnz, dispnz, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
891:     PetscCall(PetscFree2(allnz, dispnz));
892:   } else {
893:     if (adj->values) PetscCallMPI(MPI_Gatherv(adj->values, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
894:     PetscCallMPI(MPI_Gatherv(adj->j, mnz, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
895:   }
896:   PetscCallMPI(MPI_Scan(&nz, &nzstart, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)A)));
897:   nzstart -= nz;
898:   /* shift the i[] values so they will be correct after being received */
899:   for (i = 0; i < m; i++) adj->i[i] += nzstart;
900:   PetscCall(PetscMPIIntCast(m, &mm));
901:   if (!rank) {
902:     PetscCall(PetscMalloc1(M + 1, &II));
903:     PetscCall(PetscMalloc2(size, &allm, size, &dispm));
904:     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, allm, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
905:     dispm[0] = 0;
906:     for (i = 1; i < size; i++) dispm[i] = dispm[i - 1] + allm[i - 1];
907:     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, II, allm, dispm, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
908:     PetscCall(PetscFree2(allm, dispm));
909:     II[M] = NZ;
910:   } else {
911:     PetscCallMPI(MPI_Gather(&mm, 1, MPI_INT, NULL, 1, MPI_INT, 0, PetscObjectComm((PetscObject)A)));
912:     PetscCallMPI(MPI_Gatherv(adj->i, mm, MPIU_INT, NULL, NULL, NULL, MPIU_INT, 0, PetscObjectComm((PetscObject)A)));
913:   }
914:   /* shift the i[] values back */
915:   for (i = 0; i < m; i++) adj->i[i] -= nzstart;
916:   if (!rank) PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF, M, N, II, J, Values, B));
917:   PetscFunctionReturn(PETSC_SUCCESS);
918: }

920: /*@
921:    MatMPIAdjCreateNonemptySubcommMat - create the same `MATMPIADJ` matrix on a subcommunicator containing only processes owning a positive number of rows

923:    Collective

925:    Input Parameter:
926: .  A - original `MATMPIADJ` matrix

928:    Output Parameter:
929: .  B - matrix on subcommunicator, `NULL` on ranks that owned zero rows of `A`

931:    Level: developer

933:    Note:
934:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

936:    The matrix `B` should be destroyed with `MatDestroy()`. The arrays are not copied, so `B` should be destroyed before `A` is destroyed.

938: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreateMPIAdj()`
939: @*/
940: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A, Mat *B)
941: {
942:   PetscFunctionBegin;
944:   PetscUseMethod(A, "MatMPIAdjCreateNonemptySubcommMat_C", (Mat, Mat *), (A, B));
945:   PetscFunctionReturn(PETSC_SUCCESS);
946: }

948: /*MC
949:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
950:    intended for use constructing orderings and partitionings.

952:   Level: beginner

954:   Note:
955:     You can provide values to the matrix using `MatMPIAdjSetPreallocation()`, `MatCreateMPIAdj()`, or
956:     by calling `MatSetValues()` and `MatAssemblyBegin()` followed by `MatAssemblyEnd()`

958: .seealso: [](ch_matrices), `Mat`, `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
959: M*/
960: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
961: {
962:   Mat_MPIAdj *b;

964:   PetscFunctionBegin;
965:   PetscCall(PetscNew(&b));
966:   B->data = (void *)b;
967:   PetscCall(PetscMemcpy(B->ops, &MatOps_Values, sizeof(struct _MatOps)));
968:   B->assembled    = PETSC_FALSE;
969:   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */

971:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjSetPreallocation_C", MatMPIAdjSetPreallocation_MPIAdj));
972:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjCreateNonemptySubcommMat_C", MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
973:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeq_C", MatMPIAdjToSeq_MPIAdj));
974:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIAdjToSeqRankZero_C", MatMPIAdjToSeqRankZero_MPIAdj));
975:   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIADJ));
976:   PetscFunctionReturn(PETSC_SUCCESS);
977: }

979: /*@C
980:    MatMPIAdjToSeq - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on each process (needed by sequential partitioner)

982:    Logically Collective

984:    Input Parameter:
985: .  A - the matrix

987:    Output Parameter:
988: .  B - the same matrix on all processes

990:    Level: intermediate

992: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeqRankZero()`
993: @*/
994: PetscErrorCode MatMPIAdjToSeq(Mat A, Mat *B)
995: {
996:   PetscFunctionBegin;
997:   PetscUseMethod(A, "MatMPIAdjToSeq_C", (Mat, Mat *), (A, B));
998:   PetscFunctionReturn(PETSC_SUCCESS);
999: }

1001: /*@C
1002:    MatMPIAdjToSeqRankZero - Converts an parallel `MATMPIADJ` matrix to complete `MATMPIADJ` on rank zero (needed by sequential partitioner)

1004:    Logically Collective

1006:    Input Parameter:
1007: .  A - the matrix

1009:    Output Parameter:
1010: .  B - the same matrix on rank zero, not set on other ranks

1012:    Level: intermediate

1014:    Note:
1015:      This routine has the advantage on systems with multiple ranks per node since only one copy of the matrix
1016:      is stored on the first node, instead of the number of ranks copies. This can allow partitioning much larger
1017:      parallel graph sequentially.

1019: .seealso: [](ch_matrices), `Mat`, `MATMPIADJ`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MatMPIAdjToSeq()`
1020: @*/
1021: PetscErrorCode MatMPIAdjToSeqRankZero(Mat A, Mat *B)
1022: {
1023:   PetscFunctionBegin;
1024:   PetscUseMethod(A, "MatMPIAdjToSeqRankZero_C", (Mat, Mat *), (A, B));
1025:   PetscFunctionReturn(PETSC_SUCCESS);
1026: }

1028: /*@C
1029:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

1031:    Logically Collective

1033:    Input Parameters:
1034: +  A - the matrix
1035: .  i - the indices into `j` for the start of each row
1036: .  j - the column indices for each row (sorted for each row).
1037:        The indices in `i` and `j` start with zero (NOT with one).
1038: -  values - [use `NULL` if not provided] edge weights

1040:    Level: intermediate

1042: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
1043: @*/
1044: PetscErrorCode MatMPIAdjSetPreallocation(Mat B, PetscInt *i, PetscInt *j, PetscInt *values)
1045: {
1046:   PetscFunctionBegin;
1047:   PetscTryMethod(B, "MatMPIAdjSetPreallocation_C", (Mat, PetscInt *, PetscInt *, PetscInt *), (B, i, j, values));
1048:   PetscFunctionReturn(PETSC_SUCCESS);
1049: }

1051: /*@C
1052:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
1053:    The matrix does not have numerical values associated with it, but is
1054:    intended for ordering (to reduce bandwidth etc) and partitioning.

1056:    Collective

1058:    Input Parameters:
1059: +  comm - MPI communicator
1060: .  m - number of local rows
1061: .  N - number of global columns
1062: .  i - the indices into `j` for the start of each row
1063: .  j - the column indices for each row (sorted for each row).
1064:        The indices in `i` and `j` start with zero (NOT with one).
1065: -  values -[use `NULL` if not provided] edge weights

1067:    Output Parameter:
1068: .  A - the matrix

1070:    Level: intermediate

1072:    Notes:
1073:    You must NOT free the `i`, `values` and `j` arrays yourself. PETSc will free them
1074:    when the matrix is destroyed; you must allocate them with `PetscMalloc()`. If you
1075:    call from Fortran you need not create the arrays with `PetscMalloc()`.

1077:    You should not include the matrix diagonals.

1079:    If you already have a matrix, you can create its adjacency matrix by a call
1080:    to `MatConvert()`, specifying a type of `MATMPIADJ`.

1082:    Possible values for `MatSetOption()` - `MAT_STRUCTURALLY_SYMMETRIC`

1084: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1085: @*/
1086: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm, PetscInt m, PetscInt N, PetscInt *i, PetscInt *j, PetscInt *values, Mat *A)
1087: {
1088:   PetscFunctionBegin;
1089:   PetscCall(MatCreate(comm, A));
1090:   PetscCall(MatSetSizes(*A, m, PETSC_DETERMINE, PETSC_DETERMINE, N));
1091:   PetscCall(MatSetType(*A, MATMPIADJ));
1092:   PetscCall(MatMPIAdjSetPreallocation(*A, i, j, values));
1093:   PetscFunctionReturn(PETSC_SUCCESS);
1094: }