Actual source code: mmaij.c


  2: /*
  3:    Support for the parallel AIJ matrix vector multiply
  4: */
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  6: #include <petsc/private/vecimpl.h>
  7: #include <petsc/private/isimpl.h>

  9: PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat mat)
 10: {
 11:   Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data;
 12:   Mat_SeqAIJ *B   = (Mat_SeqAIJ *)(aij->B->data);
 13:   PetscInt    i, j, *aj = B->j, *garray;
 14:   PetscInt    ec = 0; /* Number of nonzero external columns */
 15:   IS          from, to;
 16:   Vec         gvec;
 17: #if defined(PETSC_USE_CTABLE)
 18:   PetscHMapI    gid1_lid1 = NULL;
 19:   PetscHashIter tpos;
 20:   PetscInt      gid, lid;
 21: #else
 22:   PetscInt N = mat->cmap->N, *indices;
 23: #endif

 25:   PetscFunctionBegin;
 26:   if (!aij->garray) {
 27:     PetscCheck(aij->B, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing B mat");
 28: #if defined(PETSC_USE_CTABLE)
 29:     /* use a table */
 30:     PetscCall(PetscHMapICreateWithSize(aij->B->rmap->n, &gid1_lid1));
 31:     for (i = 0; i < aij->B->rmap->n; i++) {
 32:       for (j = 0; j < B->ilen[i]; j++) {
 33:         PetscInt data, gid1 = aj[B->i[i] + j] + 1;
 34:         PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &data));
 35:         if (!data) {
 36:           /* one based table */
 37:           PetscCall(PetscHMapISet(gid1_lid1, gid1, ++ec));
 38:         }
 39:       }
 40:     }
 41:     /* form array of columns we need */
 42:     PetscCall(PetscMalloc1(ec, &garray));
 43:     PetscHashIterBegin(gid1_lid1, tpos);
 44:     while (!PetscHashIterAtEnd(gid1_lid1, tpos)) {
 45:       PetscHashIterGetKey(gid1_lid1, tpos, gid);
 46:       PetscHashIterGetVal(gid1_lid1, tpos, lid);
 47:       PetscHashIterNext(gid1_lid1, tpos);
 48:       gid--;
 49:       lid--;
 50:       garray[lid] = gid;
 51:     }
 52:     PetscCall(PetscSortInt(ec, garray)); /* sort, and rebuild */
 53:     PetscCall(PetscHMapIClear(gid1_lid1));
 54:     for (i = 0; i < ec; i++) PetscCall(PetscHMapISet(gid1_lid1, garray[i] + 1, i + 1));
 55:     /* compact out the extra columns in B */
 56:     for (i = 0; i < aij->B->rmap->n; i++) {
 57:       for (j = 0; j < B->ilen[i]; j++) {
 58:         PetscInt gid1 = aj[B->i[i] + j] + 1;
 59:         PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &lid));
 60:         lid--;
 61:         aj[B->i[i] + j] = lid;
 62:       }
 63:     }
 64:     PetscCall(PetscLayoutDestroy(&aij->B->cmap));
 65:     PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)aij->B), ec, ec, 1, &aij->B->cmap));
 66:     PetscCall(PetscHMapIDestroy(&gid1_lid1));
 67: #else
 68:     /* Make an array as long as the number of columns */
 69:     /* mark those columns that are in aij->B */
 70:     PetscCall(PetscCalloc1(N, &indices));
 71:     for (i = 0; i < aij->B->rmap->n; i++) {
 72:       for (j = 0; j < B->ilen[i]; j++) {
 73:         if (!indices[aj[B->i[i] + j]]) ec++;
 74:         indices[aj[B->i[i] + j]] = 1;
 75:       }
 76:     }

 78:     /* form array of columns we need */
 79:     PetscCall(PetscMalloc1(ec, &garray));
 80:     ec = 0;
 81:     for (i = 0; i < N; i++) {
 82:       if (indices[i]) garray[ec++] = i;
 83:     }

 85:     /* make indices now point into garray */
 86:     for (i = 0; i < ec; i++) indices[garray[i]] = i;

 88:     /* compact out the extra columns in B */
 89:     for (i = 0; i < aij->B->rmap->n; i++) {
 90:       for (j = 0; j < B->ilen[i]; j++) aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
 91:     }
 92:     PetscCall(PetscLayoutDestroy(&aij->B->cmap));
 93:     PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)aij->B), ec, ec, 1, &aij->B->cmap));
 94:     PetscCall(PetscFree(indices));
 95: #endif
 96:   } else {
 97:     garray = aij->garray;
 98:   }

100:   if (!aij->lvec) {
101:     PetscCheck(aij->B, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing B mat");
102:     PetscCall(MatCreateVecs(aij->B, &aij->lvec, NULL));
103:   }
104:   PetscCall(VecGetSize(aij->lvec, &ec));

106:   /* create two temporary Index sets for build scatter gather */
107:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ec, garray, PETSC_COPY_VALUES, &from));
108:   PetscCall(ISCreateStride(PETSC_COMM_SELF, ec, 0, 1, &to));

110:   /* create temporary global vector to generate scatter context */
111:   /* This does not allocate the array's memory so is efficient */
112:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat), 1, mat->cmap->n, mat->cmap->N, NULL, &gvec));

114:   /* generate the scatter context */
115:   PetscCall(VecScatterDestroy(&aij->Mvctx));
116:   PetscCall(VecScatterCreate(gvec, from, aij->lvec, to, &aij->Mvctx));
117:   PetscCall(VecScatterViewFromOptions(aij->Mvctx, (PetscObject)mat, "-matmult_vecscatter_view"));
118:   aij->garray = garray;

120:   PetscCall(ISDestroy(&from));
121:   PetscCall(ISDestroy(&to));
122:   PetscCall(VecDestroy(&gvec));
123:   PetscFunctionReturn(PETSC_SUCCESS);
124: }

126: /* Disassemble the off-diag portion of the MPIAIJXxx matrix.
127:    In other words, change the B from reduced format using local col ids
128:    to expanded format using global col ids, which will make it easier to
129:    insert new nonzeros (with global col ids) into the matrix.
130:    The off-diag B determines communication in the matrix vector multiply.
131: */
132: PetscErrorCode MatDisAssemble_MPIAIJ(Mat A)
133: {
134:   Mat_MPIAIJ *aij = (Mat_MPIAIJ *)A->data;
135:   Mat         B = aij->B, Bnew = NULL;

137:   PetscFunctionBegin;
138:   /* free stuff related to matrix-vec multiply */
139:   PetscCall(VecDestroy(&aij->lvec));
140:   if (aij->colmap) {
141: #if defined(PETSC_USE_CTABLE)
142:     PetscCall(PetscHMapIDestroy(&aij->colmap));
143: #else
144:     PetscCall(PetscFree(aij->colmap));
145: #endif
146:   }

148:   if (B) {
149:     Mat_SeqAIJ        *Baij = (Mat_SeqAIJ *)B->data;
150:     PetscInt           i, j, m = B->rmap->n, n = A->cmap->N, col, ct = 0, *garray = aij->garray, *nz;
151:     PetscScalar        v;
152:     const PetscScalar *ba;

154:     /* make sure that B is assembled so we can access its values */
155:     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
156:     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));

158:     /* invent new B and copy stuff over */
159:     PetscCall(PetscMalloc1(m + 1, &nz));
160:     for (i = 0; i < m; i++) nz[i] = Baij->i[i + 1] - Baij->i[i];
161:     PetscCall(MatCreate(PETSC_COMM_SELF, &Bnew));
162:     PetscCall(MatSetSizes(Bnew, m, n, m, n)); /* Bnew now uses A->cmap->N as its col size */
163:     PetscCall(MatSetBlockSizesFromMats(Bnew, A, A));
164:     PetscCall(MatSetType(Bnew, ((PetscObject)B)->type_name));
165:     PetscCall(MatSeqAIJSetPreallocation(Bnew, 0, nz));

167:     if (Baij->nonew >= 0) { /* Inherit insertion error options (if positive). */
168:       ((Mat_SeqAIJ *)Bnew->data)->nonew = Baij->nonew;
169:     }

171:     /*
172:      Ensure that B's nonzerostate is monotonically increasing.
173:      Or should this follow the MatSetValues() loop to preserve B's nonzerstate across a MatDisAssemble() call?
174:      */
175:     Bnew->nonzerostate = B->nonzerostate;

177:     PetscCall(PetscFree(nz));
178:     PetscCall(MatSeqAIJGetArrayRead(B, &ba));
179:     for (i = 0; i < m; i++) {
180:       for (j = Baij->i[i]; j < Baij->i[i + 1]; j++) {
181:         col = garray[Baij->j[ct]];
182:         v   = ba[ct++];
183:         PetscCall(MatSetValues(Bnew, 1, &i, 1, &col, &v, B->insertmode));
184:       }
185:     }
186:     PetscCall(MatSeqAIJRestoreArrayRead(B, &ba));
187:     PetscCall(MatDestroy(&B));
188:   }
189:   PetscCall(PetscFree(aij->garray));

191:   aij->B           = Bnew;
192:   A->was_assembled = PETSC_FALSE;
193:   A->assembled     = PETSC_FALSE;
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: /*      ugly stuff added for Glenn someday we should fix this up */

199: static PetscInt *auglyrmapd = NULL, *auglyrmapo = NULL; /* mapping from the local ordering to the "diagonal" and "off-diagonal" parts of the local matrix */
200: static Vec       auglydd = NULL, auglyoo = NULL;        /* work vectors used to scale the two parts of the local matrix */

202: PetscErrorCode MatMPIAIJDiagonalScaleLocalSetUp(Mat inA, Vec scale)
203: {
204:   Mat_MPIAIJ *ina = (Mat_MPIAIJ *)inA->data; /*access private part of matrix */
205:   PetscInt    i, n, nt, cstart, cend, no, *garray = ina->garray, *lindices;
206:   PetscInt   *r_rmapd, *r_rmapo;

208:   PetscFunctionBegin;
209:   PetscCall(MatGetOwnershipRange(inA, &cstart, &cend));
210:   PetscCall(MatGetSize(ina->A, NULL, &n));
211:   PetscCall(PetscCalloc1(inA->rmap->mapping->n + 1, &r_rmapd));
212:   nt = 0;
213:   for (i = 0; i < inA->rmap->mapping->n; i++) {
214:     if (inA->rmap->mapping->indices[i] >= cstart && inA->rmap->mapping->indices[i] < cend) {
215:       nt++;
216:       r_rmapd[i] = inA->rmap->mapping->indices[i] + 1;
217:     }
218:   }
219:   PetscCheck(nt == n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hmm nt %" PetscInt_FMT " n %" PetscInt_FMT, nt, n);
220:   PetscCall(PetscMalloc1(n + 1, &auglyrmapd));
221:   for (i = 0; i < inA->rmap->mapping->n; i++) {
222:     if (r_rmapd[i]) auglyrmapd[(r_rmapd[i] - 1) - cstart] = i;
223:   }
224:   PetscCall(PetscFree(r_rmapd));
225:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, n, &auglydd));

227:   PetscCall(PetscCalloc1(inA->cmap->N + 1, &lindices));
228:   for (i = 0; i < ina->B->cmap->n; i++) lindices[garray[i]] = i + 1;
229:   no = inA->rmap->mapping->n - nt;
230:   PetscCall(PetscCalloc1(inA->rmap->mapping->n + 1, &r_rmapo));
231:   nt = 0;
232:   for (i = 0; i < inA->rmap->mapping->n; i++) {
233:     if (lindices[inA->rmap->mapping->indices[i]]) {
234:       nt++;
235:       r_rmapo[i] = lindices[inA->rmap->mapping->indices[i]];
236:     }
237:   }
238:   PetscCheck(nt <= no, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hmm nt %" PetscInt_FMT " no %" PetscInt_FMT, nt, n);
239:   PetscCall(PetscFree(lindices));
240:   PetscCall(PetscMalloc1(nt + 1, &auglyrmapo));
241:   for (i = 0; i < inA->rmap->mapping->n; i++) {
242:     if (r_rmapo[i]) auglyrmapo[(r_rmapo[i] - 1)] = i;
243:   }
244:   PetscCall(PetscFree(r_rmapo));
245:   PetscCall(VecCreateSeq(PETSC_COMM_SELF, nt, &auglyoo));
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: PetscErrorCode MatMPIAIJDiagonalScaleLocal(Mat A, Vec scale)
250: {
251:   /* This routine should really be abandoned as it duplicates MatDiagonalScaleLocal */

253:   PetscFunctionBegin;
254:   PetscTryMethod(A, "MatDiagonalScaleLocal_C", (Mat, Vec), (A, scale));
255:   PetscFunctionReturn(PETSC_SUCCESS);
256: }

258: PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat A, Vec scale)
259: {
260:   Mat_MPIAIJ        *a = (Mat_MPIAIJ *)A->data; /*access private part of matrix */
261:   PetscInt           n, i;
262:   PetscScalar       *d, *o;
263:   const PetscScalar *s;

265:   PetscFunctionBegin;
266:   if (!auglyrmapd) PetscCall(MatMPIAIJDiagonalScaleLocalSetUp(A, scale));

268:   PetscCall(VecGetArrayRead(scale, &s));

270:   PetscCall(VecGetLocalSize(auglydd, &n));
271:   PetscCall(VecGetArray(auglydd, &d));
272:   for (i = 0; i < n; i++) { d[i] = s[auglyrmapd[i]]; /* copy "diagonal" (true local) portion of scale into dd vector */ }
273:   PetscCall(VecRestoreArray(auglydd, &d));
274:   /* column scale "diagonal" portion of local matrix */
275:   PetscCall(MatDiagonalScale(a->A, NULL, auglydd));

277:   PetscCall(VecGetLocalSize(auglyoo, &n));
278:   PetscCall(VecGetArray(auglyoo, &o));
279:   for (i = 0; i < n; i++) { o[i] = s[auglyrmapo[i]]; /* copy "off-diagonal" portion of scale into oo vector */ }
280:   PetscCall(VecRestoreArrayRead(scale, &s));
281:   PetscCall(VecRestoreArray(auglyoo, &o));
282:   /* column scale "off-diagonal" portion of local matrix */
283:   PetscCall(MatDiagonalScale(a->B, NULL, auglyoo));
284:   PetscFunctionReturn(PETSC_SUCCESS);
285: }