Actual source code: ex211.c


  2: static char help[] = "Tests MatCreateSubmatrix() in parallel.";

  4: #include <petscmat.h>
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>

  7: PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat, IS isrow, IS iscol, IS *isrow_d, IS *iscol_d, IS *iscol_o, const PetscInt *garray[])
  8: {
  9:   Vec             x, cmap;
 10:   const PetscInt *is_idx;
 11:   PetscScalar    *xarray, *cmaparray;
 12:   PetscInt        ncols, isstart, *idx, m, rstart, count;
 13:   Mat_MPIAIJ     *a    = (Mat_MPIAIJ *)mat->data;
 14:   Mat             B    = a->B;
 15:   Vec             lvec = a->lvec, lcmap;
 16:   PetscInt        i, cstart, cend, Bn = B->cmap->N;
 17:   MPI_Comm        comm;
 18:   PetscMPIInt     rank;
 19:   VecScatter      Mvctx;

 21:   PetscFunctionBegin;
 22:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
 23:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 24:   PetscCall(ISGetLocalSize(iscol, &ncols));

 26:   /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
 27:   PetscCall(MatCreateVecs(mat, &x, NULL));
 28:   PetscCall(VecDuplicate(x, &cmap));
 29:   PetscCall(VecSet(x, -1.0));
 30:   PetscCall(VecSet(cmap, -1.0));

 32:   PetscCall(VecDuplicate(lvec, &lcmap));

 34:   /* Get start indices */
 35:   PetscCallMPI(MPI_Scan(&ncols, &isstart, 1, MPIU_INT, MPI_SUM, comm));
 36:   isstart -= ncols;
 37:   PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));

 39:   PetscCall(ISGetIndices(iscol, &is_idx));
 40:   PetscCall(VecGetArray(x, &xarray));
 41:   PetscCall(VecGetArray(cmap, &cmaparray));
 42:   PetscCall(PetscMalloc1(ncols, &idx));
 43:   for (i = 0; i < ncols; i++) {
 44:     xarray[is_idx[i] - cstart]    = (PetscScalar)is_idx[i];
 45:     cmaparray[is_idx[i] - cstart] = (PetscScalar)(i + isstart); /* global index of iscol[i] */
 46:     idx[i]                        = is_idx[i] - cstart;         /* local index of iscol[i]  */
 47:   }
 48:   PetscCall(VecRestoreArray(x, &xarray));
 49:   PetscCall(VecRestoreArray(cmap, &cmaparray));
 50:   PetscCall(ISRestoreIndices(iscol, &is_idx));

 52:   /* Get iscol_d */
 53:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncols, idx, PETSC_OWN_POINTER, iscol_d));
 54:   PetscCall(ISGetBlockSize(iscol, &i));
 55:   PetscCall(ISSetBlockSize(*iscol_d, i));

 57:   /* Get isrow_d */
 58:   PetscCall(ISGetLocalSize(isrow, &m));
 59:   rstart = mat->rmap->rstart;
 60:   PetscCall(PetscMalloc1(m, &idx));
 61:   PetscCall(ISGetIndices(isrow, &is_idx));
 62:   for (i = 0; i < m; i++) idx[i] = is_idx[i] - rstart;
 63:   PetscCall(ISRestoreIndices(isrow, &is_idx));

 65:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_OWN_POINTER, isrow_d));
 66:   PetscCall(ISGetBlockSize(isrow, &i));
 67:   PetscCall(ISSetBlockSize(*isrow_d, i));

 69:   /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
 70: #if 0
 71:   if (!a->Mvctx_mpi1) {
 72:     /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */
 73:     a->Mvctx_mpi1_flg = PETSC_TRUE;
 74:     PetscCall(MatSetUpMultiply_MPIAIJ(mat));
 75:   }
 76:   Mvctx = a->Mvctx_mpi1;
 77: #endif
 78:   Mvctx = a->Mvctx;
 79:   PetscCall(VecScatterBegin(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD));
 80:   PetscCall(VecScatterEnd(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD));

 82:   PetscCall(VecScatterBegin(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD));
 83:   PetscCall(VecScatterEnd(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD));

 85:   /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
 86:   /* off-process column indices */
 87:   count = 0;
 88:   PetscInt *cmap1;
 89:   PetscCall(PetscMalloc1(Bn, &idx));
 90:   PetscCall(PetscMalloc1(Bn, &cmap1));

 92:   PetscCall(VecGetArray(lvec, &xarray));
 93:   PetscCall(VecGetArray(lcmap, &cmaparray));
 94:   for (i = 0; i < Bn; i++) {
 95:     if (PetscRealPart(xarray[i]) > -1.0) {
 96:       idx[count]   = i;                                       /* local column index in off-diagonal part B */
 97:       cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i])); /* column index in submat */
 98:       count++;
 99:     }
100:   }
101:   printf("[%d] Bn %d, count %d\n", rank, Bn, count);
102:   PetscCall(VecRestoreArray(lvec, &xarray));
103:   PetscCall(VecRestoreArray(lcmap, &cmaparray));
104:   if (count != 6) {
105:     printf("[%d] count %d != 6 lvec:\n", rank, count);
106:     PetscCall(VecView(lvec, 0));

108:     printf("[%d] count %d != 6 lcmap:\n", rank, count);
109:     PetscCall(VecView(lcmap, 0));
110:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "count %d != 6", count);
111:   }

113:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, count, idx, PETSC_COPY_VALUES, iscol_o));
114:   /* cannot ensure iscol_o has same blocksize as iscol! */

116:   PetscCall(PetscFree(idx));

118:   *garray = cmap1;

120:   PetscCall(VecDestroy(&x));
121:   PetscCall(VecDestroy(&cmap));
122:   PetscCall(VecDestroy(&lcmap));
123:   PetscFunctionReturn(PETSC_SUCCESS);
124: }

126: int main(int argc, char **args)
127: {
128:   Mat         C, A;
129:   PetscInt    i, j, m = 3, n = 2, rstart, rend;
130:   PetscMPIInt size, rank;
131:   PetscScalar v;
132:   IS          isrow, iscol;

134:   PetscFunctionBeginUser;
135:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
136:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
137:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
138:   n = 2 * size;

140:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
141:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
142:   PetscCall(MatSetFromOptions(C));
143:   PetscCall(MatSetUp(C));

145:   /*
146:         This is JUST to generate a nice test matrix, all processors fill up
147:     the entire matrix. This is not something one would ever do in practice.
148:   */
149:   PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
150:   for (i = rstart; i < rend; i++) {
151:     for (j = 0; j < m * n; j++) {
152:       v = i + j + 1;
153:       PetscCall(MatSetValues(C, 1, &i, 1, &j, &v, INSERT_VALUES));
154:     }
155:   }

157:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
158:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

160:   /*
161:      Generate a new matrix consisting of every second row and column of
162:    the original matrix
163:   */
164:   PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
165:   /* Create parallel IS with the rows we want on THIS processor */
166:   PetscCall(ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &isrow));
167:   /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
168:   PetscCall(ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &iscol));

170:   IS              iscol_d, isrow_d, iscol_o;
171:   const PetscInt *garray;
172:   PetscCall(ISGetSeqIS_SameColDist_Private(C, isrow, iscol, &isrow_d, &iscol_d, &iscol_o, &garray));

174:   PetscCall(ISDestroy(&isrow_d));
175:   PetscCall(ISDestroy(&iscol_d));
176:   PetscCall(ISDestroy(&iscol_o));
177:   PetscCall(PetscFree(garray));

179:   PetscCall(MatCreateSubMatrix(C, isrow, iscol, MAT_INITIAL_MATRIX, &A));
180:   PetscCall(MatCreateSubMatrix(C, isrow, iscol, MAT_REUSE_MATRIX, &A));

182:   PetscCall(ISDestroy(&isrow));
183:   PetscCall(ISDestroy(&iscol));
184:   PetscCall(MatDestroy(&A));
185:   PetscCall(MatDestroy(&C));
186:   PetscCall(PetscFinalize());
187:   return 0;
188: }