Actual source code: ex37.c


  2: static char help[] = "Test MatGetMultiProcBlock() and MatCreateRedundantMatrix() \n\
  3: Reads a PETSc matrix and vector from a file and solves a linear system.\n\n";
  4: /*
  5:   Example:
  6:   mpiexec -n 4 ./ex37 -f <input_file> -nsubcomm 2 -psubcomm_view -subcomm_type <1 or 2>
  7: */

  9: #include <petscksp.h>
 10: #include <petscsys.h>

 12: int main(int argc, char **args)
 13: {
 14:   KSP          subksp;
 15:   Mat          A, subA;
 16:   Vec          x, b, u, subb, subx, subu;
 17:   PetscViewer  fd;
 18:   char         file[PETSC_MAX_PATH_LEN];
 19:   PetscBool    flg;
 20:   PetscInt     i, m, n, its;
 21:   PetscReal    norm;
 22:   PetscMPIInt  rank, size;
 23:   MPI_Comm     comm, subcomm;
 24:   PetscSubcomm psubcomm;
 25:   PetscInt     nsubcomm = 1, id;
 26:   PetscScalar *barray, *xarray, *uarray, *array, one = 1.0;
 27:   PetscInt     type = 1;

 29:   PetscFunctionBeginUser;
 30:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 31:   /* Load the matrix */
 32:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));
 33:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must indicate binary file with the -f option");
 34:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));

 36:   /* Load the matrix; then destroy the viewer.*/
 37:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 38:   PetscCall(MatLoad(A, fd));
 39:   PetscCall(PetscViewerDestroy(&fd));

 41:   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
 42:   PetscCallMPI(MPI_Comm_size(comm, &size));
 43:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

 45:   /* Create rhs vector b */
 46:   PetscCall(MatGetLocalSize(A, &m, NULL));
 47:   PetscCall(VecCreate(PETSC_COMM_WORLD, &b));
 48:   PetscCall(VecSetSizes(b, m, PETSC_DECIDE));
 49:   PetscCall(VecSetFromOptions(b));
 50:   PetscCall(VecSet(b, one));

 52:   PetscCall(VecDuplicate(b, &x));
 53:   PetscCall(VecDuplicate(b, &u));
 54:   PetscCall(VecSet(x, 0.0));

 56:   /* Test MatGetMultiProcBlock() */
 57:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nsubcomm", &nsubcomm, NULL));
 58:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-subcomm_type", &type, NULL));

 60:   PetscCall(PetscSubcommCreate(comm, &psubcomm));
 61:   PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
 62:   if (type == PETSC_SUBCOMM_GENERAL) { /* user provides color, subrank and duprank */
 63:     PetscMPIInt color, subrank, duprank, subsize;
 64:     duprank = size - 1 - rank;
 65:     subsize = size / nsubcomm;
 66:     PetscCheck(subsize * nsubcomm == size, comm, PETSC_ERR_SUP, "This example requires nsubcomm %" PetscInt_FMT " divides size %d", nsubcomm, size);
 67:     color   = duprank / subsize;
 68:     subrank = duprank - color * subsize;
 69:     PetscCall(PetscSubcommSetTypeGeneral(psubcomm, color, subrank));
 70:   } else if (type == PETSC_SUBCOMM_CONTIGUOUS) {
 71:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
 72:   } else if (type == PETSC_SUBCOMM_INTERLACED) {
 73:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_INTERLACED));
 74:   } else SETERRQ(psubcomm->parent, PETSC_ERR_SUP, "PetscSubcommType %" PetscInt_FMT " is not supported yet", type);
 75:   PetscCall(PetscSubcommSetFromOptions(psubcomm));
 76:   subcomm = PetscSubcommChild(psubcomm);

 78:   /* Test MatCreateRedundantMatrix() */
 79:   if (size > 1) {
 80:     PetscMPIInt subrank = -1, color = -1;
 81:     MPI_Comm    dcomm;

 83:     if (rank == 0) {
 84:       color   = 0;
 85:       subrank = 0;
 86:     } else if (rank == 1) {
 87:       color   = 0;
 88:       subrank = 1;
 89:     } else {
 90:       color   = 1;
 91:       subrank = 0;
 92:     }

 94:     PetscCall(PetscCommDuplicate(PETSC_COMM_WORLD, &dcomm, NULL));
 95:     PetscCallMPI(MPI_Comm_split(dcomm, color, subrank, &subcomm));

 97:     PetscCall(MatCreate(subcomm, &subA));
 98:     PetscCall(MatSetSizes(subA, PETSC_DECIDE, PETSC_DECIDE, 10, 10));
 99:     PetscCall(MatSetFromOptions(subA));
100:     PetscCall(MatSetUp(subA));
101:     PetscCall(MatAssemblyBegin(subA, MAT_FINAL_ASSEMBLY));
102:     PetscCall(MatAssemblyEnd(subA, MAT_FINAL_ASSEMBLY));
103:     PetscCall(MatZeroEntries(subA));

105:     /* Test MatMult() */
106:     PetscCall(MatCreateVecs(subA, &subx, &subb));
107:     PetscCall(VecSet(subx, 1.0));
108:     PetscCall(MatMult(subA, subx, subb));

110:     PetscCall(VecDestroy(&subx));
111:     PetscCall(VecDestroy(&subb));
112:     PetscCall(MatDestroy(&subA));
113:     PetscCall(PetscCommDestroy(&dcomm));
114:   }

116:   /* Create subA */
117:   PetscCall(MatGetMultiProcBlock(A, subcomm, MAT_INITIAL_MATRIX, &subA));
118:   PetscCall(MatGetMultiProcBlock(A, subcomm, MAT_REUSE_MATRIX, &subA));

120:   /* Create sub vectors without arrays. Place b's and x's local arrays into subb and subx */
121:   PetscCall(MatGetLocalSize(subA, &m, &n));
122:   PetscCall(VecCreateMPIWithArray(subcomm, 1, m, PETSC_DECIDE, NULL, &subb));
123:   PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &subx));
124:   PetscCall(VecCreateMPIWithArray(subcomm, 1, n, PETSC_DECIDE, NULL, &subu));

126:   PetscCall(VecGetArray(b, &barray));
127:   PetscCall(VecGetArray(x, &xarray));
128:   PetscCall(VecGetArray(u, &uarray));
129:   PetscCall(VecPlaceArray(subb, barray));
130:   PetscCall(VecPlaceArray(subx, xarray));
131:   PetscCall(VecPlaceArray(subu, uarray));

133:   /* Create linear solvers associated with subA */
134:   PetscCall(KSPCreate(subcomm, &subksp));
135:   PetscCall(KSPSetOperators(subksp, subA, subA));
136:   PetscCall(KSPSetFromOptions(subksp));

138:   /* Solve sub systems */
139:   PetscCall(KSPSolve(subksp, subb, subx));
140:   PetscCall(KSPGetIterationNumber(subksp, &its));

142:   /* check residual */
143:   PetscCall(MatMult(subA, subx, subu));
144:   PetscCall(VecAXPY(subu, -1.0, subb));
145:   PetscCall(VecNorm(u, NORM_2, &norm));
146:   if (norm > 1.e-4 && rank == 0) {
147:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "[%d]  Number of iterations = %3" PetscInt_FMT "\n", rank, its));
148:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Error: Residual norm of each block |subb - subA*subx |= %g\n", (double)norm));
149:   }
150:   PetscCall(VecResetArray(subb));
151:   PetscCall(VecResetArray(subx));
152:   PetscCall(VecResetArray(subu));

154:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-subvec_view", &id, &flg));
155:   if (flg && rank == id) {
156:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] subb:\n", rank));
157:     PetscCall(VecGetArray(subb, &array));
158:     for (i = 0; i < m; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g\n", (double)PetscRealPart(array[i])));
159:     PetscCall(VecRestoreArray(subb, &array));
160:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%d] subx:\n", rank));
161:     PetscCall(VecGetArray(subx, &array));
162:     for (i = 0; i < m; i++) PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g\n", (double)PetscRealPart(array[i])));
163:     PetscCall(VecRestoreArray(subx, &array));
164:   }

166:   PetscCall(VecRestoreArray(x, &xarray));
167:   PetscCall(VecRestoreArray(b, &barray));
168:   PetscCall(VecRestoreArray(u, &uarray));
169:   PetscCall(MatDestroy(&subA));
170:   PetscCall(VecDestroy(&subb));
171:   PetscCall(VecDestroy(&subx));
172:   PetscCall(VecDestroy(&subu));
173:   PetscCall(KSPDestroy(&subksp));
174:   PetscCall(PetscSubcommDestroy(&psubcomm));
175:   if (size > 1) PetscCallMPI(MPI_Comm_free(&subcomm));
176:   PetscCall(MatDestroy(&A));
177:   PetscCall(VecDestroy(&b));
178:   PetscCall(VecDestroy(&u));
179:   PetscCall(VecDestroy(&x));

181:   PetscCall(PetscFinalize());
182:   return 0;
183: }

185: /*TEST

187:     test:
188:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 1
189:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
190:       output_file: output/ex37.out

192:     test:
193:       suffix: 2
194:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2
195:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
196:       nsize: 4
197:       output_file: output/ex37.out

199:     test:
200:       suffix: mumps
201:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -pc_factor_mat_solver_type mumps -pc_type lu
202:       requires: datafilespath  mumps !complex double !defined(PETSC_USE_64BIT_INDICES)
203:       nsize: 4
204:       output_file: output/ex37.out

206:     test:
207:       suffix: 3
208:       nsize: 4
209:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 0
210:       requires: datafilespath  !complex double !defined(PETSC_USE_64BIT_INDICES)
211:       output_file: output/ex37.out

213:     test:
214:       suffix: 4
215:       nsize: 4
216:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 1
217:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
218:       output_file: output/ex37.out

220:     test:
221:       suffix: 5
222:       nsize: 4
223:       args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 2
224:       requires: datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
225:       output_file: output/ex37.out

227: TEST*/