Actual source code: ex9.c


  2: static char help[] = "Tests MPI parallel matrix creation. Test MatCreateRedundantMatrix() \n\n";

  4: #include <petscmat.h>

  6: int main(int argc, char **args)
  7: {
  8:   Mat         C, Credundant;
  9:   MatInfo     info;
 10:   PetscMPIInt rank, size, subsize;
 11:   PetscInt    i, j, m = 3, n = 2, low, high, iglobal;
 12:   PetscInt    Ii, J, ldim, nsubcomms;
 13:   PetscBool   flg_info, flg_mat;
 14:   PetscScalar v, one = 1.0;
 15:   Vec         x, y;
 16:   MPI_Comm    subcomm;

 18:   PetscFunctionBeginUser;
 19:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 20:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 21:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 22:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 23:   n = 2 * size;

 25:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
 26:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 27:   PetscCall(MatSetFromOptions(C));
 28:   PetscCall(MatSetUp(C));

 30:   /* Create the matrix for the five point stencil, YET AGAIN */
 31:   for (i = 0; i < m; i++) {
 32:     for (j = 2 * rank; j < 2 * rank + 2; j++) {
 33:       v  = -1.0;
 34:       Ii = j + n * i;
 35:       if (i > 0) {
 36:         J = Ii - n;
 37:         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 38:       }
 39:       if (i < m - 1) {
 40:         J = Ii + n;
 41:         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 42:       }
 43:       if (j > 0) {
 44:         J = Ii - 1;
 45:         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 46:       }
 47:       if (j < n - 1) {
 48:         J = Ii + 1;
 49:         PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 50:       }
 51:       v = 4.0;
 52:       PetscCall(MatSetValues(C, 1, &Ii, 1, &Ii, &v, INSERT_VALUES));
 53:     }
 54:   }

 56:   /* Add extra elements (to illustrate variants of MatGetInfo) */
 57:   Ii = n;
 58:   J  = n - 2;
 59:   v  = 100.0;
 60:   PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));
 61:   Ii = n - 2;
 62:   J  = n;
 63:   v  = 100.0;
 64:   PetscCall(MatSetValues(C, 1, &Ii, 1, &J, &v, INSERT_VALUES));

 66:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 67:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

 69:   /* Form vectors */
 70:   PetscCall(MatCreateVecs(C, &x, &y));
 71:   PetscCall(VecGetLocalSize(x, &ldim));
 72:   PetscCall(VecGetOwnershipRange(x, &low, &high));
 73:   for (i = 0; i < ldim; i++) {
 74:     iglobal = i + low;
 75:     v       = one * ((PetscReal)i) + 100.0 * rank;
 76:     PetscCall(VecSetValues(x, 1, &iglobal, &v, INSERT_VALUES));
 77:   }
 78:   PetscCall(VecAssemblyBegin(x));
 79:   PetscCall(VecAssemblyEnd(x));

 81:   PetscCall(MatMult(C, x, y));

 83:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_info", &flg_info));
 84:   if (flg_info) {
 85:     PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO));
 86:     PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));

 88:     PetscCall(MatGetInfo(C, MAT_GLOBAL_SUM, &info));
 89:     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "matrix information (global sums):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n", (PetscInt)info.nz_used, (PetscInt)info.nz_allocated));
 90:     PetscCall(MatGetInfo(C, MAT_GLOBAL_MAX, &info));
 91:     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "matrix information (global max):\nnonzeros = %" PetscInt_FMT ", allocated nonzeros = %" PetscInt_FMT "\n", (PetscInt)info.nz_used, (PetscInt)info.nz_allocated));
 92:   }

 94:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_mat", &flg_mat));
 95:   if (flg_mat) PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));

 97:   /* Test MatCreateRedundantMatrix() */
 98:   nsubcomms = size;
 99:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-nsubcomms", &nsubcomms, NULL));
100:   PetscCall(MatCreateRedundantMatrix(C, nsubcomms, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &Credundant));
101:   PetscCall(MatCreateRedundantMatrix(C, nsubcomms, MPI_COMM_NULL, MAT_REUSE_MATRIX, &Credundant));

103:   PetscCall(PetscObjectGetComm((PetscObject)Credundant, &subcomm));
104:   PetscCallMPI(MPI_Comm_size(subcomm, &subsize));

106:   if (subsize == 2 && flg_mat) {
107:     PetscCall(PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(subcomm), "\n[%d] Credundant:\n", rank));
108:     PetscCall(MatView(Credundant, PETSC_VIEWER_STDOUT_(subcomm)));
109:   }
110:   PetscCall(MatDestroy(&Credundant));

112:   /* Test MatCreateRedundantMatrix() with user-provided subcomm */
113:   {
114:     PetscSubcomm psubcomm;

116:     PetscCall(PetscSubcommCreate(PETSC_COMM_WORLD, &psubcomm));
117:     PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomms));
118:     PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
119:     /* enable runtime switch of psubcomm type, e.g., '-psubcomm_type interlaced */
120:     PetscCall(PetscSubcommSetFromOptions(psubcomm));

122:     PetscCall(MatCreateRedundantMatrix(C, nsubcomms, PetscSubcommChild(psubcomm), MAT_INITIAL_MATRIX, &Credundant));
123:     PetscCall(MatCreateRedundantMatrix(C, nsubcomms, PetscSubcommChild(psubcomm), MAT_REUSE_MATRIX, &Credundant));

125:     PetscCall(PetscSubcommDestroy(&psubcomm));
126:     PetscCall(MatDestroy(&Credundant));
127:   }

129:   PetscCall(VecDestroy(&x));
130:   PetscCall(VecDestroy(&y));
131:   PetscCall(MatDestroy(&C));
132:   PetscCall(PetscFinalize());
133:   return 0;
134: }

136: /*TEST

138:    test:
139:       nsize: 3
140:       args: -view_info

142:    test:
143:       suffix: 2
144:       nsize: 3
145:       args: -nsubcomms 2 -view_mat -psubcomm_type interlaced

147:    test:
148:       suffix: 3
149:       nsize: 3
150:       args: -nsubcomms 2 -view_mat -psubcomm_type contiguous

152:    test:
153:       suffix: 3_baij
154:       nsize: 3
155:       args: -mat_type baij -nsubcomms 2 -view_mat

157:    test:
158:       suffix: 3_sbaij
159:       nsize: 3
160:       args: -mat_type sbaij -nsubcomms 2 -view_mat

162:    test:
163:       suffix: 3_dense
164:       nsize: 3
165:       args: -mat_type dense -nsubcomms 2 -view_mat

167:    test:
168:       suffix: 4_baij
169:       nsize: 3
170:       args: -mat_type baij -nsubcomms 2 -view_mat -psubcomm_type interlaced

172:    test:
173:       suffix: 4_sbaij
174:       nsize: 3
175:       args: -mat_type sbaij -nsubcomms 2 -view_mat -psubcomm_type interlaced

177:    test:
178:       suffix: 4_dense
179:       nsize: 3
180:       args: -mat_type dense -nsubcomms 2 -view_mat -psubcomm_type interlaced

182: TEST*/