Actual source code: ex167.c


  2: static char help[] = "Extract submatrices using unsorted indices. For SEQSBAIJ either sort both rows and columns, or sort none.\n\n";
  3: /*
  4:    Take a 4x4 grid and form a 5-point stencil graph Laplacian over it.
  5:    Partition the grid into two subdomains by splitting into two in the j-direction (slowest varying).
  6:    Impose an overlap of 1 and order the subdomains with the j-direction varying fastest.
  7:    Extract the subdomain submatrices, one per rank.
  8: */
  9: /* Results:
 10:     Sequential:
 11:     - seqaij:   will error out, if rows or columns are unsorted
 12:     - seqbaij:  will extract submatrices correctly even for unsorted row or column indices
 13:     - seqsbaij: will extract submatrices correctly even for unsorted row and column indices (both must be sorted or not);
 14:                 CANNOT automatically report inversions, because MatGetRow is not available.
 15:     MPI:
 16:     - mpiaij:   will error out, if columns are unsorted
 17:     - mpibaij:  will error out, if columns are unsorted.
 18:     - mpisbaij: will error out, if columns are unsorted; even with unsorted rows will produce correct submatrices;
 19:                 CANNOT automatically report inversions, because MatGetRow is not available.
 20: */

 22: #include <petscmat.h>
 23: #include <petscis.h>

 25: int main(int argc, char **args)
 26: {
 27:   Mat             A, *S;
 28:   IS              rowis[2], colis[2];
 29:   PetscInt        n, N, i, j, k, l, nsub, Jlow[2] = {0, 1}, *jlow, Jhigh[2] = {3, 4}, *jhigh, row, col, *subindices, ncols;
 30:   const PetscInt *cols;
 31:   PetscScalar     v;
 32:   PetscMPIInt     rank, size, p, inversions, total_inversions;
 33:   PetscBool       sort_rows, sort_cols, show_inversions;

 35:   PetscFunctionBeginUser;
 36:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 37:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 38:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 39:   PetscCheck(size < 3, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "A uniprocessor or two-processor example only.");

 41:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 42:   if (size > 1) {
 43:     n = 8;
 44:     N = 16;
 45:   } else {
 46:     n = 16;
 47:     N = 16;
 48:   }
 49:   PetscCall(MatSetSizes(A, n, n, N, N));
 50:   PetscCall(MatSetFromOptions(A));
 51:   PetscCall(MatSetUp(A));

 53:   /* Don't care if the entries are set multiple times by different procs. */
 54:   for (i = 0; i < 4; ++i) {
 55:     for (j = 0; j < 4; ++j) {
 56:       row = j * 4 + i;
 57:       v   = -1.0;
 58:       if (i > 0) {
 59:         col = row - 1;
 60:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 61:       }
 62:       if (i < 3) {
 63:         col = row + 1;
 64:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 65:       }
 66:       if (j > 0) {
 67:         col = row - 4;
 68:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 69:       }
 70:       if (j < 3) {
 71:         col = row + 4;
 72:         PetscCall(MatSetValues(A, 1, &row, 1, &col, &v, INSERT_VALUES));
 73:       }
 74:       v = 4.0;
 75:       PetscCall(MatSetValues(A, 1, &row, 1, &row, &v, INSERT_VALUES));
 76:     }
 77:   }
 78:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 79:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
 80:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original matrix\n"));
 81:   PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));

 83:   if (size > 1) {
 84:     nsub = 1; /* one subdomain per rank */
 85:   } else {
 86:     nsub = 2; /* both subdomains on rank 0 */
 87:   }
 88:   if (rank) {
 89:     jlow  = Jlow + 1;
 90:     jhigh = Jhigh + 1;
 91:   } else {
 92:     jlow  = Jlow;
 93:     jhigh = Jhigh;
 94:   }
 95:   sort_rows = PETSC_FALSE;
 96:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-sort_rows", &sort_rows, NULL));
 97:   sort_cols = PETSC_FALSE;
 98:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-sort_cols", &sort_cols, NULL));
 99:   for (l = 0; l < nsub; ++l) {
100:     PetscCall(PetscMalloc1(12, &subindices));
101:     k = 0;
102:     for (i = 0; i < 4; ++i) {
103:       for (j = jlow[l]; j < jhigh[l]; ++j) {
104:         subindices[k] = j * 4 + i;
105:         k++;
106:       }
107:     }
108:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, 12, subindices, PETSC_OWN_POINTER, rowis + l));
109:     if ((sort_rows && !sort_cols) || (!sort_rows && sort_cols)) {
110:       PetscCall(ISDuplicate(rowis[l], colis + l));
111:     } else {
112:       PetscCall(PetscObjectReference((PetscObject)rowis[l]));
113:       colis[l] = rowis[l];
114:     }
115:     if (sort_rows) PetscCall(ISSort(rowis[l]));
116:     if (sort_cols) PetscCall(ISSort(colis[l]));
117:   }

119:   PetscCall(MatCreateSubMatrices(A, nsub, rowis, colis, MAT_INITIAL_MATRIX, &S));

121:   show_inversions = PETSC_FALSE;

123:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-show_inversions", &show_inversions, NULL));

125:   inversions = 0;
126:   for (p = 0; p < size; ++p) {
127:     if (p == rank) {
128:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Number of subdomains: %" PetscInt_FMT ":\n", rank, size, nsub));
129:       for (l = 0; l < nsub; ++l) {
130:         PetscInt i0, i1;
131:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Subdomain row IS %" PetscInt_FMT ":\n", rank, size, l));
132:         PetscCall(ISView(rowis[l], PETSC_VIEWER_STDOUT_SELF));
133:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Subdomain col IS %" PetscInt_FMT ":\n", rank, size, l));
134:         PetscCall(ISView(colis[l], PETSC_VIEWER_STDOUT_SELF));
135:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "[%" PetscInt_FMT ":%" PetscInt_FMT "]: Submatrix %" PetscInt_FMT ":\n", rank, size, l));
136:         PetscCall(MatView(S[l], PETSC_VIEWER_STDOUT_SELF));
137:         if (show_inversions) {
138:           PetscCall(MatGetOwnershipRange(S[l], &i0, &i1));
139:           for (i = i0; i < i1; ++i) {
140:             PetscCall(MatGetRow(S[l], i, &ncols, &cols, NULL));
141:             for (j = 1; j < ncols; ++j) {
142:               if (cols[j] < cols[j - 1]) {
143:                 PetscCall(PetscPrintf(PETSC_COMM_SELF, "***Inversion in row %" PetscInt_FMT ": col[%" PetscInt_FMT "] = %" PetscInt_FMT " < %" PetscInt_FMT " = col[%" PetscInt_FMT "]\n", i, j, cols[j], cols[j - 1], j - 1));
144:                 inversions++;
145:               }
146:             }
147:             PetscCall(MatRestoreRow(S[l], i, &ncols, &cols, NULL));
148:           }
149:         }
150:       }
151:     }
152:     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
153:   }
154:   if (show_inversions) {
155:     PetscCallMPI(MPI_Reduce(&inversions, &total_inversions, 1, MPIU_INT, MPI_SUM, 0, PETSC_COMM_WORLD));
156:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "*Total inversions: %" PetscInt_FMT "\n", total_inversions));
157:   }
158:   PetscCall(MatDestroy(&A));

160:   for (l = 0; l < nsub; ++l) {
161:     PetscCall(ISDestroy(&(rowis[l])));
162:     PetscCall(ISDestroy(&(colis[l])));
163:   }
164:   PetscCall(MatDestroySubMatrices(nsub, &S));
165:   PetscCall(PetscFinalize());
166:   return 0;
167: }