Actual source code: ex42.c
2: static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3: This example is similar to ex40.c; here the index sets used are random.\n\
4: Input arguments are:\n\
5: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6: -nd <size> : > 0 no of domains per processor \n\
7: -ov <overlap> : >=0 amount of overlap between domains\n\n";
9: #include <petscmat.h>
11: int main(int argc, char **args)
12: {
13: PetscInt nd = 2, ov = 1, i, j, lsize, m, n, *idx, bs;
14: PetscMPIInt rank, size;
15: PetscBool flg;
16: Mat A, B, *submatA, *submatB;
17: char file[PETSC_MAX_PATH_LEN];
18: PetscViewer fd;
19: IS *is1, *is2;
20: PetscRandom r;
21: PetscBool test_unsorted = PETSC_FALSE;
22: PetscScalar rand;
24: PetscFunctionBeginUser;
25: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
26: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
27: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
28: PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
29: PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
30: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
31: PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_unsorted", &test_unsorted, NULL));
33: /* Read matrix A and RHS */
34: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
35: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
36: PetscCall(MatSetType(A, MATAIJ));
37: PetscCall(MatSetFromOptions(A));
38: PetscCall(MatLoad(A, fd));
39: PetscCall(PetscViewerDestroy(&fd));
41: /* Read the same matrix as a seq matrix B */
42: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
43: PetscCall(MatCreate(PETSC_COMM_SELF, &B));
44: PetscCall(MatSetType(B, MATSEQAIJ));
45: PetscCall(MatSetFromOptions(B));
46: PetscCall(MatLoad(B, fd));
47: PetscCall(PetscViewerDestroy(&fd));
49: PetscCall(MatGetBlockSize(A, &bs));
51: /* Create the Random no generator */
52: PetscCall(MatGetSize(A, &m, &n));
53: PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
54: PetscCall(PetscRandomSetFromOptions(r));
56: /* Create the IS corresponding to subdomains */
57: PetscCall(PetscMalloc1(nd, &is1));
58: PetscCall(PetscMalloc1(nd, &is2));
59: PetscCall(PetscMalloc1(m, &idx));
60: for (i = 0; i < m; i++) idx[i] = i;
62: /* Create the random Index Sets */
63: for (i = 0; i < nd; i++) {
64: /* Skip a few,so that the IS on different procs are different*/
65: for (j = 0; j < rank; j++) PetscCall(PetscRandomGetValue(r, &rand));
66: PetscCall(PetscRandomGetValue(r, &rand));
67: lsize = (PetscInt)(rand * (m / bs));
68: /* shuffle */
69: for (j = 0; j < lsize; j++) {
70: PetscInt k, swap, l;
72: PetscCall(PetscRandomGetValue(r, &rand));
73: k = j + (PetscInt)(rand * ((m / bs) - j));
74: for (l = 0; l < bs; l++) {
75: swap = idx[bs * j + l];
76: idx[bs * j + l] = idx[bs * k + l];
77: idx[bs * k + l] = swap;
78: }
79: }
80: if (!test_unsorted) PetscCall(PetscSortInt(lsize * bs, idx));
81: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is1 + i));
82: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is2 + i));
83: PetscCall(ISSetBlockSize(is1[i], bs));
84: PetscCall(ISSetBlockSize(is2[i], bs));
85: }
87: if (!test_unsorted) {
88: PetscCall(MatIncreaseOverlap(A, nd, is1, ov));
89: PetscCall(MatIncreaseOverlap(B, nd, is2, ov));
91: for (i = 0; i < nd; ++i) {
92: PetscCall(ISSort(is1[i]));
93: PetscCall(ISSort(is2[i]));
94: }
95: }
97: PetscCall(MatCreateSubMatrices(A, nd, is1, is1, MAT_INITIAL_MATRIX, &submatA));
98: PetscCall(MatCreateSubMatrices(B, nd, is2, is2, MAT_INITIAL_MATRIX, &submatB));
100: /* Now see if the serial and parallel case have the same answers */
101: for (i = 0; i < nd; ++i) {
102: PetscCall(MatEqual(submatA[i], submatB[i], &flg));
103: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "%" PetscInt_FMT "-th parallel submatA != seq submatB", i);
104: }
106: /* Free Allocated Memory */
107: for (i = 0; i < nd; ++i) {
108: PetscCall(ISDestroy(&is1[i]));
109: PetscCall(ISDestroy(&is2[i]));
110: }
111: PetscCall(MatDestroySubMatrices(nd, &submatA));
112: PetscCall(MatDestroySubMatrices(nd, &submatB));
114: PetscCall(PetscRandomDestroy(&r));
115: PetscCall(PetscFree(is1));
116: PetscCall(PetscFree(is2));
117: PetscCall(MatDestroy(&A));
118: PetscCall(MatDestroy(&B));
119: PetscCall(PetscFree(idx));
120: PetscCall(PetscFinalize());
121: return 0;
122: }
124: /*TEST
126: build:
127: requires: !complex
129: test:
130: nsize: 3
131: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
132: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
134: test:
135: suffix: 2
136: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
137: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
139: test:
140: suffix: unsorted_baij_mpi
141: nsize: 3
142: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
143: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
145: test:
146: suffix: unsorted_baij_seq
147: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
148: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
150: test:
151: suffix: unsorted_mpi
152: nsize: 3
153: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
154: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
156: test:
157: suffix: unsorted_seq
158: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
159: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
161: TEST*/