Actual source code: ex40.c
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4: -nd <size> : > 0 number of domains per processor \n\
5: -ov <overlap> : >=0 amount of overlap between domains\n\n";
7: #include <petscmat.h>
9: PetscErrorCode ISAllGatherDisjoint(IS iis, IS **ois)
10: {
11: IS *is2, is;
12: const PetscInt *idxs;
13: PetscInt i, ls, *sizes;
14: PetscMPIInt size;
16: PetscFunctionBeginUser;
17: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)iis), &size));
18: PetscCall(PetscMalloc1(size, &is2));
19: PetscCall(PetscMalloc1(size, &sizes));
20: PetscCall(ISGetLocalSize(iis, &ls));
21: /* we don't have a public ISGetLayout */
22: PetscCallMPI(MPI_Allgather(&ls, 1, MPIU_INT, sizes, 1, MPIU_INT, PetscObjectComm((PetscObject)iis)));
23: PetscCall(ISAllGather(iis, &is));
24: PetscCall(ISGetIndices(is, &idxs));
25: for (i = 0, ls = 0; i < size; i++) {
26: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, sizes[i], idxs + ls, PETSC_COPY_VALUES, &is2[i]));
27: ls += sizes[i];
28: }
29: PetscCall(ISRestoreIndices(is, &idxs));
30: PetscCall(ISDestroy(&is));
31: PetscCall(PetscFree(sizes));
32: *ois = is2;
33: PetscFunctionReturn(PETSC_SUCCESS);
34: }
36: int main(int argc, char **args)
37: {
38: PetscInt nd = 2, ov = 1, ndpar, i, start, m, n, end, lsize;
39: PetscMPIInt rank;
40: PetscBool flg, useND = PETSC_FALSE;
41: Mat A, B;
42: char file[PETSC_MAX_PATH_LEN];
43: PetscViewer fd;
44: IS *is1, *is2;
45: PetscRandom r;
46: PetscScalar rand;
48: PetscFunctionBeginUser;
49: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
50: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
51: PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));
52: PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_USER, "Must use -f filename to indicate a file containing a PETSc binary matrix");
53: PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
54: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
55: PetscCall(PetscOptionsGetBool(NULL, NULL, "-nested_dissection", &useND, NULL));
57: /* Read matrix */
58: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
59: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
60: PetscCall(MatSetType(A, MATMPIAIJ));
61: PetscCall(MatLoad(A, fd));
62: PetscCall(MatSetFromOptions(A));
63: PetscCall(PetscViewerDestroy(&fd));
65: /* Read the matrix again as a sequential matrix */
66: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
67: PetscCall(MatCreate(PETSC_COMM_SELF, &B));
68: PetscCall(MatSetType(B, MATSEQAIJ));
69: PetscCall(MatLoad(B, fd));
70: PetscCall(MatSetFromOptions(B));
71: PetscCall(PetscViewerDestroy(&fd));
73: /* Create the IS corresponding to subdomains */
74: if (useND) {
75: MatPartitioning part;
76: IS ndmap;
77: PetscMPIInt size;
79: ndpar = 1;
80: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
81: nd = (PetscInt)size;
82: PetscCall(PetscMalloc1(ndpar, &is1));
83: PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
84: PetscCall(MatPartitioningSetAdjacency(part, A));
85: PetscCall(MatPartitioningSetFromOptions(part));
86: PetscCall(MatPartitioningApplyND(part, &ndmap));
87: PetscCall(MatPartitioningDestroy(&part));
88: PetscCall(ISBuildTwoSided(ndmap, NULL, &is1[0]));
89: PetscCall(ISDestroy(&ndmap));
90: PetscCall(ISAllGatherDisjoint(is1[0], &is2));
91: } else {
92: /* Create the random Index Sets */
93: PetscCall(PetscMalloc1(nd, &is1));
94: PetscCall(PetscMalloc1(nd, &is2));
96: PetscCall(MatGetSize(A, &m, &n));
97: PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
98: PetscCall(PetscRandomSetFromOptions(r));
99: for (i = 0; i < nd; i++) {
100: PetscCall(PetscRandomGetValue(r, &rand));
101: start = (PetscInt)(rand * m);
102: PetscCall(PetscRandomGetValue(r, &rand));
103: end = (PetscInt)(rand * m);
104: lsize = end - start;
105: if (start > end) {
106: start = end;
107: lsize = -lsize;
108: }
109: PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is1 + i));
110: PetscCall(ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is2 + i));
111: }
112: ndpar = nd;
113: PetscCall(PetscRandomDestroy(&r));
114: }
115: PetscCall(MatIncreaseOverlap(A, ndpar, is1, ov));
116: PetscCall(MatIncreaseOverlap(B, nd, is2, ov));
117: if (useND) {
118: IS *is;
120: PetscCall(ISAllGatherDisjoint(is1[0], &is));
121: PetscCall(ISDestroy(&is1[0]));
122: PetscCall(PetscFree(is1));
123: is1 = is;
124: }
125: /* Now see if the serial and parallel case have the same answers */
126: for (i = 0; i < nd; ++i) {
127: PetscCall(ISEqual(is1[i], is2[i], &flg));
128: if (!flg) {
129: PetscCall(ISViewFromOptions(is1[i], NULL, "-err_view"));
130: PetscCall(ISViewFromOptions(is2[i], NULL, "-err_view"));
131: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d", rank, i, (int)flg);
132: }
133: }
135: /* Free allocated memory */
136: for (i = 0; i < nd; ++i) {
137: PetscCall(ISDestroy(&is1[i]));
138: PetscCall(ISDestroy(&is2[i]));
139: }
140: PetscCall(PetscFree(is1));
141: PetscCall(PetscFree(is2));
142: PetscCall(MatDestroy(&A));
143: PetscCall(MatDestroy(&B));
144: PetscCall(PetscFinalize());
145: return 0;
146: }
148: /*TEST
150: build:
151: requires: !complex
153: testset:
154: nsize: 5
155: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
156: args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
157: output_file: output/ex40_1.out
158: test:
159: suffix: 1
160: args: -nd 7
161: test:
162: requires: parmetis
163: suffix: 1_nd
164: args: -nested_dissection -mat_partitioning_type parmetis
166: testset:
167: nsize: 3
168: requires: double !defined(PETSC_USE_64BIT_INDICES) !complex
169: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
170: output_file: output/ex40_1.out
171: test:
172: suffix: 2
173: args: -nd 7
174: test:
175: requires: parmetis
176: suffix: 2_nd
177: args: -nested_dissection -mat_partitioning_type parmetis
179: TEST*/