Actual source code: ex41.c
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5: -nd <size> : > 0 no of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscmat.h>
10: int main(int argc, char **args)
11: {
12: PetscInt nd = 2, ov = 1, i, j, m, n, *idx, lsize;
13: PetscMPIInt rank;
14: PetscBool flg;
15: Mat A, B;
16: char file[PETSC_MAX_PATH_LEN];
17: PetscViewer fd;
18: IS *is1, *is2;
19: PetscRandom r;
20: PetscScalar rand;
22: PetscFunctionBeginUser;
23: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
24: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
25: PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL));
26: PetscCall(PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL));
27: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL));
29: /* Read matrix and RHS */
30: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));
31: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
32: PetscCall(MatSetType(A, MATMPIAIJ));
33: PetscCall(MatLoad(A, fd));
34: PetscCall(PetscViewerDestroy(&fd));
36: /* Read the matrix again as a seq matrix */
37: PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd));
38: PetscCall(MatCreate(PETSC_COMM_SELF, &B));
39: PetscCall(MatSetType(B, MATSEQAIJ));
40: PetscCall(MatLoad(B, fd));
41: PetscCall(PetscViewerDestroy(&fd));
43: /* Create the Random no generator */
44: PetscCall(MatGetSize(A, &m, &n));
45: PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &r));
46: PetscCall(PetscRandomSetFromOptions(r));
48: /* Create the IS corresponding to subdomains */
49: PetscCall(PetscMalloc1(nd, &is1));
50: PetscCall(PetscMalloc1(nd, &is2));
51: PetscCall(PetscMalloc1(m, &idx));
53: /* Create the random Index Sets */
54: for (i = 0; i < nd; i++) {
55: for (j = 0; j < rank; j++) PetscCall(PetscRandomGetValue(r, &rand));
56: PetscCall(PetscRandomGetValue(r, &rand));
57: lsize = (PetscInt)(rand * m);
58: for (j = 0; j < lsize; j++) {
59: PetscCall(PetscRandomGetValue(r, &rand));
60: idx[j] = (PetscInt)(rand * m);
61: }
62: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is1 + i));
63: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is2 + i));
64: }
66: PetscCall(MatIncreaseOverlap(A, nd, is1, ov));
67: PetscCall(MatIncreaseOverlap(B, nd, is2, ov));
69: /* Now see if the serial and parallel case have the same answers */
70: for (i = 0; i < nd; ++i) {
71: PetscInt sz1, sz2;
72: PetscCall(ISEqual(is1[i], is2[i], &flg));
73: PetscCall(ISGetSize(is1[i], &sz1));
74: PetscCall(ISGetSize(is2[i], &sz2));
75: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d sz1 = %" PetscInt_FMT " sz2 = %" PetscInt_FMT, rank, i, (int)flg, sz1, sz2);
76: }
78: /* Free Allocated Memory */
79: for (i = 0; i < nd; ++i) {
80: PetscCall(ISDestroy(&is1[i]));
81: PetscCall(ISDestroy(&is2[i]));
82: }
83: PetscCall(PetscRandomDestroy(&r));
84: PetscCall(PetscFree(is1));
85: PetscCall(PetscFree(is2));
86: PetscCall(MatDestroy(&A));
87: PetscCall(MatDestroy(&B));
88: PetscCall(PetscFree(idx));
89: PetscCall(PetscFinalize());
90: return 0;
91: }
93: /*TEST
95: build:
96: requires: !complex
98: test:
99: nsize: 3
100: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
101: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
103: TEST*/