Actual source code: ex53.c
2: static char help[] = "Tests setup PCFIELDSPLIT with blocked IS.\n\n";
3: /*
4: Contributed by Hoang Giang Bui, June 2017.
5: */
6: #include <petscksp.h>
8: int main(int argc, char *argv[])
9: {
10: Mat A;
11: KSP ksp;
12: PC pc;
13: PetscInt Istart, Iend, local_m, local_n, i;
14: PetscMPIInt rank;
15: PetscInt method = 2, mat_size = 40, block_size = 2, *A_indices = NULL, *B_indices = NULL, A_size = 0, B_size = 0;
16: IS A_IS, B_IS;
18: PetscFunctionBeginUser;
19: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
20: PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
22: PetscCall(PetscOptionsGetInt(NULL, NULL, "-mat_size", &mat_size, NULL));
23: PetscCall(PetscOptionsGetInt(NULL, NULL, "-method", &method, NULL));
24: PetscCall(PetscOptionsGetInt(NULL, NULL, "-block_size", &block_size, NULL));
26: if (rank == 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, " matrix size = %" PetscInt_FMT ", block size = %" PetscInt_FMT "\n", mat_size, block_size));
28: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
29: PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, mat_size, mat_size));
30: PetscCall(MatSetType(A, MATMPIAIJ));
31: PetscCall(MatSetUp(A));
33: PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));
35: for (i = Istart; i < Iend; ++i) {
36: PetscCall(MatSetValue(A, i, i, 2, INSERT_VALUES));
37: if (i < mat_size - 1) PetscCall(MatSetValue(A, i, i + 1, -1, INSERT_VALUES));
38: if (i > 0) PetscCall(MatSetValue(A, i, i - 1, -1, INSERT_VALUES));
39: }
41: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
42: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
44: PetscCall(MatGetLocalSize(A, &local_m, &local_n));
46: /* Create Index Sets */
47: if (rank == 0) {
48: if (method > 1) {
49: /* with method > 1, the fieldsplit B is set to zero */
50: A_size = Iend - Istart;
51: B_size = 0;
52: } else {
53: /* with method = 1, the fieldsplit A and B is equal. It is noticed that A_size, or N/4, must be divided by block_size */
54: A_size = (Iend - Istart) / 2;
55: B_size = (Iend - Istart) / 2;
56: }
57: PetscCall(PetscCalloc1(A_size, &A_indices));
58: PetscCall(PetscCalloc1(B_size, &B_indices));
59: for (i = 0; i < A_size; ++i) A_indices[i] = Istart + i;
60: for (i = 0; i < B_size; ++i) B_indices[i] = Istart + i + A_size;
61: } else if (rank == 1) {
62: A_size = (Iend - Istart) / 2;
63: B_size = (Iend - Istart) / 2;
64: PetscCall(PetscCalloc1(A_size, &A_indices));
65: PetscCall(PetscCalloc1(B_size, &B_indices));
66: for (i = 0; i < A_size; ++i) A_indices[i] = Istart + i;
67: for (i = 0; i < B_size; ++i) B_indices[i] = Istart + i + A_size;
68: }
70: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, A_size, A_indices, PETSC_OWN_POINTER, &A_IS));
71: PetscCall(ISCreateGeneral(PETSC_COMM_WORLD, B_size, B_indices, PETSC_OWN_POINTER, &B_IS));
72: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d]: A_size = %" PetscInt_FMT ", B_size = %" PetscInt_FMT "\n", rank, A_size, B_size));
73: PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
75: /* Solve the system */
76: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
77: PetscCall(KSPSetType(ksp, KSPGMRES));
78: PetscCall(KSPSetOperators(ksp, A, A));
80: /* Define the fieldsplit for the global matrix */
81: PetscCall(KSPGetPC(ksp, &pc));
82: PetscCall(PCSetType(pc, PCFIELDSPLIT));
83: PetscCall(PCFieldSplitSetIS(pc, "a", A_IS));
84: PetscCall(PCFieldSplitSetIS(pc, "b", B_IS));
85: PetscCall(ISSetBlockSize(A_IS, block_size));
86: PetscCall(ISSetBlockSize(B_IS, block_size));
88: PetscCall(KSPSetFromOptions(ksp));
89: PetscCall(KSPSetUp(ksp));
91: PetscCall(ISDestroy(&A_IS));
92: PetscCall(ISDestroy(&B_IS));
93: PetscCall(KSPDestroy(&ksp));
94: PetscCall(MatDestroy(&A));
95: PetscCall(PetscFinalize());
96: return 0;
97: }
99: /*TEST
101: test:
102: nsize: 2
103: args: -method 1
105: test:
106: suffix: 2
107: nsize: 2
108: args: -method 2
110: TEST*/