Actual source code: ex28.c
2: static char help[] = "Test procedural KSPSetFromOptions() or at runtime; Test PCREDUNDANT.\n\n";
4: #include <petscksp.h>
6: int main(int argc, char **args)
7: {
8: Vec x, b, u; /* approx solution, RHS, exact solution */
9: Mat A; /* linear system matrix */
10: KSP ksp; /* linear solver context */
11: PC pc; /* preconditioner context */
12: PetscReal norm; /* norm of solution error */
13: PetscInt i, n = 10, col[3], its, rstart, rend, nlocal;
14: PetscScalar one = 1.0, value[3];
15: PetscBool TEST_PROCEDURAL = PETSC_FALSE;
17: PetscFunctionBeginUser;
18: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
19: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
20: PetscCall(PetscOptionsGetBool(NULL, NULL, "-procedural", &TEST_PROCEDURAL, NULL));
22: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: Compute the matrix and right-hand-side vector that define
24: the linear system, Ax = b.
25: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
27: /*
28: Create vectors. Note that we form 1 vector from scratch and
29: then duplicate as needed. For this simple case let PETSc decide how
30: many elements of the vector are stored on each processor. The second
31: argument to VecSetSizes() below causes PETSc to decide.
32: */
33: PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
34: PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
35: PetscCall(VecSetFromOptions(x));
36: PetscCall(VecDuplicate(x, &b));
37: PetscCall(VecDuplicate(x, &u));
39: /* Identify the starting and ending mesh points on each
40: processor for the interior part of the mesh. We let PETSc decide
41: above. */
43: PetscCall(VecGetOwnershipRange(x, &rstart, &rend));
44: PetscCall(VecGetLocalSize(x, &nlocal));
46: /* Create a tridiagonal matrix. See ../tutorials/ex23.c */
47: PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
48: PetscCall(MatSetSizes(A, nlocal, nlocal, n, n));
49: PetscCall(MatSetFromOptions(A));
50: PetscCall(MatSetUp(A));
51: /* Assemble matrix */
52: if (!rstart) {
53: rstart = 1;
54: i = 0;
55: col[0] = 0;
56: col[1] = 1;
57: value[0] = 2.0;
58: value[1] = -1.0;
59: PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
60: }
61: if (rend == n) {
62: rend = n - 1;
63: i = n - 1;
64: col[0] = n - 2;
65: col[1] = n - 1;
66: value[0] = -1.0;
67: value[1] = 2.0;
68: PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
69: }
71: /* Set entries corresponding to the mesh interior */
72: value[0] = -1.0;
73: value[1] = 2.0;
74: value[2] = -1.0;
75: for (i = rstart; i < rend; i++) {
76: col[0] = i - 1;
77: col[1] = i;
78: col[2] = i + 1;
79: PetscCall(MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES));
80: }
81: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
82: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
84: /* Set exact solution; then compute right-hand-side vector. */
85: PetscCall(VecSet(u, one));
86: PetscCall(MatMult(A, u, b));
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Create the linear solver and set various options
90: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
91: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
92: PetscCall(KSPSetOperators(ksp, A, A));
94: /*
95: Set linear solver defaults for this problem (optional).
96: - By extracting the KSP and PC contexts from the KSP context,
97: we can then directly call any KSP and PC routines to set
98: various options.
99: - The following statements are optional; all of these
100: parameters could alternatively be specified at runtime via
101: KSPSetFromOptions();
102: */
103: if (TEST_PROCEDURAL) {
104: /* Example of runtime options: '-pc_redundant_number 3 -redundant_ksp_type gmres -redundant_pc_type bjacobi' */
105: PetscMPIInt size, rank, subsize;
106: Mat A_redundant;
107: KSP innerksp;
108: PC innerpc;
109: MPI_Comm subcomm;
111: PetscCall(KSPGetPC(ksp, &pc));
112: PetscCall(PCSetType(pc, PCREDUNDANT));
113: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
114: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
115: PetscCheck(size > 2, PETSC_COMM_SELF, PETSC_ERR_WRONG_MPI_SIZE, "Num of processes %d must greater than 2", size);
116: PetscCall(PCRedundantSetNumber(pc, size - 2));
117: PetscCall(KSPSetFromOptions(ksp));
119: /* Get subcommunicator and redundant matrix */
120: PetscCall(KSPSetUp(ksp));
121: PetscCall(PCRedundantGetKSP(pc, &innerksp));
122: PetscCall(KSPGetPC(innerksp, &innerpc));
123: PetscCall(PCGetOperators(innerpc, NULL, &A_redundant));
124: PetscCall(PetscObjectGetComm((PetscObject)A_redundant, &subcomm));
125: PetscCallMPI(MPI_Comm_size(subcomm, &subsize));
126: if (subsize == 1 && rank == 0) {
127: PetscCall(PetscPrintf(PETSC_COMM_SELF, "A_redundant:\n"));
128: PetscCall(MatView(A_redundant, PETSC_VIEWER_STDOUT_SELF));
129: }
130: } else {
131: PetscCall(KSPSetFromOptions(ksp));
132: }
134: /* Solve linear system */
135: PetscCall(KSPSolve(ksp, b, x));
137: /* Check the error */
138: PetscCall(VecAXPY(x, -1.0, u));
139: PetscCall(VecNorm(x, NORM_2, &norm));
140: PetscCall(KSPGetIterationNumber(ksp, &its));
141: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its));
143: /* Free work space. */
144: PetscCall(VecDestroy(&x));
145: PetscCall(VecDestroy(&u));
146: PetscCall(VecDestroy(&b));
147: PetscCall(MatDestroy(&A));
148: PetscCall(KSPDestroy(&ksp));
149: PetscCall(PetscFinalize());
150: return 0;
151: }
153: /*TEST
155: test:
156: nsize: 3
157: output_file: output/ex28.out
159: test:
160: suffix: 2
161: args: -procedural -pc_redundant_number 3 -redundant_ksp_type gmres -redundant_pc_type bjacobi
162: nsize: 3
164: test:
165: suffix: 3
166: args: -procedural -pc_redundant_number 3 -redundant_ksp_type gmres -redundant_pc_type bjacobi
167: nsize: 5
169: TEST*/