Actual source code: ex56.c
2: static char help[] = "Test the use of MatSetValuesBlocked(), MatZeroRows() for rectangular MatBAIJ matrix, test MatSetValuesBlocked() for MatSBAIJ matrix (-test_mat_sbaij).";
4: #include <petscmat.h>
6: int main(int argc, char **args)
7: {
8: Mat A;
9: PetscInt bs = 3, m = 4, n = 6, i, j, val = 10, row[2], col[3], eval, rstart;
10: PetscMPIInt size, rank;
11: PetscScalar x[6][9], y[3][3], one = 1.0;
12: PetscBool flg, testsbaij = PETSC_FALSE;
14: PetscFunctionBeginUser;
15: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
16: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
17: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
19: PetscCall(PetscOptionsHasName(NULL, NULL, "-test_mat_sbaij", &testsbaij));
21: if (testsbaij) {
22: PetscCall(MatCreateSBAIJ(PETSC_COMM_WORLD, bs, m * bs, n * bs, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, 1, NULL, &A));
23: } else {
24: PetscCall(MatCreateBAIJ(PETSC_COMM_WORLD, bs, m * bs, n * bs, PETSC_DECIDE, PETSC_DECIDE, 1, NULL, 1, NULL, &A));
25: }
26: PetscCall(MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE));
27: eval = 9;
29: PetscCall(PetscOptionsHasName(NULL, NULL, "-ass_extern", &flg));
30: if (flg && (size != 1)) rstart = m * ((rank + 1) % size);
31: else rstart = m * (rank);
33: row[0] = rstart + 0;
34: row[1] = rstart + 2;
35: col[0] = rstart + 0;
36: col[1] = rstart + 1;
37: col[2] = rstart + 3;
38: for (i = 0; i < 6; i++) {
39: for (j = 0; j < 9; j++) x[i][j] = (PetscScalar)val++;
40: }
42: PetscCall(MatSetValuesBlocked(A, 2, row, 3, col, &x[0][0], INSERT_VALUES));
44: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
45: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
47: /*
48: This option does not work for rectangular matrices
49: PetscCall(MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
50: */
52: PetscCall(MatSetValuesBlocked(A, 2, row, 3, col, &x[0][0], INSERT_VALUES));
54: /* Do another MatSetValues to test the case when only one local block is specified */
55: for (i = 0; i < 3; i++) {
56: for (j = 0; j < 3; j++) y[i][j] = (PetscScalar)(10 + i * eval + j);
57: }
58: PetscCall(MatSetValuesBlocked(A, 1, row, 1, col, &y[0][0], INSERT_VALUES));
59: PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
60: PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
62: PetscCall(PetscOptionsHasName(NULL, NULL, "-zero_rows", &flg));
63: if (flg) {
64: col[0] = rstart * bs + 0;
65: col[1] = rstart * bs + 1;
66: col[2] = rstart * bs + 2;
67: PetscCall(MatZeroRows(A, 3, col, one, 0, 0));
68: }
70: PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
72: PetscCall(MatDestroy(&A));
73: PetscCall(PetscFinalize());
74: return 0;
75: }
77: /*TEST
79: test:
80: filter: grep -v " MPI process"
82: test:
83: suffix: 4
84: nsize: 3
85: args: -ass_extern
86: filter: grep -v " MPI process"
88: test:
89: suffix: 5
90: nsize: 3
91: args: -ass_extern -zero_rows
92: filter: grep -v " MPI process"
94: TEST*/