Actual source code: ex80.c
2: static char help[] = "Partition tiny grid.\n\n";
4: /*
5: Include "petscmat.h" so that we can use matrices. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets
10: petscviewer.h - viewers
11: */
12: #include <petscmat.h>
14: int main(int argc, char **args)
15: {
16: Mat A, At;
17: PetscMPIInt rank, size;
18: PetscInt *ia, *ja, row;
19: MatPartitioning part;
20: IS is, isn;
21: PetscBool equal;
23: PetscFunctionBeginUser;
24: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
25: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
26: PetscCheck(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "Must run with 4 processors");
27: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
29: PetscCall(PetscMalloc1(5, &ia));
30: PetscCall(PetscMalloc1(16, &ja));
31: if (rank == 0) {
32: ja[0] = 1;
33: ja[1] = 4;
34: ja[2] = 0;
35: ja[3] = 2;
36: ja[4] = 5;
37: ja[5] = 1;
38: ja[6] = 3;
39: ja[7] = 6;
40: ja[8] = 2;
41: ja[9] = 7;
42: ia[0] = 0;
43: ia[1] = 2;
44: ia[2] = 5;
45: ia[3] = 8;
46: ia[4] = 10;
47: } else if (rank == 1) {
48: ja[0] = 0;
49: ja[1] = 5;
50: ja[2] = 8;
51: ja[3] = 1;
52: ja[4] = 4;
53: ja[5] = 6;
54: ja[6] = 9;
55: ja[7] = 2;
56: ja[8] = 5;
57: ja[9] = 7;
58: ja[10] = 10;
59: ja[11] = 3;
60: ja[12] = 6;
61: ja[13] = 11;
62: ia[0] = 0;
63: ia[1] = 3;
64: ia[2] = 7;
65: ia[3] = 11;
66: ia[4] = 14;
67: } else if (rank == 2) {
68: ja[0] = 4;
69: ja[1] = 9;
70: ja[2] = 12;
71: ja[3] = 5;
72: ja[4] = 8;
73: ja[5] = 10;
74: ja[6] = 13;
75: ja[7] = 6;
76: ja[8] = 9;
77: ja[9] = 11;
78: ja[10] = 14;
79: ja[11] = 7;
80: ja[12] = 10;
81: ja[13] = 15;
82: ia[0] = 0;
83: ia[1] = 3;
84: ia[2] = 7;
85: ia[3] = 11;
86: ia[4] = 14;
87: } else {
88: ja[0] = 8;
89: ja[1] = 13;
90: ja[2] = 9;
91: ja[3] = 12;
92: ja[4] = 14;
93: ja[5] = 10;
94: ja[6] = 13;
95: ja[7] = 15;
96: ja[8] = 11;
97: ja[9] = 14;
98: ia[0] = 0;
99: ia[1] = 2;
100: ia[2] = 5;
101: ia[3] = 8;
102: ia[4] = 10;
103: }
105: PetscCall(MatCreateMPIAdj(PETSC_COMM_WORLD, 4, 16, ia, ja, NULL, &A));
106: PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
108: /* Create the same matrix but using MatSetValues() */
109: PetscCall(MatCreate(PETSC_COMM_WORLD, &At));
110: PetscCall(MatSetSizes(At, 4, 4, 16, 16));
111: PetscCall(MatSetType(At, MATMPIADJ));
112: for (PetscInt i = 0; i < 4; i++) {
113: row = i + 4 * rank;
114: PetscCall(MatSetValues(At, 1, &row, ia[i + 1] - ia[i], ja + ia[i], NULL, INSERT_VALUES));
115: }
116: PetscCall(MatAssemblyBegin(At, MAT_FINAL_ASSEMBLY));
117: PetscCall(MatAssemblyEnd(At, MAT_FINAL_ASSEMBLY));
118: PetscCall(MatEqual(A, At, &equal));
119: PetscCheck(equal, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Matrices are not equal that should be equal");
120: PetscCall(MatDestroy(&At));
122: /*
123: Partition the graph of the matrix
124: */
125: PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
126: PetscCall(MatPartitioningSetAdjacency(part, A));
127: PetscCall(MatPartitioningSetFromOptions(part));
128: /* get new processor owner number of each vertex */
129: PetscCall(MatPartitioningApply(part, &is));
130: /* get new global number of each old global number */
131: PetscCall(ISPartitioningToNumbering(is, &isn));
132: PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
133: PetscCall(ISDestroy(&is));
135: PetscCall(ISDestroy(&isn));
136: PetscCall(MatPartitioningDestroy(&part));
138: /*
139: Free work space. All PETSc objects should be destroyed when they
140: are no longer needed.
141: */
142: PetscCall(MatDestroy(&A));
144: PetscCall(PetscFinalize());
145: return 0;
146: }
147: /*
148: test:
149: requires: parmetis
150: args: -mat_view
151: nsize: 4
152: */