Actual source code: ex11.c
2: static char help[] = "Tests MatMeshToDual()\n\n";
4: /*
5: Include "petscmat.h" so that we can use matrices.
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets petscviewer.h - viewers
10: */
11: #include <petscmat.h>
13: int main(int argc, char **args)
14: {
15: Mat mesh, dual;
16: PetscInt Nvertices = 6; /* total number of vertices */
17: PetscInt ncells = 2; /* number cells on this process */
18: PetscInt *ii, *jj;
19: PetscMPIInt size, rank;
20: MatPartitioning part;
21: IS is;
23: PetscFunctionBeginUser;
24: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
25: PetscCallMPI(MPI_Comm_size(MPI_COMM_WORLD, &size));
26: PetscCheck(size == 2, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, "This example is for exactly two processes");
27: PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
29: PetscCall(PetscMalloc1(3, &ii));
30: PetscCall(PetscMalloc1(6, &jj));
31: ii[0] = 0;
32: ii[1] = 3;
33: ii[2] = 6;
34: if (rank == 0) {
35: jj[0] = 0;
36: jj[1] = 1;
37: jj[2] = 2;
38: jj[3] = 1;
39: jj[4] = 2;
40: jj[5] = 3;
41: } else {
42: jj[0] = 1;
43: jj[1] = 4;
44: jj[2] = 5;
45: jj[3] = 1;
46: jj[4] = 3;
47: jj[5] = 5;
48: }
49: PetscCall(MatCreateMPIAdj(MPI_COMM_WORLD, ncells, Nvertices, ii, jj, NULL, &mesh));
50: PetscCall(MatMeshToCellGraph(mesh, 2, &dual));
51: PetscCall(MatView(dual, PETSC_VIEWER_STDOUT_WORLD));
53: PetscCall(MatPartitioningCreate(MPI_COMM_WORLD, &part));
54: PetscCall(MatPartitioningSetAdjacency(part, dual));
55: PetscCall(MatPartitioningSetFromOptions(part));
56: PetscCall(MatPartitioningApply(part, &is));
57: PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
58: PetscCall(ISDestroy(&is));
59: PetscCall(MatPartitioningDestroy(&part));
61: PetscCall(MatDestroy(&mesh));
62: PetscCall(MatDestroy(&dual));
63: PetscCall(PetscFinalize());
64: return 0;
65: }
67: /*TEST
69: build:
70: requires: parmetis
72: test:
73: nsize: 2
74: requires: parmetis
76: TEST*/