Actual source code: ex73.c


  2: static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";

  4: /*
  5:   Include "petscmat.h" so that we can use matrices.  Note that this file
  6:   automatically includes:
  7:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  8:      petscmat.h - matrices
  9:      petscis.h     - index sets
 10:      petscviewer.h - viewers

 12:   Example of usage:
 13:     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
 14: */
 15: #include <petscmat.h>

 17: int main(int argc, char **args)
 18: {
 19:   MatType         mtype = MATMPIAIJ;        /* matrix format */
 20:   Mat             A, B;                     /* matrix */
 21:   PetscViewer     fd;                       /* viewer */
 22:   char            file[PETSC_MAX_PATH_LEN]; /* input file name */
 23:   PetscBool       flg, viewMats, viewIS, viewVecs, useND, noVecLoad = PETSC_FALSE;
 24:   PetscInt       *nlocal, m, n;
 25:   PetscMPIInt     rank, size;
 26:   MatPartitioning part;
 27:   IS              is, isn;
 28:   Vec             xin, xout;
 29:   VecScatter      scat;

 31:   PetscFunctionBeginUser;
 32:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 33:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 34:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 35:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_mats", &viewMats));
 36:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_is", &viewIS));
 37:   PetscCall(PetscOptionsHasName(NULL, NULL, "-view_vecs", &viewVecs));
 38:   PetscCall(PetscOptionsHasName(NULL, NULL, "-use_nd", &useND));
 39:   PetscCall(PetscOptionsHasName(NULL, NULL, "-novec_load", &noVecLoad));

 41:   /*
 42:      Determine file from which we read the matrix
 43:   */
 44:   PetscCall(PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg));

 46:   /*
 47:        Open binary file.  Note that we use FILE_MODE_READ to indicate
 48:        reading from this file.
 49:   */
 50:   PetscCall(PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd));

 52:   /*
 53:       Load the matrix and vector; then destroy the viewer.
 54:   */
 55:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 56:   PetscCall(MatSetType(A, mtype));
 57:   PetscCall(MatLoad(A, fd));
 58:   if (!noVecLoad) {
 59:     PetscCall(VecCreate(PETSC_COMM_WORLD, &xin));
 60:     PetscCall(VecLoad(xin, fd));
 61:   } else {
 62:     PetscCall(MatCreateVecs(A, &xin, NULL));
 63:     PetscCall(VecSetRandom(xin, NULL));
 64:   }
 65:   PetscCall(PetscViewerDestroy(&fd));
 66:   if (viewMats) {
 67:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original matrix:\n"));
 68:     PetscCall(MatView(A, PETSC_VIEWER_DRAW_WORLD));
 69:   }
 70:   if (viewVecs) {
 71:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Original vector:\n"));
 72:     PetscCall(VecView(xin, PETSC_VIEWER_STDOUT_WORLD));
 73:   }

 75:   /* Partition the graph of the matrix */
 76:   PetscCall(MatPartitioningCreate(PETSC_COMM_WORLD, &part));
 77:   PetscCall(MatPartitioningSetAdjacency(part, A));
 78:   PetscCall(MatPartitioningSetFromOptions(part));

 80:   /* get new processor owner number of each vertex */
 81:   if (useND) {
 82:     PetscCall(MatPartitioningApplyND(part, &is));
 83:   } else {
 84:     PetscCall(MatPartitioningApply(part, &is));
 85:   }
 86:   if (viewIS) {
 87:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS1 - new processor ownership:\n"));
 88:     PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
 89:   }

 91:   /* get new global number of each old global number */
 92:   PetscCall(ISPartitioningToNumbering(is, &isn));
 93:   if (viewIS) {
 94:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS2 - new global numbering:\n"));
 95:     PetscCall(ISView(isn, PETSC_VIEWER_STDOUT_WORLD));
 96:   }

 98:   /* get number of new vertices for each processor */
 99:   PetscCall(PetscMalloc1(size, &nlocal));
100:   PetscCall(ISPartitioningCount(is, size, nlocal));
101:   PetscCall(ISDestroy(&is));

103:   /* get old global number of each new global number */
104:   PetscCall(ISInvertPermutation(isn, useND ? PETSC_DECIDE : nlocal[rank], &is));
105:   if (viewIS) {
106:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "IS3=inv(IS2) - old global number of each new global number:\n"));
107:     PetscCall(ISView(is, PETSC_VIEWER_STDOUT_WORLD));
108:   }

110:   /* move the matrix rows to the new processes they have been assigned to by the permutation */
111:   PetscCall(MatCreateSubMatrix(A, is, is, MAT_INITIAL_MATRIX, &B));
112:   PetscCall(PetscFree(nlocal));
113:   PetscCall(ISDestroy(&isn));
114:   PetscCall(MatDestroy(&A));
115:   PetscCall(MatPartitioningDestroy(&part));
116:   if (viewMats) {
117:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Partitioned matrix:\n"));
118:     PetscCall(MatView(B, PETSC_VIEWER_DRAW_WORLD));
119:   }

121:   /* move the vector rows to the new processes they have been assigned to */
122:   PetscCall(MatGetLocalSize(B, &m, &n));
123:   PetscCall(VecCreateMPI(PETSC_COMM_WORLD, m, PETSC_DECIDE, &xout));
124:   PetscCall(VecScatterCreate(xin, is, xout, NULL, &scat));
125:   PetscCall(VecScatterBegin(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD));
126:   PetscCall(VecScatterEnd(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD));
127:   PetscCall(VecScatterDestroy(&scat));
128:   if (viewVecs) {
129:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Mapped vector:\n"));
130:     PetscCall(VecView(xout, PETSC_VIEWER_STDOUT_WORLD));
131:   }
132:   PetscCall(VecDestroy(&xout));
133:   PetscCall(ISDestroy(&is));

135:   {
136:     PetscInt           rstart, i, *nzd, *nzo, nzl, nzmax = 0, *ncols, nrow, j;
137:     Mat                J;
138:     const PetscInt    *cols;
139:     const PetscScalar *vals;
140:     PetscScalar       *nvals;

142:     PetscCall(MatGetOwnershipRange(B, &rstart, NULL));
143:     PetscCall(PetscCalloc2(2 * m, &nzd, 2 * m, &nzo));
144:     for (i = 0; i < m; i++) {
145:       PetscCall(MatGetRow(B, i + rstart, &nzl, &cols, NULL));
146:       for (j = 0; j < nzl; j++) {
147:         if (cols[j] >= rstart && cols[j] < rstart + n) {
148:           nzd[2 * i] += 2;
149:           nzd[2 * i + 1] += 2;
150:         } else {
151:           nzo[2 * i] += 2;
152:           nzo[2 * i + 1] += 2;
153:         }
154:       }
155:       nzmax = PetscMax(nzmax, nzd[2 * i] + nzo[2 * i]);
156:       PetscCall(MatRestoreRow(B, i + rstart, &nzl, &cols, NULL));
157:     }
158:     PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, 2 * m, 2 * m, PETSC_DECIDE, PETSC_DECIDE, 0, nzd, 0, nzo, &J));
159:     PetscCall(PetscInfo(0, "Created empty Jacobian matrix\n"));
160:     PetscCall(PetscFree2(nzd, nzo));
161:     PetscCall(PetscMalloc2(nzmax, &ncols, nzmax, &nvals));
162:     PetscCall(PetscArrayzero(nvals, nzmax));
163:     for (i = 0; i < m; i++) {
164:       PetscCall(MatGetRow(B, i + rstart, &nzl, &cols, &vals));
165:       for (j = 0; j < nzl; j++) {
166:         ncols[2 * j]     = 2 * cols[j];
167:         ncols[2 * j + 1] = 2 * cols[j] + 1;
168:       }
169:       nrow = 2 * (i + rstart);
170:       PetscCall(MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES));
171:       nrow = 2 * (i + rstart) + 1;
172:       PetscCall(MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES));
173:       PetscCall(MatRestoreRow(B, i + rstart, &nzl, &cols, &vals));
174:     }
175:     PetscCall(MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY));
176:     PetscCall(MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY));
177:     if (viewMats) {
178:       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Jacobian matrix structure:\n"));
179:       PetscCall(MatView(J, PETSC_VIEWER_DRAW_WORLD));
180:     }
181:     PetscCall(MatDestroy(&J));
182:     PetscCall(PetscFree2(ncols, nvals));
183:   }

185:   /*
186:        Free work space.  All PETSc objects should be destroyed when they
187:        are no longer needed.
188:   */
189:   PetscCall(MatDestroy(&B));
190:   PetscCall(VecDestroy(&xin));
191:   PetscCall(PetscFinalize());
192:   return 0;
193: }

195: /*TEST

197:    test:
198:       nsize: 3
199:       requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
200:       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load

202:    test:
203:       requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
204:       output_file: output/ex73_1.out
205:       suffix: parmetis_nd_32
206:       nsize: 3
207:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

209:    test:
210:       requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
211:       output_file: output/ex73_1.out
212:       suffix: parmetis_nd_64
213:       nsize: 3
214:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

216:    test:
217:       requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
218:       output_file: output/ex73_1.out
219:       suffix: ptscotch_nd_32
220:       nsize: 4
221:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

223:    test:
224:       requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
225:       output_file: output/ex73_1.out
226:       suffix: ptscotch_nd_64
227:       nsize: 4
228:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

230: TEST*/