Actual source code: ex19.c


  2: static char help[] = "Tests reusing MPI parallel matrices and MatGetValues().\n\
  3: To test the parallel matrix assembly, this example intentionally lays out\n\
  4: the matrix across processors differently from the way it is assembled.\n\
  5: This example uses bilinear elements on the unit square.  Input arguments are:\n\
  6:   -m <size> : problem size\n\n";

  8: #include <petscmat.h>

 10: PetscErrorCode FormElementStiffness(PetscReal H, PetscScalar *Ke)
 11: {
 12:   PetscFunctionBegin;
 13:   Ke[0]  = H / 6.0;
 14:   Ke[1]  = -.125 * H;
 15:   Ke[2]  = H / 12.0;
 16:   Ke[3]  = -.125 * H;
 17:   Ke[4]  = -.125 * H;
 18:   Ke[5]  = H / 6.0;
 19:   Ke[6]  = -.125 * H;
 20:   Ke[7]  = H / 12.0;
 21:   Ke[8]  = H / 12.0;
 22:   Ke[9]  = -.125 * H;
 23:   Ke[10] = H / 6.0;
 24:   Ke[11] = -.125 * H;
 25:   Ke[12] = -.125 * H;
 26:   Ke[13] = H / 12.0;
 27:   Ke[14] = -.125 * H;
 28:   Ke[15] = H / 6.0;
 29:   PetscFunctionReturn(PETSC_SUCCESS);
 30: }

 32: int main(int argc, char **args)
 33: {
 34:   Mat         C;
 35:   Vec         u, b;
 36:   PetscMPIInt size, rank;
 37:   PetscInt    i, m = 5, N, start, end, M, idx[4];
 38:   PetscInt    j, nrsub, ncsub, *rsub, *csub, mystart, myend;
 39:   PetscBool   flg;
 40:   PetscScalar one = 1.0, Ke[16], *vals;
 41:   PetscReal   h, norm;

 43:   PetscFunctionBeginUser;
 44:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 45:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));

 47:   N = (m + 1) * (m + 1); /* dimension of matrix */
 48:   M = m * m;             /* number of elements */
 49:   h = 1.0 / m;           /* mesh width */
 50:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 51:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));

 53:   /* Create stiffness matrix */
 54:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
 55:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, N, N));
 56:   PetscCall(MatSetFromOptions(C));
 57:   PetscCall(MatSetUp(C));

 59:   start = rank * (M / size) + ((M % size) < rank ? (M % size) : rank);
 60:   end   = start + M / size + ((M % size) > rank);

 62:   /* Form the element stiffness for the Laplacian */
 63:   PetscCall(FormElementStiffness(h * h, Ke));
 64:   for (i = start; i < end; i++) {
 65:     /* location of lower left corner of element */
 66:     /* node numbers for the four corners of element */
 67:     idx[0] = (m + 1) * (i / m) + (i % m);
 68:     idx[1] = idx[0] + 1;
 69:     idx[2] = idx[1] + m + 1;
 70:     idx[3] = idx[2] - 1;
 71:     PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES));
 72:   }
 73:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 74:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

 76:   /* Assemble the matrix again */
 77:   PetscCall(MatZeroEntries(C));

 79:   for (i = start; i < end; i++) {
 80:     /* location of lower left corner of element */
 81:     /* node numbers for the four corners of element */
 82:     idx[0] = (m + 1) * (i / m) + (i % m);
 83:     idx[1] = idx[0] + 1;
 84:     idx[2] = idx[1] + m + 1;
 85:     idx[3] = idx[2] - 1;
 86:     PetscCall(MatSetValues(C, 4, idx, 4, idx, Ke, ADD_VALUES));
 87:   }
 88:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 89:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

 91:   /* Create test vectors */
 92:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
 93:   PetscCall(VecSetSizes(u, PETSC_DECIDE, N));
 94:   PetscCall(VecSetFromOptions(u));
 95:   PetscCall(VecDuplicate(u, &b));
 96:   PetscCall(VecSet(u, one));

 98:   /* Check error */
 99:   PetscCall(MatMult(C, u, b));
100:   PetscCall(VecNorm(b, NORM_2, &norm));
101:   if (norm > PETSC_SQRT_MACHINE_EPSILON) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error b %g should be near 0\n", (double)norm));

103:   /* Now test MatGetValues() */
104:   PetscCall(PetscOptionsHasName(NULL, NULL, "-get_values", &flg));
105:   if (flg) {
106:     PetscCall(MatGetOwnershipRange(C, &mystart, &myend));
107:     nrsub = myend - mystart;
108:     ncsub = 4;
109:     PetscCall(PetscMalloc1(nrsub * ncsub, &vals));
110:     PetscCall(PetscMalloc1(nrsub, &rsub));
111:     PetscCall(PetscMalloc1(ncsub, &csub));
112:     for (i = myend - 1; i >= mystart; i--) rsub[myend - i - 1] = i;
113:     for (i = 0; i < ncsub; i++) csub[i] = 2 * (ncsub - i) + mystart;
114:     PetscCall(MatGetValues(C, nrsub, rsub, ncsub, csub, vals));
115:     PetscCall(MatView(C, PETSC_VIEWER_STDOUT_WORLD));
116:     PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "processor number %d: start=%" PetscInt_FMT ", end=%" PetscInt_FMT ", mystart=%" PetscInt_FMT ", myend=%" PetscInt_FMT "\n", rank, start, end, mystart, myend));
117:     for (i = 0; i < nrsub; i++) {
118:       for (j = 0; j < ncsub; j++) {
119:         if (PetscImaginaryPart(vals[i * ncsub + j]) != 0.0) {
120:           PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "  C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g + %g i\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j]), (double)PetscImaginaryPart(vals[i * ncsub + j])));
121:         } else {
122:           PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "  C[%" PetscInt_FMT ", %" PetscInt_FMT "] = %g\n", rsub[i], csub[j], (double)PetscRealPart(vals[i * ncsub + j])));
123:         }
124:       }
125:     }
126:     PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
127:     PetscCall(PetscFree(rsub));
128:     PetscCall(PetscFree(csub));
129:     PetscCall(PetscFree(vals));
130:   }

132:   /* Free data structures */
133:   PetscCall(VecDestroy(&u));
134:   PetscCall(VecDestroy(&b));
135:   PetscCall(MatDestroy(&C));
136:   PetscCall(PetscFinalize());
137:   return 0;
138: }

140: /*TEST

142:    test:
143:       nsize: 4

145: TEST*/