Actual source code: ex106.c


  2: static char help[] = "Test repeated LU factorizations. Used for checking memory leak\n\
  3:   -m <size> : problem size\n\
  4:   -mat_nonsym : use nonsymmetric matrix (default is symmetric)\n\n";

  6: #include <petscmat.h>
  7: int main(int argc, char **args)
  8: {
  9:   Mat           C, F;    /* matrix */
 10:   Vec           x, u, b; /* approx solution, RHS, exact solution */
 11:   PetscReal     norm;    /* norm of solution error */
 12:   PetscScalar   v, none = -1.0;
 13:   PetscInt      I, J, ldim, low, high, iglobal, Istart, Iend;
 14:   PetscInt      i, j, m = 3, n = 2, its;
 15:   PetscMPIInt   size, rank;
 16:   PetscBool     mat_nonsymmetric;
 17:   PetscInt      its_max;
 18:   MatFactorInfo factinfo;
 19:   IS            perm, iperm;

 21:   PetscFunctionBeginUser;
 22:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 23:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
 24:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
 25:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 26:   n = 2 * size;

 28:   /*
 29:      Set flag if we are doing a nonsymmetric problem; the default is symmetric.
 30:   */
 31:   PetscCall(PetscOptionsHasName(NULL, NULL, "-mat_nonsym", &mat_nonsymmetric));

 33:   /*
 34:      Create parallel matrix, specifying only its global dimensions.
 35:      When using MatCreate(), the matrix format can be specified at
 36:      runtime. Also, the parallel partitioning of the matrix is
 37:      determined by PETSc at runtime.
 38:   */
 39:   PetscCall(MatCreate(PETSC_COMM_WORLD, &C));
 40:   PetscCall(MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n));
 41:   PetscCall(MatSetFromOptions(C));
 42:   PetscCall(MatGetOwnershipRange(C, &Istart, &Iend));

 44:   /*
 45:      Set matrix entries matrix in parallel.
 46:       - Each processor needs to insert only elements that it owns
 47:         locally (but any non-local elements will be sent to the
 48:         appropriate processor during matrix assembly).
 49:       - Always specify global row and columns of matrix entries.
 50:   */
 51:   for (I = Istart; I < Iend; I++) {
 52:     v = -1.0;
 53:     i = I / n;
 54:     j = I - i * n;
 55:     if (i > 0) {
 56:       J = I - n;
 57:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 58:     }
 59:     if (i < m - 1) {
 60:       J = I + n;
 61:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 62:     }
 63:     if (j > 0) {
 64:       J = I - 1;
 65:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 66:     }
 67:     if (j < n - 1) {
 68:       J = I + 1;
 69:       PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 70:     }
 71:     v = 4.0;
 72:     PetscCall(MatSetValues(C, 1, &I, 1, &I, &v, ADD_VALUES));
 73:   }

 75:   /*
 76:      Make the matrix nonsymmetric if desired
 77:   */
 78:   if (mat_nonsymmetric) {
 79:     for (I = Istart; I < Iend; I++) {
 80:       v = -1.5;
 81:       i = I / n;
 82:       if (i > 1) {
 83:         J = I - n - 1;
 84:         PetscCall(MatSetValues(C, 1, &I, 1, &J, &v, ADD_VALUES));
 85:       }
 86:     }
 87:   } else {
 88:     PetscCall(MatSetOption(C, MAT_SYMMETRIC, PETSC_TRUE));
 89:     PetscCall(MatSetOption(C, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
 90:   }

 92:   /*
 93:      Assemble matrix, using the 2-step process:
 94:        MatAssemblyBegin(), MatAssemblyEnd()
 95:      Computations can be done while messages are in transition
 96:      by placing code between these two statements.
 97:   */
 98:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
 99:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));

101:   its_max = 1000;
102:   /*
103:      Create parallel vectors.
104:       - When using VecSetSizes(), we specify only the vector's global
105:         dimension; the parallel partitioning is determined at runtime.
106:       - Note: We form 1 vector from scratch and then duplicate as needed.
107:   */
108:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
109:   PetscCall(VecSetSizes(u, PETSC_DECIDE, m * n));
110:   PetscCall(VecSetFromOptions(u));
111:   PetscCall(VecDuplicate(u, &b));
112:   PetscCall(VecDuplicate(b, &x));

114:   /*
115:      Currently, all parallel PETSc vectors are partitioned by
116:      contiguous chunks across the processors.  Determine which
117:      range of entries are locally owned.
118:   */
119:   PetscCall(VecGetOwnershipRange(x, &low, &high));

121:   /*
122:     Set elements within the exact solution vector in parallel.
123:      - Each processor needs to insert only elements that it owns
124:        locally (but any non-local entries will be sent to the
125:        appropriate processor during vector assembly).
126:      - Always specify global locations of vector entries.
127:   */
128:   PetscCall(VecGetLocalSize(x, &ldim));
129:   for (i = 0; i < ldim; i++) {
130:     iglobal = i + low;
131:     v       = (PetscScalar)(i + 100 * rank);
132:     PetscCall(VecSetValues(u, 1, &iglobal, &v, INSERT_VALUES));
133:   }

135:   /*
136:      Assemble vector, using the 2-step process:
137:        VecAssemblyBegin(), VecAssemblyEnd()
138:      Computations can be done while messages are in transition,
139:      by placing code between these two statements.
140:   */
141:   PetscCall(VecAssemblyBegin(u));
142:   PetscCall(VecAssemblyEnd(u));

144:   /* Compute right-hand-side vector */
145:   PetscCall(MatMult(C, u, b));

147:   PetscCall(MatGetOrdering(C, MATORDERINGNATURAL, &perm, &iperm));
148:   its_max = 2000;
149:   for (i = 0; i < its_max; i++) {
150:     PetscCall(MatGetFactor(C, MATSOLVERPETSC, MAT_FACTOR_LU, &F));
151:     PetscCall(MatLUFactorSymbolic(F, C, perm, iperm, &factinfo));
152:     for (j = 0; j < 1; j++) PetscCall(MatLUFactorNumeric(F, C, &factinfo));
153:     PetscCall(MatSolve(F, b, x));
154:     PetscCall(MatDestroy(&F));
155:   }
156:   PetscCall(ISDestroy(&perm));
157:   PetscCall(ISDestroy(&iperm));

159:   /* Check the error */
160:   PetscCall(VecAXPY(x, none, u));
161:   PetscCall(VecNorm(x, NORM_2, &norm));
162:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %t\n", (double)norm));

164:   /* Free work space. */
165:   PetscCall(VecDestroy(&u));
166:   PetscCall(VecDestroy(&x));
167:   PetscCall(VecDestroy(&b));
168:   PetscCall(MatDestroy(&C));
169:   PetscCall(PetscFinalize());
170:   return 0;
171: }