Actual source code: ex111.c


  2: static char help[] = "Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
  3:   -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
  4:   -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
  5:   -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
  6:   -Npx <npx>, where <npx> = number of processors in the x-direction\n\
  7:   -Npy <npy>, where <npy> = number of processors in the y-direction\n\
  8:   -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";

 10: /*
 11:     Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
 12: */

 14: #include <petscdm.h>
 15: #include <petscdmda.h>

 17: /* User-defined application contexts */
 18: typedef struct {
 19:   PetscInt mx, my, mz;     /* number grid points in x, y and z direction */
 20:   Vec      localX, localF; /* local vectors with ghost region */
 21:   DM       da;
 22:   Vec      x, b, r; /* global vectors */
 23:   Mat      J;       /* Jacobian on grid */
 24: } GridCtx;
 25: typedef struct {
 26:   GridCtx  fine;
 27:   GridCtx  coarse;
 28:   PetscInt ratio;
 29:   Mat      Ii; /* interpolation from coarse to fine */
 30: } AppCtx;

 32: #define COARSE_LEVEL 0
 33: #define FINE_LEVEL   1

 35: /*
 36:       Mm_ratio - ration of grid lines between fine and coarse grids.
 37: */
 38: int main(int argc, char **argv)
 39: {
 40:   AppCtx          user;
 41:   PetscMPIInt     size, rank;
 42:   PetscInt        m, n, M, N, i, nrows;
 43:   PetscScalar     one  = 1.0;
 44:   PetscReal       fill = 2.0;
 45:   Mat             A, P, R, C, PtAP, D;
 46:   PetscScalar    *array;
 47:   PetscRandom     rdm;
 48:   PetscBool       Test_3D = PETSC_FALSE, flg;
 49:   const PetscInt *ia, *ja;

 51:   PetscFunctionBeginUser;
 52:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 53:   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
 54:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 56:   /* Get size of fine grids and coarse grids */
 57:   user.ratio     = 2;
 58:   user.coarse.mx = 4;
 59:   user.coarse.my = 4;
 60:   user.coarse.mz = 4;

 62:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL));
 63:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL));
 64:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-Mz", &user.coarse.mz, NULL));
 65:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL));
 66:   if (user.coarse.mz) Test_3D = PETSC_TRUE;

 68:   user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
 69:   user.fine.my = user.ratio * (user.coarse.my - 1) + 1;
 70:   user.fine.mz = user.ratio * (user.coarse.mz - 1) + 1;

 72:   if (rank == 0) {
 73:     if (!Test_3D) {
 74:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.fine.mx, user.fine.my));
 75:     } else {
 76:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.coarse.mz, user.fine.mx,
 77:                             user.fine.my, user.fine.mz));
 78:     }
 79:   }

 81:   /* Set up distributed array for fine grid */
 82:   if (!Test_3D) {
 83:     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.fine.da));
 84:   } else {
 85:     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, user.fine.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.fine.da));
 86:   }
 87:   PetscCall(DMSetFromOptions(user.fine.da));
 88:   PetscCall(DMSetUp(user.fine.da));

 90:   /* Create and set A at fine grids */
 91:   PetscCall(DMSetMatType(user.fine.da, MATAIJ));
 92:   PetscCall(DMCreateMatrix(user.fine.da, &A));
 93:   PetscCall(MatGetLocalSize(A, &m, &n));
 94:   PetscCall(MatGetSize(A, &M, &N));

 96:   /* set val=one to A (replace with random values!) */
 97:   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rdm));
 98:   PetscCall(PetscRandomSetFromOptions(rdm));
 99:   if (size == 1) {
100:     PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
101:     if (flg) {
102:       PetscCall(MatSeqAIJGetArray(A, &array));
103:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
104:       PetscCall(MatSeqAIJRestoreArray(A, &array));
105:     }
106:     PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
107:   } else {
108:     Mat AA, AB;
109:     PetscCall(MatMPIAIJGetSeqAIJ(A, &AA, &AB, NULL));
110:     PetscCall(MatGetRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
111:     if (flg) {
112:       PetscCall(MatSeqAIJGetArray(AA, &array));
113:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
114:       PetscCall(MatSeqAIJRestoreArray(AA, &array));
115:     }
116:     PetscCall(MatRestoreRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
117:     PetscCall(MatGetRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
118:     if (flg) {
119:       PetscCall(MatSeqAIJGetArray(AB, &array));
120:       for (i = 0; i < ia[nrows]; i++) array[i] = one;
121:       PetscCall(MatSeqAIJRestoreArray(AB, &array));
122:     }
123:     PetscCall(MatRestoreRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg));
124:   }
125:   /* Set up distributed array for coarse grid */
126:   if (!Test_3D) {
127:     PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.coarse.da));
128:   } else {
129:     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, user.coarse.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.coarse.da));
130:   }
131:   PetscCall(DMSetFromOptions(user.coarse.da));
132:   PetscCall(DMSetUp(user.coarse.da));

134:   /* Create interpolation between the fine and coarse grids */
135:   PetscCall(DMCreateInterpolation(user.coarse.da, user.fine.da, &P, NULL));

137:   /* Get R = P^T */
138:   PetscCall(MatTranspose(P, MAT_INITIAL_MATRIX, &R));

140:   /* C = R*A*P */
141:   /* Developer's API */
142:   PetscCall(MatProductCreate(R, A, P, &D));
143:   PetscCall(MatProductSetType(D, MATPRODUCT_ABC));
144:   PetscCall(MatProductSetFromOptions(D));
145:   PetscCall(MatProductSymbolic(D));
146:   PetscCall(MatProductNumeric(D));
147:   PetscCall(MatProductNumeric(D)); /* Test reuse symbolic D */

149:   /* User's API */
150:   { /* Test MatMatMatMult_Basic() */
151:     Mat Adense, Cdense;
152:     PetscCall(MatConvert(A, MATDENSE, MAT_INITIAL_MATRIX, &Adense));
153:     PetscCall(MatMatMatMult(R, Adense, P, MAT_INITIAL_MATRIX, fill, &Cdense));
154:     PetscCall(MatMatMatMult(R, Adense, P, MAT_REUSE_MATRIX, fill, &Cdense));

156:     PetscCall(MatMultEqual(D, Cdense, 10, &flg));
157:     PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "D*v != Cdense*v");
158:     PetscCall(MatDestroy(&Adense));
159:     PetscCall(MatDestroy(&Cdense));
160:   }

162:   PetscCall(MatMatMatMult(R, A, P, MAT_INITIAL_MATRIX, fill, &C));
163:   PetscCall(MatMatMatMult(R, A, P, MAT_REUSE_MATRIX, fill, &C));
164:   PetscCall(MatProductClear(C));

166:   /* Test D == C */
167:   PetscCall(MatEqual(D, C, &flg));
168:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "D != C");

170:   /* Test C == PtAP */
171:   PetscCall(MatPtAP(A, P, MAT_INITIAL_MATRIX, fill, &PtAP));
172:   PetscCall(MatPtAP(A, P, MAT_REUSE_MATRIX, fill, &PtAP));
173:   PetscCall(MatEqual(C, PtAP, &flg));
174:   PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "C != PtAP");
175:   PetscCall(MatDestroy(&PtAP));

177:   /* Clean up */
178:   PetscCall(MatDestroy(&A));
179:   PetscCall(PetscRandomDestroy(&rdm));
180:   PetscCall(DMDestroy(&user.fine.da));
181:   PetscCall(DMDestroy(&user.coarse.da));
182:   PetscCall(MatDestroy(&P));
183:   PetscCall(MatDestroy(&R));
184:   PetscCall(MatDestroy(&C));
185:   PetscCall(MatDestroy(&D));
186:   PetscCall(PetscFinalize());
187:   return 0;
188: }

190: /*TEST

192:    test:

194:    test:
195:       suffix: 2
196:       nsize: 2
197:       args: -matmatmatmult_via scalable

199:    test:
200:       suffix: 3
201:       nsize: 2
202:       args: -matmatmatmult_via nonscalable
203:       output_file: output/ex111_1.out

205: TEST*/