Actual source code: ex33.c
1: static char help[] = "Test memory scalability of MatMatMult() for AIJ and DENSE matrices. \n\
2: Modified from the code contributed by Ian Lin <iancclin@umich.edu> \n\n";
4: /*
5: Example:
6: mpiexec -n <np> ./ex33 -mem_view -matmatmult_Bbn <Bbn>
7: */
9: #include <petsc.h>
11: PetscErrorCode Print_memory(PetscLogDouble mem)
12: {
13: double max_mem, min_mem;
15: PetscFunctionBeginUser;
16: PetscCallMPI(MPI_Reduce(&mem, &max_mem, 1, MPI_DOUBLE, MPI_MAX, 0, MPI_COMM_WORLD));
17: PetscCallMPI(MPI_Reduce(&mem, &min_mem, 1, MPI_DOUBLE, MPI_MIN, 0, MPI_COMM_WORLD));
18: max_mem = max_mem / 1024.0 / 1024.0;
19: min_mem = min_mem / 1024.0 / 1024.0;
20: PetscCall(PetscPrintf(MPI_COMM_WORLD, " max and min memory across all processors %.4f Mb, %.4f Mb.\n", (double)max_mem, (double)min_mem));
21: PetscFunctionReturn(PETSC_SUCCESS);
22: }
24: /*
25: Illustrate how to use MPI derived data types.
26: It would save memory significantly. See MatMPIDenseScatter()
27: */
28: PetscErrorCode TestMPIDerivedDataType()
29: {
30: MPI_Datatype type1, type2, rtype1, rtype2;
31: PetscInt i, j;
32: PetscScalar buffer[24]; /* An array of 4 rows, 6 cols */
33: MPI_Status status;
34: PetscMPIInt rank, size, disp[2];
36: PetscFunctionBeginUser;
37: PetscCallMPI(MPI_Comm_size(MPI_COMM_WORLD, &size));
38: PetscCheck(size >= 2, PETSC_COMM_SELF, PETSC_ERR_WRONG_MPI_SIZE, "Must use at least 2 processors");
39: PetscCallMPI(MPI_Comm_rank(MPI_COMM_WORLD, &rank));
41: if (rank == 0) {
42: /* proc[0] sends 2 rows to proc[1] */
43: for (i = 0; i < 24; i++) buffer[i] = (PetscScalar)i;
45: disp[0] = 0;
46: disp[1] = 2;
47: PetscCallMPI(MPI_Type_create_indexed_block(2, 1, (const PetscMPIInt *)disp, MPIU_SCALAR, &type1));
48: /* one column has 4 entries */
49: PetscCallMPI(MPI_Type_create_resized(type1, 0, 4 * sizeof(PetscScalar), &type2));
50: PetscCallMPI(MPI_Type_commit(&type2));
51: PetscCallMPI(MPI_Send(buffer, 6, type2, 1, 123, MPI_COMM_WORLD));
53: } else if (rank == 1) {
54: /* proc[1] receives 2 rows from proc[0], and put them into contiguous rows, starting at the row 1 (disp[0]) */
55: PetscInt blen = 2;
56: for (i = 0; i < 24; i++) buffer[i] = 0.0;
58: disp[0] = 1;
59: PetscCallMPI(MPI_Type_create_indexed_block(1, blen, (const PetscMPIInt *)disp, MPIU_SCALAR, &rtype1));
60: PetscCallMPI(MPI_Type_create_resized(rtype1, 0, 4 * sizeof(PetscScalar), &rtype2));
62: PetscCallMPI(MPI_Type_commit(&rtype2));
63: PetscCallMPI(MPI_Recv(buffer, 6, rtype2, 0, 123, MPI_COMM_WORLD, &status));
64: for (i = 0; i < 4; i++) {
65: for (j = 0; j < 6; j++) PetscCall(PetscPrintf(MPI_COMM_SELF, " %g", (double)PetscRealPart(buffer[i + j * 4])));
66: PetscCall(PetscPrintf(MPI_COMM_SELF, "\n"));
67: }
68: }
70: if (rank == 0) {
71: PetscCallMPI(MPI_Type_free(&type1));
72: PetscCallMPI(MPI_Type_free(&type2));
73: } else if (rank == 1) {
74: PetscCallMPI(MPI_Type_free(&rtype1));
75: PetscCallMPI(MPI_Type_free(&rtype2));
76: }
77: PetscCallMPI(MPI_Barrier(MPI_COMM_WORLD));
78: PetscFunctionReturn(PETSC_SUCCESS);
79: }
81: int main(int argc, char **args)
82: {
83: PetscInt mA = 2700, nX = 80, nz = 40;
84: /* PetscInt mA=6,nX=5,nz=2; //small test */
85: PetscLogDouble mem;
86: Mat A, X, Y;
87: PetscBool flg = PETSC_FALSE;
89: PetscFunctionBeginUser;
90: PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
91: PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_mpiderivedtype", &flg, NULL));
92: if (flg) {
93: PetscCall(TestMPIDerivedDataType());
94: PetscCall(PetscFinalize());
95: return 0;
96: }
98: PetscCall(PetscOptionsGetBool(NULL, NULL, "-mem_view", &flg, NULL));
99: PetscCall(PetscMemoryGetCurrentUsage(&mem));
100: if (flg) {
101: PetscCall(PetscPrintf(MPI_COMM_WORLD, "Before start,"));
102: PetscCall(Print_memory(mem));
103: }
105: PetscCall(MatCreateAIJ(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, mA, mA, nz, NULL, nz, NULL, &A));
106: PetscCall(MatSetRandom(A, NULL));
107: PetscCall(PetscMemoryGetCurrentUsage(&mem));
108: if (flg) {
109: PetscCall(PetscPrintf(MPI_COMM_WORLD, "After creating A,"));
110: PetscCall(Print_memory(mem));
111: }
113: PetscCall(MatCreateDense(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, mA, nX, NULL, &X));
114: PetscCall(MatSetRandom(X, NULL));
115: PetscCall(PetscMemoryGetCurrentUsage(&mem));
116: if (flg) {
117: PetscCall(PetscPrintf(MPI_COMM_WORLD, "After creating X,"));
118: PetscCall(Print_memory(mem));
119: }
121: PetscCall(MatMatMult(A, X, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Y));
122: PetscCall(PetscMemoryGetCurrentUsage(&mem));
123: if (flg) {
124: PetscCall(PetscPrintf(MPI_COMM_WORLD, "After MatMatMult,"));
125: PetscCall(Print_memory(mem));
126: }
128: /* Test reuse */
129: PetscCall(MatMatMult(A, X, MAT_REUSE_MATRIX, PETSC_DEFAULT, &Y));
130: PetscCall(PetscMemoryGetCurrentUsage(&mem));
131: if (flg) {
132: PetscCall(PetscPrintf(MPI_COMM_WORLD, "After reuse MatMatMult,"));
133: PetscCall(Print_memory(mem));
134: }
136: /* Check accuracy */
137: PetscCall(MatMatMultEqual(A, X, Y, 10, &flg));
138: PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_NOTSAMETYPE, "Error in MatMatMult()");
140: PetscCall(MatDestroy(&A));
141: PetscCall(MatDestroy(&X));
142: PetscCall(MatDestroy(&Y));
144: PetscCall(PetscFinalize());
145: return 0;
146: }
148: /*TEST
150: test:
151: suffix: 1
152: nsize: 4
153: output_file: output/ex33.out
155: test:
156: suffix: 2
157: nsize: 8
158: output_file: output/ex33.out
160: test:
161: suffix: 3
162: nsize: 2
163: args: -test_mpiderivedtype
164: output_file: output/ex33_3.out
166: TEST*/