Actual source code: ex14.c
2: static char help[] = "Tests DMCreateDomainDecomposition.\n\n";
4: /*
5: Use the options
6: -da_grid_x <nx> - number of grid points in x direction, if M < 0
7: -da_grid_y <ny> - number of grid points in y direction, if N < 0
8: -da_processors_x <MX> number of processors in x directio
9: -da_processors_y <MY> number of processors in x direction
10: */
12: #include <petscdm.h>
13: #include <petscdmda.h>
15: PetscErrorCode FillLocalSubdomain(DM da, Vec gvec)
16: {
17: DMDALocalInfo info;
18: PetscMPIInt rank;
19: PetscInt i, j, k, l;
21: PetscFunctionBeginUser;
22: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
23: PetscCall(DMDAGetLocalInfo(da, &info));
25: if (info.dim == 3) {
26: PetscScalar ***g;
27: PetscCall(DMDAVecGetArray(da, gvec, &g));
28: /* loop over ghosts */
29: for (k = info.zs; k < info.zs + info.zm; k++) {
30: for (j = info.ys; j < info.ys + info.ym; j++) {
31: for (i = info.xs; i < info.xs + info.xm; i++) {
32: g[k][j][info.dof * i + 0] = i;
33: g[k][j][info.dof * i + 1] = j;
34: g[k][j][info.dof * i + 2] = k;
35: }
36: }
37: }
38: PetscCall(DMDAVecRestoreArray(da, gvec, &g));
39: }
40: if (info.dim == 2) {
41: PetscScalar **g;
42: PetscCall(DMDAVecGetArray(da, gvec, &g));
43: /* loop over ghosts */
44: for (j = info.ys; j < info.ys + info.ym; j++) {
45: for (i = info.xs; i < info.xs + info.xm; i++) {
46: for (l = 0; l < info.dof; l++) {
47: g[j][info.dof * i + 0] = i;
48: g[j][info.dof * i + 1] = j;
49: g[j][info.dof * i + 2] = rank;
50: }
51: }
52: }
53: PetscCall(DMDAVecRestoreArray(da, gvec, &g));
54: }
55: PetscFunctionReturn(PETSC_SUCCESS);
56: }
58: int main(int argc, char **argv)
59: {
60: DM da, *subda;
61: PetscInt i, dim = 3;
62: PetscInt M = 25, N = 25, P = 25;
63: PetscMPIInt size, rank;
64: Vec v;
65: Vec slvec, sgvec;
66: IS *ois, *iis;
67: VecScatter oscata;
68: VecScatter *iscat, *oscat, *gscat;
69: DMDALocalInfo info;
70: PetscBool patchis_offproc = PETSC_TRUE;
72: PetscFunctionBeginUser;
73: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
74: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL));
76: /* Create distributed array and get vectors */
77: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
78: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
79: if (dim == 2) {
80: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, &da));
81: } else if (dim == 3) {
82: PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, P, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, NULL, &da));
83: }
84: PetscCall(DMSetFromOptions(da));
85: PetscCall(DMSetUp(da));
86: PetscCall(DMDAGetLocalInfo(da, &info));
88: PetscCall(DMCreateDomainDecomposition(da, NULL, NULL, &iis, &ois, &subda));
89: PetscCall(DMCreateDomainDecompositionScatters(da, 1, subda, &iscat, &oscat, &gscat));
91: {
92: DMDALocalInfo subinfo;
93: MatStencil lower, upper;
94: IS patchis;
95: Vec smallvec;
96: Vec largevec;
97: VecScatter patchscat;
99: PetscCall(DMDAGetLocalInfo(subda[0], &subinfo));
101: lower.i = info.xs;
102: lower.j = info.ys;
103: lower.k = info.zs;
104: upper.i = info.xs + info.xm;
105: upper.j = info.ys + info.ym;
106: upper.k = info.zs + info.zm;
108: /* test the patch IS as a thing to scatter to/from */
109: PetscCall(DMDACreatePatchIS(da, &lower, &upper, &patchis, patchis_offproc));
110: PetscCall(DMGetGlobalVector(da, &largevec));
112: PetscCall(VecCreate(PETSC_COMM_SELF, &smallvec));
113: PetscCall(VecSetSizes(smallvec, info.dof * (upper.i - lower.i) * (upper.j - lower.j) * (upper.k - lower.k), PETSC_DECIDE));
114: PetscCall(VecSetFromOptions(smallvec));
115: PetscCall(VecScatterCreate(smallvec, NULL, largevec, patchis, &patchscat));
117: PetscCall(FillLocalSubdomain(subda[0], smallvec));
118: PetscCall(VecSet(largevec, 0));
120: PetscCall(VecScatterBegin(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
121: PetscCall(VecScatterEnd(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
122: PetscCall(ISView(patchis, PETSC_VIEWER_STDOUT_WORLD));
123: PetscCall(VecScatterView(patchscat, PETSC_VIEWER_STDOUT_WORLD));
125: for (i = 0; i < size; i++) {
126: if (i == rank) PetscCall(VecView(smallvec, PETSC_VIEWER_STDOUT_SELF));
127: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
128: }
130: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
131: PetscCall(VecView(largevec, PETSC_VIEWER_STDOUT_WORLD));
133: PetscCall(VecDestroy(&smallvec));
134: PetscCall(DMRestoreGlobalVector(da, &largevec));
135: PetscCall(ISDestroy(&patchis));
136: PetscCall(VecScatterDestroy(&patchscat));
137: }
139: /* view the various parts */
140: {
141: for (i = 0; i < size; i++) {
142: if (i == rank) {
143: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
144: PetscCall(DMView(subda[0], PETSC_VIEWER_STDOUT_SELF));
145: }
146: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
147: }
149: PetscCall(DMGetLocalVector(subda[0], &slvec));
150: PetscCall(DMGetGlobalVector(subda[0], &sgvec));
151: PetscCall(DMGetGlobalVector(da, &v));
153: /* test filling outer between the big DM and the small ones with the IS scatter*/
154: PetscCall(VecScatterCreate(v, ois[0], sgvec, NULL, &oscata));
156: PetscCall(FillLocalSubdomain(subda[0], sgvec));
158: PetscCall(VecScatterBegin(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));
159: PetscCall(VecScatterEnd(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));
161: /* test the local-to-local scatter */
163: /* fill up the local subdomain and then add them together */
164: PetscCall(FillLocalSubdomain(da, v));
166: PetscCall(VecScatterBegin(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));
167: PetscCall(VecScatterEnd(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));
169: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
171: /* test ghost scattering backwards */
173: PetscCall(VecSet(v, 0));
175: PetscCall(VecScatterBegin(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));
176: PetscCall(VecScatterEnd(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));
178: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
180: /* test overlap scattering backwards */
182: PetscCall(DMLocalToGlobalBegin(subda[0], slvec, ADD_VALUES, sgvec));
183: PetscCall(DMLocalToGlobalEnd(subda[0], slvec, ADD_VALUES, sgvec));
185: PetscCall(VecSet(v, 0));
187: PetscCall(VecScatterBegin(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
188: PetscCall(VecScatterEnd(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
190: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
192: /* test interior scattering backwards */
194: PetscCall(VecSet(v, 0));
196: PetscCall(VecScatterBegin(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
197: PetscCall(VecScatterEnd(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
199: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
201: /* test matrix allocation */
202: for (i = 0; i < size; i++) {
203: if (i == rank) {
204: Mat m;
205: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
206: PetscCall(DMSetMatType(subda[0], MATAIJ));
207: PetscCall(DMCreateMatrix(subda[0], &m));
208: PetscCall(MatView(m, PETSC_VIEWER_STDOUT_SELF));
209: PetscCall(MatDestroy(&m));
210: }
211: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
212: }
213: PetscCall(DMRestoreLocalVector(subda[0], &slvec));
214: PetscCall(DMRestoreGlobalVector(subda[0], &sgvec));
215: PetscCall(DMRestoreGlobalVector(da, &v));
216: }
218: PetscCall(DMDestroy(&subda[0]));
219: PetscCall(ISDestroy(&ois[0]));
220: PetscCall(ISDestroy(&iis[0]));
222: PetscCall(VecScatterDestroy(&iscat[0]));
223: PetscCall(VecScatterDestroy(&oscat[0]));
224: PetscCall(VecScatterDestroy(&gscat[0]));
225: PetscCall(VecScatterDestroy(&oscata));
227: PetscCall(PetscFree(iscat));
228: PetscCall(PetscFree(oscat));
229: PetscCall(PetscFree(gscat));
230: PetscCall(PetscFree(oscata));
232: PetscCall(PetscFree(subda));
233: PetscCall(PetscFree(ois));
234: PetscCall(PetscFree(iis));
236: PetscCall(DMDestroy(&da));
237: PetscCall(PetscFinalize());
238: return 0;
239: }