Actual source code: ex4.c
2: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
4: #include <petscdm.h>
5: #include <petscdmda.h>
7: int main(int argc, char **argv)
8: {
9: PetscMPIInt rank;
10: PetscInt M = 10, N = 8, m = PETSC_DECIDE;
11: PetscInt s = 2, w = 2, n = PETSC_DECIDE, nloc, l, i, j, kk;
12: PetscInt Xs, Xm, Ys, Ym, iloc, *iglobal;
13: const PetscInt *ltog;
14: PetscInt *lx = NULL, *ly = NULL;
15: PetscBool testorder = PETSC_FALSE, flg;
16: DMBoundaryType bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE;
17: DM da;
18: PetscViewer viewer;
19: Vec local, global;
20: PetscScalar value;
21: DMDAStencilType st = DMDA_STENCIL_BOX;
22: AO ao;
24: PetscFunctionBeginUser;
25: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
26: PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 300, 0, 400, 400, &viewer));
28: /* Readoptions */
29: PetscCall(PetscOptionsGetInt(NULL, NULL, "-NX", &M, NULL));
30: PetscCall(PetscOptionsGetInt(NULL, NULL, "-NY", &N, NULL));
31: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL));
32: PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
33: PetscCall(PetscOptionsGetInt(NULL, NULL, "-s", &s, NULL));
34: PetscCall(PetscOptionsGetInt(NULL, NULL, "-w", &w, NULL));
36: flg = PETSC_FALSE;
37: PetscCall(PetscOptionsGetBool(NULL, NULL, "-xperiodic", &flg, NULL));
38: if (flg) bx = DM_BOUNDARY_PERIODIC;
39: flg = PETSC_FALSE;
40: PetscCall(PetscOptionsGetBool(NULL, NULL, "-yperiodic", &flg, NULL));
41: if (flg) by = DM_BOUNDARY_PERIODIC;
42: flg = PETSC_FALSE;
43: PetscCall(PetscOptionsGetBool(NULL, NULL, "-xghosted", &flg, NULL));
44: if (flg) bx = DM_BOUNDARY_GHOSTED;
45: flg = PETSC_FALSE;
46: PetscCall(PetscOptionsGetBool(NULL, NULL, "-yghosted", &flg, NULL));
47: if (flg) by = DM_BOUNDARY_GHOSTED;
48: flg = PETSC_FALSE;
49: PetscCall(PetscOptionsGetBool(NULL, NULL, "-star", &flg, NULL));
50: if (flg) st = DMDA_STENCIL_STAR;
51: flg = PETSC_FALSE;
52: PetscCall(PetscOptionsGetBool(NULL, NULL, "-box", &flg, NULL));
53: if (flg) st = DMDA_STENCIL_BOX;
54: flg = PETSC_FALSE;
55: PetscCall(PetscOptionsGetBool(NULL, NULL, "-testorder", &testorder, NULL));
56: /*
57: Test putting two nodes in x and y on each processor, exact last processor
58: in x and y gets the rest.
59: */
60: flg = PETSC_FALSE;
61: PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &flg, NULL));
62: if (flg) {
63: PetscCheck(m != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -m option with -distribute option");
64: PetscCall(PetscMalloc1(m, &lx));
65: for (i = 0; i < m - 1; i++) lx[i] = 4;
66: lx[m - 1] = M - 4 * (m - 1);
67: PetscCheck(n != PETSC_DECIDE, PETSC_COMM_WORLD, PETSC_ERR_USER_INPUT, "Must set -n option with -distribute option");
68: PetscCall(PetscMalloc1(n, &ly));
69: for (i = 0; i < n - 1; i++) ly[i] = 2;
70: ly[n - 1] = N - 2 * (n - 1);
71: }
73: /* Create distributed array and get vectors */
74: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, bx, by, st, M, N, m, n, w, s, lx, ly, &da));
75: PetscCall(DMSetFromOptions(da));
76: PetscCall(DMSetUp(da));
77: PetscCall(PetscFree(lx));
78: PetscCall(PetscFree(ly));
80: PetscCall(DMView(da, viewer));
81: PetscCall(DMCreateGlobalVector(da, &global));
82: PetscCall(DMCreateLocalVector(da, &local));
84: /* Set global vector; send ghost points to local vectors */
85: value = 1;
86: PetscCall(VecSet(global, value));
87: PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
88: PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
90: /* Scale local vectors according to processor rank; pass to global vector */
91: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
92: value = rank;
93: PetscCall(VecScale(local, value));
94: PetscCall(DMLocalToGlobalBegin(da, local, INSERT_VALUES, global));
95: PetscCall(DMLocalToGlobalEnd(da, local, INSERT_VALUES, global));
97: if (!testorder) { /* turn off printing when testing ordering mappings */
98: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nGlobal Vectors:\n"));
99: PetscCall(VecView(global, PETSC_VIEWER_STDOUT_WORLD));
100: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n\n"));
101: }
103: /* Send ghost points to local vectors */
104: PetscCall(DMGlobalToLocalBegin(da, global, INSERT_VALUES, local));
105: PetscCall(DMGlobalToLocalEnd(da, global, INSERT_VALUES, local));
107: flg = PETSC_FALSE;
108: PetscCall(PetscOptionsGetBool(NULL, NULL, "-local_print", &flg, NULL));
109: if (flg) {
110: PetscViewer sviewer;
112: PetscCall(PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD));
113: PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nLocal Vector: processor %d\n", rank));
114: PetscCall(PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
115: PetscCall(VecView(local, sviewer));
116: PetscCall(PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer));
117: PetscCall(PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD));
118: PetscCall(PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD));
119: }
121: /* Tests mappings between application/PETSc orderings */
122: if (testorder) {
123: ISLocalToGlobalMapping ltogm;
125: PetscCall(DMGetLocalToGlobalMapping(da, <ogm));
126: PetscCall(ISLocalToGlobalMappingGetSize(ltogm, &nloc));
127: PetscCall(ISLocalToGlobalMappingGetIndices(ltogm, <og));
128: PetscCall(DMDAGetGhostCorners(da, &Xs, &Ys, NULL, &Xm, &Ym, NULL));
129: PetscCall(DMDAGetAO(da, &ao));
130: PetscCall(PetscMalloc1(nloc, &iglobal));
132: /* Set iglobal to be global indices for each processor's local and ghost nodes,
133: using the DMDA ordering of grid points */
134: kk = 0;
135: for (j = Ys; j < Ys + Ym; j++) {
136: for (i = Xs; i < Xs + Xm; i++) {
137: iloc = w * ((j - Ys) * Xm + i - Xs);
138: for (l = 0; l < w; l++) iglobal[kk++] = ltog[iloc + l];
139: }
140: }
142: /* Map this to the application ordering (which for DMDAs is just the natural ordering
143: that would be used for 1 processor, numbering most rapidly by x, then y) */
144: PetscCall(AOPetscToApplication(ao, nloc, iglobal));
146: /* Then map the application ordering back to the PETSc DMDA ordering */
147: PetscCall(AOApplicationToPetsc(ao, nloc, iglobal));
149: /* Verify the mappings */
150: kk = 0;
151: for (j = Ys; j < Ys + Ym; j++) {
152: for (i = Xs; i < Xs + Xm; i++) {
153: iloc = w * ((j - Ys) * Xm + i - Xs);
154: for (l = 0; l < w; l++) {
155: if (iglobal[kk] != ltog[iloc + l]) {
156: PetscCall(PetscFPrintf(PETSC_COMM_SELF, stdout, "[%d] Problem with mapping: j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n", rank, j, i, l, ltog[iloc + l], iglobal[kk]));
157: }
158: kk++;
159: }
160: }
161: }
162: PetscCall(PetscFree(iglobal));
163: PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogm, <og));
164: }
166: /* Free memory */
167: PetscCall(PetscViewerDestroy(&viewer));
168: PetscCall(VecDestroy(&local));
169: PetscCall(VecDestroy(&global));
170: PetscCall(DMDestroy(&da));
172: PetscCall(PetscFinalize());
173: return 0;
174: }
176: /*TEST
178: test:
179: nsize: 4
180: args: -nox
181: filter: grep -v -i Object
182: requires: x
184: test:
185: suffix: 2
186: args: -testorder -nox
187: requires: x
189: TEST*/