Actual source code: dagtol.c
1: /*
2: Code for manipulating distributed regular arrays in parallel.
3: */
5: #include <petsc/private/dmdaimpl.h>
7: PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
8: {
9: DM_DA *dd = (DM_DA *)da->data;
11: PetscFunctionBegin;
15: PetscCall(VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD));
16: PetscFunctionReturn(PETSC_SUCCESS);
17: }
19: PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
20: {
21: DM_DA *dd = (DM_DA *)da->data;
23: PetscFunctionBegin;
27: PetscCall(VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD));
28: PetscFunctionReturn(PETSC_SUCCESS);
29: }
31: PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
32: {
33: DM_DA *dd = (DM_DA *)da->data;
35: PetscFunctionBegin;
39: if (mode == ADD_VALUES) {
40: PetscCall(VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
41: } else if (mode == INSERT_VALUES) {
42: PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bx == DM_BOUNDARY_NONE || dd->s <= 0 || dd->m != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in x direction");
43: PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->by == DM_BOUNDARY_NONE || dd->s <= 0 || dd->n != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in y direction");
44: PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bz == DM_BOUNDARY_NONE || dd->s <= 0 || dd->p != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in z direction");
45: PetscCall(VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
46: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
47: PetscFunctionReturn(PETSC_SUCCESS);
48: }
50: PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
51: {
52: DM_DA *dd = (DM_DA *)da->data;
54: PetscFunctionBegin;
58: if (mode == ADD_VALUES) {
59: PetscCall(VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
60: } else if (mode == INSERT_VALUES) {
61: PetscCall(VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
62: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
63: PetscFunctionReturn(PETSC_SUCCESS);
64: }
66: extern PetscErrorCode DMDAGetNatural_Private(DM, PetscInt *, IS *);
67: /*
68: DMDAGlobalToNatural_Create - Create the global to natural scatter object
70: Collective
72: Input Parameter:
73: . da - the distributed array context
75: Level: developer
77: Note:
78: This is an internal routine called by `DMDAGlobalToNatural()` to
79: create the scatter context.
81: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
82: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
83: */
84: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
85: {
86: PetscInt m, start, Nlocal;
87: IS from, to;
88: Vec global;
89: DM_DA *dd = (DM_DA *)da->data;
91: PetscFunctionBegin;
93: PetscCheck(dd->natural, PetscObjectComm((PetscObject)da), PETSC_ERR_ORDER, "Natural layout vector not yet created; cannot scatter into it");
95: /* create the scatter context */
96: PetscCall(VecGetLocalSize(dd->natural, &m));
97: PetscCall(VecGetOwnershipRange(dd->natural, &start, NULL));
99: PetscCall(DMDAGetNatural_Private(da, &Nlocal, &to));
100: PetscCheck(Nlocal == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Internal error: Nlocal %" PetscInt_FMT " local vector size %" PetscInt_FMT, Nlocal, m);
101: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
102: PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
103: PetscCall(VecScatterCreate(global, from, dd->natural, to, &dd->gton));
104: PetscCall(VecDestroy(&global));
105: PetscCall(ISDestroy(&from));
106: PetscCall(ISDestroy(&to));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: /*@
111: DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
112: in the "natural" grid ordering. Must be followed by
113: `DMDAGlobalToNaturalEnd()` to complete the exchange.
115: Neighbor-wise Collective
117: Input Parameters:
118: + da - the distributed array context
119: . g - the global vector
120: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
122: Output Parameter:
123: . l - the natural ordering values
125: Level: advanced
127: Notes:
128: The global and natural vectors used here need not be the same as those
129: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
130: must have the same parallel data layout; they could, for example, be
131: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
133: You must call `DMDACreateNaturalVector()` before using this routine
135: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
136: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
137: @*/
138: PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
139: {
140: DM_DA *dd = (DM_DA *)da->data;
142: PetscFunctionBegin;
146: if (!dd->gton) {
147: /* create the scatter context */
148: PetscCall(DMDAGlobalToNatural_Create(da));
149: }
150: PetscCall(VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD));
151: PetscFunctionReturn(PETSC_SUCCESS);
152: }
154: /*@
155: DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
156: in the natural ordering. Must be preceded by `DMDAGlobalToNaturalBegin()`.
158: Neighbor-wise Collective
160: Input Parameters:
161: + da - the distributed array context
162: . g - the global vector
163: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
165: Output Parameter:
166: . l - the global values in the natural ordering
168: Level: advanced
170: Notes:
171: The global and local vectors used here need not be the same as those
172: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
173: must have the same parallel data layout; they could, for example, be
174: obtained with VecDuplicate() from the `DMDA` originating vectors.
176: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
177: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
178: @*/
179: PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
180: {
181: DM_DA *dd = (DM_DA *)da->data;
183: PetscFunctionBegin;
187: PetscCall(VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD));
188: PetscFunctionReturn(PETSC_SUCCESS);
189: }
191: /*@
192: DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
193: to a global vector in the PETSc `DMDA` grid ordering. Must be followed by
194: `DMDANaturalToGlobalEnd()` to complete the exchange.
196: Neighbor-wise Collective
198: Input Parameters:
199: + da - the distributed array context
200: . g - the global vector in a natural ordering
201: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
203: Output Parameter:
204: . l - the values in the `DMDA` ordering
206: Level: advanced
208: Notes:
209: The global and natural vectors used here need not be the same as those
210: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
211: must have the same parallel data layout; they could, for example, be
212: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
214: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
215: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
216: @*/
217: PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
218: {
219: DM_DA *dd = (DM_DA *)da->data;
221: PetscFunctionBegin;
225: if (!dd->gton) {
226: /* create the scatter context */
227: PetscCall(DMDAGlobalToNatural_Create(da));
228: }
229: PetscCall(VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE));
230: PetscFunctionReturn(PETSC_SUCCESS);
231: }
233: /*@
234: DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
235: to a global vector in the PETSc `DMDA` ordering. Must be preceded by `DMDANaturalToGlobalBegin()`.
237: Neighbor-wise Collective
239: Input Parameters:
240: + da - the distributed array context
241: . g - the global vector in a natural ordering
242: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
244: Output Parameter:
245: . l - the global values in the PETSc `DMDA` ordering
247: Level: advanced
249: Notes:
250: The global and local vectors used here need not be the same as those
251: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
252: must have the same parallel data layout; they could, for example, be
253: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
255: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
256: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
257: @*/
258: PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
259: {
260: DM_DA *dd = (DM_DA *)da->data;
262: PetscFunctionBegin;
266: PetscCall(VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE));
267: PetscFunctionReturn(PETSC_SUCCESS);
268: }