Actual source code: da.c

  1: #include <petsc/private/dmdaimpl.h>

  3: /*@
  4:   DMDASetSizes - Sets the number of grid points in the three dimensional directions

  6:   Logically Collective

  8:   Input Parameters:
  9: + da - the `DMDA`
 10: . M - the global X size
 11: . N - the global Y size
 12: - P - the global Z size

 14:   Level: intermediate

 16:   Developer Note:
 17:   Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points

 19: .seealso: `DM`, `DMDA`, `PetscSplitOwnership()`
 20: @*/
 21: PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
 22: {
 23:   DM_DA *dd = (DM_DA *)da->data;

 25:   PetscFunctionBegin;
 30:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
 31:   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
 32:   PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
 33:   PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");

 35:   dd->M = M;
 36:   dd->N = N;
 37:   dd->P = P;
 38:   PetscFunctionReturn(PETSC_SUCCESS);
 39: }

 41: /*@
 42:   DMDASetNumProcs - Sets the number of processes in each dimension

 44:   Logically Collective

 46:   Input Parameters:
 47: + da - the `DMDA`
 48: . m - the number of X procs (or `PETSC_DECIDE`)
 49: . n - the number of Y procs (or `PETSC_DECIDE`)
 50: - p - the number of Z procs (or `PETSC_DECIDE`)

 52:   Level: intermediate

 54: .seealso: `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
 55: @*/
 56: PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
 57: {
 58:   DM_DA *dd = (DM_DA *)da->data;

 60:   PetscFunctionBegin;
 65:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
 66:   dd->m = m;
 67:   dd->n = n;
 68:   dd->p = p;
 69:   if (da->dim == 2) {
 70:     PetscMPIInt size;
 71:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
 72:     if ((dd->m > 0) && (dd->n < 0)) {
 73:       dd->n = size / dd->m;
 74:       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
 75:     }
 76:     if ((dd->n > 0) && (dd->m < 0)) {
 77:       dd->m = size / dd->n;
 78:       PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
 79:     }
 80:   }
 81:   PetscFunctionReturn(PETSC_SUCCESS);
 82: }

 84: /*@
 85:   DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.

 87:   Not Collective

 89:   Input Parameters:
 90: + da    - The `DMDA`
 91: - bx,by,bz - One of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`

 93:   Level: intermediate

 95: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`
 96: @*/
 97: PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
 98: {
 99:   DM_DA *dd = (DM_DA *)da->data;

101:   PetscFunctionBegin;
106:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
107:   dd->bx = bx;
108:   dd->by = by;
109:   dd->bz = bz;
110:   PetscFunctionReturn(PETSC_SUCCESS);
111: }

113: /*@
114:   DMDASetDof - Sets the number of degrees of freedom per vertex

116:   Not Collective

118:   Input Parameters:
119: + da  - The `DMDA`
120: - dof - Number of degrees of freedom

122:   Level: intermediate

124: .seealso: `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
125: @*/
126: PetscErrorCode DMDASetDof(DM da, PetscInt dof)
127: {
128:   DM_DA *dd = (DM_DA *)da->data;

130:   PetscFunctionBegin;
133:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
134:   dd->w  = dof;
135:   da->bs = dof;
136:   PetscFunctionReturn(PETSC_SUCCESS);
137: }

139: /*@
140:   DMDAGetDof - Gets the number of degrees of freedom per vertex

142:   Not Collective

144:   Input Parameter:
145: . da  - The `DMDA`

147:   Output Parameter:
148: . dof - Number of degrees of freedom

150:   Level: intermediate

152: .seealso: `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
153: @*/
154: PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
155: {
156:   DM_DA *dd = (DM_DA *)da->data;

158:   PetscFunctionBegin;
161:   *dof = dd->w;
162:   PetscFunctionReturn(PETSC_SUCCESS);
163: }

165: /*@
166:   DMDAGetOverlap - Gets the size of the per-processor overlap.

168:   Not Collective

170:   Input Parameter:
171: . da  - The `DMDA`

173:   Output Parameters:
174: + x   - Overlap in the x direction
175: . y   - Overlap in the y direction
176: - z   - Overlap in the z direction

178:   Level: intermediate

180: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
181: @*/
182: PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
183: {
184:   DM_DA *dd = (DM_DA *)da->data;

186:   PetscFunctionBegin;
188:   if (x) *x = dd->xol;
189:   if (y) *y = dd->yol;
190:   if (z) *z = dd->zol;
191:   PetscFunctionReturn(PETSC_SUCCESS);
192: }

194: /*@
195:   DMDASetOverlap - Sets the size of the per-processor overlap.

197:   Not Collective

199:   Input Parameters:
200: + da  - The `DMDA`
201: . x   - Overlap in the x direction
202: . y   - Overlap in the y direction
203: - z   - Overlap in the z direction

205:   Level: intermediate

207: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
208: @*/
209: PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
210: {
211:   DM_DA *dd = (DM_DA *)da->data;

213:   PetscFunctionBegin;
218:   dd->xol = x;
219:   dd->yol = y;
220:   dd->zol = z;
221:   PetscFunctionReturn(PETSC_SUCCESS);
222: }

224: /*@
225:   DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.

227:   Not Collective

229:   Input Parameter:
230: . da  - The `DMDA`

232:   Output Parameter:
233: . Nsub   - Number of local subdomains created upon decomposition

235:   Level: intermediate

237: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
238: @*/
239: PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
240: {
241:   DM_DA *dd = (DM_DA *)da->data;

243:   PetscFunctionBegin;
245:   if (Nsub) *Nsub = dd->Nsub;
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: /*@
250:   DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.

252:   Not Collective

254:   Input Parameters:
255: + da  - The `DMDA`
256: - Nsub - The number of local subdomains requested

258:   Level: intermediate

260: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
261: @*/
262: PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
263: {
264:   DM_DA *dd = (DM_DA *)da->data;

266:   PetscFunctionBegin;
269:   dd->Nsub = Nsub;
270:   PetscFunctionReturn(PETSC_SUCCESS);
271: }

273: /*@
274:   DMDASetOffset - Sets the index offset of the DA.

276:   Collective

278:   Input Parameters:
279: + da  - The `DMDA`
280: . xo  - The offset in the x direction
281: . yo  - The offset in the y direction
282: - zo  - The offset in the z direction

284:   Level: intermediate

286:   Note:
287:     This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
288:   changing boundary conditions or subdomain features that depend upon the global offsets.

290: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
291: @*/
292: PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
293: {
294:   DM_DA *dd = (DM_DA *)da->data;

296:   PetscFunctionBegin;
304:   dd->xo = xo;
305:   dd->yo = yo;
306:   dd->zo = zo;
307:   dd->Mo = Mo;
308:   dd->No = No;
309:   dd->Po = Po;

311:   if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: /*@
316:   DMDAGetOffset - Gets the index offset of the `DMDA`.

318:   Not Collective

320:   Input Parameter:
321: . da  - The `DMDA`

323:   Output Parameters:
324: + xo  - The offset in the x direction
325: . yo  - The offset in the y direction
326: . zo  - The offset in the z direction
327: . Mo  - The global size in the x direction
328: . No  - The global size in the y direction
329: - Po  - The global size in the z direction

331:   Level: intermediate

333: .seealso: `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
334: @*/
335: PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
336: {
337:   DM_DA *dd = (DM_DA *)da->data;

339:   PetscFunctionBegin;
341:   if (xo) *xo = dd->xo;
342:   if (yo) *yo = dd->yo;
343:   if (zo) *zo = dd->zo;
344:   if (Mo) *Mo = dd->Mo;
345:   if (No) *No = dd->No;
346:   if (Po) *Po = dd->Po;
347:   PetscFunctionReturn(PETSC_SUCCESS);
348: }

350: /*@
351:   DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DM`.

353:   Not Collective

355:   Input Parameter:
356: . da  - The `DMDA`

358:   Output Parameters:
359: + xs  - The start of the region in x
360: . ys  - The start of the region in y
361: . zs  - The start of the region in z
362: . xs  - The size of the region in x
363: . ys  - The size of the region in y
364: - zs  - The size of the region in z

366:   Level: intermediate

368: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
369: @*/
370: PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
371: {
372:   DM_DA *dd = (DM_DA *)da->data;

374:   PetscFunctionBegin;
376:   if (xs) *xs = dd->nonxs;
377:   if (ys) *ys = dd->nonys;
378:   if (zs) *zs = dd->nonzs;
379:   if (xm) *xm = dd->nonxm;
380:   if (ym) *ym = dd->nonym;
381:   if (zm) *zm = dd->nonzm;
382:   PetscFunctionReturn(PETSC_SUCCESS);
383: }

385: /*@
386:   DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DM`.

388:   Collective

390:   Input Parameters:
391: + da  - The `DMDA`
392: . xs  - The start of the region in x
393: . ys  - The start of the region in y
394: . zs  - The start of the region in z
395: . xs  - The size of the region in x
396: . ys  - The size of the region in y
397: - zs  - The size of the region in z

399:   Level: intermediate

401: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
402: @*/
403: PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
404: {
405:   DM_DA *dd = (DM_DA *)da->data;

407:   PetscFunctionBegin;
415:   dd->nonxs = xs;
416:   dd->nonys = ys;
417:   dd->nonzs = zs;
418:   dd->nonxm = xm;
419:   dd->nonym = ym;
420:   dd->nonzm = zm;

422:   PetscFunctionReturn(PETSC_SUCCESS);
423: }

425: /*@
426:   DMDASetStencilType - Sets the type of the communication stencil

428:   Logically Collective

430:   Input Parameters:
431: + da    - The `DMDA`
432: - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.

434:   Level: intermediate

436: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
437: @*/
438: PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
439: {
440:   DM_DA *dd = (DM_DA *)da->data;

442:   PetscFunctionBegin;
445:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
446:   dd->stencil_type = stype;
447:   PetscFunctionReturn(PETSC_SUCCESS);
448: }

450: /*@
451:   DMDAGetStencilType - Gets the type of the communication stencil

453:   Not Collective

455:   Input Parameter:
456: . da    - The `DMDA`

458:   Output Parameter:
459: . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.

461:   Level: intermediate

463: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
464: @*/
465: PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
466: {
467:   DM_DA *dd = (DM_DA *)da->data;

469:   PetscFunctionBegin;
472:   *stype = dd->stencil_type;
473:   PetscFunctionReturn(PETSC_SUCCESS);
474: }

476: /*@
477:   DMDASetStencilWidth - Sets the width of the communication stencil

479:   Logically Collective

481:   Input Parameters:
482: + da    - The `DMDA`
483: - width - The stencil width

485:   Level: intermediate

487: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
488: @*/
489: PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
490: {
491:   DM_DA *dd = (DM_DA *)da->data;

493:   PetscFunctionBegin;
496:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
497:   dd->s = width;
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }

501: /*@
502:   DMDAGetStencilWidth - Gets the width of the communication stencil

504:   Not Collective

506:   Input Parameter:
507: . da    - The `DMDA`

509:   Output Parameter:
510: . width - The stencil width

512:   Level: intermediate

514: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
515: @*/
516: PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
517: {
518:   DM_DA *dd = (DM_DA *)da->data;

520:   PetscFunctionBegin;
523:   *width = dd->s;
524:   PetscFunctionReturn(PETSC_SUCCESS);
525: }

527: static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
528: {
529:   PetscInt i, sum;

531:   PetscFunctionBegin;
532:   PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
533:   for (i = sum = 0; i < m; i++) sum += lx[i];
534:   PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
535:   PetscFunctionReturn(PETSC_SUCCESS);
536: }

538: /*@
539:   DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process

541:   Logically Collective

543:   Input Parameters:
544: + da - The `DMDA`
545: . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
546: . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
547: - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.

549:   Level: intermediate

551:   Note: these numbers are NOT multiplied by the number of dof per node.

553: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
554: @*/
555: PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
556: {
557:   DM_DA *dd = (DM_DA *)da->data;

559:   PetscFunctionBegin;
561:   PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
562:   if (lx) {
563:     PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
564:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
565:     if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
566:     PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
567:   }
568:   if (ly) {
569:     PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
570:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
571:     if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
572:     PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
573:   }
574:   if (lz) {
575:     PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
576:     PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
577:     if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
578:     PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
579:   }
580:   PetscFunctionReturn(PETSC_SUCCESS);
581: }

583: /*@
584:        DMDASetInterpolationType - Sets the type of interpolation that will be
585:           returned by `DMCreateInterpolation()`

587:    Logically Collective

589:    Input Parameters:
590: +  da - initial distributed array
591: -  ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms

593:    Level: intermediate

595:    Note:
596:    You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`

598: .seealso: `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`
599: @*/
600: PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
601: {
602:   DM_DA *dd = (DM_DA *)da->data;

604:   PetscFunctionBegin;
607:   dd->interptype = ctype;
608:   PetscFunctionReturn(PETSC_SUCCESS);
609: }

611: /*@
612:        DMDAGetInterpolationType - Gets the type of interpolation that will be
613:           used by `DMCreateInterpolation()`

615:    Not Collective

617:    Input Parameter:
618: .  da - distributed array

620:    Output Parameter:
621: .  ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)

623:    Level: intermediate

625: .seealso: `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`
626: @*/
627: PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
628: {
629:   DM_DA *dd = (DM_DA *)da->data;

631:   PetscFunctionBegin;
634:   *ctype = dd->interptype;
635:   PetscFunctionReturn(PETSC_SUCCESS);
636: }

638: /*@C
639:       DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
640:         processes neighbors.

642:     Not Collective

644:    Input Parameter:
645: .     da - the `DMDA` object

647:    Output Parameter:
648: .     ranks - the neighbors ranks, stored with the x index increasing most rapidly.
649:               this process itself is in the list

651:    Level: intermediate

653:    Notes:
654:    In 2d the array is of length 9, in 3d of length 27

656:    Not supported in 1d

658:    Do not free the array, it is freed when the `DMDA` is destroyed.

660:    Fortran Note:
661:     In fortran you must pass in an array of the appropriate length.

663: .seealso: `DMDA`, `DM`
664: @*/
665: PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
666: {
667:   DM_DA *dd = (DM_DA *)da->data;

669:   PetscFunctionBegin;
671:   *ranks = dd->neighbors;
672:   PetscFunctionReturn(PETSC_SUCCESS);
673: }

675: /*@C
676:       DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process

678:     Not Collective

680:    Input Parameter:
681: .     da - the `DMDA` object

683:    Output Parameters:
684: +     lx - ownership along x direction (optional)
685: .     ly - ownership along y direction (optional)
686: -     lz - ownership along z direction (optional)

688:    Level: intermediate

690:     Note:
691:     These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`

693:     In C you should not free these arrays, nor change the values in them. They will only have valid values while the
694:     `DMDA` they came from still exists (has not been destroyed).

696:     These numbers are NOT multiplied by the number of dof per node.

698:     Fortran Note:
699:     In Fortran one must pass in arrays `lx`, `ly`, and `lz` that are long enough to hold the values; the sixth, seventh and
700:     eighth arguments from `DMDAGetInfo()`

702: .seealso: `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
703: @*/
704: PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
705: {
706:   DM_DA *dd = (DM_DA *)da->data;

708:   PetscFunctionBegin;
710:   if (lx) *lx = dd->lx;
711:   if (ly) *ly = dd->ly;
712:   if (lz) *lz = dd->lz;
713:   PetscFunctionReturn(PETSC_SUCCESS);
714: }

716: /*@
717:      DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined

719:     Logically Collective

721:   Input Parameters:
722: +    da - the `DMDA` object
723: .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
724: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
725: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

727:   Options Database Keys:
728: +  -da_refine_x refine_x - refinement ratio in x direction
729: .  -da_refine_y rafine_y - refinement ratio in y direction
730: .  -da_refine_z refine_z - refinement ratio in z direction
731: -  -da_refine <n> - refine the DMDA object n times when it is created.

733:   Level: intermediate

735:     Note:
736:     Pass `PETSC_IGNORE` to leave a value unchanged

738: .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
739: @*/
740: PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
741: {
742:   DM_DA *dd = (DM_DA *)da->data;

744:   PetscFunctionBegin;

750:   if (refine_x > 0) dd->refine_x = refine_x;
751:   if (refine_y > 0) dd->refine_y = refine_y;
752:   if (refine_z > 0) dd->refine_z = refine_z;
753:   PetscFunctionReturn(PETSC_SUCCESS);
754: }

756: /*@C
757:      DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined

759:     Not Collective

761:   Input Parameter:
762: .    da - the `DMDA` object

764:   Output Parameters:
765: +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
766: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
767: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

769:   Level: intermediate

771:     Note:
772:     Pass `NULL` for values you do not need

774: .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
775: @*/
776: PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
777: {
778:   DM_DA *dd = (DM_DA *)da->data;

780:   PetscFunctionBegin;
782:   if (refine_x) *refine_x = dd->refine_x;
783:   if (refine_y) *refine_y = dd->refine_y;
784:   if (refine_z) *refine_z = dd->refine_z;
785:   PetscFunctionReturn(PETSC_SUCCESS);
786: }

788: /*@C
789:      DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.

791:     Logically Collective; No Fortran Support

793:   Input Parameters:
794: +    da - the `DMDA` object
795: -    f - the function that allocates the matrix for that specific DMDA

797:   Level: developer

799:    Note:
800:     See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
801:        the diagonal and off-diagonal blocks of the matrix

803: .seealso: `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
804: @*/
805: PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM, Mat *))
806: {
807:   PetscFunctionBegin;
809:   da->ops->creatematrix = f;
810:   PetscFunctionReturn(PETSC_SUCCESS);
811: }

813: /*@
814:    DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.

816:    Not Collective

818:    Input Parameters:
819: +  da - the `DMDA` object
820: .  m - number of MatStencils
821: -  idxm - grid points (and component number when dof > 1)

823:    Output Parameter:
824: .  gidxm - global row indices

826:    Level: intermediate

828: .seealso: `DM`, `DMDA`, `MatStencil`
829: @*/
830: PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
831: {
832:   const DM_DA           *dd  = (const DM_DA *)da->data;
833:   const PetscInt        *dxm = (const PetscInt *)idxm;
834:   PetscInt               i, j, sdim, tmp, dim;
835:   PetscInt               dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
836:   ISLocalToGlobalMapping ltog;

838:   PetscFunctionBegin;
839:   if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);

841:   /* Code adapted from DMDAGetGhostCorners() */
842:   starts2[0] = dd->Xs / dof + dd->xo;
843:   starts2[1] = dd->Ys + dd->yo;
844:   starts2[2] = dd->Zs + dd->zo;
845:   dims2[0]   = (dd->Xe - dd->Xs) / dof;
846:   dims2[1]   = (dd->Ye - dd->Ys);
847:   dims2[2]   = (dd->Ze - dd->Zs);

849:   /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
850:   dim  = da->dim;                   /* DA dim: 1 to 3 */
851:   sdim = dim + (dof > 1 ? 1 : 0);   /* Dimensions in MatStencil's (k,j,i,c) view */
852:   for (i = 0; i < dim; i++) {       /* Reverse the order and also skip the unused dimensions */
853:     dims[i]   = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
854:     starts[i] = starts2[dim - i - 1];
855:   }
856:   starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
857:   dims[dim]   = dof;

859:   /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
860:   for (i = 0; i < m; i++) {
861:     dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
862:     tmp = 0;
863:     for (j = 0; j < sdim; j++) {                                                      /* Iter over, ex. j,i or j,i,c in 2D */
864:       if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
865:       else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
866:     }
867:     gidxm[i] = tmp;
868:     /* Move to the next MatStencil point */
869:     if (dof > 1) dxm += sdim; /* c is already counted in sdim */
870:     else dxm += sdim + 1;     /* skip the unused c */
871:   }

873:   /* Map local indices to global indices */
874:   PetscCall(DMGetLocalToGlobalMapping(da, &ltog));
875:   PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
876:   PetscFunctionReturn(PETSC_SUCCESS);
877: }

879: /*
880:   Creates "balanced" ownership ranges after refinement, constrained by the need for the
881:   fine grid boundaries to fall within one stencil width of the coarse partition.

883:   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
884: */
885: static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
886: {
887:   PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;

889:   PetscFunctionBegin;
890:   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
891:   if (ratio == 1) {
892:     PetscCall(PetscArraycpy(lf, lc, m));
893:     PetscFunctionReturn(PETSC_SUCCESS);
894:   }
895:   for (i = 0; i < m; i++) totalc += lc[i];
896:   remaining = (!periodic) + ratio * (totalc - (!periodic));
897:   for (i = 0; i < m; i++) {
898:     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
899:     if (i == m - 1) lf[i] = want;
900:     else {
901:       const PetscInt nextc = startc + lc[i];
902:       /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
903:        * coarse stencil width of the first coarse node in the next subdomain. */
904:       while ((startf + want) / ratio < nextc - stencil_width) want++;
905:       /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
906:        * coarse stencil width of the last coarse node in the current subdomain. */
907:       while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
908:       /* Make sure all constraints are satisfied */
909:       if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
910:         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
911:     }
912:     lf[i] = want;
913:     startc += lc[i];
914:     startf += lf[i];
915:     remaining -= lf[i];
916:   }
917:   PetscFunctionReturn(PETSC_SUCCESS);
918: }

920: /*
921:   Creates "balanced" ownership ranges after coarsening, constrained by the need for the
922:   fine grid boundaries to fall within one stencil width of the coarse partition.

924:   Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
925: */
926: static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
927: {
928:   PetscInt i, totalf, remaining, startc, startf;

930:   PetscFunctionBegin;
931:   PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
932:   if (ratio == 1) {
933:     PetscCall(PetscArraycpy(lc, lf, m));
934:     PetscFunctionReturn(PETSC_SUCCESS);
935:   }
936:   for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
937:   remaining = (!periodic) + (totalf - (!periodic)) / ratio;
938:   for (i = 0, startc = 0, startf = 0; i < m; i++) {
939:     PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
940:     if (i == m - 1) lc[i] = want;
941:     else {
942:       const PetscInt nextf = startf + lf[i];
943:       /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
944:        * node is within one stencil width. */
945:       while (nextf / ratio < startc + want - stencil_width) want--;
946:       /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
947:        * fine node is within one stencil width. */
948:       while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
949:       if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
950:         SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
951:     }
952:     lc[i] = want;
953:     startc += lc[i];
954:     startf += lf[i];
955:     remaining -= lc[i];
956:   }
957:   PetscFunctionReturn(PETSC_SUCCESS);
958: }

960: PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
961: {
962:   PetscInt M, N, P, i, dim;
963:   Vec      coordsc, coordsf;
964:   DM       da2;
965:   DM_DA   *dd = (DM_DA *)da->data, *dd2;

967:   PetscFunctionBegin;

971:   PetscCall(DMGetDimension(da, &dim));
972:   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
973:     M = dd->refine_x * dd->M;
974:   } else {
975:     M = 1 + dd->refine_x * (dd->M - 1);
976:   }
977:   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
978:     if (dim > 1) {
979:       N = dd->refine_y * dd->N;
980:     } else {
981:       N = 1;
982:     }
983:   } else {
984:     N = 1 + dd->refine_y * (dd->N - 1);
985:   }
986:   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
987:     if (dim > 2) {
988:       P = dd->refine_z * dd->P;
989:     } else {
990:       P = 1;
991:     }
992:   } else {
993:     P = 1 + dd->refine_z * (dd->P - 1);
994:   }
995:   PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
996:   PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
997:   PetscCall(DMSetDimension(da2, dim));
998:   PetscCall(DMDASetSizes(da2, M, N, P));
999:   PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1000:   PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1001:   PetscCall(DMDASetDof(da2, dd->w));
1002:   PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1003:   PetscCall(DMDASetStencilWidth(da2, dd->s));
1004:   if (dim == 3) {
1005:     PetscInt *lx, *ly, *lz;
1006:     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1007:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1008:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1009:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1010:     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1011:     PetscCall(PetscFree3(lx, ly, lz));
1012:   } else if (dim == 2) {
1013:     PetscInt *lx, *ly;
1014:     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1015:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1016:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1017:     PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1018:     PetscCall(PetscFree2(lx, ly));
1019:   } else if (dim == 1) {
1020:     PetscInt *lx;
1021:     PetscCall(PetscMalloc1(dd->m, &lx));
1022:     PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1023:     PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1024:     PetscCall(PetscFree(lx));
1025:   }
1026:   dd2 = (DM_DA *)da2->data;

1028:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1029:   da2->ops->creatematrix = da->ops->creatematrix;
1030:   /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1031:   da2->ops->getcoloring = da->ops->getcoloring;
1032:   dd2->interptype       = dd->interptype;

1034:   /* copy fill information if given */
1035:   if (dd->dfill) {
1036:     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1037:     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1038:   }
1039:   if (dd->ofill) {
1040:     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1041:     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1042:   }
1043:   /* copy the refine information */
1044:   dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1045:   dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1046:   dd2->coarsen_z = dd2->refine_z = dd->refine_z;

1048:   if (dd->refine_z_hier) {
1049:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1050:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1051:     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1052:     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1053:     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1054:   }
1055:   if (dd->refine_y_hier) {
1056:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1057:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1058:     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1059:     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1060:     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1061:   }
1062:   if (dd->refine_x_hier) {
1063:     if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1064:     if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1065:     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1066:     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1067:     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1068:   }

1070:   /* copy vector type information */
1071:   PetscCall(DMSetVecType(da2, da->vectype));

1073:   dd2->lf = dd->lf;
1074:   dd2->lj = dd->lj;

1076:   da2->leveldown = da->leveldown;
1077:   da2->levelup   = da->levelup + 1;

1079:   PetscCall(DMSetUp(da2));

1081:   /* interpolate coordinates if they are set on the coarse grid */
1082:   PetscCall(DMGetCoordinates(da, &coordsc));
1083:   if (coordsc) {
1084:     DM  cdaf, cdac;
1085:     Mat II;

1087:     PetscCall(DMGetCoordinateDM(da, &cdac));
1088:     PetscCall(DMGetCoordinateDM(da2, &cdaf));
1089:     /* force creation of the coordinate vector */
1090:     PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1091:     PetscCall(DMGetCoordinates(da2, &coordsf));
1092:     PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1093:     PetscCall(MatInterpolate(II, coordsc, coordsf));
1094:     PetscCall(MatDestroy(&II));
1095:   }

1097:   for (i = 0; i < da->bs; i++) {
1098:     const char *fieldname;
1099:     PetscCall(DMDAGetFieldName(da, i, &fieldname));
1100:     PetscCall(DMDASetFieldName(da2, i, fieldname));
1101:   }

1103:   *daref = da2;
1104:   PetscFunctionReturn(PETSC_SUCCESS);
1105: }

1107: PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1108: {
1109:   PetscInt M, N, P, i, dim;
1110:   Vec      coordsc, coordsf;
1111:   DM       dmc2;
1112:   DM_DA   *dd = (DM_DA *)dmf->data, *dd2;

1114:   PetscFunctionBegin;

1118:   PetscCall(DMGetDimension(dmf, &dim));
1119:   if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1120:     M = dd->M / dd->coarsen_x;
1121:   } else {
1122:     M = 1 + (dd->M - 1) / dd->coarsen_x;
1123:   }
1124:   if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1125:     if (dim > 1) {
1126:       N = dd->N / dd->coarsen_y;
1127:     } else {
1128:       N = 1;
1129:     }
1130:   } else {
1131:     N = 1 + (dd->N - 1) / dd->coarsen_y;
1132:   }
1133:   if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1134:     if (dim > 2) {
1135:       P = dd->P / dd->coarsen_z;
1136:     } else {
1137:       P = 1;
1138:     }
1139:   } else {
1140:     P = 1 + (dd->P - 1) / dd->coarsen_z;
1141:   }
1142:   PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1143:   PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1144:   PetscCall(DMSetDimension(dmc2, dim));
1145:   PetscCall(DMDASetSizes(dmc2, M, N, P));
1146:   PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1147:   PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1148:   PetscCall(DMDASetDof(dmc2, dd->w));
1149:   PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1150:   PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1151:   if (dim == 3) {
1152:     PetscInt *lx, *ly, *lz;
1153:     PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1154:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1155:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1156:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1157:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1158:     PetscCall(PetscFree3(lx, ly, lz));
1159:   } else if (dim == 2) {
1160:     PetscInt *lx, *ly;
1161:     PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1162:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1163:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1164:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1165:     PetscCall(PetscFree2(lx, ly));
1166:   } else if (dim == 1) {
1167:     PetscInt *lx;
1168:     PetscCall(PetscMalloc1(dd->m, &lx));
1169:     PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1170:     PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1171:     PetscCall(PetscFree(lx));
1172:   }
1173:   dd2 = (DM_DA *)dmc2->data;

1175:   /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1176:   /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1177:   dmc2->ops->creatematrix = dmf->ops->creatematrix;
1178:   dmc2->ops->getcoloring  = dmf->ops->getcoloring;
1179:   dd2->interptype         = dd->interptype;

1181:   /* copy fill information if given */
1182:   if (dd->dfill) {
1183:     PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1184:     PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1185:   }
1186:   if (dd->ofill) {
1187:     PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1188:     PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1189:   }
1190:   /* copy the refine information */
1191:   dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1192:   dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1193:   dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;

1195:   if (dd->refine_z_hier) {
1196:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1197:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1198:     dd2->refine_z_hier_n = dd->refine_z_hier_n;
1199:     PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1200:     PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1201:   }
1202:   if (dd->refine_y_hier) {
1203:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1204:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1205:     dd2->refine_y_hier_n = dd->refine_y_hier_n;
1206:     PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1207:     PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1208:   }
1209:   if (dd->refine_x_hier) {
1210:     if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1211:     if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1212:     dd2->refine_x_hier_n = dd->refine_x_hier_n;
1213:     PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1214:     PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1215:   }

1217:   /* copy vector type information */
1218:   PetscCall(DMSetVecType(dmc2, dmf->vectype));

1220:   dd2->lf = dd->lf;
1221:   dd2->lj = dd->lj;

1223:   dmc2->leveldown = dmf->leveldown + 1;
1224:   dmc2->levelup   = dmf->levelup;

1226:   PetscCall(DMSetUp(dmc2));

1228:   /* inject coordinates if they are set on the fine grid */
1229:   PetscCall(DMGetCoordinates(dmf, &coordsf));
1230:   if (coordsf) {
1231:     DM         cdaf, cdac;
1232:     Mat        inject;
1233:     VecScatter vscat;

1235:     PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1236:     PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1237:     /* force creation of the coordinate vector */
1238:     PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1239:     PetscCall(DMGetCoordinates(dmc2, &coordsc));

1241:     PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1242:     PetscCall(MatScatterGetVecScatter(inject, &vscat));
1243:     PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1244:     PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1245:     PetscCall(MatDestroy(&inject));
1246:   }

1248:   for (i = 0; i < dmf->bs; i++) {
1249:     const char *fieldname;
1250:     PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1251:     PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1252:   }

1254:   *dmc = dmc2;
1255:   PetscFunctionReturn(PETSC_SUCCESS);
1256: }

1258: PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1259: {
1260:   PetscInt i, n, *refx, *refy, *refz;

1262:   PetscFunctionBegin;
1264:   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1265:   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);

1268:   /* Get refinement factors, defaults taken from the coarse DMDA */
1269:   PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1270:   for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1271:   n = nlevels;
1272:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1273:   n = nlevels;
1274:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1275:   n = nlevels;
1276:   PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));

1278:   PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1279:   PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1280:   for (i = 1; i < nlevels; i++) {
1281:     PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1282:     PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1283:   }
1284:   PetscCall(PetscFree3(refx, refy, refz));
1285:   PetscFunctionReturn(PETSC_SUCCESS);
1286: }

1288: PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1289: {
1290:   PetscInt i;

1292:   PetscFunctionBegin;
1294:   PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1295:   if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1297:   PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1298:   for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1299:   PetscFunctionReturn(PETSC_SUCCESS);
1300: }

1302: PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1303: {
1304:   PetscInt     i, j, xs, xn, q;
1305:   PetscScalar *xx;
1306:   PetscReal    h;
1307:   Vec          x;
1308:   DM_DA       *da = (DM_DA *)dm->data;

1310:   PetscFunctionBegin;
1311:   if (da->bx != DM_BOUNDARY_PERIODIC) {
1312:     PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1313:     q = (q - 1) / (n - 1); /* number of spectral elements */
1314:     h = 2.0 / q;
1315:     PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1316:     xs = xs / (n - 1);
1317:     xn = xn / (n - 1);
1318:     PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1319:     PetscCall(DMGetCoordinates(dm, &x));
1320:     PetscCall(DMDAVecGetArray(dm, x, &xx));

1322:     /* loop over local spectral elements */
1323:     for (j = xs; j < xs + xn; j++) {
1324:       /*
1325:        Except for the first process, each process starts on the second GLL point of the first element on that process
1326:        */
1327:       for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1328:     }
1329:     PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1330:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1331:   PetscFunctionReturn(PETSC_SUCCESS);
1332: }

1334: /*@

1336:      DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points

1338:    Collective

1340:    Input Parameters:
1341: +   da - the `DMDA` object
1342: .   n - the number of GLL nodes
1343: -   nodes - the GLL nodes

1345:    Level: advanced

1347:    Note:
1348:    The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1349:    on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DM` is
1350:    periodic or not.

1352: .seealso: `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1353: @*/
1354: PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1355: {
1356:   PetscFunctionBegin;
1357:   if (da->dim == 1) {
1358:     PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1359:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1360:   PetscFunctionReturn(PETSC_SUCCESS);
1361: }

1363: PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1364: {
1365:   DM_DA    *dd1 = (DM_DA *)da1->data, *dd2;
1366:   DM        da2;
1367:   DMType    dmtype2;
1368:   PetscBool isda, compatibleLocal;
1369:   PetscInt  i;

1371:   PetscFunctionBegin;
1372:   PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1373:   PetscCall(DMGetType(dm2, &dmtype2));
1374:   PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1375:   if (isda) {
1376:     da2 = dm2;
1377:     dd2 = (DM_DA *)da2->data;
1378:     PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1379:     compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1380:     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1381:     /*                                                                           Global size              ranks               Boundary type */
1382:     if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1383:     if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1384:     if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1385:     if (compatibleLocal) {
1386:       for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size     */ }
1387:     }
1388:     if (compatibleLocal && da1->dim > 1) {
1389:       for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1390:     }
1391:     if (compatibleLocal && da1->dim > 2) {
1392:       for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1393:     }
1394:     *compatible = compatibleLocal;
1395:     *set        = PETSC_TRUE;
1396:   } else {
1397:     /* Decline to determine compatibility with other DM types */
1398:     *set = PETSC_FALSE;
1399:   }
1400:   PetscFunctionReturn(PETSC_SUCCESS);
1401: }