Actual source code: dainterp.c


  2: /*
  3:   Code for interpolating between grids represented by DMDAs
  4: */

  6: /*
  7:       For linear elements there are two branches of code to compute the interpolation. They should compute the same results but may not. The "new version" does
  8:    not work for periodic domains, the old does. Change NEWVERSION to 1 to compile in the new version. Eventually when we are sure the two produce identical results
  9:    we will remove/merge the new version. Based on current tests, these both produce the same results. We are leaving NEWVERSION for now in the code since some
 10:    consider it cleaner, but old version is turned on since it handles periodic case.
 11: */
 12: #define NEWVERSION 0

 14: #include <petsc/private/dmdaimpl.h>

 16: /*
 17:    Since the interpolation uses MATMAIJ for dof > 0 we convert request for non-MATAIJ baseded matrices to MATAIJ.
 18:    This is a bit of a hack, the reason for it is partially because -dm_mat_type defines the
 19:    matrix type for both the operator matrices and the interpolation matrices so that users
 20:    can select matrix types of base MATAIJ for accelerators
 21: */
 22: static PetscErrorCode ConvertToAIJ(MatType intype, MatType *outtype)
 23: {
 24:   PetscInt    i;
 25:   char const *types[3] = {MATAIJ, MATSEQAIJ, MATMPIAIJ};
 26:   PetscBool   flg;

 28:   PetscFunctionBegin;
 29:   *outtype = MATAIJ;
 30:   for (i = 0; i < 3; i++) {
 31:     PetscCall(PetscStrbeginswith(intype, types[i], &flg));
 32:     if (flg) {
 33:       *outtype = intype;
 34:       PetscFunctionReturn(PETSC_SUCCESS);
 35:     }
 36:   }
 37:   PetscFunctionReturn(PETSC_SUCCESS);
 38: }

 40: PetscErrorCode DMCreateInterpolation_DA_1D_Q1(DM dac, DM daf, Mat *A)
 41: {
 42:   PetscInt               i, i_start, m_f, Mx;
 43:   const PetscInt        *idx_f, *idx_c;
 44:   PetscInt               m_ghost, m_ghost_c;
 45:   PetscInt               row, col, i_start_ghost, mx, m_c, nc, ratio;
 46:   PetscInt               i_c, i_start_c, i_start_ghost_c, cols[2], dof;
 47:   PetscScalar            v[2], x;
 48:   Mat                    mat;
 49:   DMBoundaryType         bx;
 50:   ISLocalToGlobalMapping ltog_f, ltog_c;
 51:   MatType                mattype;

 53:   PetscFunctionBegin;
 54:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bx, NULL, NULL, NULL));
 55:   PetscCall(DMDAGetInfo(daf, NULL, &mx, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
 56:   if (bx == DM_BOUNDARY_PERIODIC) {
 57:     ratio = mx / Mx;
 58:     PetscCheck(ratio * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
 59:   } else {
 60:     ratio = (mx - 1) / (Mx - 1);
 61:     PetscCheck(ratio * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
 62:   }

 64:   PetscCall(DMDAGetCorners(daf, &i_start, NULL, NULL, &m_f, NULL, NULL));
 65:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, NULL, NULL, &m_ghost, NULL, NULL));
 66:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
 67:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

 69:   PetscCall(DMDAGetCorners(dac, &i_start_c, NULL, NULL, &m_c, NULL, NULL));
 70:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, NULL, NULL, &m_ghost_c, NULL, NULL));
 71:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
 72:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

 74:   /* create interpolation matrix */
 75:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dac), &mat));
 76: #if defined(PETSC_HAVE_CUDA)
 77:   /*
 78:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
 79:      we don't want the original unconverted matrix copied to the GPU
 80:    */
 81:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
 82: #endif
 83:   PetscCall(MatSetSizes(mat, m_f, m_c, mx, Mx));
 84:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
 85:   PetscCall(MatSetType(mat, mattype));
 86:   PetscCall(MatSeqAIJSetPreallocation(mat, 2, NULL));
 87:   PetscCall(MatMPIAIJSetPreallocation(mat, 2, NULL, 1, NULL));

 89:   /* loop over local fine grid nodes setting interpolation for those*/
 90:   if (!NEWVERSION) {
 91:     for (i = i_start; i < i_start + m_f; i++) {
 92:       /* convert to local "natural" numbering and then to PETSc global numbering */
 93:       row = idx_f[i - i_start_ghost];

 95:       i_c = (i / ratio); /* coarse grid node to left of fine grid node */
 96:       PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
 97:                                           i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
 98:                  i_start, i_c, i_start_ghost_c);

100:       /*
101:        Only include those interpolation points that are truly
102:        nonzero. Note this is very important for final grid lines
103:        in x direction; since they have no right neighbor
104:        */
105:       x  = ((PetscReal)(i - i_c * ratio)) / ((PetscReal)ratio);
106:       nc = 0;
107:       /* one left and below; or we are right on it */
108:       col      = (i_c - i_start_ghost_c);
109:       cols[nc] = idx_c[col];
110:       v[nc++]  = -x + 1.0;
111:       /* one right? */
112:       if (i_c * ratio != i) {
113:         cols[nc] = idx_c[col + 1];
114:         v[nc++]  = x;
115:       }
116:       PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
117:     }

119:   } else {
120:     PetscScalar *xi;
121:     PetscInt     li, nxi, n;
122:     PetscScalar  Ni[2];

124:     /* compute local coordinate arrays */
125:     nxi = ratio + 1;
126:     PetscCall(PetscMalloc1(nxi, &xi));
127:     for (li = 0; li < nxi; li++) xi[li] = -1.0 + (PetscScalar)li * (2.0 / (PetscScalar)(nxi - 1));

129:     for (i = i_start; i < i_start + m_f; i++) {
130:       /* convert to local "natural" numbering and then to PETSc global numbering */
131:       row = idx_f[(i - i_start_ghost)];

133:       i_c = (i / ratio); /* coarse grid node to left of fine grid node */
134:       PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
135:                                           i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
136:                  i_start, i_c, i_start_ghost_c);

138:       /* remainders */
139:       li = i - ratio * (i / ratio);
140:       if (i == mx - 1) li = nxi - 1;

142:       /* corners */
143:       col     = (i_c - i_start_ghost_c);
144:       cols[0] = idx_c[col];
145:       Ni[0]   = 1.0;
146:       if ((li == 0) || (li == nxi - 1)) {
147:         PetscCall(MatSetValue(mat, row, cols[0], Ni[0], INSERT_VALUES));
148:         continue;
149:       }

151:       /* edges + interior */
152:       /* remainders */
153:       if (i == mx - 1) i_c--;

155:       col     = (i_c - i_start_ghost_c);
156:       cols[0] = idx_c[col]; /* one left and below; or we are right on it */
157:       cols[1] = idx_c[col + 1];

159:       Ni[0] = 0.5 * (1.0 - xi[li]);
160:       Ni[1] = 0.5 * (1.0 + xi[li]);
161:       for (n = 0; n < 2; n++) {
162:         if (PetscAbsScalar(Ni[n]) < 1.0e-32) cols[n] = -1;
163:       }
164:       PetscCall(MatSetValues(mat, 1, &row, 2, cols, Ni, INSERT_VALUES));
165:     }
166:     PetscCall(PetscFree(xi));
167:   }
168:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
169:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
170:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
171:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
172:   PetscCall(MatCreateMAIJ(mat, dof, A));
173:   PetscCall(MatDestroy(&mat));
174:   PetscFunctionReturn(PETSC_SUCCESS);
175: }

177: PetscErrorCode DMCreateInterpolation_DA_1D_Q0(DM dac, DM daf, Mat *A)
178: {
179:   PetscInt               i, i_start, m_f, Mx;
180:   const PetscInt        *idx_f, *idx_c;
181:   ISLocalToGlobalMapping ltog_f, ltog_c;
182:   PetscInt               m_ghost, m_ghost_c;
183:   PetscInt               row, col, i_start_ghost, mx, m_c, nc, ratio;
184:   PetscInt               i_c, i_start_c, i_start_ghost_c, cols[2], dof;
185:   PetscScalar            v[2], x;
186:   Mat                    mat;
187:   DMBoundaryType         bx;
188:   MatType                mattype;

190:   PetscFunctionBegin;
191:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bx, NULL, NULL, NULL));
192:   PetscCall(DMDAGetInfo(daf, NULL, &mx, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
193:   if (bx == DM_BOUNDARY_PERIODIC) {
194:     PetscCheck(Mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be positive", Mx);
195:     ratio = mx / Mx;
196:     PetscCheck(ratio * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
197:   } else {
198:     PetscCheck(Mx >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be greater than 1", Mx);
199:     ratio = (mx - 1) / (Mx - 1);
200:     PetscCheck(ratio * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
201:   }

203:   PetscCall(DMDAGetCorners(daf, &i_start, NULL, NULL, &m_f, NULL, NULL));
204:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, NULL, NULL, &m_ghost, NULL, NULL));
205:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
206:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

208:   PetscCall(DMDAGetCorners(dac, &i_start_c, NULL, NULL, &m_c, NULL, NULL));
209:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, NULL, NULL, &m_ghost_c, NULL, NULL));
210:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
211:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

213:   /* create interpolation matrix */
214:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dac), &mat));
215: #if defined(PETSC_HAVE_CUDA)
216:   /*
217:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
218:      we don't want the original unconverted matrix copied to the GPU
219:    */
220:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
221: #endif
222:   PetscCall(MatSetSizes(mat, m_f, m_c, mx, Mx));
223:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
224:   PetscCall(MatSetType(mat, mattype));
225:   PetscCall(MatSeqAIJSetPreallocation(mat, 2, NULL));
226:   PetscCall(MatMPIAIJSetPreallocation(mat, 2, NULL, 0, NULL));

228:   /* loop over local fine grid nodes setting interpolation for those*/
229:   for (i = i_start; i < i_start + m_f; i++) {
230:     /* convert to local "natural" numbering and then to PETSc global numbering */
231:     row = idx_f[(i - i_start_ghost)];

233:     i_c = (i / ratio); /* coarse grid node to left of fine grid node */

235:     /*
236:          Only include those interpolation points that are truly
237:          nonzero. Note this is very important for final grid lines
238:          in x direction; since they have no right neighbor
239:     */
240:     x  = ((PetscReal)(i - i_c * ratio)) / ((PetscReal)ratio);
241:     nc = 0;
242:     /* one left and below; or we are right on it */
243:     col      = (i_c - i_start_ghost_c);
244:     cols[nc] = idx_c[col];
245:     v[nc++]  = -x + 1.0;
246:     /* one right? */
247:     if (i_c * ratio != i) {
248:       cols[nc] = idx_c[col + 1];
249:       v[nc++]  = x;
250:     }
251:     PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
252:   }
253:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
254:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
255:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
256:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
257:   PetscCall(MatCreateMAIJ(mat, dof, A));
258:   PetscCall(MatDestroy(&mat));
259:   PetscCall(PetscLogFlops(5.0 * m_f));
260:   PetscFunctionReturn(PETSC_SUCCESS);
261: }

263: PetscErrorCode DMCreateInterpolation_DA_2D_Q1(DM dac, DM daf, Mat *A)
264: {
265:   PetscInt               i, j, i_start, j_start, m_f, n_f, Mx, My, dof;
266:   const PetscInt        *idx_c, *idx_f;
267:   ISLocalToGlobalMapping ltog_f, ltog_c;
268:   PetscInt               m_ghost, n_ghost, m_ghost_c, n_ghost_c, *dnz, *onz;
269:   PetscInt               row, col, i_start_ghost, j_start_ghost, cols[4], mx, m_c, my, nc, ratioi, ratioj;
270:   PetscInt               i_c, j_c, i_start_c, j_start_c, n_c, i_start_ghost_c, j_start_ghost_c, col_shift, col_scale;
271:   PetscMPIInt            size_c, size_f, rank_f;
272:   PetscScalar            v[4], x, y;
273:   Mat                    mat;
274:   DMBoundaryType         bx, by;
275:   MatType                mattype;

277:   PetscFunctionBegin;
278:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, NULL, NULL, NULL, NULL, NULL, NULL, &bx, &by, NULL, NULL));
279:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
280:   if (bx == DM_BOUNDARY_PERIODIC) {
281:     PetscCheck(Mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be positive", Mx);
282:     ratioi = mx / Mx;
283:     PetscCheck(ratioi * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
284:   } else {
285:     PetscCheck(Mx >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be greater than 1", Mx);
286:     ratioi = (mx - 1) / (Mx - 1);
287:     PetscCheck(ratioi * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
288:   }
289:   if (by == DM_BOUNDARY_PERIODIC) {
290:     PetscCheck(My, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be positive", My);
291:     ratioj = my / My;
292:     PetscCheck(ratioj * My == my, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: my/My  must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
293:   } else {
294:     PetscCheck(My >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be greater than 1", My);
295:     ratioj = (my - 1) / (My - 1);
296:     PetscCheck(ratioj * (My - 1) == my - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (my - 1)/(My - 1) must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
297:   }

299:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, NULL, &m_f, &n_f, NULL));
300:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, NULL, &m_ghost, &n_ghost, NULL));
301:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
302:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

304:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, NULL, &m_c, &n_c, NULL));
305:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, NULL, &m_ghost_c, &n_ghost_c, NULL));
306:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
307:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

309:   /*
310:    Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
311:    The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
312:    processors). It's effective length is hence 4 times its normal length, this is
313:    why the col_scale is multiplied by the interpolation matrix column sizes.
314:    sol_shift allows each set of 1/4 processors do its own interpolation using ITS
315:    copy of the coarse vector. A bit of a hack but you do better.

317:    In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
318:    */
319:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dac), &size_c));
320:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)daf), &size_f));
321:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)daf), &rank_f));
322:   col_scale = size_f / size_c;
323:   col_shift = Mx * My * (rank_f / size_c);

325:   MatPreallocateBegin(PetscObjectComm((PetscObject)daf), m_f * n_f, col_scale * m_c * n_c, dnz, onz);
326:   for (j = j_start; j < j_start + n_f; j++) {
327:     for (i = i_start; i < i_start + m_f; i++) {
328:       /* convert to local "natural" numbering and then to PETSc global numbering */
329:       row = idx_f[(m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

331:       i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
332:       j_c = (j / ratioj); /* coarse grid node below fine grid node */

334:       PetscCheck(j_c >= j_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
335:                                           j_start %" PetscInt_FMT " j_c %" PetscInt_FMT " j_start_ghost_c %" PetscInt_FMT,
336:                  j_start, j_c, j_start_ghost_c);
337:       PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
338:                                           i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
339:                  i_start, i_c, i_start_ghost_c);

341:       /*
342:        Only include those interpolation points that are truly
343:        nonzero. Note this is very important for final grid lines
344:        in x and y directions; since they have no right/top neighbors
345:        */
346:       nc = 0;
347:       /* one left and below; or we are right on it */
348:       col        = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
349:       cols[nc++] = col_shift + idx_c[col];
350:       /* one right and below */
351:       if (i_c * ratioi != i) cols[nc++] = col_shift + idx_c[col + 1];
352:       /* one left and above */
353:       if (j_c * ratioj != j) cols[nc++] = col_shift + idx_c[col + m_ghost_c];
354:       /* one right and above */
355:       if (i_c * ratioi != i && j_c * ratioj != j) cols[nc++] = col_shift + idx_c[col + (m_ghost_c + 1)];
356:       PetscCall(MatPreallocateSet(row, nc, cols, dnz, onz));
357:     }
358:   }
359:   PetscCall(MatCreate(PetscObjectComm((PetscObject)daf), &mat));
360: #if defined(PETSC_HAVE_CUDA)
361:   /*
362:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
363:      we don't want the original unconverted matrix copied to the GPU
364:   */
365:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
366: #endif
367:   PetscCall(MatSetSizes(mat, m_f * n_f, col_scale * m_c * n_c, mx * my, col_scale * Mx * My));
368:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
369:   PetscCall(MatSetType(mat, mattype));
370:   PetscCall(MatSeqAIJSetPreallocation(mat, 0, dnz));
371:   PetscCall(MatMPIAIJSetPreallocation(mat, 0, dnz, 0, onz));
372:   MatPreallocateEnd(dnz, onz);

374:   /* loop over local fine grid nodes setting interpolation for those*/
375:   if (!NEWVERSION) {
376:     for (j = j_start; j < j_start + n_f; j++) {
377:       for (i = i_start; i < i_start + m_f; i++) {
378:         /* convert to local "natural" numbering and then to PETSc global numbering */
379:         row = idx_f[(m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

381:         i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
382:         j_c = (j / ratioj); /* coarse grid node below fine grid node */

384:         /*
385:          Only include those interpolation points that are truly
386:          nonzero. Note this is very important for final grid lines
387:          in x and y directions; since they have no right/top neighbors
388:          */
389:         x = ((PetscReal)(i - i_c * ratioi)) / ((PetscReal)ratioi);
390:         y = ((PetscReal)(j - j_c * ratioj)) / ((PetscReal)ratioj);

392:         nc = 0;
393:         /* one left and below; or we are right on it */
394:         col      = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
395:         cols[nc] = col_shift + idx_c[col];
396:         v[nc++]  = x * y - x - y + 1.0;
397:         /* one right and below */
398:         if (i_c * ratioi != i) {
399:           cols[nc] = col_shift + idx_c[col + 1];
400:           v[nc++]  = -x * y + x;
401:         }
402:         /* one left and above */
403:         if (j_c * ratioj != j) {
404:           cols[nc] = col_shift + idx_c[col + m_ghost_c];
405:           v[nc++]  = -x * y + y;
406:         }
407:         /* one right and above */
408:         if (j_c * ratioj != j && i_c * ratioi != i) {
409:           cols[nc] = col_shift + idx_c[col + (m_ghost_c + 1)];
410:           v[nc++]  = x * y;
411:         }
412:         PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
413:       }
414:     }

416:   } else {
417:     PetscScalar  Ni[4];
418:     PetscScalar *xi, *eta;
419:     PetscInt     li, nxi, lj, neta;

421:     /* compute local coordinate arrays */
422:     nxi  = ratioi + 1;
423:     neta = ratioj + 1;
424:     PetscCall(PetscMalloc1(nxi, &xi));
425:     PetscCall(PetscMalloc1(neta, &eta));
426:     for (li = 0; li < nxi; li++) xi[li] = -1.0 + (PetscScalar)li * (2.0 / (PetscScalar)(nxi - 1));
427:     for (lj = 0; lj < neta; lj++) eta[lj] = -1.0 + (PetscScalar)lj * (2.0 / (PetscScalar)(neta - 1));

429:     /* loop over local fine grid nodes setting interpolation for those*/
430:     for (j = j_start; j < j_start + n_f; j++) {
431:       for (i = i_start; i < i_start + m_f; i++) {
432:         /* convert to local "natural" numbering and then to PETSc global numbering */
433:         row = idx_f[(m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

435:         i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
436:         j_c = (j / ratioj); /* coarse grid node below fine grid node */

438:         /* remainders */
439:         li = i - ratioi * (i / ratioi);
440:         if (i == mx - 1) li = nxi - 1;
441:         lj = j - ratioj * (j / ratioj);
442:         if (j == my - 1) lj = neta - 1;

444:         /* corners */
445:         col     = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
446:         cols[0] = col_shift + idx_c[col]; /* left, below */
447:         Ni[0]   = 1.0;
448:         if ((li == 0) || (li == nxi - 1)) {
449:           if ((lj == 0) || (lj == neta - 1)) {
450:             PetscCall(MatSetValue(mat, row, cols[0], Ni[0], INSERT_VALUES));
451:             continue;
452:           }
453:         }

455:         /* edges + interior */
456:         /* remainders */
457:         if (i == mx - 1) i_c--;
458:         if (j == my - 1) j_c--;

460:         col     = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
461:         cols[0] = col_shift + idx_c[col];                   /* left, below */
462:         cols[1] = col_shift + idx_c[col + 1];               /* right, below */
463:         cols[2] = col_shift + idx_c[col + m_ghost_c];       /* left, above */
464:         cols[3] = col_shift + idx_c[col + (m_ghost_c + 1)]; /* right, above */

466:         Ni[0] = 0.25 * (1.0 - xi[li]) * (1.0 - eta[lj]);
467:         Ni[1] = 0.25 * (1.0 + xi[li]) * (1.0 - eta[lj]);
468:         Ni[2] = 0.25 * (1.0 - xi[li]) * (1.0 + eta[lj]);
469:         Ni[3] = 0.25 * (1.0 + xi[li]) * (1.0 + eta[lj]);

471:         nc = 0;
472:         if (PetscAbsScalar(Ni[0]) < 1.0e-32) cols[0] = -1;
473:         if (PetscAbsScalar(Ni[1]) < 1.0e-32) cols[1] = -1;
474:         if (PetscAbsScalar(Ni[2]) < 1.0e-32) cols[2] = -1;
475:         if (PetscAbsScalar(Ni[3]) < 1.0e-32) cols[3] = -1;

477:         PetscCall(MatSetValues(mat, 1, &row, 4, cols, Ni, INSERT_VALUES));
478:       }
479:     }
480:     PetscCall(PetscFree(xi));
481:     PetscCall(PetscFree(eta));
482:   }
483:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
484:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
485:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
486:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
487:   PetscCall(MatCreateMAIJ(mat, dof, A));
488:   PetscCall(MatDestroy(&mat));
489:   PetscFunctionReturn(PETSC_SUCCESS);
490: }

492: /*
493:        Contributed by Andrei Draganescu <aidraga@sandia.gov>
494: */
495: PetscErrorCode DMCreateInterpolation_DA_2D_Q0(DM dac, DM daf, Mat *A)
496: {
497:   PetscInt               i, j, i_start, j_start, m_f, n_f, Mx, My, dof;
498:   const PetscInt        *idx_c, *idx_f;
499:   ISLocalToGlobalMapping ltog_f, ltog_c;
500:   PetscInt               m_ghost, n_ghost, m_ghost_c, n_ghost_c, *dnz, *onz;
501:   PetscInt               row, col, i_start_ghost, j_start_ghost, cols[4], mx, m_c, my, nc, ratioi, ratioj;
502:   PetscInt               i_c, j_c, i_start_c, j_start_c, n_c, i_start_ghost_c, j_start_ghost_c, col_shift, col_scale;
503:   PetscMPIInt            size_c, size_f, rank_f;
504:   PetscScalar            v[4];
505:   Mat                    mat;
506:   DMBoundaryType         bx, by;
507:   MatType                mattype;

509:   PetscFunctionBegin;
510:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, NULL, NULL, NULL, NULL, NULL, NULL, &bx, &by, NULL, NULL));
511:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
512:   PetscCheck(Mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be positive", Mx);
513:   PetscCheck(My, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be positive", My);
514:   ratioi = mx / Mx;
515:   ratioj = my / My;
516:   PetscCheck(ratioi * Mx == mx, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Fine grid points must be multiple of coarse grid points in x");
517:   PetscCheck(ratioj * My == my, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Fine grid points must be multiple of coarse grid points in y");
518:   PetscCheck(ratioi == 2, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Coarsening factor in x must be 2");
519:   PetscCheck(ratioj == 2, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Coarsening factor in y must be 2");

521:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, NULL, &m_f, &n_f, NULL));
522:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, NULL, &m_ghost, &n_ghost, NULL));
523:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
524:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

526:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, NULL, &m_c, &n_c, NULL));
527:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, NULL, &m_ghost_c, &n_ghost_c, NULL));
528:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
529:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

531:   /*
532:      Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
533:      The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
534:      processors). It's effective length is hence 4 times its normal length, this is
535:      why the col_scale is multiplied by the interpolation matrix column sizes.
536:      sol_shift allows each set of 1/4 processors do its own interpolation using ITS
537:      copy of the coarse vector. A bit of a hack but you do better.

539:      In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
540:   */
541:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dac), &size_c));
542:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)daf), &size_f));
543:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)daf), &rank_f));
544:   col_scale = size_f / size_c;
545:   col_shift = Mx * My * (rank_f / size_c);

547:   MatPreallocateBegin(PetscObjectComm((PetscObject)daf), m_f * n_f, col_scale * m_c * n_c, dnz, onz);
548:   for (j = j_start; j < j_start + n_f; j++) {
549:     for (i = i_start; i < i_start + m_f; i++) {
550:       /* convert to local "natural" numbering and then to PETSc global numbering */
551:       row = idx_f[(m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

553:       i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
554:       j_c = (j / ratioj); /* coarse grid node below fine grid node */

556:       PetscCheck(j_c >= j_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
557:     j_start %" PetscInt_FMT " j_c %" PetscInt_FMT " j_start_ghost_c %" PetscInt_FMT,
558:                  j_start, j_c, j_start_ghost_c);
559:       PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
560:     i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
561:                  i_start, i_c, i_start_ghost_c);

563:       /*
564:          Only include those interpolation points that are truly
565:          nonzero. Note this is very important for final grid lines
566:          in x and y directions; since they have no right/top neighbors
567:       */
568:       nc = 0;
569:       /* one left and below; or we are right on it */
570:       col        = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
571:       cols[nc++] = col_shift + idx_c[col];
572:       PetscCall(MatPreallocateSet(row, nc, cols, dnz, onz));
573:     }
574:   }
575:   PetscCall(MatCreate(PetscObjectComm((PetscObject)daf), &mat));
576: #if defined(PETSC_HAVE_CUDA)
577:   /*
578:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
579:      we don't want the original unconverted matrix copied to the GPU
580:   */
581:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
582: #endif
583:   PetscCall(MatSetSizes(mat, m_f * n_f, col_scale * m_c * n_c, mx * my, col_scale * Mx * My));
584:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
585:   PetscCall(MatSetType(mat, mattype));
586:   PetscCall(MatSeqAIJSetPreallocation(mat, 0, dnz));
587:   PetscCall(MatMPIAIJSetPreallocation(mat, 0, dnz, 0, onz));
588:   MatPreallocateEnd(dnz, onz);

590:   /* loop over local fine grid nodes setting interpolation for those*/
591:   for (j = j_start; j < j_start + n_f; j++) {
592:     for (i = i_start; i < i_start + m_f; i++) {
593:       /* convert to local "natural" numbering and then to PETSc global numbering */
594:       row = idx_f[(m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

596:       i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
597:       j_c = (j / ratioj); /* coarse grid node below fine grid node */
598:       nc  = 0;
599:       /* one left and below; or we are right on it */
600:       col      = (m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
601:       cols[nc] = col_shift + idx_c[col];
602:       v[nc++]  = 1.0;

604:       PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
605:     }
606:   }
607:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
608:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
609:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
610:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
611:   PetscCall(MatCreateMAIJ(mat, dof, A));
612:   PetscCall(MatDestroy(&mat));
613:   PetscCall(PetscLogFlops(13.0 * m_f * n_f));
614:   PetscFunctionReturn(PETSC_SUCCESS);
615: }

617: /*
618:        Contributed by Jianming Yang <jianming-yang@uiowa.edu>
619: */
620: PetscErrorCode DMCreateInterpolation_DA_3D_Q0(DM dac, DM daf, Mat *A)
621: {
622:   PetscInt               i, j, l, i_start, j_start, l_start, m_f, n_f, p_f, Mx, My, Mz, dof;
623:   const PetscInt        *idx_c, *idx_f;
624:   ISLocalToGlobalMapping ltog_f, ltog_c;
625:   PetscInt               m_ghost, n_ghost, p_ghost, m_ghost_c, n_ghost_c, p_ghost_c, nc, *dnz, *onz;
626:   PetscInt               row, col, i_start_ghost, j_start_ghost, l_start_ghost, cols[8], mx, m_c, my, n_c, mz, p_c, ratioi, ratioj, ratiol;
627:   PetscInt               i_c, j_c, l_c, i_start_c, j_start_c, l_start_c, i_start_ghost_c, j_start_ghost_c, l_start_ghost_c, col_shift, col_scale;
628:   PetscMPIInt            size_c, size_f, rank_f;
629:   PetscScalar            v[8];
630:   Mat                    mat;
631:   DMBoundaryType         bx, by, bz;
632:   MatType                mattype;

634:   PetscFunctionBegin;
635:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, &Mz, NULL, NULL, NULL, NULL, NULL, &bx, &by, &bz, NULL));
636:   PetscCheck(Mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be positive", Mx);
637:   PetscCheck(My, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be positive", My);
638:   PetscCheck(Mz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of z coarse grid points %" PetscInt_FMT " must be positive", Mz);
639:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, &mz, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
640:   ratioi = mx / Mx;
641:   ratioj = my / My;
642:   ratiol = mz / Mz;
643:   PetscCheck(ratioi * Mx == mx, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Fine grid points must be multiple of coarse grid points in x");
644:   PetscCheck(ratioj * My == my, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Fine grid points must be multiple of coarse grid points in y");
645:   PetscCheck(ratiol * Mz == mz, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Fine grid points must be multiple of coarse grid points in z");
646:   PetscCheck(ratioi == 2 || ratioi == 1, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Coarsening factor in x must be 1 or 2");
647:   PetscCheck(ratioj == 2 || ratioj == 1, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Coarsening factor in y must be 1 or 2");
648:   PetscCheck(ratiol == 2 || ratiol == 1, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_WRONG, "Coarsening factor in z must be 1 or 2");

650:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, &l_start, &m_f, &n_f, &p_f));
651:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, &l_start_ghost, &m_ghost, &n_ghost, &p_ghost));
652:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
653:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

655:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, &l_start_c, &m_c, &n_c, &p_c));
656:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, &l_start_ghost_c, &m_ghost_c, &n_ghost_c, &p_ghost_c));
657:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
658:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

660:   /*
661:      Used for handling a coarse DMDA that lives on 1/4 the processors of the fine DMDA.
662:      The coarse vector is then duplicated 4 times (each time it lives on 1/4 of the
663:      processors). It's effective length is hence 4 times its normal length, this is
664:      why the col_scale is multiplied by the interpolation matrix column sizes.
665:      sol_shift allows each set of 1/4 processors do its own interpolation using ITS
666:      copy of the coarse vector. A bit of a hack but you do better.

668:      In the standard case when size_f == size_c col_scale == 1 and col_shift == 0
669:   */
670:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dac), &size_c));
671:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)daf), &size_f));
672:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)daf), &rank_f));
673:   col_scale = size_f / size_c;
674:   col_shift = Mx * My * Mz * (rank_f / size_c);

676:   MatPreallocateBegin(PetscObjectComm((PetscObject)daf), m_f * n_f * p_f, col_scale * m_c * n_c * p_c, dnz, onz);
677:   for (l = l_start; l < l_start + p_f; l++) {
678:     for (j = j_start; j < j_start + n_f; j++) {
679:       for (i = i_start; i < i_start + m_f; i++) {
680:         /* convert to local "natural" numbering and then to PETSc global numbering */
681:         row = idx_f[(m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

683:         i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
684:         j_c = (j / ratioj); /* coarse grid node below fine grid node */
685:         l_c = (l / ratiol);

687:         PetscCheck(l_c >= l_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
688:     l_start %" PetscInt_FMT " l_c %" PetscInt_FMT " l_start_ghost_c %" PetscInt_FMT,
689:                    l_start, l_c, l_start_ghost_c);
690:         PetscCheck(j_c >= j_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
691:     j_start %" PetscInt_FMT " j_c %" PetscInt_FMT " j_start_ghost_c %" PetscInt_FMT,
692:                    j_start, j_c, j_start_ghost_c);
693:         PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
694:     i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
695:                    i_start, i_c, i_start_ghost_c);

697:         /*
698:            Only include those interpolation points that are truly
699:            nonzero. Note this is very important for final grid lines
700:            in x and y directions; since they have no right/top neighbors
701:         */
702:         nc = 0;
703:         /* one left and below; or we are right on it */
704:         col        = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
705:         cols[nc++] = col_shift + idx_c[col];
706:         PetscCall(MatPreallocateSet(row, nc, cols, dnz, onz));
707:       }
708:     }
709:   }
710:   PetscCall(MatCreate(PetscObjectComm((PetscObject)daf), &mat));
711: #if defined(PETSC_HAVE_CUDA)
712:   /*
713:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
714:      we don't want the original unconverted matrix copied to the GPU
715:   */
716:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
717: #endif
718:   PetscCall(MatSetSizes(mat, m_f * n_f * p_f, col_scale * m_c * n_c * p_c, mx * my * mz, col_scale * Mx * My * Mz));
719:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
720:   PetscCall(MatSetType(mat, mattype));
721:   PetscCall(MatSeqAIJSetPreallocation(mat, 0, dnz));
722:   PetscCall(MatMPIAIJSetPreallocation(mat, 0, dnz, 0, onz));
723:   MatPreallocateEnd(dnz, onz);

725:   /* loop over local fine grid nodes setting interpolation for those*/
726:   for (l = l_start; l < l_start + p_f; l++) {
727:     for (j = j_start; j < j_start + n_f; j++) {
728:       for (i = i_start; i < i_start + m_f; i++) {
729:         /* convert to local "natural" numbering and then to PETSc global numbering */
730:         row = idx_f[(m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

732:         i_c = (i / ratioi); /* coarse grid node to left of fine grid node */
733:         j_c = (j / ratioj); /* coarse grid node below fine grid node */
734:         l_c = (l / ratiol);
735:         nc  = 0;
736:         /* one left and below; or we are right on it */
737:         col      = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
738:         cols[nc] = col_shift + idx_c[col];
739:         v[nc++]  = 1.0;

741:         PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
742:       }
743:     }
744:   }
745:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
746:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
747:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
748:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
749:   PetscCall(MatCreateMAIJ(mat, dof, A));
750:   PetscCall(MatDestroy(&mat));
751:   PetscCall(PetscLogFlops(13.0 * m_f * n_f * p_f));
752:   PetscFunctionReturn(PETSC_SUCCESS);
753: }

755: PetscErrorCode DMCreateInterpolation_DA_3D_Q1(DM dac, DM daf, Mat *A)
756: {
757:   PetscInt               i, j, i_start, j_start, m_f, n_f, Mx, My, dof, l;
758:   const PetscInt        *idx_c, *idx_f;
759:   ISLocalToGlobalMapping ltog_f, ltog_c;
760:   PetscInt               m_ghost, n_ghost, m_ghost_c, n_ghost_c, Mz, mz;
761:   PetscInt               row, col, i_start_ghost, j_start_ghost, cols[8], mx, m_c, my, nc, ratioi, ratioj, ratiok;
762:   PetscInt               i_c, j_c, i_start_c, j_start_c, n_c, i_start_ghost_c, j_start_ghost_c;
763:   PetscInt               l_start, p_f, l_start_ghost, p_ghost, l_start_c, p_c;
764:   PetscInt               l_start_ghost_c, p_ghost_c, l_c, *dnz, *onz;
765:   PetscScalar            v[8], x, y, z;
766:   Mat                    mat;
767:   DMBoundaryType         bx, by, bz;
768:   MatType                mattype;

770:   PetscFunctionBegin;
771:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, &Mz, NULL, NULL, NULL, NULL, NULL, &bx, &by, &bz, NULL));
772:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, &mz, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
773:   if (mx == Mx) {
774:     ratioi = 1;
775:   } else if (bx == DM_BOUNDARY_PERIODIC) {
776:     PetscCheck(Mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be positive", Mx);
777:     ratioi = mx / Mx;
778:     PetscCheck(ratioi * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
779:   } else {
780:     PetscCheck(Mx >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of x coarse grid points %" PetscInt_FMT " must be greater than 1", Mx);
781:     ratioi = (mx - 1) / (Mx - 1);
782:     PetscCheck(ratioi * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
783:   }
784:   if (my == My) {
785:     ratioj = 1;
786:   } else if (by == DM_BOUNDARY_PERIODIC) {
787:     PetscCheck(My, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be positive", My);
788:     ratioj = my / My;
789:     PetscCheck(ratioj * My == my, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: my/My  must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
790:   } else {
791:     PetscCheck(My >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of y coarse grid points %" PetscInt_FMT " must be greater than 1", My);
792:     ratioj = (my - 1) / (My - 1);
793:     PetscCheck(ratioj * (My - 1) == my - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (my - 1)/(My - 1) must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
794:   }
795:   if (mz == Mz) {
796:     ratiok = 1;
797:   } else if (bz == DM_BOUNDARY_PERIODIC) {
798:     PetscCheck(Mz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of z coarse grid points %" PetscInt_FMT " must be positive", Mz);
799:     ratiok = mz / Mz;
800:     PetscCheck(ratiok * Mz == mz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mz/Mz  must be integer: mz %" PetscInt_FMT " Mz %" PetscInt_FMT, mz, Mz);
801:   } else {
802:     PetscCheck(Mz >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of z coarse grid points %" PetscInt_FMT " must be greater than 1", Mz);
803:     ratiok = (mz - 1) / (Mz - 1);
804:     PetscCheck(ratiok * (Mz - 1) == mz - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %" PetscInt_FMT " Mz %" PetscInt_FMT, mz, Mz);
805:   }

807:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, &l_start, &m_f, &n_f, &p_f));
808:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, &l_start_ghost, &m_ghost, &n_ghost, &p_ghost));
809:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
810:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

812:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, &l_start_c, &m_c, &n_c, &p_c));
813:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, &l_start_ghost_c, &m_ghost_c, &n_ghost_c, &p_ghost_c));
814:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
815:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

817:   /* create interpolation matrix, determining exact preallocation */
818:   MatPreallocateBegin(PetscObjectComm((PetscObject)dac), m_f * n_f * p_f, m_c * n_c * p_c, dnz, onz);
819:   /* loop over local fine grid nodes counting interpolating points */
820:   for (l = l_start; l < l_start + p_f; l++) {
821:     for (j = j_start; j < j_start + n_f; j++) {
822:       for (i = i_start; i < i_start + m_f; i++) {
823:         /* convert to local "natural" numbering and then to PETSc global numbering */
824:         row = idx_f[(m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];
825:         i_c = (i / ratioi);
826:         j_c = (j / ratioj);
827:         l_c = (l / ratiok);
828:         PetscCheck(l_c >= l_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
829:                                             l_start %" PetscInt_FMT " l_c %" PetscInt_FMT " l_start_ghost_c %" PetscInt_FMT,
830:                    l_start, l_c, l_start_ghost_c);
831:         PetscCheck(j_c >= j_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
832:                                             j_start %" PetscInt_FMT " j_c %" PetscInt_FMT " j_start_ghost_c %" PetscInt_FMT,
833:                    j_start, j_c, j_start_ghost_c);
834:         PetscCheck(i_c >= i_start_ghost_c, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
835:                                             i_start %" PetscInt_FMT " i_c %" PetscInt_FMT " i_start_ghost_c %" PetscInt_FMT,
836:                    i_start, i_c, i_start_ghost_c);

838:         /*
839:          Only include those interpolation points that are truly
840:          nonzero. Note this is very important for final grid lines
841:          in x and y directions; since they have no right/top neighbors
842:          */
843:         nc         = 0;
844:         col        = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
845:         cols[nc++] = idx_c[col];
846:         if (i_c * ratioi != i) cols[nc++] = idx_c[col + 1];
847:         if (j_c * ratioj != j) cols[nc++] = idx_c[col + m_ghost_c];
848:         if (l_c * ratiok != l) cols[nc++] = idx_c[col + m_ghost_c * n_ghost_c];
849:         if (j_c * ratioj != j && i_c * ratioi != i) cols[nc++] = idx_c[col + (m_ghost_c + 1)];
850:         if (j_c * ratioj != j && l_c * ratiok != l) cols[nc++] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c)];
851:         if (i_c * ratioi != i && l_c * ratiok != l) cols[nc++] = idx_c[col + (m_ghost_c * n_ghost_c + 1)];
852:         if (i_c * ratioi != i && l_c * ratiok != l && j_c * ratioj != j) cols[nc++] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c + 1)];
853:         PetscCall(MatPreallocateSet(row, nc, cols, dnz, onz));
854:       }
855:     }
856:   }
857:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dac), &mat));
858: #if defined(PETSC_HAVE_CUDA)
859:   /*
860:      Temporary hack: Since the MAIJ matrix must be converted to AIJ before being used by the GPU
861:      we don't want the original unconverted matrix copied to the GPU
862:   */
863:   if (dof > 1) PetscCall(MatBindToCPU(mat, PETSC_TRUE));
864: #endif
865:   PetscCall(MatSetSizes(mat, m_f * n_f * p_f, m_c * n_c * p_c, mx * my * mz, Mx * My * Mz));
866:   PetscCall(ConvertToAIJ(dac->mattype, &mattype));
867:   PetscCall(MatSetType(mat, mattype));
868:   PetscCall(MatSeqAIJSetPreallocation(mat, 0, dnz));
869:   PetscCall(MatMPIAIJSetPreallocation(mat, 0, dnz, 0, onz));
870:   MatPreallocateEnd(dnz, onz);

872:   /* loop over local fine grid nodes setting interpolation for those*/
873:   if (!NEWVERSION) {
874:     for (l = l_start; l < l_start + p_f; l++) {
875:       for (j = j_start; j < j_start + n_f; j++) {
876:         for (i = i_start; i < i_start + m_f; i++) {
877:           /* convert to local "natural" numbering and then to PETSc global numbering */
878:           row = idx_f[(m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

880:           i_c = (i / ratioi);
881:           j_c = (j / ratioj);
882:           l_c = (l / ratiok);

884:           /*
885:            Only include those interpolation points that are truly
886:            nonzero. Note this is very important for final grid lines
887:            in x and y directions; since they have no right/top neighbors
888:            */
889:           x = ((PetscReal)(i - i_c * ratioi)) / ((PetscReal)ratioi);
890:           y = ((PetscReal)(j - j_c * ratioj)) / ((PetscReal)ratioj);
891:           z = ((PetscReal)(l - l_c * ratiok)) / ((PetscReal)ratiok);

893:           nc = 0;
894:           /* one left and below; or we are right on it */
895:           col = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));

897:           cols[nc] = idx_c[col];
898:           v[nc++]  = .125 * (1. - (2.0 * x - 1.)) * (1. - (2.0 * y - 1.)) * (1. - (2.0 * z - 1.));

900:           if (i_c * ratioi != i) {
901:             cols[nc] = idx_c[col + 1];
902:             v[nc++]  = .125 * (1. + (2.0 * x - 1.)) * (1. - (2.0 * y - 1.)) * (1. - (2.0 * z - 1.));
903:           }

905:           if (j_c * ratioj != j) {
906:             cols[nc] = idx_c[col + m_ghost_c];
907:             v[nc++]  = .125 * (1. - (2.0 * x - 1.)) * (1. + (2.0 * y - 1.)) * (1. - (2.0 * z - 1.));
908:           }

910:           if (l_c * ratiok != l) {
911:             cols[nc] = idx_c[col + m_ghost_c * n_ghost_c];
912:             v[nc++]  = .125 * (1. - (2.0 * x - 1.)) * (1. - (2.0 * y - 1.)) * (1. + (2.0 * z - 1.));
913:           }

915:           if (j_c * ratioj != j && i_c * ratioi != i) {
916:             cols[nc] = idx_c[col + (m_ghost_c + 1)];
917:             v[nc++]  = .125 * (1. + (2.0 * x - 1.)) * (1. + (2.0 * y - 1.)) * (1. - (2.0 * z - 1.));
918:           }

920:           if (j_c * ratioj != j && l_c * ratiok != l) {
921:             cols[nc] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c)];
922:             v[nc++]  = .125 * (1. - (2.0 * x - 1.)) * (1. + (2.0 * y - 1.)) * (1. + (2.0 * z - 1.));
923:           }

925:           if (i_c * ratioi != i && l_c * ratiok != l) {
926:             cols[nc] = idx_c[col + (m_ghost_c * n_ghost_c + 1)];
927:             v[nc++]  = .125 * (1. + (2.0 * x - 1.)) * (1. - (2.0 * y - 1.)) * (1. + (2.0 * z - 1.));
928:           }

930:           if (i_c * ratioi != i && l_c * ratiok != l && j_c * ratioj != j) {
931:             cols[nc] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c + 1)];
932:             v[nc++]  = .125 * (1. + (2.0 * x - 1.)) * (1. + (2.0 * y - 1.)) * (1. + (2.0 * z - 1.));
933:           }
934:           PetscCall(MatSetValues(mat, 1, &row, nc, cols, v, INSERT_VALUES));
935:         }
936:       }
937:     }

939:   } else {
940:     PetscScalar *xi, *eta, *zeta;
941:     PetscInt     li, nxi, lj, neta, lk, nzeta, n;
942:     PetscScalar  Ni[8];

944:     /* compute local coordinate arrays */
945:     nxi   = ratioi + 1;
946:     neta  = ratioj + 1;
947:     nzeta = ratiok + 1;
948:     PetscCall(PetscMalloc1(nxi, &xi));
949:     PetscCall(PetscMalloc1(neta, &eta));
950:     PetscCall(PetscMalloc1(nzeta, &zeta));
951:     for (li = 0; li < nxi; li++) xi[li] = -1.0 + (PetscScalar)li * (2.0 / (PetscScalar)(nxi - 1));
952:     for (lj = 0; lj < neta; lj++) eta[lj] = -1.0 + (PetscScalar)lj * (2.0 / (PetscScalar)(neta - 1));
953:     for (lk = 0; lk < nzeta; lk++) zeta[lk] = -1.0 + (PetscScalar)lk * (2.0 / (PetscScalar)(nzeta - 1));

955:     for (l = l_start; l < l_start + p_f; l++) {
956:       for (j = j_start; j < j_start + n_f; j++) {
957:         for (i = i_start; i < i_start + m_f; i++) {
958:           /* convert to local "natural" numbering and then to PETSc global numbering */
959:           row = idx_f[(m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))];

961:           i_c = (i / ratioi);
962:           j_c = (j / ratioj);
963:           l_c = (l / ratiok);

965:           /* remainders */
966:           li = i - ratioi * (i / ratioi);
967:           if (i == mx - 1) li = nxi - 1;
968:           lj = j - ratioj * (j / ratioj);
969:           if (j == my - 1) lj = neta - 1;
970:           lk = l - ratiok * (l / ratiok);
971:           if (l == mz - 1) lk = nzeta - 1;

973:           /* corners */
974:           col     = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
975:           cols[0] = idx_c[col];
976:           Ni[0]   = 1.0;
977:           if ((li == 0) || (li == nxi - 1)) {
978:             if ((lj == 0) || (lj == neta - 1)) {
979:               if ((lk == 0) || (lk == nzeta - 1)) {
980:                 PetscCall(MatSetValue(mat, row, cols[0], Ni[0], INSERT_VALUES));
981:                 continue;
982:               }
983:             }
984:           }

986:           /* edges + interior */
987:           /* remainders */
988:           if (i == mx - 1) i_c--;
989:           if (j == my - 1) j_c--;
990:           if (l == mz - 1) l_c--;

992:           col     = (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c));
993:           cols[0] = idx_c[col];                   /* one left and below; or we are right on it */
994:           cols[1] = idx_c[col + 1];               /* one right and below */
995:           cols[2] = idx_c[col + m_ghost_c];       /* one left and above */
996:           cols[3] = idx_c[col + (m_ghost_c + 1)]; /* one right and above */

998:           cols[4] = idx_c[col + m_ghost_c * n_ghost_c];                   /* one left and below and front; or we are right on it */
999:           cols[5] = idx_c[col + (m_ghost_c * n_ghost_c + 1)];             /* one right and below, and front */
1000:           cols[6] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c)];     /* one left and above and front*/
1001:           cols[7] = idx_c[col + (m_ghost_c * n_ghost_c + m_ghost_c + 1)]; /* one right and above and front */

1003:           Ni[0] = 0.125 * (1.0 - xi[li]) * (1.0 - eta[lj]) * (1.0 - zeta[lk]);
1004:           Ni[1] = 0.125 * (1.0 + xi[li]) * (1.0 - eta[lj]) * (1.0 - zeta[lk]);
1005:           Ni[2] = 0.125 * (1.0 - xi[li]) * (1.0 + eta[lj]) * (1.0 - zeta[lk]);
1006:           Ni[3] = 0.125 * (1.0 + xi[li]) * (1.0 + eta[lj]) * (1.0 - zeta[lk]);

1008:           Ni[4] = 0.125 * (1.0 - xi[li]) * (1.0 - eta[lj]) * (1.0 + zeta[lk]);
1009:           Ni[5] = 0.125 * (1.0 + xi[li]) * (1.0 - eta[lj]) * (1.0 + zeta[lk]);
1010:           Ni[6] = 0.125 * (1.0 - xi[li]) * (1.0 + eta[lj]) * (1.0 + zeta[lk]);
1011:           Ni[7] = 0.125 * (1.0 + xi[li]) * (1.0 + eta[lj]) * (1.0 + zeta[lk]);

1013:           for (n = 0; n < 8; n++) {
1014:             if (PetscAbsScalar(Ni[n]) < 1.0e-32) cols[n] = -1;
1015:           }
1016:           PetscCall(MatSetValues(mat, 1, &row, 8, cols, Ni, INSERT_VALUES));
1017:         }
1018:       }
1019:     }
1020:     PetscCall(PetscFree(xi));
1021:     PetscCall(PetscFree(eta));
1022:     PetscCall(PetscFree(zeta));
1023:   }
1024:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
1025:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));
1026:   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
1027:   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));

1029:   PetscCall(MatCreateMAIJ(mat, dof, A));
1030:   PetscCall(MatDestroy(&mat));
1031:   PetscFunctionReturn(PETSC_SUCCESS);
1032: }

1034: PetscErrorCode DMCreateInterpolation_DA(DM dac, DM daf, Mat *A, Vec *scale)
1035: {
1036:   PetscInt        dimc, Mc, Nc, Pc, mc, nc, pc, dofc, sc, dimf, Mf, Nf, Pf, mf, nf, pf, doff, sf;
1037:   DMBoundaryType  bxc, byc, bzc, bxf, byf, bzf;
1038:   DMDAStencilType stc, stf;
1039:   DM_DA          *ddc = (DM_DA *)dac->data;

1041:   PetscFunctionBegin;

1047:   PetscCall(DMDAGetInfo(dac, &dimc, &Mc, &Nc, &Pc, &mc, &nc, &pc, &dofc, &sc, &bxc, &byc, &bzc, &stc));
1048:   PetscCall(DMDAGetInfo(daf, &dimf, &Mf, &Nf, &Pf, &mf, &nf, &pf, &doff, &sf, &bxf, &byf, &bzf, &stf));
1049:   PetscCheck(dimc == dimf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Dimensions of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dimc, dimf);
1050:   PetscCheck(dofc == doff, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "DOF of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dofc, doff);
1051:   PetscCheck(sc == sf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil width of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, sc, sf);
1052:   PetscCheck(bxc == bxf && byc == byf && bzc == bzf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Boundary type different in two DMDAs");
1053:   PetscCheck(stc == stf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil type different in two DMDAs");
1054:   PetscCheck(Mc >= 2 || Mf <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in x direction");
1055:   PetscCheck(dimc <= 1 || Nc >= 2 || Nf <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in y direction");
1056:   PetscCheck(dimc <= 2 || Pc >= 2 || Pf <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in z direction");

1058:   if (ddc->interptype == DMDA_Q1) {
1059:     if (dimc == 1) {
1060:       PetscCall(DMCreateInterpolation_DA_1D_Q1(dac, daf, A));
1061:     } else if (dimc == 2) {
1062:       PetscCall(DMCreateInterpolation_DA_2D_Q1(dac, daf, A));
1063:     } else if (dimc == 3) {
1064:       PetscCall(DMCreateInterpolation_DA_3D_Q1(dac, daf, A));
1065:     } else SETERRQ(PetscObjectComm((PetscObject)daf), PETSC_ERR_SUP, "No support for this DMDA dimension %" PetscInt_FMT " for interpolation type %d", dimc, (int)ddc->interptype);
1066:   } else if (ddc->interptype == DMDA_Q0) {
1067:     if (dimc == 1) {
1068:       PetscCall(DMCreateInterpolation_DA_1D_Q0(dac, daf, A));
1069:     } else if (dimc == 2) {
1070:       PetscCall(DMCreateInterpolation_DA_2D_Q0(dac, daf, A));
1071:     } else if (dimc == 3) {
1072:       PetscCall(DMCreateInterpolation_DA_3D_Q0(dac, daf, A));
1073:     } else SETERRQ(PetscObjectComm((PetscObject)daf), PETSC_ERR_SUP, "No support for this DMDA dimension %" PetscInt_FMT " for interpolation type %d", dimc, (int)ddc->interptype);
1074:   }
1075:   if (scale) PetscCall(DMCreateInterpolationScale((DM)dac, (DM)daf, *A, scale));
1076:   PetscFunctionReturn(PETSC_SUCCESS);
1077: }

1079: PetscErrorCode DMCreateInjection_DA_1D(DM dac, DM daf, VecScatter *inject)
1080: {
1081:   PetscInt               i, i_start, m_f, Mx, dof;
1082:   const PetscInt        *idx_f;
1083:   ISLocalToGlobalMapping ltog_f;
1084:   PetscInt               m_ghost, m_ghost_c;
1085:   PetscInt               row, i_start_ghost, mx, m_c, nc, ratioi;
1086:   PetscInt               i_start_c, i_start_ghost_c;
1087:   PetscInt              *cols;
1088:   DMBoundaryType         bx;
1089:   Vec                    vecf, vecc;
1090:   IS                     isf;

1092:   PetscFunctionBegin;
1093:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bx, NULL, NULL, NULL));
1094:   PetscCall(DMDAGetInfo(daf, NULL, &mx, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
1095:   if (bx == DM_BOUNDARY_PERIODIC) {
1096:     ratioi = mx / Mx;
1097:     PetscCheck(ratioi * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1098:   } else {
1099:     ratioi = (mx - 1) / (Mx - 1);
1100:     PetscCheck(ratioi * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1101:   }

1103:   PetscCall(DMDAGetCorners(daf, &i_start, NULL, NULL, &m_f, NULL, NULL));
1104:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, NULL, NULL, &m_ghost, NULL, NULL));
1105:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
1106:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

1108:   PetscCall(DMDAGetCorners(dac, &i_start_c, NULL, NULL, &m_c, NULL, NULL));
1109:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, NULL, NULL, &m_ghost_c, NULL, NULL));

1111:   /* loop over local fine grid nodes setting interpolation for those*/
1112:   nc = 0;
1113:   PetscCall(PetscMalloc1(m_f, &cols));

1115:   for (i = i_start_c; i < i_start_c + m_c; i++) {
1116:     PetscInt i_f = i * ratioi;

1118:     PetscCheck(i_f >= i_start_ghost && i_f < i_start_ghost + m_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\ni_c %" PetscInt_FMT " i_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]", i, i_f, i_start_ghost, i_start_ghost + m_ghost);

1120:     row        = idx_f[(i_f - i_start_ghost)];
1121:     cols[nc++] = row;
1122:   }

1124:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
1125:   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)daf), dof, nc, cols, PETSC_OWN_POINTER, &isf));
1126:   PetscCall(DMGetGlobalVector(dac, &vecc));
1127:   PetscCall(DMGetGlobalVector(daf, &vecf));
1128:   PetscCall(VecScatterCreate(vecf, isf, vecc, NULL, inject));
1129:   PetscCall(DMRestoreGlobalVector(dac, &vecc));
1130:   PetscCall(DMRestoreGlobalVector(daf, &vecf));
1131:   PetscCall(ISDestroy(&isf));
1132:   PetscFunctionReturn(PETSC_SUCCESS);
1133: }

1135: PetscErrorCode DMCreateInjection_DA_2D(DM dac, DM daf, VecScatter *inject)
1136: {
1137:   PetscInt               i, j, i_start, j_start, m_f, n_f, Mx, My, dof;
1138:   const PetscInt        *idx_c, *idx_f;
1139:   ISLocalToGlobalMapping ltog_f, ltog_c;
1140:   PetscInt               m_ghost, n_ghost, m_ghost_c, n_ghost_c;
1141:   PetscInt               row, i_start_ghost, j_start_ghost, mx, m_c, my, nc, ratioi, ratioj;
1142:   PetscInt               i_start_c, j_start_c, n_c, i_start_ghost_c, j_start_ghost_c;
1143:   PetscInt              *cols;
1144:   DMBoundaryType         bx, by;
1145:   Vec                    vecf, vecc;
1146:   IS                     isf;

1148:   PetscFunctionBegin;
1149:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, NULL, NULL, NULL, NULL, NULL, NULL, &bx, &by, NULL, NULL));
1150:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
1151:   if (bx == DM_BOUNDARY_PERIODIC) {
1152:     ratioi = mx / Mx;
1153:     PetscCheck(ratioi * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1154:   } else {
1155:     ratioi = (mx - 1) / (Mx - 1);
1156:     PetscCheck(ratioi * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1157:   }
1158:   if (by == DM_BOUNDARY_PERIODIC) {
1159:     ratioj = my / My;
1160:     PetscCheck(ratioj * My == my, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: my/My  must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
1161:   } else {
1162:     ratioj = (my - 1) / (My - 1);
1163:     PetscCheck(ratioj * (My - 1) == my - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (my - 1)/(My - 1) must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
1164:   }

1166:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, NULL, &m_f, &n_f, NULL));
1167:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, NULL, &m_ghost, &n_ghost, NULL));
1168:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
1169:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

1171:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, NULL, &m_c, &n_c, NULL));
1172:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, NULL, &m_ghost_c, &n_ghost_c, NULL));
1173:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
1174:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

1176:   /* loop over local fine grid nodes setting interpolation for those*/
1177:   nc = 0;
1178:   PetscCall(PetscMalloc1(n_f * m_f, &cols));
1179:   for (j = j_start_c; j < j_start_c + n_c; j++) {
1180:     for (i = i_start_c; i < i_start_c + m_c; i++) {
1181:       PetscInt i_f = i * ratioi, j_f = j * ratioj;
1182:       PetscCheck(j_f >= j_start_ghost && j_f < j_start_ghost + n_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
1183:     j_c %" PetscInt_FMT " j_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]",
1184:                  j, j_f, j_start_ghost, j_start_ghost + n_ghost);
1185:       PetscCheck(i_f >= i_start_ghost && i_f < i_start_ghost + m_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Processor's coarse DMDA must lie over fine DMDA\n\
1186:     i_c %" PetscInt_FMT " i_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]",
1187:                  i, i_f, i_start_ghost, i_start_ghost + m_ghost);
1188:       row        = idx_f[(m_ghost * (j_f - j_start_ghost) + (i_f - i_start_ghost))];
1189:       cols[nc++] = row;
1190:     }
1191:   }
1192:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
1193:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));

1195:   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)daf), dof, nc, cols, PETSC_OWN_POINTER, &isf));
1196:   PetscCall(DMGetGlobalVector(dac, &vecc));
1197:   PetscCall(DMGetGlobalVector(daf, &vecf));
1198:   PetscCall(VecScatterCreate(vecf, isf, vecc, NULL, inject));
1199:   PetscCall(DMRestoreGlobalVector(dac, &vecc));
1200:   PetscCall(DMRestoreGlobalVector(daf, &vecf));
1201:   PetscCall(ISDestroy(&isf));
1202:   PetscFunctionReturn(PETSC_SUCCESS);
1203: }

1205: PetscErrorCode DMCreateInjection_DA_3D(DM dac, DM daf, VecScatter *inject)
1206: {
1207:   PetscInt               i, j, k, i_start, j_start, k_start, m_f, n_f, p_f, Mx, My, Mz;
1208:   PetscInt               m_ghost, n_ghost, p_ghost, m_ghost_c, n_ghost_c, p_ghost_c;
1209:   PetscInt               i_start_ghost, j_start_ghost, k_start_ghost;
1210:   PetscInt               mx, my, mz, ratioi, ratioj, ratiok;
1211:   PetscInt               i_start_c, j_start_c, k_start_c;
1212:   PetscInt               m_c, n_c, p_c;
1213:   PetscInt               i_start_ghost_c, j_start_ghost_c, k_start_ghost_c;
1214:   PetscInt               row, nc, dof;
1215:   const PetscInt        *idx_c, *idx_f;
1216:   ISLocalToGlobalMapping ltog_f, ltog_c;
1217:   PetscInt              *cols;
1218:   DMBoundaryType         bx, by, bz;
1219:   Vec                    vecf, vecc;
1220:   IS                     isf;

1222:   PetscFunctionBegin;
1223:   PetscCall(DMDAGetInfo(dac, NULL, &Mx, &My, &Mz, NULL, NULL, NULL, NULL, NULL, &bx, &by, &bz, NULL));
1224:   PetscCall(DMDAGetInfo(daf, NULL, &mx, &my, &mz, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));

1226:   if (bx == DM_BOUNDARY_PERIODIC) {
1227:     ratioi = mx / Mx;
1228:     PetscCheck(ratioi * Mx == mx, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mx/Mx  must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1229:   } else {
1230:     ratioi = (mx - 1) / (Mx - 1);
1231:     PetscCheck(ratioi * (Mx - 1) == mx - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mx - 1)/(Mx - 1) must be integer: mx %" PetscInt_FMT " Mx %" PetscInt_FMT, mx, Mx);
1232:   }
1233:   if (by == DM_BOUNDARY_PERIODIC) {
1234:     ratioj = my / My;
1235:     PetscCheck(ratioj * My == my, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: my/My  must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
1236:   } else {
1237:     ratioj = (my - 1) / (My - 1);
1238:     PetscCheck(ratioj * (My - 1) == my - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (my - 1)/(My - 1) must be integer: my %" PetscInt_FMT " My %" PetscInt_FMT, my, My);
1239:   }
1240:   if (bz == DM_BOUNDARY_PERIODIC) {
1241:     ratiok = mz / Mz;
1242:     PetscCheck(ratiok * Mz == mz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: mz/Mz  must be integer: mz %" PetscInt_FMT " My %" PetscInt_FMT, mz, Mz);
1243:   } else {
1244:     ratiok = (mz - 1) / (Mz - 1);
1245:     PetscCheck(ratiok * (Mz - 1) == mz - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Ratio between levels: (mz - 1)/(Mz - 1) must be integer: mz %" PetscInt_FMT " Mz %" PetscInt_FMT, mz, Mz);
1246:   }

1248:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, &k_start, &m_f, &n_f, &p_f));
1249:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, &k_start_ghost, &m_ghost, &n_ghost, &p_ghost));
1250:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltog_f));
1251:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_f, &idx_f));

1253:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, &k_start_c, &m_c, &n_c, &p_c));
1254:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, &k_start_ghost_c, &m_ghost_c, &n_ghost_c, &p_ghost_c));
1255:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltog_c));
1256:   PetscCall(ISLocalToGlobalMappingGetBlockIndices(ltog_c, &idx_c));

1258:   /* loop over local fine grid nodes setting interpolation for those*/
1259:   nc = 0;
1260:   PetscCall(PetscMalloc1(n_f * m_f * p_f, &cols));
1261:   for (k = k_start_c; k < k_start_c + p_c; k++) {
1262:     for (j = j_start_c; j < j_start_c + n_c; j++) {
1263:       for (i = i_start_c; i < i_start_c + m_c; i++) {
1264:         PetscInt i_f = i * ratioi, j_f = j * ratioj, k_f = k * ratiok;
1265:         PetscCheck(k_f >= k_start_ghost && k_f < k_start_ghost + p_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
1266:                    "Processor's coarse DMDA must lie over fine DMDA  "
1267:                    "k_c %" PetscInt_FMT " k_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]",
1268:                    k, k_f, k_start_ghost, k_start_ghost + p_ghost);
1269:         PetscCheck(j_f >= j_start_ghost && j_f < j_start_ghost + n_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
1270:                    "Processor's coarse DMDA must lie over fine DMDA  "
1271:                    "j_c %" PetscInt_FMT " j_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]",
1272:                    j, j_f, j_start_ghost, j_start_ghost + n_ghost);
1273:         PetscCheck(i_f >= i_start_ghost && i_f < i_start_ghost + m_ghost, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,
1274:                    "Processor's coarse DMDA must lie over fine DMDA  "
1275:                    "i_c %" PetscInt_FMT " i_f %" PetscInt_FMT " fine ghost range [%" PetscInt_FMT ",%" PetscInt_FMT "]",
1276:                    i, i_f, i_start_ghost, i_start_ghost + m_ghost);
1277:         row        = idx_f[(m_ghost * n_ghost * (k_f - k_start_ghost) + m_ghost * (j_f - j_start_ghost) + (i_f - i_start_ghost))];
1278:         cols[nc++] = row;
1279:       }
1280:     }
1281:   }
1282:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_f, &idx_f));
1283:   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(ltog_c, &idx_c));

1285:   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)daf), dof, nc, cols, PETSC_OWN_POINTER, &isf));
1286:   PetscCall(DMGetGlobalVector(dac, &vecc));
1287:   PetscCall(DMGetGlobalVector(daf, &vecf));
1288:   PetscCall(VecScatterCreate(vecf, isf, vecc, NULL, inject));
1289:   PetscCall(DMRestoreGlobalVector(dac, &vecc));
1290:   PetscCall(DMRestoreGlobalVector(daf, &vecf));
1291:   PetscCall(ISDestroy(&isf));
1292:   PetscFunctionReturn(PETSC_SUCCESS);
1293: }

1295: PetscErrorCode DMCreateInjection_DA(DM dac, DM daf, Mat *mat)
1296: {
1297:   PetscInt        dimc, Mc, Nc, Pc, mc, nc, pc, dofc, sc, dimf, Mf, Nf, Pf, mf, nf, pf, doff, sf;
1298:   DMBoundaryType  bxc, byc, bzc, bxf, byf, bzf;
1299:   DMDAStencilType stc, stf;
1300:   VecScatter      inject = NULL;

1302:   PetscFunctionBegin;

1307:   PetscCall(DMDAGetInfo(dac, &dimc, &Mc, &Nc, &Pc, &mc, &nc, &pc, &dofc, &sc, &bxc, &byc, &bzc, &stc));
1308:   PetscCall(DMDAGetInfo(daf, &dimf, &Mf, &Nf, &Pf, &mf, &nf, &pf, &doff, &sf, &bxf, &byf, &bzf, &stf));
1309:   PetscCheck(dimc == dimf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Dimensions of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dimc, dimf);
1310:   PetscCheck(dofc == doff, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "DOF of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dofc, doff);
1311:   PetscCheck(sc == sf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil width of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, sc, sf);
1312:   PetscCheck(bxc == bxf && byc == byf && bzc == bzf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Boundary type different in two DMDAs");
1313:   PetscCheck(stc == stf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil type different in two DMDAs");
1314:   PetscCheck(Mc >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in x direction");
1315:   PetscCheck(dimc <= 1 || Nc >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in y direction");
1316:   PetscCheck(dimc <= 2 || Pc >= 2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Coarse grid requires at least 2 points in z direction");

1318:   if (dimc == 1) {
1319:     PetscCall(DMCreateInjection_DA_1D(dac, daf, &inject));
1320:   } else if (dimc == 2) {
1321:     PetscCall(DMCreateInjection_DA_2D(dac, daf, &inject));
1322:   } else if (dimc == 3) {
1323:     PetscCall(DMCreateInjection_DA_3D(dac, daf, &inject));
1324:   }
1325:   PetscCall(MatCreateScatter(PetscObjectComm((PetscObject)inject), inject, mat));
1326:   PetscCall(VecScatterDestroy(&inject));
1327:   PetscFunctionReturn(PETSC_SUCCESS);
1328: }

1330: /*@
1331:    DMCreateAggregates - Deprecated, see DMDACreateAggregates.

1333:    Level: intermediate
1334: @*/
1335: PetscErrorCode DMCreateAggregates(DM dac, DM daf, Mat *mat)
1336: {
1337:   return DMDACreateAggregates(dac, daf, mat);
1338: }

1340: /*@
1341:    DMDACreateAggregates - Gets the aggregates that map between
1342:    grids associated with two `DMDA`

1344:    Collective

1346:    Input Parameters:
1347: +  dmc - the coarse grid `DMDA`
1348: -  dmf - the fine grid `DMDA`

1350:    Output Parameter:
1351: .  rest - the restriction matrix (transpose of the projection matrix)

1353:    Level: intermediate

1355:    Note:
1356:    This routine is not used by PETSc.
1357:    It is not clear what its use case is and it may be removed in a future release.
1358:    Users should contact petsc-maint@mcs.anl.gov if they plan to use it.

1360: .seealso: `DMRefine()`, `DMCreateInjection()`, `DMCreateInterpolation()`
1361: @*/
1362: PetscErrorCode DMDACreateAggregates(DM dac, DM daf, Mat *rest)
1363: {
1364:   PetscInt               dimc, Mc, Nc, Pc, mc, nc, pc, dofc, sc;
1365:   PetscInt               dimf, Mf, Nf, Pf, mf, nf, pf, doff, sf;
1366:   DMBoundaryType         bxc, byc, bzc, bxf, byf, bzf;
1367:   DMDAStencilType        stc, stf;
1368:   PetscInt               i, j, l;
1369:   PetscInt               i_start, j_start, l_start, m_f, n_f, p_f;
1370:   PetscInt               i_start_ghost, j_start_ghost, l_start_ghost, m_ghost, n_ghost, p_ghost;
1371:   const PetscInt        *idx_f;
1372:   PetscInt               i_c, j_c, l_c;
1373:   PetscInt               i_start_c, j_start_c, l_start_c, m_c, n_c, p_c;
1374:   PetscInt               i_start_ghost_c, j_start_ghost_c, l_start_ghost_c, m_ghost_c, n_ghost_c, p_ghost_c;
1375:   const PetscInt        *idx_c;
1376:   PetscInt               d;
1377:   PetscInt               a;
1378:   PetscInt               max_agg_size;
1379:   PetscInt              *fine_nodes;
1380:   PetscScalar           *one_vec;
1381:   PetscInt               fn_idx;
1382:   ISLocalToGlobalMapping ltogmf, ltogmc;

1384:   PetscFunctionBegin;

1389:   PetscCall(DMDAGetInfo(dac, &dimc, &Mc, &Nc, &Pc, &mc, &nc, &pc, &dofc, &sc, &bxc, &byc, &bzc, &stc));
1390:   PetscCall(DMDAGetInfo(daf, &dimf, &Mf, &Nf, &Pf, &mf, &nf, &pf, &doff, &sf, &bxf, &byf, &bzf, &stf));
1391:   PetscCheck(dimc == dimf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Dimensions of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dimc, dimf);
1392:   PetscCheck(dofc == doff, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "DOF of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, dofc, doff);
1393:   PetscCheck(sc == sf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil width of DMDA do not match %" PetscInt_FMT " %" PetscInt_FMT, sc, sf);
1394:   PetscCheck(bxc == bxf && byc == byf && bzc == bzf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Boundary type different in two DMDAs");
1395:   PetscCheck(stc == stf, PetscObjectComm((PetscObject)daf), PETSC_ERR_ARG_INCOMP, "Stencil type different in two DMDAs");

1397:   PetscCheck(Mf >= Mc, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Coarse grid has more points than fine grid, Mc %" PetscInt_FMT ", Mf %" PetscInt_FMT, Mc, Mf);
1398:   PetscCheck(Nf >= Nc, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Coarse grid has more points than fine grid, Nc %" PetscInt_FMT ", Nf %" PetscInt_FMT, Nc, Nf);
1399:   PetscCheck(Pf >= Pc, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Coarse grid has more points than fine grid, Pc %" PetscInt_FMT ", Pf %" PetscInt_FMT, Pc, Pf);

1401:   if (Pc < 0) Pc = 1;
1402:   if (Pf < 0) Pf = 1;
1403:   if (Nc < 0) Nc = 1;
1404:   if (Nf < 0) Nf = 1;

1406:   PetscCall(DMDAGetCorners(daf, &i_start, &j_start, &l_start, &m_f, &n_f, &p_f));
1407:   PetscCall(DMDAGetGhostCorners(daf, &i_start_ghost, &j_start_ghost, &l_start_ghost, &m_ghost, &n_ghost, &p_ghost));

1409:   PetscCall(DMGetLocalToGlobalMapping(daf, &ltogmf));
1410:   PetscCall(ISLocalToGlobalMappingGetIndices(ltogmf, &idx_f));

1412:   PetscCall(DMDAGetCorners(dac, &i_start_c, &j_start_c, &l_start_c, &m_c, &n_c, &p_c));
1413:   PetscCall(DMDAGetGhostCorners(dac, &i_start_ghost_c, &j_start_ghost_c, &l_start_ghost_c, &m_ghost_c, &n_ghost_c, &p_ghost_c));

1415:   PetscCall(DMGetLocalToGlobalMapping(dac, &ltogmc));
1416:   PetscCall(ISLocalToGlobalMappingGetIndices(ltogmc, &idx_c));

1418:   /*
1419:      Basic idea is as follows. Here's a 2D example, suppose r_x, r_y are the ratios
1420:      for dimension 1 and 2 respectively.
1421:      Let (i,j) be a coarse grid node. All the fine grid nodes between r_x*i and r_x*(i+1)
1422:      and r_y*j and r_y*(j+1) will be grouped into the same coarse grid aggregate.
1423:      Each specific dof on the fine grid is mapped to one dof on the coarse grid.
1424:   */

1426:   max_agg_size = (Mf / Mc + 1) * (Nf / Nc + 1) * (Pf / Pc + 1);

1428:   /* create the matrix that will contain the restriction operator */
1429:   PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)daf), m_c * n_c * p_c * dofc, m_f * n_f * p_f * doff, Mc * Nc * Pc * dofc, Mf * Nf * Pf * doff, max_agg_size, NULL, max_agg_size, NULL, rest));

1431:   /* store nodes in the fine grid here */
1432:   PetscCall(PetscMalloc2(max_agg_size, &one_vec, max_agg_size, &fine_nodes));
1433:   for (i = 0; i < max_agg_size; i++) one_vec[i] = 1.0;

1435:   /* loop over all coarse nodes */
1436:   for (l_c = l_start_c; l_c < l_start_c + p_c; l_c++) {
1437:     for (j_c = j_start_c; j_c < j_start_c + n_c; j_c++) {
1438:       for (i_c = i_start_c; i_c < i_start_c + m_c; i_c++) {
1439:         for (d = 0; d < dofc; d++) {
1440:           /* convert to local "natural" numbering and then to PETSc global numbering */
1441:           a = idx_c[dofc * (m_ghost_c * n_ghost_c * (l_c - l_start_ghost_c) + m_ghost_c * (j_c - j_start_ghost_c) + (i_c - i_start_ghost_c))] + d;

1443:           fn_idx = 0;
1444:           /* Corresponding fine points are all points (i_f, j_f, l_f) such that
1445:              i_c*Mf/Mc <= i_f < (i_c+1)*Mf/Mc
1446:              (same for other dimensions)
1447:           */
1448:           for (l = l_c * Pf / Pc; l < PetscMin((l_c + 1) * Pf / Pc, Pf); l++) {
1449:             for (j = j_c * Nf / Nc; j < PetscMin((j_c + 1) * Nf / Nc, Nf); j++) {
1450:               for (i = i_c * Mf / Mc; i < PetscMin((i_c + 1) * Mf / Mc, Mf); i++) {
1451:                 fine_nodes[fn_idx] = idx_f[doff * (m_ghost * n_ghost * (l - l_start_ghost) + m_ghost * (j - j_start_ghost) + (i - i_start_ghost))] + d;
1452:                 fn_idx++;
1453:               }
1454:             }
1455:           }
1456:           /* add all these points to one aggregate */
1457:           PetscCall(MatSetValues(*rest, 1, &a, fn_idx, fine_nodes, one_vec, INSERT_VALUES));
1458:         }
1459:       }
1460:     }
1461:   }
1462:   PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogmf, &idx_f));
1463:   PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogmc, &idx_c));
1464:   PetscCall(PetscFree2(one_vec, fine_nodes));
1465:   PetscCall(MatAssemblyBegin(*rest, MAT_FINAL_ASSEMBLY));
1466:   PetscCall(MatAssemblyEnd(*rest, MAT_FINAL_ASSEMBLY));
1467:   PetscFunctionReturn(PETSC_SUCCESS);
1468: }