Actual source code: ex9.c
1: static char help[] = "This example shows 1) how to transfer vectors from a parent communicator to vectors on a child communicator and vice versa;\n\
2: 2) how to transfer vectors from a subcommunicator to vectors on another subcommunicator. The two subcommunicators are not\n\
3: required to cover all processes in PETSC_COMM_WORLD; 3) how to copy a vector from a parent communicator to vectors on its child communicators.\n\
4: To run any example with VECCUDA vectors, add -vectype cuda to the argument list\n\n";
6: #include <petscvec.h>
7: int main(int argc, char **argv)
8: {
9: PetscMPIInt nproc, grank, mycolor;
10: PetscInt i, n, N = 20, low, high;
11: MPI_Comm subcomm;
12: Vec x = NULL; /* global vectors on PETSC_COMM_WORLD */
13: Vec yg = NULL; /* global vectors on PETSC_COMM_WORLD */
14: VecScatter vscat;
15: IS ix, iy;
16: PetscBool iscuda = PETSC_FALSE; /* Option to use VECCUDA vectors */
17: PetscBool optionflag, compareflag;
18: char vectypename[PETSC_MAX_PATH_LEN];
19: PetscBool world2sub = PETSC_FALSE; /* Copy a vector from WORLD to a subcomm? */
20: PetscBool sub2sub = PETSC_FALSE; /* Copy a vector from a subcomm to another subcomm? */
21: PetscBool world2subs = PETSC_FALSE; /* Copy a vector from WORLD to multiple subcomms? */
23: PetscFunctionBeginUser;
24: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
25: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &nproc));
26: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &grank));
28: PetscCheck(nproc >= 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "This test must have at least two processes to run");
30: PetscCall(PetscOptionsGetBool(NULL, 0, "-world2sub", &world2sub, NULL));
31: PetscCall(PetscOptionsGetBool(NULL, 0, "-sub2sub", &sub2sub, NULL));
32: PetscCall(PetscOptionsGetBool(NULL, 0, "-world2subs", &world2subs, NULL));
33: PetscCall(PetscOptionsGetString(NULL, NULL, "-vectype", vectypename, sizeof(vectypename), &optionflag));
34: if (optionflag) {
35: PetscCall(PetscStrncmp(vectypename, "cuda", (size_t)4, &compareflag));
36: if (compareflag) iscuda = PETSC_TRUE;
37: }
39: /* Split PETSC_COMM_WORLD into three subcomms. Each process can only see the subcomm it belongs to */
40: mycolor = grank % 3;
41: PetscCallMPI(MPI_Comm_split(PETSC_COMM_WORLD, mycolor, grank, &subcomm));
43: /*===========================================================================
44: * Transfer a vector x defined on PETSC_COMM_WORLD to a vector y defined on
45: * a subcommunicator of PETSC_COMM_WORLD and vice versa.
46: *===========================================================================*/
47: if (world2sub) {
48: PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
49: PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
50: if (iscuda) {
51: PetscCall(VecSetType(x, VECCUDA));
52: } else {
53: PetscCall(VecSetType(x, VECSTANDARD));
54: }
55: PetscCall(VecSetUp(x));
56: PetscCall(PetscObjectSetName((PetscObject)x, "x_commworld")); /* Give a name to view x clearly */
58: /* Initialize x to [-0.0, -1.0, -2.0, ..., -19.0] */
59: PetscCall(VecGetOwnershipRange(x, &low, &high));
60: for (i = low; i < high; i++) {
61: PetscScalar val = -i;
62: PetscCall(VecSetValue(x, i, val, INSERT_VALUES));
63: }
64: PetscCall(VecAssemblyBegin(x));
65: PetscCall(VecAssemblyEnd(x));
67: /* Transfer x to a vector y only defined on subcomm0 and vice versa */
68: if (mycolor == 0) { /* subcomm0 contains ranks 0, 3, 6, ... in PETSC_COMM_WORLD */
69: Vec y;
70: PetscScalar *yvalue;
71: PetscCall(VecCreate(subcomm, &y));
72: PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
73: if (iscuda) {
74: PetscCall(VecSetType(y, VECCUDA));
75: } else {
76: PetscCall(VecSetType(y, VECSTANDARD));
77: }
78: PetscCall(VecSetUp(y));
79: PetscCall(PetscObjectSetName((PetscObject)y, "y_subcomm_0")); /* Give a name to view y clearly */
80: PetscCall(VecGetLocalSize(y, &n));
81: if (iscuda) {
82: #if defined(PETSC_HAVE_CUDA)
83: PetscCall(VecCUDAGetArray(y, &yvalue));
84: #endif
85: } else {
86: PetscCall(VecGetArray(y, &yvalue));
87: }
88: /* Create yg on PETSC_COMM_WORLD and alias yg with y. They share the memory pointed by yvalue.
89: Note this is a collective call. All processes have to call it and supply consistent N.
90: */
91: if (iscuda) {
92: #if defined(PETSC_HAVE_CUDA)
93: PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, n, N, yvalue, &yg));
94: #endif
95: } else {
96: PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, n, N, yvalue, &yg));
97: }
99: /* Create an identity map that makes yg[i] = x[i], i=0..N-1 */
100: PetscCall(VecGetOwnershipRange(yg, &low, &high)); /* low, high are global indices */
101: PetscCall(ISCreateStride(PETSC_COMM_SELF, high - low, low, 1, &ix));
102: PetscCall(ISDuplicate(ix, &iy));
104: /* Union of ix's on subcomm0 covers the full range of [0,N) */
105: PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
106: PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
107: PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
109: /* Once yg got the data from x, we return yvalue to y so that we can use y in other operations.
110: VecGetArray must be paired with VecRestoreArray.
111: */
112: if (iscuda) {
113: #if defined(PETSC_HAVE_CUDA)
114: PetscCall(VecCUDARestoreArray(y, &yvalue));
115: #endif
116: } else {
117: PetscCall(VecRestoreArray(y, &yvalue));
118: }
120: /* Libraries on subcomm0 can safely use y now, for example, view and scale it */
121: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_(subcomm)));
122: PetscCall(VecScale(y, 2.0));
124: /* Send the new y back to x */
125: PetscCall(VecGetArray(y, &yvalue)); /* If VecScale is done on GPU, Petsc will prepare a valid yvalue for access */
126: /* Supply new yvalue to yg without memory copying */
127: PetscCall(VecPlaceArray(yg, yvalue));
128: PetscCall(VecScatterBegin(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
129: PetscCall(VecScatterEnd(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
130: PetscCall(VecResetArray(yg));
131: PetscCall(VecRestoreArray(y, &yvalue));
132: PetscCall(VecDestroy(&y));
133: } else {
134: /* Ranks outside of subcomm0 do not supply values to yg */
135: if (iscuda) {
136: #if defined(PETSC_HAVE_CUDA)
137: PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, 0 /*n*/, N, NULL, &yg));
138: #endif
139: } else {
140: PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, 0 /*n*/, N, NULL, &yg));
141: }
143: /* Ranks in subcomm0 already specified the full range of the identity map. The remaining
144: ranks just need to create empty ISes to cheat VecScatterCreate.
145: */
146: PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &ix));
147: PetscCall(ISDuplicate(ix, &iy));
149: PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
150: PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
151: PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
153: /* Send the new y back to x. Ranks outside of subcomm0 actually have nothing to send.
154: But they have to call VecScatterBegin/End since these routines are collective.
155: */
156: PetscCall(VecScatterBegin(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
157: PetscCall(VecScatterEnd(vscat, yg, x, INSERT_VALUES, SCATTER_REVERSE));
158: }
160: PetscCall(VecView(x, PETSC_VIEWER_STDOUT_WORLD));
161: PetscCall(ISDestroy(&ix));
162: PetscCall(ISDestroy(&iy));
163: PetscCall(VecDestroy(&x));
164: PetscCall(VecDestroy(&yg));
165: PetscCall(VecScatterDestroy(&vscat));
166: } /* world2sub */
168: /*===========================================================================
169: * Transfer a vector x defined on subcomm0 to a vector y defined on
170: * subcomm1. The two subcomms are not overlapping and their union is
171: * not necessarily equal to PETSC_COMM_WORLD.
172: *===========================================================================*/
173: if (sub2sub) {
174: if (mycolor == 0) {
175: /* Intentionally declare N as a local variable so that processes in subcomm1 do not know its value */
176: PetscInt n, N = 22;
177: Vec x, xg, yg;
178: IS ix, iy;
179: VecScatter vscat;
180: const PetscScalar *xvalue;
181: MPI_Comm intercomm, parentcomm;
182: PetscMPIInt lrank;
184: PetscCallMPI(MPI_Comm_rank(subcomm, &lrank));
185: /* x is on subcomm */
186: PetscCall(VecCreate(subcomm, &x));
187: PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
188: if (iscuda) {
189: PetscCall(VecSetType(x, VECCUDA));
190: } else {
191: PetscCall(VecSetType(x, VECSTANDARD));
192: }
193: PetscCall(VecSetUp(x));
194: PetscCall(VecGetOwnershipRange(x, &low, &high));
196: /* initialize x = [0.0, 1.0, 2.0, ..., 21.0] */
197: for (i = low; i < high; i++) {
198: PetscScalar val = i;
199: PetscCall(VecSetValue(x, i, val, INSERT_VALUES));
200: }
201: PetscCall(VecAssemblyBegin(x));
202: PetscCall(VecAssemblyEnd(x));
204: PetscCallMPI(MPI_Intercomm_create(subcomm, 0, PETSC_COMM_WORLD /*peer_comm*/, 1, 100 /*tag*/, &intercomm));
206: /* Tell rank 0 of subcomm1 the global size of x */
207: if (!lrank) PetscCallMPI(MPI_Send(&N, 1, MPIU_INT, 0 /*receiver's rank in remote comm, i.e., subcomm1*/, 200 /*tag*/, intercomm));
209: /* Create an intracomm Petsc can work on. Ranks in subcomm0 are ordered before ranks in subcomm1 in parentcomm.
210: But this order actually does not matter, since what we care is vector y, which is defined on subcomm1.
211: */
212: PetscCallMPI(MPI_Intercomm_merge(intercomm, 0 /*low*/, &parentcomm));
214: /* Create a vector xg on parentcomm, which shares memory with x */
215: PetscCall(VecGetLocalSize(x, &n));
216: if (iscuda) {
217: #if defined(PETSC_HAVE_CUDA)
218: PetscCall(VecCUDAGetArrayRead(x, &xvalue));
219: PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1, n, N, xvalue, &xg));
220: #endif
221: } else {
222: PetscCall(VecGetArrayRead(x, &xvalue));
223: PetscCall(VecCreateMPIWithArray(parentcomm, 1, n, N, xvalue, &xg));
224: }
226: /* Ranks in subcomm 0 have nothing on yg, so they simply have n=0, array=NULL */
227: if (iscuda) {
228: #if defined(PETSC_HAVE_CUDA)
229: PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1, 0 /*n*/, N, NULL /*array*/, &yg));
230: #endif
231: } else {
232: PetscCall(VecCreateMPIWithArray(parentcomm, 1, 0 /*n*/, N, NULL /*array*/, &yg));
233: }
235: /* Create the vecscatter, which does identity map by setting yg[i] = xg[i], i=0..N-1. */
236: PetscCall(VecGetOwnershipRange(xg, &low, &high)); /* low, high are global indices of xg */
237: PetscCall(ISCreateStride(PETSC_COMM_SELF, high - low, low, 1, &ix));
238: PetscCall(ISDuplicate(ix, &iy));
239: PetscCall(VecScatterCreate(xg, ix, yg, iy, &vscat));
241: /* Scatter values from xg to yg */
242: PetscCall(VecScatterBegin(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
243: PetscCall(VecScatterEnd(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
245: /* After the VecScatter is done, xg is idle so we can safely return xvalue to x */
246: if (iscuda) {
247: #if defined(PETSC_HAVE_CUDA)
248: PetscCall(VecCUDARestoreArrayRead(x, &xvalue));
249: #endif
250: } else {
251: PetscCall(VecRestoreArrayRead(x, &xvalue));
252: }
253: PetscCall(VecDestroy(&x));
254: PetscCall(ISDestroy(&ix));
255: PetscCall(ISDestroy(&iy));
256: PetscCall(VecDestroy(&xg));
257: PetscCall(VecDestroy(&yg));
258: PetscCall(VecScatterDestroy(&vscat));
259: PetscCallMPI(MPI_Comm_free(&intercomm));
260: PetscCallMPI(MPI_Comm_free(&parentcomm));
261: } else if (mycolor == 1) { /* subcomm 1, containing ranks 1, 4, 7, ... in PETSC_COMM_WORLD */
262: PetscInt n, N;
263: Vec y, xg, yg;
264: IS ix, iy;
265: VecScatter vscat;
266: PetscScalar *yvalue;
267: MPI_Comm intercomm, parentcomm;
268: PetscMPIInt lrank;
270: PetscCallMPI(MPI_Comm_rank(subcomm, &lrank));
271: PetscCallMPI(MPI_Intercomm_create(subcomm, 0, PETSC_COMM_WORLD /*peer_comm*/, 0 /*remote_leader*/, 100 /*tag*/, &intercomm));
273: /* Two rank-0 are talking */
274: if (!lrank) PetscCallMPI(MPI_Recv(&N, 1, MPIU_INT, 0 /*sender's rank in remote comm, i.e. subcomm0*/, 200 /*tag*/, intercomm, MPI_STATUS_IGNORE));
275: /* Rank 0 of subcomm1 bcasts N to its members */
276: PetscCallMPI(MPI_Bcast(&N, 1, MPIU_INT, 0 /*local root*/, subcomm));
278: /* Create a intracomm Petsc can work on */
279: PetscCallMPI(MPI_Intercomm_merge(intercomm, 1 /*high*/, &parentcomm));
281: /* Ranks in subcomm1 have nothing on xg, so they simply have n=0, array=NULL.*/
282: if (iscuda) {
283: #if defined(PETSC_HAVE_CUDA)
284: PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1 /*bs*/, 0 /*n*/, N, NULL /*array*/, &xg));
285: #endif
286: } else {
287: PetscCall(VecCreateMPIWithArray(parentcomm, 1 /*bs*/, 0 /*n*/, N, NULL /*array*/, &xg));
288: }
290: PetscCall(VecCreate(subcomm, &y));
291: PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
292: if (iscuda) {
293: PetscCall(VecSetType(y, VECCUDA));
294: } else {
295: PetscCall(VecSetType(y, VECSTANDARD));
296: }
297: PetscCall(VecSetUp(y));
299: PetscCall(PetscObjectSetName((PetscObject)y, "y_subcomm_1")); /* Give a name to view y clearly */
300: PetscCall(VecGetLocalSize(y, &n));
301: if (iscuda) {
302: #if defined(PETSC_HAVE_CUDA)
303: PetscCall(VecCUDAGetArray(y, &yvalue));
304: #endif
305: } else {
306: PetscCall(VecGetArray(y, &yvalue));
307: }
308: /* Create a vector yg on parentcomm, which shares memory with y. xg and yg must be
309: created in the same order in subcomm0/1. For example, we can not reverse the order of
310: creating xg and yg in subcomm1.
311: */
312: if (iscuda) {
313: #if defined(PETSC_HAVE_CUDA)
314: PetscCall(VecCreateMPICUDAWithArray(parentcomm, 1 /*bs*/, n, N, yvalue, &yg));
315: #endif
316: } else {
317: PetscCall(VecCreateMPIWithArray(parentcomm, 1 /*bs*/, n, N, yvalue, &yg));
318: }
320: /* Ranks in subcomm0 already specified the full range of the identity map.
321: ranks in subcomm1 just need to create empty ISes to cheat VecScatterCreate.
322: */
323: PetscCall(ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &ix));
324: PetscCall(ISDuplicate(ix, &iy));
325: PetscCall(VecScatterCreate(xg, ix, yg, iy, &vscat));
327: /* Scatter values from xg to yg */
328: PetscCall(VecScatterBegin(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
329: PetscCall(VecScatterEnd(vscat, xg, yg, INSERT_VALUES, SCATTER_FORWARD));
331: /* After the VecScatter is done, values in yg are available. y is our interest, so we return yvalue to y */
332: if (iscuda) {
333: #if defined(PETSC_HAVE_CUDA)
334: PetscCall(VecCUDARestoreArray(y, &yvalue));
335: #endif
336: } else {
337: PetscCall(VecRestoreArray(y, &yvalue));
338: }
340: /* Libraries on subcomm1 can safely use y now, for example, view it */
341: PetscCall(VecView(y, PETSC_VIEWER_STDOUT_(subcomm)));
343: PetscCall(VecDestroy(&y));
344: PetscCall(ISDestroy(&ix));
345: PetscCall(ISDestroy(&iy));
346: PetscCall(VecDestroy(&xg));
347: PetscCall(VecDestroy(&yg));
348: PetscCall(VecScatterDestroy(&vscat));
349: PetscCallMPI(MPI_Comm_free(&intercomm));
350: PetscCallMPI(MPI_Comm_free(&parentcomm));
351: } else if (mycolor == 2) { /* subcomm2 */
352: /* Processes in subcomm2 do not participate in the VecScatter. They can freely do unrelated things on subcomm2 */
353: }
354: } /* sub2sub */
356: /*===========================================================================
357: * Copy a vector x defined on PETSC_COMM_WORLD to vectors y defined on
358: * every subcommunicator of PETSC_COMM_WORLD. We could use multiple transfers
359: * as we did in case 1, but that is not efficient. Instead, we use one vecscatter
360: * to achieve that.
361: *===========================================================================*/
362: if (world2subs) {
363: Vec y;
364: PetscInt n, N = 15, xstart, ystart, low, high;
365: PetscScalar *yvalue;
367: /* Initialize x to [0, 1, 2, 3, ..., N-1] */
368: PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
369: PetscCall(VecSetSizes(x, PETSC_DECIDE, N));
370: if (iscuda) {
371: PetscCall(VecSetType(x, VECCUDA));
372: } else {
373: PetscCall(VecSetType(x, VECSTANDARD));
374: }
375: PetscCall(VecSetUp(x));
376: PetscCall(VecGetOwnershipRange(x, &low, &high));
377: for (i = low; i < high; i++) PetscCall(VecSetValue(x, i, (PetscScalar)i, INSERT_VALUES));
378: PetscCall(VecAssemblyBegin(x));
379: PetscCall(VecAssemblyEnd(x));
381: /* Every subcomm has a y as long as x */
382: PetscCall(VecCreate(subcomm, &y));
383: PetscCall(VecSetSizes(y, PETSC_DECIDE, N));
384: if (iscuda) {
385: PetscCall(VecSetType(y, VECCUDA));
386: } else {
387: PetscCall(VecSetType(y, VECSTANDARD));
388: }
389: PetscCall(VecSetUp(y));
390: PetscCall(VecGetLocalSize(y, &n));
392: /* Create a global vector yg on PETSC_COMM_WORLD using y's memory. yg's global size = N*(number of subcommunicators).
393: Eeach rank in subcomms contributes a piece to construct the global yg. Keep in mind that pieces from a subcomm are not
394: necessarily consecutive in yg. That depends on how PETSC_COMM_WORLD is split. In our case, subcomm0 is made of rank
395: 0, 3, 6 etc from PETSC_COMM_WORLD. So subcomm0's pieces are interleaved with pieces from other subcomms in yg.
396: */
397: if (iscuda) {
398: #if defined(PETSC_HAVE_CUDA)
399: PetscCall(VecCUDAGetArray(y, &yvalue));
400: PetscCall(VecCreateMPICUDAWithArray(PETSC_COMM_WORLD, 1, n, PETSC_DECIDE, yvalue, &yg));
401: #endif
402: } else {
403: PetscCall(VecGetArray(y, &yvalue));
404: PetscCall(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, n, PETSC_DECIDE, yvalue, &yg));
405: }
406: PetscCall(PetscObjectSetName((PetscObject)yg, "yg_on_subcomms")); /* Give a name to view yg clearly */
408: /* The following two lines are key. From xstart, we know where to pull entries from x. Note that we get xstart from y,
409: since first entry of y on this rank is from x[xstart]. From ystart, we know where ot put entries to yg.
410: */
411: PetscCall(VecGetOwnershipRange(y, &xstart, NULL));
412: PetscCall(VecGetOwnershipRange(yg, &ystart, NULL));
414: PetscCall(ISCreateStride(PETSC_COMM_SELF, n, xstart, 1, &ix));
415: PetscCall(ISCreateStride(PETSC_COMM_SELF, n, ystart, 1, &iy));
416: PetscCall(VecScatterCreate(x, ix, yg, iy, &vscat));
417: PetscCall(VecScatterBegin(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
418: PetscCall(VecScatterEnd(vscat, x, yg, INSERT_VALUES, SCATTER_FORWARD));
420: /* View yg on PETSC_COMM_WORLD before destroying it. We shall see the interleaving effect in output. */
421: PetscCall(VecView(yg, PETSC_VIEWER_STDOUT_WORLD));
422: PetscCall(VecDestroy(&yg));
424: /* Restory yvalue so that processes in subcomm can use y from now on. */
425: if (iscuda) {
426: #if defined(PETSC_HAVE_CUDA)
427: PetscCall(VecCUDARestoreArray(y, &yvalue));
428: #endif
429: } else {
430: PetscCall(VecRestoreArray(y, &yvalue));
431: }
432: PetscCall(VecScale(y, 3.0));
434: PetscCall(ISDestroy(&ix)); /* One can also destroy ix, iy immediately after VecScatterCreate() */
435: PetscCall(ISDestroy(&iy));
436: PetscCall(VecDestroy(&x));
437: PetscCall(VecDestroy(&y));
438: PetscCall(VecScatterDestroy(&vscat));
439: } /* world2subs */
441: PetscCallMPI(MPI_Comm_free(&subcomm));
442: PetscCall(PetscFinalize());
443: return 0;
444: }
446: /*TEST
448: build:
449: requires: !defined(PETSC_HAVE_MPIUNI)
451: testset:
452: nsize: 7
454: test:
455: suffix: 1
456: args: -world2sub
458: test:
459: suffix: 2
460: args: -sub2sub
461: # deadlocks with NECMPI and INTELMPI (20210400300)
462: requires: !defined(PETSC_HAVE_NECMPI) !defined(PETSC_HAVE_I_MPI_NUMVERSION)
464: test:
465: suffix: 3
466: args: -world2subs
468: test:
469: suffix: 4
470: args: -world2sub -vectype cuda
471: requires: cuda
473: test:
474: suffix: 5
475: args: -sub2sub -vectype cuda
476: requires: cuda
478: test:
479: suffix: 6
480: args: -world2subs -vectype cuda
481: requires: cuda
483: test:
484: suffix: 7
485: args: -world2sub -sf_type neighbor
486: output_file: output/ex9_1.out
487: # OpenMPI has a bug wrt MPI_Neighbor_alltoallv etc (https://github.com/open-mpi/ompi/pull/6782). Once the patch is in, we can remove !define(PETSC_HAVE_OMPI_MAJOR_VERSION)
488: # segfaults with NECMPI
489: requires: defined(PETSC_HAVE_MPI_NEIGHBORHOOD_COLLECTIVES) !defined(PETSC_HAVE_OMPI_MAJOR_VERSION) !defined(PETSC_HAVE_NECMPI)
490: TEST*/