Actual source code: ex1.c

  1: static const char help[] = "Test star forest communication (PetscSF)\n\n";

  3: /*
  4:     Description: A star is a simple tree with one root and zero or more leaves.
  5:     A star forest is a union of disjoint stars.
  6:     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
  7:     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
  8: */

 10: /*
 11:   Include petscsf.h so we can use PetscSF objects. Note that this automatically
 12:   includes petscsys.h.
 13: */
 14: #include <petscsf.h>
 15: #include <petscviewer.h>

 17: /* like PetscSFView() but with alternative array of local indices */
 18: static PetscErrorCode PetscSFViewCustomLocals_Private(PetscSF sf, const PetscInt locals[], PetscViewer viewer)
 19: {
 20:   const PetscSFNode *iremote;
 21:   PetscInt           i, nroots, nleaves, nranks;
 22:   PetscMPIInt        rank;

 25:   MPI_Comm_rank(PetscObjectComm((PetscObject)sf), &rank);
 26:   PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);
 27:   PetscSFGetRootRanks(sf, &nranks, NULL, NULL, NULL, NULL);
 28:   PetscViewerASCIIPushTab(viewer);
 29:   PetscViewerASCIIPushSynchronized(viewer);
 30:   PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Number of roots=%" PetscInt_FMT ", leaves=%" PetscInt_FMT ", remote ranks=%" PetscInt_FMT "\n", rank, nroots, nleaves, nranks);
 31:   for (i = 0; i < nleaves; i++) PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " <- (%" PetscInt_FMT ",%" PetscInt_FMT ")\n", rank, locals[i], iremote[i].rank, iremote[i].index);
 32:   PetscViewerFlush(viewer);
 33:   PetscViewerASCIIPopTab(viewer);
 34:   PetscViewerASCIIPopSynchronized(viewer);
 35:   return 0;
 36: }

 38: int main(int argc, char **argv)
 39: {
 40:   PetscInt     i, nroots, nrootsalloc, nleaves, nleavesalloc, *mine, stride;
 41:   PetscSFNode *remote;
 42:   PetscMPIInt  rank, size;
 43:   PetscSF      sf;
 44:   PetscBool    test_all, test_bcast, test_bcastop, test_reduce, test_degree, test_fetchandop, test_gather, test_scatter, test_embed, test_invert, test_sf_distribute, test_char;
 45:   MPI_Op       mop = MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
 46:   char         opstring[256];
 47:   PetscBool    strflg;

 50:   PetscInitialize(&argc, &argv, (char *)0, help);
 51:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
 52:   MPI_Comm_size(PETSC_COMM_WORLD, &size);

 54:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "PetscSF Test Options", "none");
 55:   test_all = PETSC_FALSE;
 56:   PetscOptionsBool("-test_all", "Test all SF communications", "", test_all, &test_all, NULL);
 57:   test_bcast = test_all;
 58:   PetscOptionsBool("-test_bcast", "Test broadcast", "", test_bcast, &test_bcast, NULL);
 59:   test_bcastop = test_all;
 60:   PetscOptionsBool("-test_bcastop", "Test broadcast and reduce", "", test_bcastop, &test_bcastop, NULL);
 61:   test_reduce = test_all;
 62:   PetscOptionsBool("-test_reduce", "Test reduction", "", test_reduce, &test_reduce, NULL);
 63:   test_char = test_all;
 64:   PetscOptionsBool("-test_char", "Test signed char, unsigned char, and char", "", test_char, &test_char, NULL);
 65:   mop = MPI_SUM;
 66:   PetscStrcpy(opstring, "sum");
 67:   PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL);
 68:   PetscStrcmp("sum", opstring, &strflg);
 69:   if (strflg) mop = MPIU_SUM;
 70:   PetscStrcmp("prod", opstring, &strflg);
 71:   if (strflg) mop = MPI_PROD;
 72:   PetscStrcmp("max", opstring, &strflg);
 73:   if (strflg) mop = MPI_MAX;
 74:   PetscStrcmp("min", opstring, &strflg);
 75:   if (strflg) mop = MPI_MIN;
 76:   PetscStrcmp("land", opstring, &strflg);
 77:   if (strflg) mop = MPI_LAND;
 78:   PetscStrcmp("band", opstring, &strflg);
 79:   if (strflg) mop = MPI_BAND;
 80:   PetscStrcmp("lor", opstring, &strflg);
 81:   if (strflg) mop = MPI_LOR;
 82:   PetscStrcmp("bor", opstring, &strflg);
 83:   if (strflg) mop = MPI_BOR;
 84:   PetscStrcmp("lxor", opstring, &strflg);
 85:   if (strflg) mop = MPI_LXOR;
 86:   PetscStrcmp("bxor", opstring, &strflg);
 87:   if (strflg) mop = MPI_BXOR;
 88:   test_degree = test_all;
 89:   PetscOptionsBool("-test_degree", "Test computation of vertex degree", "", test_degree, &test_degree, NULL);
 90:   test_fetchandop = test_all;
 91:   PetscOptionsBool("-test_fetchandop", "Test atomic Fetch-And-Op", "", test_fetchandop, &test_fetchandop, NULL);
 92:   test_gather = test_all;
 93:   PetscOptionsBool("-test_gather", "Test point gather", "", test_gather, &test_gather, NULL);
 94:   test_scatter = test_all;
 95:   PetscOptionsBool("-test_scatter", "Test point scatter", "", test_scatter, &test_scatter, NULL);
 96:   test_embed = test_all;
 97:   PetscOptionsBool("-test_embed", "Test point embed", "", test_embed, &test_embed, NULL);
 98:   test_invert = test_all;
 99:   PetscOptionsBool("-test_invert", "Test point invert", "", test_invert, &test_invert, NULL);
100:   stride = 1;
101:   PetscOptionsInt("-stride", "Stride for leaf and root data", "", stride, &stride, NULL);
102:   test_sf_distribute = PETSC_FALSE;
103:   PetscOptionsBool("-test_sf_distribute", "Create an SF that 'distributes' to each process, like an alltoall", "", test_sf_distribute, &test_sf_distribute, NULL);
104:   PetscOptionsString("-test_op", "Designate which MPI_Op to use", "", opstring, opstring, sizeof(opstring), NULL);
105:   PetscOptionsEnd();

107:   if (test_sf_distribute) {
108:     nroots       = size;
109:     nrootsalloc  = size;
110:     nleaves      = size;
111:     nleavesalloc = size;
112:     mine         = NULL;
113:     PetscMalloc1(nleaves, &remote);
114:     for (i = 0; i < size; i++) {
115:       remote[i].rank  = i;
116:       remote[i].index = rank;
117:     }
118:   } else {
119:     nroots       = 2 + (PetscInt)(rank == 0);
120:     nrootsalloc  = nroots * stride;
121:     nleaves      = 2 + (PetscInt)(rank > 0);
122:     nleavesalloc = nleaves * stride;
123:     mine         = NULL;
124:     if (stride > 1) {
125:       PetscInt i;

127:       PetscMalloc1(nleaves, &mine);
128:       for (i = 0; i < nleaves; i++) mine[i] = stride * i;
129:     }
130:     PetscMalloc1(nleaves, &remote);
131:     /* Left periodic neighbor */
132:     remote[0].rank  = (rank + size - 1) % size;
133:     remote[0].index = 1 * stride;
134:     /* Right periodic neighbor */
135:     remote[1].rank  = (rank + 1) % size;
136:     remote[1].index = 0 * stride;
137:     if (rank > 0) { /* All processes reference rank 0, index 1 */
138:       remote[2].rank  = 0;
139:       remote[2].index = 2 * stride;
140:     }
141:   }

143:   /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
144:   PetscSFCreate(PETSC_COMM_WORLD, &sf);
145:   PetscSFSetFromOptions(sf);
146:   PetscSFSetGraph(sf, nrootsalloc, nleaves, mine, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
147:   PetscSFSetUp(sf);

149:   /* View graph, mostly useful for debugging purposes. */
150:   PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL);
151:   PetscSFView(sf, PETSC_VIEWER_STDOUT_WORLD);
152:   PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);

154:   if (test_bcast) { /* broadcast rootdata into leafdata */
155:     PetscInt *rootdata, *leafdata;
156:     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
157:      * user-defined structures, could also be used. */
158:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
159:     /* Set rootdata buffer to be broadcast */
160:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
161:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
162:     /* Initialize local buffer, these values are never used. */
163:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
164:     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
165:     PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE);
166:     PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, MPI_REPLACE);
167:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata\n");
168:     PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD);
169:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata\n");
170:     PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD);
171:     PetscFree2(rootdata, leafdata);
172:   }

174:   if (test_bcast && test_char) { /* Bcast with char */
175:     PetscInt len;
176:     char     buf[256];
177:     char    *rootdata, *leafdata;
178:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
179:     /* Set rootdata buffer to be broadcast */
180:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = '*';
181:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 'A' + rank * 3 + i; /* rank is very small, so it is fine to compute a char */
182:     /* Initialize local buffer, these values are never used. */
183:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = '?';

185:     PetscSFBcastBegin(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE);
186:     PetscSFBcastEnd(sf, MPI_CHAR, rootdata, leafdata, MPI_REPLACE);

188:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Rootdata in type of char\n");
189:     len = 0;
190:     PetscSNPrintf(buf, 256, "%4d:", rank);
191:     len += 5;
192:     for (i = 0; i < nrootsalloc; i++) {
193:       PetscSNPrintf(buf + len, 256 - len, "%5c", rootdata[i]);
194:       len += 5;
195:     }
196:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
197:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

199:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Bcast Leafdata in type of char\n");
200:     len = 0;
201:     PetscSNPrintf(buf, 256, "%4d:", rank);
202:     len += 5;
203:     for (i = 0; i < nleavesalloc; i++) {
204:       PetscSNPrintf(buf + len, 256 - len, "%5c", leafdata[i]);
205:       len += 5;
206:     }
207:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
208:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

210:     PetscFree2(rootdata, leafdata);
211:   }

213:   if (test_bcastop) { /* Reduce rootdata into leafdata */
214:     PetscInt *rootdata, *leafdata;
215:     /* Allocate space for send and receive buffers. This example communicates PetscInt, but other types, including
216:      * user-defined structures, could also be used. */
217:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
218:     /* Set rootdata buffer to be broadcast */
219:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
220:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
221:     /* Set leaf values to reduce with */
222:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -10 * (rank + 1) - i;
223:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-BcastAndOp Leafdata\n");
224:     PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD);
225:     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
226:     PetscSFBcastBegin(sf, MPIU_INT, rootdata, leafdata, mop);
227:     PetscSFBcastEnd(sf, MPIU_INT, rootdata, leafdata, mop);
228:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Rootdata\n");
229:     PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD);
230:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## BcastAndOp Leafdata\n");
231:     PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD);
232:     PetscFree2(rootdata, leafdata);
233:   }

235:   if (test_reduce) { /* Reduce leafdata into rootdata */
236:     PetscInt *rootdata, *leafdata;
237:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
238:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
239:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
240:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 100 * (rank + 1) + i;
241:     /* Set leaf values to reduce. */
242:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
243:     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1000 * (rank + 1) + 10 * i;
244:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata\n");
245:     PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD);
246:     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
247:      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
248:     PetscSFReduceBegin(sf, MPIU_INT, leafdata, rootdata, mop);
249:     PetscSFReduceEnd(sf, MPIU_INT, leafdata, rootdata, mop);
250:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata\n");
251:     PetscIntView(nleavesalloc, leafdata, PETSC_VIEWER_STDOUT_WORLD);
252:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata\n");
253:     PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD);
254:     PetscFree2(rootdata, leafdata);
255:   }

257:   if (test_reduce && test_char) { /* Reduce with signed char */
258:     PetscInt     len;
259:     char         buf[256];
260:     signed char *rootdata, *leafdata;
261:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
262:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
263:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
264:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
265:     /* Set leaf values to reduce. */
266:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
267:     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
268:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of signed char\n");

270:     len = 0;
271:     PetscSNPrintf(buf, 256, "%4d:", rank);
272:     len += 5;
273:     for (i = 0; i < nrootsalloc; i++) {
274:       PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]);
275:       len += 5;
276:     }
277:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
278:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

280:     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
281:        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
282:      */
283:     PetscSFReduceBegin(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop);
284:     PetscSFReduceEnd(sf, MPI_SIGNED_CHAR, leafdata, rootdata, mop);

286:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of signed char\n");
287:     len = 0;
288:     PetscSNPrintf(buf, 256, "%4d:", rank);
289:     len += 5;
290:     for (i = 0; i < nleavesalloc; i++) {
291:       PetscSNPrintf(buf + len, 256 - len, "%5d", leafdata[i]);
292:       len += 5;
293:     }
294:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
295:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

297:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of signed char\n");
298:     len = 0;
299:     PetscSNPrintf(buf, 256, "%4d:", rank);
300:     len += 5;
301:     for (i = 0; i < nrootsalloc; i++) {
302:       PetscSNPrintf(buf + len, 256 - len, "%5d", rootdata[i]);
303:       len += 5;
304:     }
305:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
306:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

308:     PetscFree2(rootdata, leafdata);
309:   }

311:   if (test_reduce && test_char) { /* Reduce with unsigned char */
312:     PetscInt       len;
313:     char           buf[256];
314:     unsigned char *rootdata, *leafdata;
315:     PetscMalloc2(nrootsalloc, &rootdata, nleavesalloc, &leafdata);
316:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
317:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = 0;
318:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 10 * (rank + 1) + i;
319:     /* Set leaf values to reduce. */
320:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = 0;
321:     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 50 * (rank + 1) + 10 * i;
322:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Pre-Reduce Rootdata in type of unsigned char\n");

324:     len = 0;
325:     PetscSNPrintf(buf, 256, "%4d:", rank);
326:     len += 5;
327:     for (i = 0; i < nrootsalloc; i++) {
328:       PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]);
329:       len += 5;
330:     }
331:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
332:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

334:     /* Using MPI_CHAR should trigger an error since MPI standard does not support reduction on MPI_CHAR.
335:        Testing with -test_op max, one can see the sign does take effect in MPI_MAX.
336:      */
337:     PetscSFReduceBegin(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop);
338:     PetscSFReduceEnd(sf, MPI_UNSIGNED_CHAR, leafdata, rootdata, mop);

340:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Leafdata in type of unsigned char\n");
341:     len = 0;
342:     PetscSNPrintf(buf, 256, "%4d:", rank);
343:     len += 5;
344:     for (i = 0; i < nleavesalloc; i++) {
345:       PetscSNPrintf(buf + len, 256 - len, "%5u", leafdata[i]);
346:       len += 5;
347:     }
348:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
349:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

351:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Reduce Rootdata in type of unsigned char\n");
352:     len = 0;
353:     PetscSNPrintf(buf, 256, "%4d:", rank);
354:     len += 5;
355:     for (i = 0; i < nrootsalloc; i++) {
356:       PetscSNPrintf(buf + len, 256 - len, "%5u", rootdata[i]);
357:       len += 5;
358:     }
359:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "%s\n", buf);
360:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);

362:     PetscFree2(rootdata, leafdata);
363:   }

365:   if (test_degree) {
366:     const PetscInt *degree;
367:     PetscSFComputeDegreeBegin(sf, &degree);
368:     PetscSFComputeDegreeEnd(sf, &degree);
369:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Root degrees\n");
370:     PetscIntView(nrootsalloc, degree, PETSC_VIEWER_STDOUT_WORLD);
371:   }

373:   if (test_fetchandop) {
374:     /* Cannot use text compare here because token ordering is not deterministic */
375:     PetscInt *leafdata, *leafupdate, *rootdata;
376:     PetscMalloc3(nleavesalloc, &leafdata, nleavesalloc, &leafupdate, nrootsalloc, &rootdata);
377:     for (i = 0; i < nleavesalloc; i++) leafdata[i] = -1;
378:     for (i = 0; i < nleaves; i++) leafdata[i * stride] = 1;
379:     for (i = 0; i < nrootsalloc; i++) rootdata[i] = -1;
380:     for (i = 0; i < nroots; i++) rootdata[i * stride] = 0;
381:     PetscSFFetchAndOpBegin(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop);
382:     PetscSFFetchAndOpEnd(sf, MPIU_INT, rootdata, leafdata, leafupdate, mop);
383:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Rootdata (sum of 1 from each leaf)\n");
384:     PetscIntView(nrootsalloc, rootdata, PETSC_VIEWER_STDOUT_WORLD);
385:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Leafupdate (value at roots prior to my atomic update)\n");
386:     PetscIntView(nleavesalloc, leafupdate, PETSC_VIEWER_STDOUT_WORLD);
387:     PetscFree3(leafdata, leafupdate, rootdata);
388:   }

390:   if (test_gather) {
391:     const PetscInt *degree;
392:     PetscInt        inedges, *indata, *outdata;
393:     PetscSFComputeDegreeBegin(sf, &degree);
394:     PetscSFComputeDegreeEnd(sf, &degree);
395:     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
396:     PetscMalloc2(inedges, &indata, nleavesalloc, &outdata);
397:     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
398:     for (i = 0; i < nleaves; i++) outdata[i * stride] = 1000 * (rank + 1) + i;
399:     PetscSFGatherBegin(sf, MPIU_INT, outdata, indata);
400:     PetscSFGatherEnd(sf, MPIU_INT, outdata, indata);
401:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Gathered data at multi-roots from leaves\n");
402:     PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD);
403:     PetscFree2(indata, outdata);
404:   }

406:   if (test_scatter) {
407:     const PetscInt *degree;
408:     PetscInt        j, count, inedges, *indata, *outdata;
409:     PetscSFComputeDegreeBegin(sf, &degree);
410:     PetscSFComputeDegreeEnd(sf, &degree);
411:     for (i = 0, inedges = 0; i < nrootsalloc; i++) inedges += degree[i];
412:     PetscMalloc2(inedges, &indata, nleavesalloc, &outdata);
413:     for (i = 0; i < nleavesalloc; i++) outdata[i] = -1;
414:     for (i = 0, count = 0; i < nrootsalloc; i++) {
415:       for (j = 0; j < degree[i]; j++) indata[count++] = 1000 * (rank + 1) + 100 * i + j;
416:     }
417:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Data at multi-roots, to scatter to leaves\n");
418:     PetscIntView(inedges, indata, PETSC_VIEWER_STDOUT_WORLD);

420:     PetscSFScatterBegin(sf, MPIU_INT, indata, outdata);
421:     PetscSFScatterEnd(sf, MPIU_INT, indata, outdata);
422:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Scattered data at leaves\n");
423:     PetscIntView(nleavesalloc, outdata, PETSC_VIEWER_STDOUT_WORLD);
424:     PetscFree2(indata, outdata);
425:   }

427:   if (test_embed) {
428:     const PetscInt nroots = 1 + (PetscInt)(rank == 0);
429:     PetscInt       selected[2];
430:     PetscSF        esf;

432:     selected[0] = stride;
433:     selected[1] = 2 * stride;
434:     PetscSFCreateEmbeddedRootSF(sf, nroots, selected, &esf);
435:     PetscSFSetUp(esf);
436:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Embedded PetscSF\n");
437:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_ASCII_INFO_DETAIL);
438:     PetscSFView(esf, PETSC_VIEWER_STDOUT_WORLD);
439:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
440:     PetscSFDestroy(&esf);
441:   }

443:   if (test_invert) {
444:     const PetscInt *degree;
445:     PetscInt       *mRootsOrigNumbering;
446:     PetscInt        inedges;
447:     PetscSF         msf, imsf;

449:     PetscSFGetMultiSF(sf, &msf);
450:     PetscSFCreateInverseSF(msf, &imsf);
451:     PetscSFSetUp(msf);
452:     PetscSFSetUp(imsf);
453:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF\n");
454:     PetscSFView(msf, PETSC_VIEWER_STDOUT_WORLD);
455:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Multi-SF roots indices in original SF roots numbering\n");
456:     PetscSFComputeDegreeBegin(sf, &degree);
457:     PetscSFComputeDegreeEnd(sf, &degree);
458:     PetscSFComputeMultiRootOriginalNumbering(sf, degree, &inedges, &mRootsOrigNumbering);
459:     PetscIntView(inedges, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD);
460:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF\n");
461:     PetscSFView(imsf, PETSC_VIEWER_STDOUT_WORLD);
462:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "## Inverse of Multi-SF, original numbering\n");
463:     PetscSFViewCustomLocals_Private(imsf, mRootsOrigNumbering, PETSC_VIEWER_STDOUT_WORLD);
464:     PetscSFDestroy(&imsf);
465:     PetscFree(mRootsOrigNumbering);
466:   }

468:   /* Clean storage for star forest. */
469:   PetscSFDestroy(&sf);
470:   PetscFinalize();
471:   return 0;
472: }

474: /*TEST

476:    test:
477:       nsize: 4
478:       filter: grep -v "type" | grep -v "sort"
479:       args: -test_bcast -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
480:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

482:    test:
483:       suffix: 2
484:       nsize: 4
485:       filter: grep -v "type" | grep -v "sort"
486:       args: -test_reduce -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
487:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

489:    test:
490:       suffix: 2_basic
491:       nsize: 4
492:       args: -test_reduce -sf_type basic

494:    test:
495:       suffix: 3
496:       nsize: 4
497:       filter: grep -v "type" | grep -v "sort"
498:       args: -test_degree -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
499:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

501:    test:
502:       suffix: 3_basic
503:       nsize: 4
504:       args: -test_degree -sf_type basic

506:    test:
507:       suffix: 4
508:       nsize: 4
509:       filter: grep -v "type" | grep -v "sort"
510:       args: -test_gather -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
511:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

513:    test:
514:       suffix: 4_basic
515:       nsize: 4
516:       args: -test_gather -sf_type basic

518:    test:
519:       suffix: 4_stride
520:       nsize: 4
521:       args: -test_gather -sf_type basic -stride 2

523:    test:
524:       suffix: 5
525:       nsize: 4
526:       filter: grep -v "type" | grep -v "sort"
527:       args: -test_scatter -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
528:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

530:    test:
531:       suffix: 5_basic
532:       nsize: 4
533:       args: -test_scatter -sf_type basic

535:    test:
536:       suffix: 5_stride
537:       nsize: 4
538:       args: -test_scatter -sf_type basic -stride 2

540:    test:
541:       suffix: 6
542:       nsize: 4
543:       filter: grep -v "type" | grep -v "sort"
544:       # No -sf_window_flavor dynamic due to bug https://gitlab.com/petsc/petsc/issues/555
545:       args: -test_embed -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}}
546:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

548:    test:
549:       suffix: 6_basic
550:       nsize: 4
551:       args: -test_embed -sf_type basic

553:    test:
554:       suffix: 7
555:       nsize: 4
556:       filter: grep -v "type" | grep -v "sort"
557:       args: -test_invert -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
558:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

560:    test:
561:       suffix: 7_basic
562:       nsize: 4
563:       args: -test_invert -sf_type basic

565:    test:
566:       suffix: basic
567:       nsize: 4
568:       args: -test_bcast -sf_type basic
569:       output_file: output/ex1_1_basic.out

571:    test:
572:       suffix: bcastop_basic
573:       nsize: 4
574:       args: -test_bcastop -sf_type basic
575:       output_file: output/ex1_bcastop_basic.out

577:    test:
578:       suffix: 8
579:       nsize: 3
580:       filter: grep -v "type" | grep -v "sort"
581:       args: -test_bcast -test_sf_distribute -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create dynamic allocate}}
582:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

584:    test:
585:       suffix: 8_basic
586:       nsize: 3
587:       args: -test_bcast -test_sf_distribute -sf_type basic

589:    test:
590:       suffix: 9_char
591:       nsize: 4
592:       args: -sf_type basic -test_bcast -test_reduce -test_op max -test_char

594:    # Here we do not test -sf_window_flavor dynamic since it is designed for repeated SFs with few different rootdata pointers
595:    test:
596:       suffix: 10
597:       filter: grep -v "type" | grep -v "sort"
598:       nsize: 4
599:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor {{create allocate}} -test_all -test_bcastop 0 -test_fetchandop 0
600:       requires: defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

602:    # The nightly test suite with MPICH uses ch3:sock, which is broken when winsize == 0 in some of the processes
603:    test:
604:       suffix: 10_shared
605:       output_file: output/ex1_10.out
606:       filter: grep -v "type" | grep -v "sort"
607:       nsize: 4
608:       args: -sf_type window -sf_window_sync {{fence active lock}} -sf_window_flavor shared -test_all -test_bcastop 0 -test_fetchandop 0
609:       requires: defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY) !defined(PETSC_HAVE_MPICH_NUMVERSION) defined(PETSC_HAVE_MPI_ONE_SIDED) defined(PETSC_HAVE_MPI_FEATURE_DYNAMIC_WINDOW)

611:    test:
612:       suffix: 10_basic
613:       nsize: 4
614:       args: -sf_type basic -test_all -test_bcastop 0 -test_fetchandop 0

616: TEST*/