Actual source code: sfgatherv.c
1: #include <../src/vec/is/sf/impls/basic/gatherv/sfgatherv.h>
3: /* Reuse the type. The difference is some fields (displs, recvcounts) are only significant
4: on rank 0 in Gatherv. On other ranks they are harmless NULL.
5: */
6: typedef PetscSF_Allgatherv PetscSF_Gatherv;
8: static PetscErrorCode PetscSFLinkStartCommunication_Gatherv(PetscSF sf, PetscSFLink link, PetscSFDirection direction)
9: {
10: MPI_Comm comm = MPI_COMM_NULL;
11: PetscMPIInt count;
12: PetscSF_Gatherv *dat = (PetscSF_Gatherv *)sf->data;
13: void *rootbuf = NULL, *leafbuf = NULL; /* buffer seen by MPI */
14: MPI_Request *req = NULL;
15: MPI_Datatype unit = link->unit;
17: PetscFunctionBegin;
18: if (direction == PETSCSF_ROOT2LEAF) {
19: PetscCall(PetscSFLinkCopyRootBufferInCaseNotUseGpuAwareMPI(sf, link, PETSC_TRUE /* device2host before sending */));
20: } else {
21: PetscCall(PetscSFLinkCopyLeafBufferInCaseNotUseGpuAwareMPI(sf, link, PETSC_TRUE /* device2host */));
22: }
23: PetscCall(PetscObjectGetComm((PetscObject)sf, &comm));
24: PetscCall(PetscMPIIntCast(sf->nroots, &count));
25: PetscCall(PetscSFLinkGetMPIBuffersAndRequests(sf, link, direction, &rootbuf, &leafbuf, &req, NULL));
26: PetscCall(PetscSFLinkSyncStreamBeforeCallMPI(sf, link));
28: if (direction == PETSCSF_ROOT2LEAF) {
29: PetscCallMPI(MPIU_Igatherv(rootbuf, count, unit, leafbuf, dat->recvcounts, dat->displs, unit, 0 /*rank 0*/, comm, req));
30: } else {
31: PetscCallMPI(MPIU_Iscatterv(leafbuf, dat->recvcounts, dat->displs, unit, rootbuf, count, unit, 0, comm, req));
32: }
33: PetscFunctionReturn(PETSC_SUCCESS);
34: }
36: static PetscErrorCode PetscSFSetCommunicationOps_Gatherv(PetscSF sf, PetscSFLink link)
37: {
38: PetscFunctionBegin;
39: link->StartCommunication = PetscSFLinkStartCommunication_Gatherv;
40: PetscFunctionReturn(PETSC_SUCCESS);
41: }
43: PETSC_INTERN PetscErrorCode PetscSFFetchAndOpBegin_Gatherv(PetscSF sf, MPI_Datatype unit, PetscMemType rootmtype, void *rootdata, PetscMemType leafmtype, const void *leafdata, void *leafupdate, MPI_Op op)
44: {
45: PetscFunctionBegin;
46: /* In Gatherv, each root only has one leaf. So we just need to bcast rootdata to leafupdate and then reduce leafdata to rootdata */
47: PetscCall(PetscSFBcastBegin(sf, unit, rootdata, leafupdate, MPI_REPLACE));
48: PetscCall(PetscSFBcastEnd(sf, unit, rootdata, leafupdate, MPI_REPLACE));
49: PetscCall(PetscSFReduceBegin(sf, unit, leafdata, rootdata, op));
50: PetscFunctionReturn(PETSC_SUCCESS);
51: }
53: PETSC_INTERN PetscErrorCode PetscSFCreate_Gatherv(PetscSF sf)
54: {
55: PetscSF_Gatherv *dat = (PetscSF_Gatherv *)sf->data;
57: PetscFunctionBegin;
58: sf->ops->BcastBegin = PetscSFBcastBegin_Basic;
59: sf->ops->BcastEnd = PetscSFBcastEnd_Basic;
60: sf->ops->ReduceBegin = PetscSFReduceBegin_Basic;
61: sf->ops->ReduceEnd = PetscSFReduceEnd_Basic;
63: /* Inherit from Allgatherv */
64: sf->ops->SetUp = PetscSFSetUp_Allgatherv;
65: sf->ops->Reset = PetscSFReset_Allgatherv;
66: sf->ops->Destroy = PetscSFDestroy_Allgatherv;
67: sf->ops->GetGraph = PetscSFGetGraph_Allgatherv;
68: sf->ops->GetLeafRanks = PetscSFGetLeafRanks_Allgatherv;
69: sf->ops->GetRootRanks = PetscSFGetRootRanks_Allgatherv;
70: sf->ops->FetchAndOpEnd = PetscSFFetchAndOpEnd_Allgatherv;
71: sf->ops->CreateLocalSF = PetscSFCreateLocalSF_Allgatherv;
73: /* Gatherv stuff */
74: sf->ops->FetchAndOpBegin = PetscSFFetchAndOpBegin_Gatherv;
76: sf->ops->SetCommunicationOps = PetscSFSetCommunicationOps_Gatherv;
78: sf->collective = PETSC_TRUE;
80: PetscCall(PetscNew(&dat));
81: sf->data = (void *)dat;
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }