Actual source code: petscsftypes.h

  1: #pragma once

  3: /* MANSEC = Vec */
  4: /* SUBMANSEC = PetscSF */

  6: /*S
  7:    PetscSF - PETSc object for managing the communication of certain entries of arrays and `Vec` between MPI processes.

  9:    Level: intermediate

 11:   `PetscSF` uses the concept of star forests to indicate and determine the communication patterns concisely and efficiently.
 12:   A star  <https://en.wikipedia.org/wiki/Star_(graph_theory)> forest is simply a collection of trees of height 1. The leave nodes represent
 13:   "ghost locations" for the root nodes.

 15:   The standard usage paradigm for `PetscSF` is to provide the communication pattern with `PetscSFSetGraph()` or `PetscSFSetGraphWithPattern()` and
 16:   then perform the communication using `PetscSFBcastBegin()` and `PetscSFBcastEnd()`, `PetscSFReduceBegin()` and `PetscSFReduceEnd()`.

 18: .seealso: [](sec_petscsf), `PetscSFCreate()`, `PetscSFSetGraph()`, `PetscSFSetGraphWithPattern()`, `PetscSFBcastBegin()`, `PetscSFBcastEnd()`,
 19:           `PetscSFReduceBegin()`, `PetscSFReduceEnd()`, `VecScatter`, `VecScatterCreate()`
 20: S*/
 21: typedef struct _p_PetscSF *PetscSF;

 23: /*J
 24:   PetscSFType - String with the name of a `PetscSF` type. Each `PetscSFType` uses different mechanisms to perform the communication.

 26:   Level: beginner

 28:   Available Types:
 29: + `PETSCSFBASIC`      - use MPI sends and receives
 30: . `PETSCSFNEIGHBOR`   - use MPI_Neighbor operations
 31: . `PETSCSFALLGATHERV` - use MPI_Allgatherv operations
 32: . `PETSCSFALLGATHER`  - use MPI_Allgather operations
 33: . `PETSCSFGATHERV`    - use MPI_Igatherv and MPI_Iscatterv operations
 34: . `PETSCSFGATHER`     - use MPI_Igather and MPI_Iscatter operations
 35: . `PETSCSFALLTOALL`   - use MPI_Ialltoall operations
 36: - `PETSCSFWINDOW`     - use MPI_Win operations

 38:   Note:
 39:   Some `PetscSFType` only provide specialized code for a subset of the `PetscSF` operations and use `PETSCSFBASIC` for the others.

 41: .seealso: [](sec_petscsf), `PetscSFSetType()`, `PetscSF`
 42: J*/
 43: typedef const char *PetscSFType;
 44: #define PETSCSFBASIC      "basic"
 45: #define PETSCSFNEIGHBOR   "neighbor"
 46: #define PETSCSFALLGATHERV "allgatherv"
 47: #define PETSCSFALLGATHER  "allgather"
 48: #define PETSCSFGATHERV    "gatherv"
 49: #define PETSCSFGATHER     "gather"
 50: #define PETSCSFALLTOALL   "alltoall"
 51: #define PETSCSFWINDOW     "window"

 53: /*S
 54:    PetscSFNode - specifier of MPI rank owner and local index for array or `Vec` entry locations that are to be communicated with a `PetscSF`

 56:    Level: beginner

 58:   Sample Usage:
 59: .vb
 60:     PetscSFNode    *remote;
 61:     PetscCall(PetscMalloc1(nleaves,&remote));
 62:     for (i=0; i<size; i++) {
 63:       remote[i].rank = i;
 64:       remote[i].index = rank;
 65:     }
 66: .ve

 68:   Sample Fortran Usage:
 69: .vb
 70:     type(PetscSFNode) remote(6)
 71:     remote(1)%rank  = modulo(rank+size-1,size)
 72:     remote(1)%index = 1 * stride
 73: .ve

 75:   Notes:
 76:   Use  `MPIU_SF_NODE` when performing MPI operations on arrays of `PetscSFNode`

 78:   Generally the values of `rank` should be in $[ 0,size)$  and the value of `index` greater than or equal to 0, but there are some situations that violate this.

 80: .seealso: [](sec_petscsf), `PetscSF`, `PetscSFSetGraph()`
 81: S*/
 82: typedef struct {
 83:   PetscInt rank;  /* MPI rank of owner */
 84:   PetscInt index; /* Index of node on rank */
 85: } PetscSFNode;

 87: #define MPIU_SF_NODE MPIU_2INT

 89: typedef enum {
 90:   PETSCSF_ROOT2LEAF = 0,
 91:   PETSCSF_LEAF2ROOT = 1
 92: } PetscSFDirection;
 93: typedef enum {
 94:   PETSCSF_BCAST  = 0,
 95:   PETSCSF_REDUCE = 1,
 96:   PETSCSF_FETCH  = 2
 97: } PetscSFOperation;
 98: /* When doing device-aware MPI, a backend refers to the SF/device interface */
 99: typedef enum {
100:   PETSCSF_BACKEND_INVALID = 0,
101:   PETSCSF_BACKEND_CUDA    = 1,
102:   PETSCSF_BACKEND_HIP     = 2,
103:   PETSCSF_BACKEND_KOKKOS  = 3
104: } PetscSFBackend;
105: typedef struct _n_PetscSFLink *PetscSFLink;

107: /*S
108:   VecScatter - Object used to manage communication of data
109:   between vectors in parallel or between parallel and sequential vectors. Manages both scatters and gathers

111:   Level: beginner

113:   Note:
114:   This is an alias for `PetscSF`.

116: .seealso: [](sec_petscsf), `Vec`, `PetscSF`, `VecScatterCreate()`, `VecScatterBegin()`, `VecScatterEnd()`
117: S*/
118: typedef PetscSF VecScatter;

120: /*J
121:   VecScatterType - String with the name of a PETSc vector scatter type

123:   Level: beginner

125:   Note:
126:   This is an alias for `PetscSFType`

128: .seealso: [](sec_petscsf), `PetscSFType`, `VecScatterSetType()`, `VecScatter`, `VecScatterCreate()`, `VecScatterDestroy()`
129: J*/
130: typedef PetscSFType VecScatterType;