Actual source code: petscsftypes.h
1: #pragma once
3: /* MANSEC = Vec */
4: /* SUBMANSEC = PetscSF */
6: /*S
7: PetscSF - PETSc object for setting up and managing the communication of certain entries of arrays and `Vec` between MPI ranks.
9: Level: intermediate
11: `PetscSF` uses the concept of star forests to indicate and determine the communication patterns concisely and efficiently.
12: A star <https://en.wikipedia.org/wiki/Star_(graph_theory)> forest is simply a collection of trees of height 1. The leave nodes represent
13: "ghost locations" for the root nodes.
15: .seealso: `PetscSFCreate()`, `VecScatter`, `VecScatterCreate()`
16: S*/
17: typedef struct _p_PetscSF *PetscSF;
19: /*J
20: PetscSFType - String with the name of a `PetscSF` type
22: Level: beginner
24: .seealso: `PetscSFSetType()`, `PetscSF`
25: J*/
26: typedef const char *PetscSFType;
27: #define PETSCSFBASIC "basic"
28: #define PETSCSFNEIGHBOR "neighbor"
29: #define PETSCSFALLGATHERV "allgatherv"
30: #define PETSCSFALLGATHER "allgather"
31: #define PETSCSFGATHERV "gatherv"
32: #define PETSCSFGATHER "gather"
33: #define PETSCSFALLTOALL "alltoall"
34: #define PETSCSFWINDOW "window"
36: /*S
37: PetscSFNode - specifier of owner and index
39: Level: beginner
41: Sample Usage:
42: .vb
43: PetscSFNode *remote;
44: PetscCall(PetscMalloc1(nleaves,&remote));
45: for (i=0; i<size; i++) {
46: remote[i].rank = i;
47: remote[i].index = rank;
48: }
49: .ve
51: Sample Fortran Usage:
52: .vb
53: type(PetscSFNode) remote(6)
54: remote(1)%rank = modulo(rank+size-1,size)
55: remote(1)%index = 1 * stride
56: .ve
58: Notes:
59: Use `MPIU_SF_NODE` when performing MPI operations on arrays of `PetscSFNode`
61: Generally the values of `rank` should be in $[ 0,size)$ and the value of `index` greater than or equal to 0, but there are some situations that violate this.
63: .seealso: `PetscSF`, `PetscSFSetGraph()`
64: S*/
65: typedef struct {
66: PetscInt rank; /* Rank of owner */
67: PetscInt index; /* Index of node on rank */
68: } PetscSFNode;
70: #define MPIU_SF_NODE MPIU_2INT
72: typedef enum {
73: PETSCSF_ROOT2LEAF = 0,
74: PETSCSF_LEAF2ROOT = 1
75: } PetscSFDirection;
76: typedef enum {
77: PETSCSF_BCAST = 0,
78: PETSCSF_REDUCE = 1,
79: PETSCSF_FETCH = 2
80: } PetscSFOperation;
81: /* When doing device-aware MPI, a backend refers to the SF/device interface */
82: typedef enum {
83: PETSCSF_BACKEND_INVALID = 0,
84: PETSCSF_BACKEND_CUDA = 1,
85: PETSCSF_BACKEND_HIP = 2,
86: PETSCSF_BACKEND_KOKKOS = 3
87: } PetscSFBackend;
88: typedef struct _n_PetscSFLink *PetscSFLink;
90: /*S
91: VecScatter - Object used to manage communication of data
92: between vectors in parallel or between parallel and sequential vectors. Manages both scatters and gathers
94: Level: beginner
96: Note:
97: This is an alias for `PetscSF`
99: .seealso: `Vec`, `PetscSF`, `VecScatterCreate()`, `VecScatterBegin()`, `VecScatterEnd()`
100: S*/
101: typedef PetscSF VecScatter;
103: /*J
104: VecScatterType - String with the name of a PETSc vector scatter type
106: Level: beginner
108: Note:
109: This is an alias for `PetscSFType`
111: .seealso: `PetscSFType`, `VecScatterSetType()`, `VecScatter`, `VecScatterCreate()`, `VecScatterDestroy()`
112: J*/
113: typedef PetscSFType VecScatterType;