Actual source code: pvecimpl.h
1: #pragma once
3: #include <../src/vec/vec/impls/dvecimpl.h>
5: typedef struct {
6: PetscInt insertmode;
7: PetscInt count;
8: PetscInt bcount;
9: } VecAssemblyHeader;
11: typedef struct {
12: PetscInt *ints;
13: PetscInt *intb;
14: PetscScalar *scalars;
15: PetscScalar *scalarb;
16: char pendings;
17: char pendingb;
18: } VecAssemblyFrame;
20: typedef struct {
21: VECHEADER
22: PetscInt nghost; /* number of ghost points on this process */
23: IS ghost; /* global indices of ghost values */
24: Vec localrep; /* local representation of vector */
25: VecScatter localupdate; /* scatter to update ghost values */
27: PetscBool assembly_subset; /* Subsequent assemblies will set a subset (perhaps equal) of off-process entries set on first assembly */
28: PetscBool first_assembly_done; /* Is the first time assembly done? */
29: PetscBool use_status; /* Use MPI_Status to determine number of items in each message */
30: PetscMPIInt nsendranks;
31: PetscMPIInt nrecvranks;
32: PetscMPIInt *sendranks;
33: PetscMPIInt *recvranks;
34: VecAssemblyHeader *sendhdr, *recvhdr;
35: VecAssemblyFrame *sendptrs; /* pointers to the main messages */
36: MPI_Request *sendreqs;
37: MPI_Request *recvreqs;
38: PetscSegBuffer segrecvint;
39: PetscSegBuffer segrecvscalar;
40: PetscSegBuffer segrecvframe;
41: #if defined(PETSC_HAVE_NVSHMEM)
42: PetscBool use_nvshmem; /* Try to use NVSHMEM in communication of, for example, VecNorm */
43: #endif
45: /* COO fields, assuming m is the vector's local size */
46: PetscCount coo_n;
47: PetscCount tot1; /* Total local entries in COO arrays */
48: PetscCount *jmap1; /* [m+1]: i-th entry of the vector has jmap1[i+1]-jmap1[i] repeats in COO arrays */
49: PetscCount *perm1; /* [tot1]: permutation array for local entries */
51: PetscCount nnz2; /* Unique entries in recvbuf */
52: PetscCount *imap2; /* [nnz2]: i-th unique entry in recvbuf is imap2[i]-th entry in the vector */
53: PetscCount *jmap2; /* [nnz2+1] */
54: PetscCount *perm2; /* [recvlen] */
56: PetscSF coo_sf;
57: PetscCount sendlen, recvlen; /* Lengths (in unit of PetscScalar) of send/recvbuf */
58: PetscCount *Cperm; /* [sendlen]: permutation array to fill sendbuf[]. 'C' for communication */
59: PetscScalar *sendbuf, *recvbuf; /* Buffers for remote values in VecSetValuesCOO() */
60: } Vec_MPI;
62: PETSC_INTERN PetscErrorCode VecMTDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
63: PETSC_INTERN PetscErrorCode VecView_MPI_Binary(Vec, PetscViewer);
64: PETSC_INTERN PetscErrorCode VecView_MPI_Draw_LG(Vec, PetscViewer);
65: PETSC_INTERN PetscErrorCode VecView_MPI_Socket(Vec, PetscViewer);
66: PETSC_INTERN PetscErrorCode VecView_MPI_HDF5(Vec, PetscViewer);
67: PETSC_INTERN PetscErrorCode VecView_MPI_ADIOS(Vec, PetscViewer);
68: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
69: PETSC_INTERN PetscErrorCode VecGetSize_MPI(Vec, PetscInt *);
70: PETSC_INTERN PetscErrorCode VecGetValues_MPI(Vec, PetscInt, const PetscInt[], PetscScalar[]);
71: PETSC_INTERN PetscErrorCode VecSetValues_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
72: PETSC_INTERN PetscErrorCode VecSetValuesBlocked_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
73: PETSC_INTERN PetscErrorCode VecAssemblyBegin_MPI(Vec);
74: PETSC_INTERN PetscErrorCode VecAssemblyEnd_MPI(Vec);
75: PETSC_INTERN PetscErrorCode VecAssemblyReset_MPI(Vec);
76: PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec);
77: PETSC_INTERN PetscErrorCode VecMDot_MPI_GEMV(Vec, PetscInt, const Vec[], PetscScalar *);
78: PETSC_INTERN PetscErrorCode VecMTDot_MPI_GEMV(Vec, PetscInt, const Vec[], PetscScalar *);
80: PETSC_INTERN PetscErrorCode VecDuplicate_MPI(Vec, Vec *);
81: PETSC_INTERN PetscErrorCode VecDuplicateWithArray_MPI(Vec, const PetscScalar *, Vec *);
82: PETSC_INTERN PetscErrorCode VecSetPreallocationCOO_MPI(Vec, PetscCount, const PetscInt[]);
83: PETSC_INTERN PetscErrorCode VecSetValuesCOO_MPI(Vec, const PetscScalar[], InsertMode);
85: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecDot_MPI(Vec, Vec, PetscScalar *);
86: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
87: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecTDot_MPI(Vec, Vec, PetscScalar *);
88: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecNorm_MPI(Vec, NormType, PetscReal *);
89: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMax_MPI(Vec, PetscInt *, PetscReal *);
90: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMin_MPI(Vec, PetscInt *, PetscReal *);
91: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMaxPointwiseDivide_MPI(Vec, Vec, PetscReal *);
92: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecPlaceArray_MPI(Vec, const PetscScalar *);
93: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecResetArray_MPI(Vec);
94: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecDestroy_MPI(Vec);
95: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecCreate_MPI_Private(Vec, PetscBool, PetscInt, const PetscScalar[]);
97: static inline PetscErrorCode VecMXDot_MPI_Default(Vec xin, PetscInt nv, const Vec y[], PetscScalar *z, PetscErrorCode (*VecMXDot_SeqFn)(Vec, PetscInt, const Vec[], PetscScalar *))
98: {
99: PetscFunctionBegin;
100: PetscCall(VecMXDot_SeqFn(xin, nv, y, z));
101: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, z, nv, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
102: PetscFunctionReturn(PETSC_SUCCESS);
103: }
105: static inline PetscErrorCode VecXDot_MPI_Default(Vec xin, Vec yin, PetscScalar *z, PetscErrorCode (*VecXDot_SeqFn)(Vec, Vec, PetscScalar *))
106: {
107: PetscFunctionBegin;
108: PetscCall(VecXDot_SeqFn(xin, yin, z));
109: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
110: PetscFunctionReturn(PETSC_SUCCESS);
111: }
113: static inline PetscErrorCode VecMinMax_MPI_Default(Vec xin, PetscInt *idx, PetscReal *z, PetscErrorCode (*VecMinMax_SeqFn)(Vec, PetscInt *, PetscReal *), const MPI_Op ops[2])
114: {
115: PetscFunctionBegin;
116: /* Find the local max */
117: PetscCall(VecMinMax_SeqFn(xin, idx, z));
118: if (PetscDefined(HAVE_MPIUNI)) PetscFunctionReturn(PETSC_SUCCESS);
119: /* Find the global max */
120: if (idx) {
121: struct {
122: PetscReal v;
123: PetscInt i;
124: } in = {*z, *idx + xin->map->rstart};
126: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &in, 1, MPIU_REAL_INT, ops[0], PetscObjectComm((PetscObject)xin)));
127: *z = in.v;
128: *idx = in.i;
129: } else {
130: /* User does not need idx */
131: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_REAL, ops[1], PetscObjectComm((PetscObject)xin)));
132: }
133: PetscFunctionReturn(PETSC_SUCCESS);
134: }
136: static inline PetscErrorCode VecDotNorm2_MPI_Default(Vec s, Vec t, PetscScalar *dp, PetscScalar *nm, PetscErrorCode (*VecDotNorm2_SeqFn)(Vec, Vec, PetscScalar *, PetscScalar *))
137: {
138: PetscFunctionBegin;
139: PetscCall(VecDotNorm2_SeqFn(s, t, dp, nm));
140: {
141: PetscScalar sum[] = {*dp, *nm};
143: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &sum, 2, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)s)));
144: *dp = sum[0];
145: *nm = sum[1];
146: }
147: PetscFunctionReturn(PETSC_SUCCESS);
148: }
150: static inline PetscErrorCode VecNorm_MPI_Default(Vec xin, NormType type, PetscReal *z, PetscErrorCode (*VecNorm_SeqFn)(Vec, NormType, PetscReal *))
151: {
152: PetscMPIInt zn = 1;
153: MPI_Op op = MPIU_SUM;
155: PetscFunctionBegin;
156: PetscCall(VecNorm_SeqFn(xin, type, z));
157: switch (type) {
158: case NORM_1_AND_2:
159: // the 2 norm needs to be squared below before being summed. NORM_2 stores the norm in the
160: // first slot but while NORM_1_AND_2 stores it in the second
161: z[1] *= z[1];
162: zn = 2;
163: break;
164: case NORM_2:
165: case NORM_FROBENIUS:
166: z[0] *= z[0];
167: case NORM_1:
168: break;
169: case NORM_INFINITY:
170: op = MPIU_MAX;
171: break;
172: }
173: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, z, zn, MPIU_REAL, op, PetscObjectComm((PetscObject)xin)));
174: if (type == NORM_2 || type == NORM_FROBENIUS || type == NORM_1_AND_2) z[type == NORM_1_AND_2] = PetscSqrtReal(z[type == NORM_1_AND_2]);
175: PetscFunctionReturn(PETSC_SUCCESS);
176: }
178: static inline PetscErrorCode VecErrorWeightedNorms_MPI_Default(Vec U, Vec Y, Vec E, NormType wnormtype, PetscReal atol, Vec vatol, PetscReal rtol, Vec vrtol, PetscReal ignore_max, PetscReal *norm, PetscInt *norm_loc, PetscReal *norma, PetscInt *norma_loc, PetscReal *normr, PetscInt *normr_loc, PetscErrorCode (*SeqFn)(Vec, Vec, Vec, NormType, PetscReal, Vec, PetscReal, Vec, PetscReal, PetscReal *, PetscInt *, PetscReal *, PetscInt *, PetscReal *, PetscInt *))
179: {
180: PetscReal loc[6];
182: PetscFunctionBegin;
183: PetscCall(SeqFn(U, Y, E, wnormtype, atol, vatol, rtol, vrtol, ignore_max, norm, norm_loc, norma, norma_loc, normr, normr_loc));
184: if (wnormtype == NORM_2) {
185: loc[0] = PetscSqr(*norm);
186: loc[1] = PetscSqr(*norma);
187: loc[2] = PetscSqr(*normr);
188: } else {
189: loc[0] = *norm;
190: loc[1] = *norma;
191: loc[2] = *normr;
192: }
193: loc[3] = (PetscReal)*norm_loc;
194: loc[4] = (PetscReal)*norma_loc;
195: loc[5] = (PetscReal)*normr_loc;
196: if (wnormtype == NORM_2) {
197: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, loc, 6, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)U)));
198: } else {
199: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, loc, 3, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject)U)));
200: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, loc + 3, 3, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)U)));
201: }
202: if (wnormtype == NORM_2) {
203: *norm = PetscSqrtReal(loc[0]);
204: *norma = PetscSqrtReal(loc[1]);
205: *normr = PetscSqrtReal(loc[2]);
206: } else {
207: *norm = loc[0];
208: *norma = loc[1];
209: *normr = loc[2];
210: }
211: *norm_loc = loc[3];
212: *norma_loc = loc[4];
213: *normr_loc = loc[5];
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }