Actual source code: dagtol.c
1: /*
2: Code for manipulating distributed regular arrays in parallel.
3: */
5: #include <petsc/private/dmdaimpl.h>
7: PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
8: {
9: DM_DA *dd = (DM_DA *)da->data;
14: VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD);
15: return 0;
16: }
18: PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
19: {
20: DM_DA *dd = (DM_DA *)da->data;
25: VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD);
26: return 0;
27: }
29: PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
30: {
31: DM_DA *dd = (DM_DA *)da->data;
36: if (mode == ADD_VALUES) {
37: VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE);
38: } else if (mode == INSERT_VALUES) {
42: VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL);
43: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
44: return 0;
45: }
47: PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
48: {
49: DM_DA *dd = (DM_DA *)da->data;
54: if (mode == ADD_VALUES) {
55: VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE);
56: } else if (mode == INSERT_VALUES) {
57: VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL);
58: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
59: return 0;
60: }
62: extern PetscErrorCode DMDAGetNatural_Private(DM, PetscInt *, IS *);
63: /*
64: DMDAGlobalToNatural_Create - Create the global to natural scatter object
66: Collective on da
68: Input Parameter:
69: . da - the distributed array context
71: Level: developer
73: Note:
74: This is an internal routine called by `DMDAGlobalToNatural()` to
75: create the scatter context.
77: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
78: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
79: */
80: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
81: {
82: PetscInt m, start, Nlocal;
83: IS from, to;
84: Vec global;
85: DM_DA *dd = (DM_DA *)da->data;
90: /* create the scatter context */
91: VecGetLocalSize(dd->natural, &m);
92: VecGetOwnershipRange(dd->natural, &start, NULL);
94: DMDAGetNatural_Private(da, &Nlocal, &to);
96: ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from);
97: VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global);
98: VecScatterCreate(global, from, dd->natural, to, &dd->gton);
99: VecDestroy(&global);
100: ISDestroy(&from);
101: ISDestroy(&to);
102: return 0;
103: }
105: /*@
106: DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
107: in the "natural" grid ordering. Must be followed by
108: `DMDAGlobalToNaturalEnd()` to complete the exchange.
110: Neighbor-wise Collective on da
112: Input Parameters:
113: + da - the distributed array context
114: . g - the global vector
115: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
117: Output Parameter:
118: . l - the natural ordering values
120: Level: advanced
122: Notes:
123: The global and natrual vectors used here need not be the same as those
124: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
125: must have the same parallel data layout; they could, for example, be
126: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
128: You must call `DMDACreateNaturalVector()` before using this routine
130: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
131: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
132: @*/
133: PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
134: {
135: DM_DA *dd = (DM_DA *)da->data;
140: if (!dd->gton) {
141: /* create the scatter context */
142: DMDAGlobalToNatural_Create(da);
143: }
144: VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD);
145: return 0;
146: }
148: /*@
149: DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
150: in the natural ordering. Must be preceded by `DMDAGlobalToNaturalBegin()`.
152: Neighbor-wise Collective on da
154: Input Parameters:
155: + da - the distributed array context
156: . g - the global vector
157: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
159: Output Parameter:
160: . l - the global values in the natural ordering
162: Level: advanced
164: Notes:
165: The global and local vectors used here need not be the same as those
166: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
167: must have the same parallel data layout; they could, for example, be
168: obtained with VecDuplicate() from the `DMDA` originating vectors.
170: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
171: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
172: @*/
173: PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
174: {
175: DM_DA *dd = (DM_DA *)da->data;
180: VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD);
181: return 0;
182: }
184: /*@
185: DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
186: to a global vector in the PETSc `DMDA` grid ordering. Must be followed by
187: `DMDANaturalToGlobalEnd()` to complete the exchange.
189: Neighbor-wise Collective on da
191: Input Parameters:
192: + da - the distributed array context
193: . g - the global vector in a natural ordering
194: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
196: Output Parameter:
197: . l - the values in the `DMDA` ordering
199: Level: advanced
201: Notes:
202: The global and natural vectors used here need not be the same as those
203: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
204: must have the same parallel data layout; they could, for example, be
205: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
207: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
208: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
209: @*/
210: PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
211: {
212: DM_DA *dd = (DM_DA *)da->data;
217: if (!dd->gton) {
218: /* create the scatter context */
219: DMDAGlobalToNatural_Create(da);
220: }
221: VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE);
222: return 0;
223: }
225: /*@
226: DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
227: to a global vector in the PETSc `DMDA` ordering. Must be preceded by `DMDANaturalToGlobalBegin()`.
229: Neighbor-wise Collective on da
231: Input Parameters:
232: + da - the distributed array context
233: . g - the global vector in a natural ordering
234: - mode - one of `INSERT_VALUES` or `ADD_VALUES`
236: Output Parameter:
237: . l - the global values in the PETSc `DMDA` ordering
239: Level: advanced
241: Notes:
242: The global and local vectors used here need not be the same as those
243: obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
244: must have the same parallel data layout; they could, for example, be
245: obtained with `VecDuplicate()` from the `DMDA` originating vectors.
247: .seealso: `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
248: `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
249: @*/
250: PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
251: {
252: DM_DA *dd = (DM_DA *)da->data;
257: VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE);
258: return 0;
259: }