Actual source code: pbvec.c

  1: /*
  2:    This file contains routines for Parallel vector operations.
  3:  */
  4: #include <petscsys.h>
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>

  7: PETSC_INTERN PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);

  9: PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
 10: {
 11:   Vec_MPI *v = (Vec_MPI *)vin->data;

 13:   PetscFunctionBegin;
 14:   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 15:   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
 16:   v->array         = (PetscScalar *)a;
 17:   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
 18:   PetscFunctionReturn(PETSC_SUCCESS);
 19: }

 21: PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
 22: {
 23:   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
 24:   PetscScalar *array;

 26:   PetscFunctionBegin;
 27:   PetscCall(VecCreateWithLayout_Private(win->map, v));

 29:   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
 30:   vw           = (Vec_MPI *)(*v)->data;
 31:   (*v)->ops[0] = win->ops[0];

 33:   /* save local representation of the parallel vector (and scatter) if it exists */
 34:   if (w->localrep) {
 35:     PetscCall(VecGetArray(*v, &array));
 36:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
 37:     vw->localrep->ops[0] = w->localrep->ops[0];
 38:     PetscCall(VecRestoreArray(*v, &array));

 40:     vw->localupdate = w->localupdate;
 41:     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));

 43:     vw->ghost = w->ghost;
 44:     if (vw->ghost) PetscCall(PetscObjectReference((PetscObject)vw->ghost));
 45:   }

 47:   /* New vector should inherit stashing property of parent */
 48:   (*v)->stash.donotstash   = win->stash.donotstash;
 49:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

 51:   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)*v)->olist));
 52:   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)*v)->qlist));

 54:   (*v)->stash.bs  = win->stash.bs;
 55:   (*v)->bstash.bs = win->bstash.bs;
 56:   PetscFunctionReturn(PETSC_SUCCESS);
 57: }

 59: static PetscErrorCode VecDuplicateVecs_MPI_GEMV(Vec w, PetscInt m, Vec *V[])
 60: {
 61:   Vec_MPI *wmpi = (Vec_MPI *)w->data;

 63:   PetscFunctionBegin;
 64:   // Currently only do GEMV for vectors without ghosts. Note w might be a VECMPI subclass object.
 65:   // This routine relies on the duplicate operation being VecDuplicate_MPI. If not, bail out to the default.
 66:   if (wmpi->localrep || w->ops->duplicate != VecDuplicate_MPI) {
 67:     w->ops->duplicatevecs = VecDuplicateVecs_Default;
 68:     PetscCall(VecDuplicateVecs(w, m, V));
 69:   } else {
 70:     PetscScalar *array;
 71:     PetscInt64   lda; // use 64-bit as we will do "m * lda"

 73:     PetscCall(PetscMalloc1(m, V));
 74:     VecGetLocalSizeAligned(w, 64, &lda); // get in lda the 64-bytes aligned local size

 76:     PetscCall(PetscCalloc1(m * lda, &array));
 77:     for (PetscInt i = 0; i < m; i++) {
 78:       Vec v;
 79:       PetscCall(VecCreateMPIWithLayoutAndArray_Private(w->map, PetscSafePointerPlusOffset(array, i * lda), &v));
 80:       PetscCall(VecSetType(v, ((PetscObject)w)->type_name));
 81:       PetscCall(PetscObjectListDuplicate(((PetscObject)w)->olist, &((PetscObject)v)->olist));
 82:       PetscCall(PetscFunctionListDuplicate(((PetscObject)w)->qlist, &((PetscObject)v)->qlist));
 83:       v->ops[0]             = w->ops[0];
 84:       v->stash.donotstash   = w->stash.donotstash;
 85:       v->stash.ignorenegidx = w->stash.ignorenegidx;
 86:       v->stash.bs           = w->stash.bs;
 87:       v->bstash.bs          = w->bstash.bs;
 88:       (*V)[i]               = v;
 89:     }
 90:     // So when the first vector is destroyed it will destroy the array
 91:     if (m) ((Vec_MPI *)(*V)[0]->data)->array_allocated = array;
 92:     // disable replacearray of the first vector, as freeing its memory also frees others in the group.
 93:     // But replacearray of others is ok, as they don't own their array.
 94:     if (m > 1) (*V)[0]->ops->replacearray = VecReplaceArray_Default_GEMV_Error;
 95:   }
 96:   PetscFunctionReturn(PETSC_SUCCESS);
 97: }

 99: static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
100: {
101:   Vec_MPI *v = (Vec_MPI *)V->data;

103:   PetscFunctionBegin;
104:   switch (op) {
105:   case VEC_IGNORE_OFF_PROC_ENTRIES:
106:     V->stash.donotstash = flag;
107:     break;
108:   case VEC_IGNORE_NEGATIVE_INDICES:
109:     V->stash.ignorenegidx = flag;
110:     break;
111:   case VEC_SUBSET_OFF_PROC_ENTRIES:
112:     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
113:     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
114:       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
115:       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
116:     }
117:     break;
118:   }
119:   PetscFunctionReturn(PETSC_SUCCESS);
120: }

122: PetscErrorCode VecResetArray_MPI(Vec vin)
123: {
124:   Vec_MPI *v = (Vec_MPI *)vin->data;

126:   PetscFunctionBegin;
127:   v->array         = v->unplacedarray;
128:   v->unplacedarray = NULL;
129:   if (v->localrep) PetscCall(VecResetArray(v->localrep));
130:   PetscFunctionReturn(PETSC_SUCCESS);
131: }

133: static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
134: {
135:   Vec                X   = (Vec)ctx;
136:   Vec_MPI           *x   = (Vec_MPI *)X->data;
137:   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
138:   PetscInt           bs  = X->map->bs;

140:   PetscFunctionBegin;
141:   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
142:      messages can be empty, but we have to send them this time if we sent them before because the
143:      receiver is expecting them.
144:    */
145:   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
146:     PetscCallMPI(MPIU_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
147:     PetscCallMPI(MPIU_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
148:   }
149:   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
150:     PetscCallMPI(MPIU_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
151:     PetscCallMPI(MPIU_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
152:   }
153:   PetscFunctionReturn(PETSC_SUCCESS);
154: }

156: static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
157: {
158:   Vec                X   = (Vec)ctx;
159:   Vec_MPI           *x   = (Vec_MPI *)X->data;
160:   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
161:   PetscInt           bs  = X->map->bs;
162:   VecAssemblyFrame  *frame;

164:   PetscFunctionBegin;
165:   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));

167:   if (hdr->count) {
168:     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
169:     PetscCallMPI(MPIU_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
170:     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
171:     PetscCallMPI(MPIU_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
172:     frame->pendings = 2;
173:   } else {
174:     frame->ints     = NULL;
175:     frame->scalars  = NULL;
176:     frame->pendings = 0;
177:   }

179:   if (hdr->bcount) {
180:     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
181:     PetscCallMPI(MPIU_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
182:     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
183:     PetscCallMPI(MPIU_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
184:     frame->pendingb = 2;
185:   } else {
186:     frame->intb     = NULL;
187:     frame->scalarb  = NULL;
188:     frame->pendingb = 0;
189:   }
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }

193: static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
194: {
195:   Vec_MPI    *x = (Vec_MPI *)X->data;
196:   MPI_Comm    comm;
197:   PetscMPIInt i;
198:   PetscInt    j, jb, bs;

200:   PetscFunctionBegin;
201:   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);

203:   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
204:   PetscCall(VecGetBlockSize(X, &bs));
205:   if (PetscDefined(USE_DEBUG)) {
206:     InsertMode addv;

208:     PetscCallMPI(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
209:     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
210:   }
211:   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */

213:   PetscCall(VecStashSortCompress_Private(&X->stash));
214:   PetscCall(VecStashSortCompress_Private(&X->bstash));

216:   if (!x->sendranks) {
217:     PetscMPIInt nowners, bnowners, *owners, *bowners;
218:     PetscInt    ntmp;

220:     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
221:     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
222:     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
223:     PetscCall(PetscMPIIntCast(ntmp, &x->nsendranks));
224:     PetscCall(PetscFree(owners));
225:     PetscCall(PetscFree(bowners));
226:     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
227:     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
228:   }
229:   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
230:     PetscMPIInt rank = x->sendranks[i];

232:     x->sendhdr[i].insertmode = X->stash.insertmode;
233:     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
234:      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
235:      * there is nothing new to send, so that size-zero messages get sent instead. */
236:     x->sendhdr[i].count = 0;
237:     if (X->stash.n) {
238:       x->sendptrs[i].ints    = &X->stash.idx[j];
239:       x->sendptrs[i].scalars = &X->stash.array[j];
240:       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
241:     }
242:     x->sendhdr[i].bcount = 0;
243:     if (X->bstash.n) {
244:       x->sendptrs[i].intb    = &X->bstash.idx[jb];
245:       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
246:       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
247:     }
248:   }

250:   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
251:   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
252:   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
253:   if (x->first_assembly_done) { /* this is not the first assembly */
254:     PetscMPIInt tag[4];
255:     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
256:     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
257:     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
258:     x->use_status = PETSC_TRUE;
259:   } else { /* First time assembly */
260:     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
261:     x->use_status = PETSC_FALSE;
262:   }

264:   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
265:      This line says when assembly_subset is set, then we mark that the first assembly is done.
266:    */
267:   x->first_assembly_done = x->assembly_subset;

269:   {
270:     PetscInt nstash, reallocs;
271:     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
272:     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
273:     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
274:     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
275:   }
276:   PetscFunctionReturn(PETSC_SUCCESS);
277: }

279: static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
280: {
281:   Vec_MPI          *x  = (Vec_MPI *)X->data;
282:   PetscInt          bs = X->map->bs;
283:   PetscMPIInt       npending, *some_indices, r;
284:   MPI_Status       *some_statuses;
285:   PetscScalar      *xarray;
286:   VecAssemblyFrame *frame;

288:   PetscFunctionBegin;
289:   if (X->stash.donotstash) {
290:     X->stash.insertmode  = NOT_SET_VALUES;
291:     X->bstash.insertmode = NOT_SET_VALUES;
292:     PetscFunctionReturn(PETSC_SUCCESS);
293:   }

295:   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin() first");
296:   PetscCall(VecGetArray(X, &xarray));
297:   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
298:   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
299:   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
300:   while (npending > 0) {
301:     PetscMPIInt ndone = 0, ii;
302:     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
303:      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
304:      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
305:      * subsequent assembly can set a proper subset of the values. */
306:     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
307:     for (ii = 0; ii < ndone; ii++) {
308:       PetscInt     i     = some_indices[ii] / 4, j, k;
309:       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
310:       PetscInt    *recvint;
311:       PetscScalar *recvscalar;
312:       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
313:       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);

315:       npending--;
316:       if (!blockmsg) { /* Scalar stash */
317:         PetscMPIInt count;

319:         if (--frame[i].pendings > 0) continue;
320:         if (x->use_status) {
321:           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
322:         } else {
323:           PetscCall(PetscMPIIntCast(x->recvhdr[i].count, &count));
324:         }
325:         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
326:           PetscInt loc = *recvint - X->map->rstart;

328:           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
329:           switch (imode) {
330:           case ADD_VALUES:
331:             xarray[loc] += *recvscalar++;
332:             break;
333:           case INSERT_VALUES:
334:             xarray[loc] = *recvscalar++;
335:             break;
336:           default:
337:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
338:           }
339:         }
340:       } else { /* Block stash */
341:         PetscMPIInt count;
342:         if (--frame[i].pendingb > 0) continue;
343:         if (x->use_status) {
344:           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
345:           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
346:         } else {
347:           PetscCall(PetscMPIIntCast(x->recvhdr[i].bcount, &count));
348:         }
349:         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
350:           PetscInt loc = (*recvint) * bs - X->map->rstart;
351:           switch (imode) {
352:           case ADD_VALUES:
353:             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
354:             break;
355:           case INSERT_VALUES:
356:             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
357:             break;
358:           default:
359:             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
360:           }
361:         }
362:       }
363:     }
364:   }
365:   PetscCall(VecRestoreArray(X, &xarray));
366:   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
367:   PetscCall(PetscFree2(some_indices, some_statuses));
368:   if (x->assembly_subset) {
369:     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
370:     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
371:   } else {
372:     PetscCall(VecAssemblyReset_MPI(X));
373:   }

375:   X->stash.insertmode  = NOT_SET_VALUES;
376:   X->bstash.insertmode = NOT_SET_VALUES;
377:   PetscCall(VecStashScatterEnd_Private(&X->stash));
378:   PetscCall(VecStashScatterEnd_Private(&X->bstash));
379:   PetscFunctionReturn(PETSC_SUCCESS);
380: }

382: PetscErrorCode VecAssemblyReset_MPI(Vec X)
383: {
384:   Vec_MPI *x = (Vec_MPI *)X->data;

386:   PetscFunctionBegin;
387:   PetscCall(PetscFree(x->sendreqs));
388:   PetscCall(PetscFree(x->recvreqs));
389:   PetscCall(PetscFree(x->sendranks));
390:   PetscCall(PetscFree(x->recvranks));
391:   PetscCall(PetscFree(x->sendhdr));
392:   PetscCall(PetscFree(x->recvhdr));
393:   PetscCall(PetscFree(x->sendptrs));
394:   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
395:   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
396:   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
397:   PetscFunctionReturn(PETSC_SUCCESS);
398: }

400: static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
401: {
402: #if !defined(PETSC_HAVE_MPIUNI)
403:   PetscBool flg = PETSC_FALSE, set;

405:   PetscFunctionBegin;
406:   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
407:   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
408:   if (set) {
409:     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
410:     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
411:   }
412:   PetscOptionsHeadEnd();
413: #else
414:   PetscFunctionBegin;
415:   X->ops->assemblybegin = VecAssemblyBegin_MPI;
416:   X->ops->assemblyend   = VecAssemblyEnd_MPI;
417: #endif
418:   PetscFunctionReturn(PETSC_SUCCESS);
419: }

421: static PetscErrorCode VecGetLocalToGlobalMapping_MPI_VecGhost(Vec X, ISLocalToGlobalMapping *ltg)
422: {
423:   PetscInt       *indices, n, nghost, rstart, i;
424:   IS              ghostis;
425:   const PetscInt *ghostidx;

427:   PetscFunctionBegin;
428:   if (X->map->mapping) {
429:     *ltg = X->map->mapping;
430:     PetscFunctionReturn(PETSC_SUCCESS);
431:   }
432:   PetscCall(VecGhostGetGhostIS(X, &ghostis));
433:   PetscCall(ISGetLocalSize(ghostis, &nghost));
434:   PetscCall(VecGetLocalSize(X, &n));
435:   PetscCall(ISGetIndices(ghostis, &ghostidx));
436:   /* set local to global mapping for ghosted vector */
437:   PetscCall(PetscMalloc1(n + nghost, &indices));
438:   PetscCall(VecGetOwnershipRange(X, &rstart, NULL));
439:   for (i = 0; i < n; i++) indices[i] = rstart + i;
440:   PetscCall(PetscArraycpy(indices + n, ghostidx, nghost));
441:   PetscCall(ISRestoreIndices(ghostis, &ghostidx));
442:   PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, 1, n + nghost, indices, PETSC_OWN_POINTER, &X->map->mapping));
443:   *ltg = X->map->mapping;
444:   PetscFunctionReturn(PETSC_SUCCESS);
445: }

447: static struct _VecOps DvOps = {
448:   PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
449:   PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
450:   PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
451:   PetscDesignatedInitializer(dot, VecDot_MPI),
452:   PetscDesignatedInitializer(mdot, VecMDot_MPI),
453:   PetscDesignatedInitializer(norm, VecNorm_MPI),
454:   PetscDesignatedInitializer(tdot, VecTDot_MPI),
455:   PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
456:   PetscDesignatedInitializer(scale, VecScale_Seq),
457:   PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
458:   PetscDesignatedInitializer(set, VecSet_Seq),
459:   PetscDesignatedInitializer(swap, VecSwap_Seq),
460:   PetscDesignatedInitializer(axpy, VecAXPY_Seq),
461:   PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
462:   PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
463:   PetscDesignatedInitializer(aypx, VecAYPX_Seq),
464:   PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
465:   PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
466:   PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
467:   PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
468:   PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
469:   PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
470:   PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
471:   PetscDesignatedInitializer(getarray, NULL),
472:   PetscDesignatedInitializer(getsize, VecGetSize_MPI),
473:   PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
474:   PetscDesignatedInitializer(restorearray, NULL),
475:   PetscDesignatedInitializer(max, VecMax_MPI),
476:   PetscDesignatedInitializer(min, VecMin_MPI),
477:   PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
478:   PetscDesignatedInitializer(setoption, VecSetOption_MPI),
479:   PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
480:   PetscDesignatedInitializer(destroy, VecDestroy_MPI),
481:   PetscDesignatedInitializer(view, VecView_MPI),
482:   PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
483:   PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
484:   PetscDesignatedInitializer(dot_local, VecDot_Seq),
485:   PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
486:   PetscDesignatedInitializer(norm_local, VecNorm_Seq),
487:   PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
488:   PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
489:   PetscDesignatedInitializer(load, VecLoad_Default),
490:   PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
491:   PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
492:   PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
493:   PetscDesignatedInitializer(getlocaltoglobalmapping, VecGetLocalToGlobalMapping_MPI_VecGhost),
494:   PetscDesignatedInitializer(setvalueslocal, NULL),
495:   PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
496:   PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
497:   PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
498:   PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
499:   PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
500:   PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
501:   PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
502:   PetscDesignatedInitializer(sqrt, NULL),
503:   PetscDesignatedInitializer(abs, NULL),
504:   PetscDesignatedInitializer(exp, NULL),
505:   PetscDesignatedInitializer(log, NULL),
506:   PetscDesignatedInitializer(shift, NULL),
507:   PetscDesignatedInitializer(create, NULL), /* really? */
508:   PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
509:   PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
510:   PetscDesignatedInitializer(dotnorm2, NULL),
511:   PetscDesignatedInitializer(getsubvector, NULL),
512:   PetscDesignatedInitializer(restoresubvector, NULL),
513:   PetscDesignatedInitializer(getarrayread, NULL),
514:   PetscDesignatedInitializer(restorearrayread, NULL),
515:   PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
516:   PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
517:   PetscDesignatedInitializer(viewnative, VecView_MPI),
518:   PetscDesignatedInitializer(loadnative, NULL),
519:   PetscDesignatedInitializer(createlocalvector, NULL),
520:   PetscDesignatedInitializer(getlocalvector, NULL),
521:   PetscDesignatedInitializer(restorelocalvector, NULL),
522:   PetscDesignatedInitializer(getlocalvectorread, NULL),
523:   PetscDesignatedInitializer(restorelocalvectorread, NULL),
524:   PetscDesignatedInitializer(bindtocpu, NULL),
525:   PetscDesignatedInitializer(getarraywrite, NULL),
526:   PetscDesignatedInitializer(restorearraywrite, NULL),
527:   PetscDesignatedInitializer(getarrayandmemtype, NULL),
528:   PetscDesignatedInitializer(restorearrayandmemtype, NULL),
529:   PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
530:   PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
531:   PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
532:   PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
533:   PetscDesignatedInitializer(concatenate, NULL),
534:   PetscDesignatedInitializer(sum, NULL),
535:   PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
536:   PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI),
537:   PetscDesignatedInitializer(errorwnorm, NULL),
538:   PetscDesignatedInitializer(maxpby, NULL),
539: };

541: /*
542:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
543:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
544:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()

546:     If alloc is true and array is NULL then this routine allocates the space, otherwise
547:     no space is allocated.
548: */
549: PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
550: {
551:   Vec_MPI  *s;
552:   PetscBool mdot_use_gemv  = PETSC_TRUE;
553:   PetscBool maxpy_use_gemv = PETSC_FALSE; // default is false as we saw bad performance with vendors' GEMV with tall skinny matrices.

555:   PetscFunctionBegin;
556:   PetscCall(PetscNew(&s));
557:   v->data   = (void *)s;
558:   v->ops[0] = DvOps;

560:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-vec_mdot_use_gemv", &mdot_use_gemv, NULL));
561:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-vec_maxpy_use_gemv", &maxpy_use_gemv, NULL));

563:   // allocate multiple vectors together
564:   if (mdot_use_gemv || maxpy_use_gemv) v->ops[0].duplicatevecs = VecDuplicateVecs_MPI_GEMV;

566:   if (mdot_use_gemv) {
567:     v->ops[0].mdot        = VecMDot_MPI_GEMV;
568:     v->ops[0].mdot_local  = VecMDot_Seq_GEMV;
569:     v->ops[0].mtdot       = VecMTDot_MPI_GEMV;
570:     v->ops[0].mtdot_local = VecMTDot_Seq_GEMV;
571:   }
572:   if (maxpy_use_gemv) v->ops[0].maxpy = VecMAXPY_Seq_GEMV;

574:   s->nghost      = nghost;
575:   v->petscnative = PETSC_TRUE;
576:   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;

578:   PetscCall(PetscLayoutSetUp(v->map));

580:   s->array           = (PetscScalar *)array;
581:   s->array_allocated = NULL;
582:   if (alloc && !array) {
583:     PetscInt n = v->map->n + nghost;
584:     PetscCall(PetscCalloc1(n, &s->array));
585:     s->array_allocated = s->array;
586:     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_2], 0));
587:     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_1], 0));
588:     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_INFINITY], 0));
589:   }

591:   /* By default parallel vectors do not have local representation */
592:   s->localrep    = NULL;
593:   s->localupdate = NULL;
594:   s->ghost       = NULL;

596:   v->stash.insertmode  = NOT_SET_VALUES;
597:   v->bstash.insertmode = NOT_SET_VALUES;
598:   /* create the stashes. The block-size for bstash is set later when
599:      VecSetValuesBlocked is called.
600:   */
601:   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
602:   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));

604: #if defined(PETSC_HAVE_MATLAB)
605:   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
606:   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
607: #endif
608:   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
609:   PetscFunctionReturn(PETSC_SUCCESS);
610: }

612: /*
613:   Create a VECMPI with the given layout and array

615:   Collective

617:   Input Parameter:
618: + map   - the layout
619: - array - the array on host

621:   Output Parameter:
622: . V  - The vector object
623: */
624: PetscErrorCode VecCreateMPIWithLayoutAndArray_Private(PetscLayout map, const PetscScalar array[], Vec *V)
625: {
626:   PetscFunctionBegin;
627:   PetscCall(VecCreateWithLayout_Private(map, V));
628:   PetscCall(VecCreate_MPI_Private(*V, PETSC_FALSE, 0, array));
629:   PetscFunctionReturn(PETSC_SUCCESS);
630: }

632: /*MC
633:    VECMPI - VECMPI = "mpi" - The basic parallel vector

635:    Options Database Key:
636: . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`

638:   Level: beginner

640: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
641: M*/

643: PetscErrorCode VecCreate_MPI(Vec vv)
644: {
645:   PetscFunctionBegin;
646:   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
647:   PetscFunctionReturn(PETSC_SUCCESS);
648: }

650: /*MC
651:    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process

653:    Options Database Key:
654: . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`

656:   Level: beginner

658: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
659: M*/

661: PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
662: {
663:   PetscMPIInt size;

665:   PetscFunctionBegin;
666:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
667:   if (size == 1) {
668:     PetscCall(VecSetType(v, VECSEQ));
669:   } else {
670:     PetscCall(VecSetType(v, VECMPI));
671:   }
672:   PetscFunctionReturn(PETSC_SUCCESS);
673: }

675: /*@
676:   VecCreateMPIWithArray - Creates a parallel, array-style vector,
677:   where the user provides the array space to store the vector values.

679:   Collective

681:   Input Parameters:
682: + comm  - the MPI communicator to use
683: . bs    - block size, same meaning as `VecSetBlockSize()`
684: . n     - local vector length, cannot be `PETSC_DECIDE`
685: . N     - global vector length (or `PETSC_DETERMINE` to have calculated)
686: - array - the user provided array to store the vector values

688:   Output Parameter:
689: . vv - the vector

691:   Level: intermediate

693:   Notes:
694:   Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
695:   same type as an existing vector.

697:   If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
698:   at a later stage to SET the array for storing the vector values.

700:   PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.

702:   The user should not free `array` until the vector is destroyed.

704: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
705:           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
706: @*/
707: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
708: {
709:   PetscFunctionBegin;
710:   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
711:   PetscCall(PetscSplitOwnership(comm, &n, &N));
712:   PetscCall(VecCreate(comm, vv));
713:   PetscCall(VecSetSizes(*vv, n, N));
714:   PetscCall(VecSetBlockSize(*vv, bs));
715:   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
716:   PetscFunctionReturn(PETSC_SUCCESS);
717: }

719: /*@
720:   VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
721:   the caller allocates the array space.

723:   Collective

725:   Input Parameters:
726: + comm   - the MPI communicator to use
727: . n      - local vector length
728: . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
729: . nghost - number of local ghost points
730: . ghosts - global indices of ghost points (or `NULL` if not needed), these do not need to be in increasing order (sorted)
731: - array  - the space to store the vector values (as long as n + nghost)

733:   Output Parameter:
734: . vv - the global vector representation (without ghost points as part of vector)

736:   Level: advanced

738:   Notes:
739:   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
740:   of the vector.

742:   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.

744: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
745:           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
746:           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
747: @*/
748: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
749: {
750:   Vec_MPI     *w;
751:   PetscScalar *larray;
752:   IS           from, to;

754:   PetscFunctionBegin;
755:   *vv = NULL;
756:   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
757:   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
758:   PetscCall(PetscSplitOwnership(comm, &n, &N));
759:   /* Create global representation */
760:   PetscCall(VecCreate(comm, vv));
761:   PetscCall(VecSetSizes(*vv, n, N));
762:   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
763:   w = (Vec_MPI *)(*vv)->data;
764:   /* Create local representation */
765:   PetscCall(VecGetArray(*vv, &larray));
766:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
767:   PetscCall(VecRestoreArray(*vv, &larray));

769:   /*
770:        Create scatter context for scattering (updating) ghost values
771:   */
772:   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
773:   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
774:   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
775:   PetscCall(ISDestroy(&to));

777:   w->ghost                            = from;
778:   (*vv)->ops->getlocaltoglobalmapping = VecGetLocalToGlobalMapping_MPI_VecGhost;
779:   PetscFunctionReturn(PETSC_SUCCESS);
780: }

782: /*@
783:   VecGhostGetGhostIS - Return ghosting indices of a ghost vector

785:   Input Parameters:
786: . X - ghost vector

788:   Output Parameter:
789: . ghost - ghosting indices

791:   Level: beginner

793: .seealso: `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`
794: @*/
795: PetscErrorCode VecGhostGetGhostIS(Vec X, IS *ghost)
796: {
797:   Vec_MPI  *w;
798:   PetscBool flg;

800:   PetscFunctionBegin;
802:   PetscAssertPointer(ghost, 2);
803:   PetscCall(PetscObjectTypeCompare((PetscObject)X, VECMPI, &flg));
804:   PetscCheck(flg, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_WRONGSTATE, "VecGhostGetGhostIS was not supported for vec type %s", ((PetscObject)X)->type_name);
805:   w      = (Vec_MPI *)X->data;
806:   *ghost = w->ghost;
807:   PetscFunctionReturn(PETSC_SUCCESS);
808: }

810: /*@
811:   VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

813:   Collective

815:   Input Parameters:
816: + comm   - the MPI communicator to use
817: . n      - local vector length
818: . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
819: . nghost - number of local ghost points
820: - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)

822:   Output Parameter:
823: . vv - the global vector representation (without ghost points as part of vector)

825:   Level: advanced

827:   Notes:
828:   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
829:   of the vector.

831:   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.

833: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
834:           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
835:           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
836:           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`

838: @*/
839: PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
840: {
841:   PetscFunctionBegin;
842:   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
843:   PetscFunctionReturn(PETSC_SUCCESS);
844: }

846: /*@
847:   VecMPISetGhost - Sets the ghost points for an MPI ghost vector

849:   Collective

851:   Input Parameters:
852: + vv     - the MPI vector
853: . nghost - number of local ghost points
854: - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)

856:   Level: advanced

858:   Notes:
859:   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
860:   of the vector.

862:   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.

864:   You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).

866: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
867:           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
868:           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
869:           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
870: @*/
871: PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
872: {
873:   PetscBool flg;

875:   PetscFunctionBegin;
876:   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
877:   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
878:   if (flg) {
879:     PetscInt     n, N;
880:     Vec_MPI     *w;
881:     PetscScalar *larray;
882:     IS           from, to;
883:     MPI_Comm     comm;

885:     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
886:     n = vv->map->n;
887:     N = vv->map->N;
888:     PetscUseTypeMethod(vv, destroy);
889:     PetscCall(VecSetSizes(vv, n, N));
890:     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
891:     w = (Vec_MPI *)vv->data;
892:     /* Create local representation */
893:     PetscCall(VecGetArray(vv, &larray));
894:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
895:     PetscCall(VecRestoreArray(vv, &larray));

897:     /*
898:      Create scatter context for scattering (updating) ghost values
899:      */
900:     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
901:     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
902:     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
903:     PetscCall(ISDestroy(&to));

905:     w->ghost                         = from;
906:     vv->ops->getlocaltoglobalmapping = VecGetLocalToGlobalMapping_MPI_VecGhost;
907:   } else {
908:     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
909:     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
910:   }
911:   PetscFunctionReturn(PETSC_SUCCESS);
912: }

914: /*@
915:   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
916:   the caller allocates the array space. Indices in the ghost region are based on blocks.

918:   Collective

920:   Input Parameters:
921: + comm   - the MPI communicator to use
922: . bs     - block size
923: . n      - local vector length
924: . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
925: . nghost - number of local ghost blocks
926: . ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
927: - array  - the space to store the vector values (as long as `n + nghost*bs`)

929:   Output Parameter:
930: . vv - the global vector representation (without ghost points as part of vector)

932:   Level: advanced

934:   Notes:
935:   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
936:   of the vector.

938:   n is the local vector size (total local size not the number of blocks) while nghost
939:   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
940:   portion is bs*nghost

942: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
943:           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
944:           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
945: @*/
946: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
947: {
948:   Vec_MPI               *w;
949:   PetscScalar           *larray;
950:   IS                     from, to;
951:   ISLocalToGlobalMapping ltog;
952:   PetscInt               rstart, i, nb, *indices;

954:   PetscFunctionBegin;
955:   *vv = NULL;

957:   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
958:   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
959:   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
960:   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
961:   PetscCall(PetscSplitOwnership(comm, &n, &N));
962:   /* Create global representation */
963:   PetscCall(VecCreate(comm, vv));
964:   PetscCall(VecSetSizes(*vv, n, N));
965:   PetscCall(VecSetBlockSize(*vv, bs));
966:   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
967:   w = (Vec_MPI *)(*vv)->data;
968:   /* Create local representation */
969:   PetscCall(VecGetArray(*vv, &larray));
970:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
971:   PetscCall(VecRestoreArray(*vv, &larray));

973:   /*
974:        Create scatter context for scattering (updating) ghost values
975:   */
976:   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
977:   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
978:   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
979:   PetscCall(ISDestroy(&to));
980:   PetscCall(ISDestroy(&from));

982:   /* set local to global mapping for ghosted vector */
983:   nb = n / bs;
984:   PetscCall(PetscMalloc1(nb + nghost, &indices));
985:   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
986:   rstart = rstart / bs;

988:   for (i = 0; i < nb; i++) indices[i] = rstart + i;
989:   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];

991:   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
992:   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
993:   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
994:   PetscFunctionReturn(PETSC_SUCCESS);
995: }

997: /*@
998:   VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
999:   The indicing of the ghost points is done with blocks.

1001:   Collective

1003:   Input Parameters:
1004: + comm   - the MPI communicator to use
1005: . bs     - the block size
1006: . n      - local vector length
1007: . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
1008: . nghost - number of local ghost blocks
1009: - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)

1011:   Output Parameter:
1012: . vv - the global vector representation (without ghost points as part of vector)

1014:   Level: advanced

1016:   Notes:
1017:   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
1018:   of the vector.

1020:   `n` is the local vector size (total local size not the number of blocks) while `nghost`
1021:   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
1022:   portion is `bs*nghost`

1024: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
1025:           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
1026:           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
1027: @*/
1028: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
1029: {
1030:   PetscFunctionBegin;
1031:   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
1032:   PetscFunctionReturn(PETSC_SUCCESS);
1033: }