Actual source code: dagtona.c

  1: /*
  2:      Tools to help solve the coarse grid problem redundantly.
  3:   Provides two scatter contexts that (1) map from the usual global vector
  4:   to all processors the entire vector in NATURAL numbering and (2)
  5:   from the entire vector on each processor in natural numbering extracts
  6:   out this processors piece in GLOBAL numbering
  7: */

  9: #include <petsc/private/dmdaimpl.h>

 11: /*@
 12:   DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from a
 13:   global vector the entire vector to each processor in natural numbering

 15:   Collective

 17:   Input Parameter:
 18: . da - the distributed array context

 20:   Output Parameter:
 21: . scatter - the scatter context

 23:   Level: advanced

 25: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDANaturalAllToGlobalCreate()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
 26:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
 27: @*/
 28: PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da, VecScatter *scatter)
 29: {
 30:   PetscInt N;
 31:   IS       from, to;
 32:   Vec      tmplocal, global;
 33:   AO       ao;
 34:   DM_DA   *dd = (DM_DA *)da->data;

 36:   PetscFunctionBegin;
 38:   PetscAssertPointer(scatter, 2);
 39:   PetscCall(DMDAGetAO(da, &ao));

 41:   /* create the scatter context */
 42:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
 43:   PetscCall(VecGetSize(global, &N));
 44:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &to));
 45:   PetscCall(AOPetscToApplicationIS(ao, to));
 46:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &from));
 47:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, N, NULL, &tmplocal));
 48:   PetscCall(VecScatterCreate(global, from, tmplocal, to, scatter));
 49:   PetscCall(VecDestroy(&tmplocal));
 50:   PetscCall(VecDestroy(&global));
 51:   PetscCall(ISDestroy(&from));
 52:   PetscCall(ISDestroy(&to));
 53:   PetscFunctionReturn(PETSC_SUCCESS);
 54: }

 56: /*@
 57:   DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 58:   of the entire vector on each processor (in the natural ordering) to its local part in the global vector.

 60:   Collective

 62:   Input Parameter:
 63: . da - the distributed array context

 65:   Output Parameter:
 66: . scatter - the scatter context

 68:   Level: advanced

 70: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalAllCreate()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
 71:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
 72: @*/
 73: PetscErrorCode DMDANaturalAllToGlobalCreate(DM da, VecScatter *scatter)
 74: {
 75:   DM_DA   *dd = (DM_DA *)da->data;
 76:   PetscInt M, m = dd->Nlocal, start;
 77:   IS       from, to;
 78:   Vec      tmplocal, global;
 79:   AO       ao;

 81:   PetscFunctionBegin;
 83:   PetscAssertPointer(scatter, 2);
 84:   PetscCall(DMDAGetAO(da, &ao));

 86:   /* create the scatter context */
 87:   PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)da)));
 88:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, m, PETSC_DETERMINE, NULL, &global));
 89:   PetscCall(VecGetOwnershipRange(global, &start, NULL));
 90:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
 91:   PetscCall(AOPetscToApplicationIS(ao, from));
 92:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &to));
 93:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, M, NULL, &tmplocal));
 94:   PetscCall(VecScatterCreate(tmplocal, from, global, to, scatter));
 95:   PetscCall(VecDestroy(&tmplocal));
 96:   PetscCall(VecDestroy(&global));
 97:   PetscCall(ISDestroy(&from));
 98:   PetscCall(ISDestroy(&to));
 99:   PetscFunctionReturn(PETSC_SUCCESS);
100: }