Actual source code: pmap.c

  1: /*
  2:    This file contains routines for basic map object implementation.
  3: */

  5: #include <petsc/private/isimpl.h>

  7: /*@
  8:   PetscLayoutCreate - Allocates `PetscLayout` object

 10:   Collective

 12:   Input Parameter:
 13: . comm - the MPI communicator

 15:   Output Parameter:
 16: . map - the new `PetscLayout`

 18:   Level: advanced

 20:   Notes:
 21:   Typical calling sequence
 22: .vb
 23:        PetscLayoutCreate(MPI_Comm,PetscLayout *);
 24:        PetscLayoutSetBlockSize(PetscLayout,bs);
 25:        PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
 26:        PetscLayoutSetUp(PetscLayout);
 27: .ve
 28:   Alternatively,
 29: .vb
 30:       PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
 31: .ve

 33:   Optionally use any of the following
 34: .vb
 35:   PetscLayoutGetSize(PetscLayout,PetscInt *);
 36:   PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
 37:   PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
 38:   PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
 39:   PetscLayoutDestroy(PetscLayout*);
 40: .ve

 42:   The `PetscLayout` object and methods are intended to be used in the PETSc `Vec` and `Mat` implementations; it is often not needed in
 43:   user codes unless you really gain something in their use.

 45: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutSetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutGetLocalSize()`,
 46:           `PetscLayout`, `PetscLayoutDestroy()`,
 47:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`, `PetscLayoutSetUp()`,
 48:           `PetscLayoutCreateFromSizes()`
 49: @*/
 50: PetscErrorCode PetscLayoutCreate(MPI_Comm comm, PetscLayout *map)
 51: {
 52:   PetscFunctionBegin;
 53:   PetscCall(PetscNew(map));
 54:   PetscCallMPI(MPI_Comm_size(comm, &(*map)->size));
 55:   (*map)->comm        = comm;
 56:   (*map)->bs          = 1;
 57:   (*map)->n           = -1;
 58:   (*map)->N           = -1;
 59:   (*map)->range       = NULL;
 60:   (*map)->range_alloc = PETSC_TRUE;
 61:   (*map)->rstart      = 0;
 62:   (*map)->rend        = 0;
 63:   (*map)->setupcalled = PETSC_FALSE;
 64:   (*map)->oldn        = -1;
 65:   (*map)->oldN        = -1;
 66:   (*map)->oldbs       = -1;
 67:   PetscFunctionReturn(PETSC_SUCCESS);
 68: }

 70: /*@
 71:   PetscLayoutCreateFromSizes - Allocates `PetscLayout` object and sets the layout sizes, and sets the layout up.

 73:   Collective

 75:   Input Parameters:
 76: + comm - the MPI communicator
 77: . n    - the local size (or `PETSC_DECIDE`)
 78: . N    - the global size (or `PETSC_DECIDE`)
 79: - bs   - the block size (or `PETSC_DECIDE`)

 81:   Output Parameter:
 82: . map - the new `PetscLayout`

 84:   Level: advanced

 86:   Note:
 87: .vb
 88:   PetscLayoutCreateFromSizes(comm, n, N, bs, &layout);
 89: .ve
 90:   is a shorthand for
 91: .vb
 92:   PetscLayoutCreate(comm, &layout);
 93:   PetscLayoutSetLocalSize(layout, n);
 94:   PetscLayoutSetSize(layout, N);
 95:   PetscLayoutSetBlockSize(layout, bs);
 96:   PetscLayoutSetUp(layout);
 97: .ve

 99: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutGetLocalSize()`, `PetscLayout`, `PetscLayoutDestroy()`,
100:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`, `PetscLayoutSetUp()`, `PetscLayoutCreateFromRanges()`
101: @*/
102: PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt bs, PetscLayout *map)
103: {
104:   PetscFunctionBegin;
105:   PetscCall(PetscLayoutCreate(comm, map));
106:   PetscCall(PetscLayoutSetLocalSize(*map, n));
107:   PetscCall(PetscLayoutSetSize(*map, N));
108:   PetscCall(PetscLayoutSetBlockSize(*map, bs));
109:   PetscCall(PetscLayoutSetUp(*map));
110:   PetscFunctionReturn(PETSC_SUCCESS);
111: }

113: /*@
114:   PetscLayoutDestroy - Frees a `PetscLayout` object and frees its range if that exists.

116:   Collective

118:   Input Parameter:
119: . map - the `PetscLayout`

121:   Level: developer

123: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutSetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutGetLocalSize()`,
124:           `PetscLayout`, `PetscLayoutCreate()`,
125:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`, `PetscLayoutSetUp()`
126: @*/
127: PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
128: {
129:   PetscFunctionBegin;
130:   if (!*map) PetscFunctionReturn(PETSC_SUCCESS);
131:   if (!(*map)->refcnt--) {
132:     if ((*map)->range_alloc) PetscCall(PetscFree((*map)->range));
133:     PetscCall(ISLocalToGlobalMappingDestroy(&(*map)->mapping));
134:     PetscCall(PetscFree(*map));
135:   }
136:   *map = NULL;
137:   PetscFunctionReturn(PETSC_SUCCESS);
138: }

140: /*@
141:   PetscLayoutCreateFromRanges - Creates a new `PetscLayout` with the given ownership ranges and sets it up.

143:   Collective

145:   Input Parameters:
146: + comm  - the MPI communicator
147: . range - the array of ownership ranges for each rank with length commsize+1
148: . mode  - the copy mode for range
149: - bs    - the block size (or `PETSC_DECIDE`)

151:   Output Parameter:
152: . newmap - the new `PetscLayout`

154:   Level: developer

156: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`,
157:           `PetscLayoutGetLocalSize()`, `PetscLayout`, `PetscLayoutDestroy()`,
158:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`, `PetscLayoutSetUp()`, `PetscLayoutCreateFromSizes()`
159: @*/
160: PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm, const PetscInt range[], PetscCopyMode mode, PetscInt bs, PetscLayout *newmap)
161: {
162:   PetscLayout map;
163:   PetscMPIInt rank;

165:   PetscFunctionBegin;
166:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
167:   PetscCall(PetscLayoutCreate(comm, &map));
168:   PetscCall(PetscLayoutSetBlockSize(map, bs));
169:   switch (mode) {
170:   case PETSC_COPY_VALUES:
171:     PetscCall(PetscMalloc1(map->size + 1, &map->range));
172:     PetscCall(PetscArraycpy(map->range, range, map->size + 1));
173:     break;
174:   case PETSC_USE_POINTER:
175:     map->range_alloc = PETSC_FALSE;
176:     break;
177:   default:
178:     map->range = (PetscInt *)range;
179:     break;
180:   }
181:   map->rstart = map->range[rank];
182:   map->rend   = map->range[rank + 1];
183:   map->n      = map->rend - map->rstart;
184:   map->N      = map->range[map->size];
185:   if (PetscDefined(USE_DEBUG)) { /* just check that n, N and bs are consistent */
186:     PetscInt tmp;
187:     PetscCallMPI(MPIU_Allreduce(&map->n, &tmp, 1, MPIU_INT, MPI_SUM, map->comm));
188:     PetscCheck(tmp == map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local lengths %" PetscInt_FMT " does not equal global length %" PetscInt_FMT ", my local length %" PetscInt_FMT ". The provided PetscLayout is wrong.", tmp, map->N, map->n);
189:     PetscCheck(map->n % map->bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local size %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, map->n, map->bs);
190:     PetscCheck(map->N % map->bs == 0, map->comm, PETSC_ERR_PLIB, "Global size %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, map->N, map->bs);
191:   }
192:   /* lock the layout */
193:   map->setupcalled = PETSC_TRUE;
194:   map->oldn        = map->n;
195:   map->oldN        = map->N;
196:   map->oldbs       = map->bs;
197:   *newmap          = map;
198:   PetscFunctionReturn(PETSC_SUCCESS);
199: }

201: /*@
202:   PetscLayoutSetUp - given a map where you have set either the global or local
203:   size sets up the map so that it may be used.

205:   Collective

207:   Input Parameter:
208: . map - pointer to the map

210:   Level: developer

212:   Notes:
213:   Typical calling sequence
214: .vb
215:   PetscLayoutCreate(MPI_Comm,PetscLayout *);
216:   PetscLayoutSetBlockSize(PetscLayout,1);
217:   PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
218:   PetscLayoutSetUp(PetscLayout);
219:   PetscLayoutGetSize(PetscLayout,PetscInt *);
220: .ve

222:   If range exists, and local size is not set, everything gets computed from the range.

224:   If the local size, global size are already set and range exists then this does nothing.

226: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutSetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutGetLocalSize()`,
227:           `PetscLayout`, `PetscLayoutDestroy()`,
228:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`, `PetscLayoutCreate()`, `PetscSplitOwnership()`
229: @*/
230: PetscErrorCode PetscLayoutSetUp(PetscLayout map)
231: {
232:   PetscMPIInt rank;
233:   PetscInt    p;

235:   PetscFunctionBegin;
236:   PetscCheck(!map->setupcalled || !(map->n != map->oldn || map->N != map->oldN), map->comm, PETSC_ERR_ARG_WRONGSTATE, "Layout is already setup with (local=%" PetscInt_FMT ",global=%" PetscInt_FMT "), cannot call setup again with (local=%" PetscInt_FMT ",global=%" PetscInt_FMT ")",
237:              map->oldn, map->oldN, map->n, map->N);
238:   if (map->setupcalled) PetscFunctionReturn(PETSC_SUCCESS);

240:   PetscCheck(map->n < 0 || map->n % map->bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Local size %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, map->n, map->bs);
241:   PetscCheck(map->N < 0 || map->N % map->bs == 0, map->comm, PETSC_ERR_PLIB, "Global size %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, map->N, map->bs);

243:   PetscCallMPI(MPI_Comm_rank(map->comm, &rank));
244:   if (map->n > 0) map->n = map->n / map->bs;
245:   if (map->N > 0) map->N = map->N / map->bs;
246:   PetscCall(PetscSplitOwnership(map->comm, &map->n, &map->N));
247:   map->n = map->n * map->bs;
248:   map->N = map->N * map->bs;
249:   if (!map->range) PetscCall(PetscMalloc1(map->size + 1, &map->range));
250:   PetscCallMPI(MPI_Allgather(&map->n, 1, MPIU_INT, map->range + 1, 1, MPIU_INT, map->comm));

252:   map->range[0] = 0;
253:   for (p = 2; p <= map->size; p++) map->range[p] += map->range[p - 1];

255:   map->rstart = map->range[rank];
256:   map->rend   = map->range[rank + 1];

258:   /* lock the layout */
259:   map->setupcalled = PETSC_TRUE;
260:   map->oldn        = map->n;
261:   map->oldN        = map->N;
262:   map->oldbs       = map->bs;
263:   PetscFunctionReturn(PETSC_SUCCESS);
264: }

266: /*@
267:   PetscLayoutDuplicate - creates a new `PetscLayout` with the same information as a given one. If the `PetscLayout` already exists it is destroyed first.

269:   Collective

271:   Input Parameter:
272: . in - input `PetscLayout` to be duplicated

274:   Output Parameter:
275: . out - the copy

277:   Level: developer

279:   Note:
280:   `PetscLayoutSetUp()` does not need to be called on the resulting `PetscLayout`

282: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutDestroy()`, `PetscLayoutSetUp()`, `PetscLayoutReference()`
283: @*/
284: PetscErrorCode PetscLayoutDuplicate(PetscLayout in, PetscLayout *out)
285: {
286:   MPI_Comm comm = in->comm;

288:   PetscFunctionBegin;
289:   PetscCall(PetscLayoutDestroy(out));
290:   PetscCall(PetscLayoutCreate(comm, out));
291:   PetscCall(PetscMemcpy(*out, in, sizeof(struct _n_PetscLayout)));
292:   if (in->range) {
293:     PetscCall(PetscMalloc1((*out)->size + 1, &(*out)->range));
294:     PetscCall(PetscArraycpy((*out)->range, in->range, (*out)->size + 1));
295:   }
296:   (*out)->refcnt = 0;
297:   PetscFunctionReturn(PETSC_SUCCESS);
298: }

300: /*@
301:   PetscLayoutReference - Causes a PETSc `Vec` or `Mat` to share a `PetscLayout` with one that already exists.

303:   Collective

305:   Input Parameter:
306: . in - input `PetscLayout` to be copied

308:   Output Parameter:
309: . out - the reference location

311:   Level: developer

313:   Notes:
314:   `PetscLayoutSetUp()` does not need to be called on the resulting `PetscLayout`

316:   If the out location already contains a `PetscLayout` it is destroyed

318: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutDestroy()`, `PetscLayoutSetUp()`, `PetscLayoutDuplicate()`
319: @*/
320: PetscErrorCode PetscLayoutReference(PetscLayout in, PetscLayout *out)
321: {
322:   PetscFunctionBegin;
323:   in->refcnt++;
324:   PetscCall(PetscLayoutDestroy(out));
325:   *out = in;
326:   PetscFunctionReturn(PETSC_SUCCESS);
327: }

329: /*@
330:   PetscLayoutSetISLocalToGlobalMapping - sets a `ISLocalGlobalMapping` into a `PetscLayout`

332:   Collective

334:   Input Parameters:
335: + in   - input `PetscLayout`
336: - ltog - the local to global mapping

338:   Level: developer

340:   Notes:
341:   `PetscLayoutSetUp()` does not need to be called on the resulting `PetscLayout`

343:   If the `PetscLayout` already contains a `ISLocalGlobalMapping` it is destroyed

345: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutDestroy()`, `PetscLayoutSetUp()`, `PetscLayoutDuplicate()`
346: @*/
347: PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in, ISLocalToGlobalMapping ltog)
348: {
349:   PetscFunctionBegin;
350:   if (ltog) {
351:     PetscInt bs;

353:     PetscCall(ISLocalToGlobalMappingGetBlockSize(ltog, &bs));
354:     PetscCheck(in->bs == 1 || bs == 1 || in->bs == bs, in->comm, PETSC_ERR_PLIB, "Blocksize of layout %" PetscInt_FMT " must match that of mapping %" PetscInt_FMT " (or the latter must be 1)", in->bs, bs);
355:   }
356:   PetscCall(PetscObjectReference((PetscObject)ltog));
357:   PetscCall(ISLocalToGlobalMappingDestroy(&in->mapping));
358:   in->mapping = ltog;
359:   PetscFunctionReturn(PETSC_SUCCESS);
360: }

362: /*@
363:   PetscLayoutSetLocalSize - Sets the local size for a `PetscLayout` object.

365:   Collective

367:   Input Parameters:
368: + map - pointer to the map
369: - n   - the local size

371:   Level: developer

373: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutSetUp()`
374:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`
375: @*/
376: PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map, PetscInt n)
377: {
378:   PetscFunctionBegin;
379:   PetscCheck(n % map->bs == 0, map->comm, PETSC_ERR_ARG_INCOMP, "Local size %" PetscInt_FMT " not compatible with block size %" PetscInt_FMT, n, map->bs);
380:   map->n = n;
381:   PetscFunctionReturn(PETSC_SUCCESS);
382: }

384: /*@
385:   PetscLayoutGetLocalSize - Gets the local size for a `PetscLayout` object.

387:   Not Collective

389:   Input Parameter:
390: . map - pointer to the map

392:   Output Parameter:
393: . n - the local size

395:   Level: developer

397:   Note:
398:   Call this after the call to `PetscLayoutSetUp()`

400: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutSetUp()`
401:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`
402: @*/
403: PetscErrorCode PetscLayoutGetLocalSize(PetscLayout map, PetscInt *n)
404: {
405:   PetscFunctionBegin;
406:   *n = map->n;
407:   PetscFunctionReturn(PETSC_SUCCESS);
408: }

410: /*@
411:   PetscLayoutSetSize - Sets the global size for a `PetscLayout` object.

413:   Logically Collective

415:   Input Parameters:
416: + map - pointer to the map
417: - n   - the global size

419:   Level: developer

421: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutGetSize()`, `PetscLayoutSetUp()`
422:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`
423: @*/
424: PetscErrorCode PetscLayoutSetSize(PetscLayout map, PetscInt n)
425: {
426:   PetscFunctionBegin;
427:   map->N = n;
428:   PetscFunctionReturn(PETSC_SUCCESS);
429: }

431: /*@
432:   PetscLayoutGetSize - Gets the global size for a `PetscLayout` object.

434:   Not Collective

436:   Input Parameter:
437: . map - pointer to the map

439:   Output Parameter:
440: . n - the global size

442:   Level: developer

444:   Note:
445:   Call this after the call to `PetscLayoutSetUp()`

447: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutSetUp()`
448:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetBlockSize()`
449: @*/
450: PetscErrorCode PetscLayoutGetSize(PetscLayout map, PetscInt *n)
451: {
452:   PetscFunctionBegin;
453:   *n = map->N;
454:   PetscFunctionReturn(PETSC_SUCCESS);
455: }

457: /*@
458:   PetscLayoutSetBlockSize - Sets the block size for a `PetscLayout` object.

460:   Logically Collective

462:   Input Parameters:
463: + map - pointer to the map
464: - bs  - the size

466:   Level: developer

468: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutGetBlockSize()`,
469:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutSetUp()`
470: @*/
471: PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map, PetscInt bs)
472: {
473:   PetscFunctionBegin;
474:   PetscCheck(bs > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " must be positive", bs);
475:   PetscCheck(map->n <= 0 || (map->n % bs) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size %" PetscInt_FMT " not compatible with block size %" PetscInt_FMT, map->n, bs);
476:   if (map->mapping) {
477:     PetscInt obs;

479:     PetscCall(ISLocalToGlobalMappingGetBlockSize(map->mapping, &obs));
480:     if (obs > 1) PetscCall(ISLocalToGlobalMappingSetBlockSize(map->mapping, bs));
481:   }
482:   map->bs = bs;
483:   PetscFunctionReturn(PETSC_SUCCESS);
484: }

486: /*@
487:   PetscLayoutGetBlockSize - Gets the block size for a `PetscLayout` object.

489:   Not Collective

491:   Input Parameter:
492: . map - pointer to the map

494:   Output Parameter:
495: . bs - the size

497:   Level: developer

499:   Notes:
500:   Call this after the call to `PetscLayoutSetUp()`

502: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutSetSize()`, `PetscLayoutSetUp()`
503:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutGetSize()`
504: @*/
505: PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map, PetscInt *bs)
506: {
507:   PetscFunctionBegin;
508:   *bs = map->bs;
509:   PetscFunctionReturn(PETSC_SUCCESS);
510: }

512: /*@
513:   PetscLayoutGetRange - gets the range of values owned by this process

515:   Not Collective

517:   Input Parameter:
518: . map - pointer to the map

520:   Output Parameters:
521: + rstart - first index owned by this process
522: - rend   - one more than the last index owned by this process

524:   Level: developer

526:   Note:
527:   Call this after the call to `PetscLayoutSetUp()`

529: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutSetSize()`,
530:           `PetscLayoutGetSize()`, `PetscLayoutGetRanges()`, `PetscLayoutSetBlockSize()`, `PetscLayoutSetUp()`
531: @*/
532: PetscErrorCode PetscLayoutGetRange(PetscLayout map, PetscInt *rstart, PetscInt *rend)
533: {
534:   PetscFunctionBegin;
535:   if (rstart) *rstart = map->rstart;
536:   if (rend) *rend = map->rend;
537:   PetscFunctionReturn(PETSC_SUCCESS);
538: }

540: /*@C
541:   PetscLayoutGetRanges - gets the ranges of values owned by all processes

543:   Not Collective

545:   Input Parameter:
546: . map - pointer to the map

548:   Output Parameter:
549: . range - start of each processors range of indices (the final entry is one more than the
550:           last index on the last process). The length of the array is one more than the number of processes in the MPI
551:           communicator owned by `map`

553:   Level: developer

555:   Note:
556:   Call this after the call to `PetscLayoutSetUp()`

558:   Fortran Notes:
559: .vb
560:   PetscInt, pointer :: range(:)
561: .ve

563:   Call `PetscLayoutRestoreRanges()` when no longer needed.

565: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutSetSize()`,
566:           `PetscLayoutGetSize()`, `PetscLayoutGetRange()`, `PetscLayoutSetBlockSize()`, `PetscLayoutSetUp()`
567: @*/
568: PetscErrorCode PetscLayoutGetRanges(PetscLayout map, const PetscInt *range[])
569: {
570:   PetscFunctionBegin;
571:   *range = map->range;
572:   PetscFunctionReturn(PETSC_SUCCESS);
573: }

575: /*@
576:   PetscLayoutCompare - Compares two layouts

578:   Not Collective

580:   Input Parameters:
581: + mapa - pointer to the first map
582: - mapb - pointer to the second map

584:   Output Parameter:
585: . congruent - `PETSC_TRUE` if the two layouts are congruent, `PETSC_FALSE` otherwise

587:   Level: beginner

589: .seealso: [PetscLayout](sec_matlayout), `PetscLayoutCreate()`, `PetscLayoutSetLocalSize()`, `PetscLayoutGetLocalSize()`, `PetscLayoutGetBlockSize()`,
590:           `PetscLayoutGetRange()`, `PetscLayoutGetRanges()`, `PetscLayoutSetSize()`, `PetscLayoutGetSize()`, `PetscLayoutSetUp()`
591: @*/
592: PetscErrorCode PetscLayoutCompare(PetscLayout mapa, PetscLayout mapb, PetscBool *congruent)
593: {
594:   PetscFunctionBegin;
595:   *congruent = PETSC_FALSE;
596:   if (mapa->N == mapb->N && mapa->range && mapb->range && mapa->size == mapb->size) PetscCall(PetscArraycmp(mapa->range, mapb->range, mapa->size + 1, congruent));
597:   PetscFunctionReturn(PETSC_SUCCESS);
598: }

600: /*@
601:   PetscLayoutFindOwner - Find the owning MPI process for a global index

603:   Not Collective; No Fortran Support

605:   Input Parameters:
606: + map - the layout
607: - idx - global index to find the owner of

609:   Output Parameter:
610: . owner - the owning rank

612:   Level: developer

614: .seealso: `PetscLayout`, `PetscLayoutFindOwnerIndex()`
615: @*/
616: PetscErrorCode PetscLayoutFindOwner(PetscLayout map, PetscInt idx, PetscMPIInt *owner)
617: {
618:   PetscMPIInt lo = 0, hi, t;

620:   PetscFunctionBegin;
621:   *owner = -1; /* GCC erroneously issues warning about possibly uninitialized use when error condition */
622:   PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first");
623:   PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx);
624:   hi = map->size;
625:   while (hi - lo > 1) {
626:     t = lo + (hi - lo) / 2;
627:     if (idx < map->range[t]) hi = t;
628:     else lo = t;
629:   }
630:   *owner = lo;
631:   PetscFunctionReturn(PETSC_SUCCESS);
632: }

634: /*@
635:   PetscLayoutFindOwnerIndex - Find the owning MPI process and the local index on that process for a global index

637:   Not Collective; No Fortran Support

639:   Input Parameters:
640: + map - the layout
641: - idx - global index to find the owner of

643:   Output Parameters:
644: + owner - the owning rank
645: - lidx  - local index used by the owner for `idx`

647:   Level: developer

649: .seealso: `PetscLayout`, `PetscLayoutFindOwner()`
650: @*/
651: PetscErrorCode PetscLayoutFindOwnerIndex(PetscLayout map, PetscInt idx, PetscMPIInt *owner, PetscInt *lidx)
652: {
653:   PetscMPIInt lo = 0, hi, t;

655:   PetscFunctionBegin;
656:   PetscAssert((map->n >= 0) && (map->N >= 0) && (map->range), PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "PetscLayoutSetUp() must be called first");
657:   PetscAssert(idx >= 0 && idx <= map->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Index %" PetscInt_FMT " is out of range", idx);
658:   hi = map->size;
659:   while (hi - lo > 1) {
660:     t = lo + (hi - lo) / 2;
661:     if (idx < map->range[t]) hi = t;
662:     else lo = t;
663:   }
664:   if (owner) *owner = lo;
665:   if (lidx) *lidx = idx - map->range[lo];
666:   PetscFunctionReturn(PETSC_SUCCESS);
667: }