Actual source code: swarm.c
1: #include "petscdmswarm.h"
2: #include <petsc/private/dmswarmimpl.h>
3: #include <petsc/private/hashsetij.h>
4: #include <petsc/private/petscfeimpl.h>
5: #include <petscviewer.h>
6: #include <petscdraw.h>
7: #include <petscdmplex.h>
8: #include <petscblaslapack.h>
9: #include "../src/dm/impls/swarm/data_bucket.h"
10: #include <petscdmlabel.h>
11: #include <petscsection.h>
13: PetscLogEvent DMSWARM_Migrate, DMSWARM_SetSizes, DMSWARM_AddPoints, DMSWARM_RemovePoints, DMSWARM_Sort;
14: PetscLogEvent DMSWARM_DataExchangerTopologySetup, DMSWARM_DataExchangerBegin, DMSWARM_DataExchangerEnd;
15: PetscLogEvent DMSWARM_DataExchangerSendCount, DMSWARM_DataExchangerPack;
17: const char *DMSwarmTypeNames[] = {"basic", "pic", NULL};
18: const char *DMSwarmMigrateTypeNames[] = {"basic", "dmcellnscatter", "dmcellexact", "user", NULL};
19: const char *DMSwarmCollectTypeNames[] = {"basic", "boundingbox", "general", "user", NULL};
20: const char *DMSwarmRemapTypeNames[] = {"none", "pfak", "colella", "DMSwarmRemapType", "DMSWARM_REMAP_", NULL};
21: const char *DMSwarmPICLayoutTypeNames[] = {"regular", "gauss", "subdivision", NULL};
23: const char DMSwarmField_pid[] = "DMSwarm_pid";
24: const char DMSwarmField_rank[] = "DMSwarm_rank";
25: const char DMSwarmPICField_coor[] = "DMSwarmPIC_coor";
27: PetscInt SwarmDataFieldId = -1;
29: #if defined(PETSC_HAVE_HDF5)
30: #include <petscviewerhdf5.h>
32: static PetscErrorCode VecView_Swarm_HDF5_Internal(Vec v, PetscViewer viewer)
33: {
34: DM dm;
35: PetscReal seqval;
36: PetscInt seqnum, bs;
37: PetscBool isseq, ists;
39: PetscFunctionBegin;
40: PetscCall(VecGetDM(v, &dm));
41: PetscCall(VecGetBlockSize(v, &bs));
42: PetscCall(PetscViewerHDF5PushGroup(viewer, "/particle_fields"));
43: PetscCall(PetscObjectTypeCompare((PetscObject)v, VECSEQ, &isseq));
44: PetscCall(DMGetOutputSequenceNumber(dm, &seqnum, &seqval));
45: PetscCall(PetscViewerHDF5IsTimestepping(viewer, &ists));
46: if (ists) PetscCall(PetscViewerHDF5SetTimestep(viewer, seqnum));
47: /* PetscCall(DMSequenceView_HDF5(dm, "time", seqnum, (PetscScalar) seqval, viewer)); */
48: PetscCall(VecViewNative(v, viewer));
49: PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)v, "Nc", PETSC_INT, (void *)&bs));
50: PetscCall(PetscViewerHDF5PopGroup(viewer));
51: PetscFunctionReturn(PETSC_SUCCESS);
52: }
54: static PetscErrorCode DMSwarmView_HDF5(DM dm, PetscViewer viewer)
55: {
56: DMSwarmCellDM celldm;
57: Vec coordinates;
58: PetscInt Np, Nfc;
59: PetscBool isseq;
60: const char **coordFields;
62: PetscFunctionBegin;
63: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
64: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
65: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
66: PetscCall(DMSwarmGetSize(dm, &Np));
67: PetscCall(DMSwarmCreateGlobalVectorFromField(dm, coordFields[0], &coordinates));
68: PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
69: PetscCall(PetscViewerHDF5PushGroup(viewer, "/particles"));
70: PetscCall(PetscObjectTypeCompare((PetscObject)coordinates, VECSEQ, &isseq));
71: PetscCall(VecViewNative(coordinates, viewer));
72: PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)coordinates, "Np", PETSC_INT, (void *)&Np));
73: PetscCall(PetscViewerHDF5PopGroup(viewer));
74: PetscCall(DMSwarmDestroyGlobalVectorFromField(dm, coordFields[0], &coordinates));
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
77: #endif
79: static PetscErrorCode VecView_Swarm(Vec v, PetscViewer viewer)
80: {
81: DM dm;
82: #if defined(PETSC_HAVE_HDF5)
83: PetscBool ishdf5;
84: #endif
86: PetscFunctionBegin;
87: PetscCall(VecGetDM(v, &dm));
88: PetscCheck(dm, PetscObjectComm((PetscObject)v), PETSC_ERR_ARG_WRONG, "Vector not generated from a DM");
89: #if defined(PETSC_HAVE_HDF5)
90: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
91: if (ishdf5) {
92: PetscCall(VecView_Swarm_HDF5_Internal(v, viewer));
93: PetscFunctionReturn(PETSC_SUCCESS);
94: }
95: #endif
96: PetscCall(VecViewNative(v, viewer));
97: PetscFunctionReturn(PETSC_SUCCESS);
98: }
100: /*@C
101: DMSwarmVectorGetField - Gets the fields from which to define a `Vec` object
102: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
104: Not collective
106: Input Parameter:
107: . sw - a `DMSWARM`
109: Output Parameters:
110: + Nf - the number of fields
111: - fieldnames - the textual name given to each registered field, or NULL if it has not been set
113: Level: beginner
115: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
116: @*/
117: PetscErrorCode DMSwarmVectorGetField(DM sw, PetscInt *Nf, const char **fieldnames[])
118: {
119: DMSwarmCellDM celldm;
121: PetscFunctionBegin;
123: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
124: PetscCall(DMSwarmCellDMGetFields(celldm, Nf, fieldnames));
125: PetscFunctionReturn(PETSC_SUCCESS);
126: }
128: /*@
129: DMSwarmVectorDefineField - Sets the field from which to define a `Vec` object
130: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
132: Collective
134: Input Parameters:
135: + dm - a `DMSWARM`
136: - fieldname - the textual name given to each registered field
138: Level: beginner
140: Notes:
141: The field with name `fieldname` must be defined as having a data type of `PetscScalar`.
143: This function must be called prior to calling `DMCreateLocalVector()`, `DMCreateGlobalVector()`.
144: Multiple calls to `DMSwarmVectorDefineField()` are permitted.
146: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineFields()`, `DMSwarmVectorGetField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
147: @*/
148: PetscErrorCode DMSwarmVectorDefineField(DM dm, const char fieldname[])
149: {
150: PetscFunctionBegin;
151: PetscCall(DMSwarmVectorDefineFields(dm, 1, &fieldname));
152: PetscFunctionReturn(PETSC_SUCCESS);
153: }
155: /*@C
156: DMSwarmVectorDefineFields - Sets the fields from which to define a `Vec` object
157: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
159: Collective, No Fortran support
161: Input Parameters:
162: + sw - a `DMSWARM`
163: . Nf - the number of fields
164: - fieldnames - the textual name given to each registered field
166: Level: beginner
168: Notes:
169: Each field with name in `fieldnames` must be defined as having a data type of `PetscScalar`.
171: This function must be called prior to calling `DMCreateLocalVector()`, `DMCreateGlobalVector()`.
172: Multiple calls to `DMSwarmVectorDefineField()` are permitted.
174: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineField()`, `DMSwarmVectorGetField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
175: @*/
176: PetscErrorCode DMSwarmVectorDefineFields(DM sw, PetscInt Nf, const char *fieldnames[])
177: {
178: DM_Swarm *swarm = (DM_Swarm *)sw->data;
179: DMSwarmCellDM celldm;
181: PetscFunctionBegin;
183: if (fieldnames) PetscAssertPointer(fieldnames, 3);
184: if (!swarm->issetup) PetscCall(DMSetUp(sw));
185: PetscCheck(Nf >= 0, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_OUTOFRANGE, "Number of fields must be non-negative, not %" PetscInt_FMT, Nf);
186: // Create a dummy cell DM if none has been specified (I think we should not support this mode)
187: if (!swarm->activeCellDM) {
188: DM dm;
189: DMSwarmCellDM celldm;
191: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), &dm));
192: PetscCall(DMSetType(dm, DMSHELL));
193: PetscCall(PetscObjectSetName((PetscObject)dm, "dummy"));
194: PetscCall(DMSwarmCellDMCreate(dm, 0, NULL, 0, NULL, &celldm));
195: PetscCall(DMDestroy(&dm));
196: PetscCall(DMSwarmAddCellDM(sw, celldm));
197: PetscCall(DMSwarmCellDMDestroy(&celldm));
198: PetscCall(DMSwarmSetCellDMActive(sw, "dummy"));
199: }
200: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
201: for (PetscInt f = 0; f < celldm->Nf; ++f) PetscCall(PetscFree(celldm->dmFields[f]));
202: PetscCall(PetscFree(celldm->dmFields));
204: celldm->Nf = Nf;
205: PetscCall(PetscMalloc1(Nf, &celldm->dmFields));
206: for (PetscInt f = 0; f < Nf; ++f) {
207: PetscDataType type;
209: // Check all fields are of type PETSC_REAL or PETSC_SCALAR
210: PetscCall(DMSwarmGetFieldInfo(sw, fieldnames[f], NULL, &type));
211: PetscCheck(type == PETSC_REAL, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
212: PetscCall(PetscStrallocpy(fieldnames[f], (char **)&celldm->dmFields[f]));
213: }
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: /* requires DMSwarmDefineFieldVector has been called */
218: static PetscErrorCode DMCreateGlobalVector_Swarm(DM sw, Vec *vec)
219: {
220: DM_Swarm *swarm = (DM_Swarm *)sw->data;
221: DMSwarmCellDM celldm;
222: Vec x;
223: char name[PETSC_MAX_PATH_LEN];
224: PetscInt bs = 0, n;
226: PetscFunctionBegin;
227: if (!swarm->issetup) PetscCall(DMSetUp(sw));
228: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
229: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Active cell DM does not define any fields");
230: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
232: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
233: for (PetscInt f = 0; f < celldm->Nf; ++f) {
234: PetscInt fbs;
235: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
236: PetscCall(PetscStrlcat(name, celldm->dmFields[f], PETSC_MAX_PATH_LEN));
237: PetscCall(DMSwarmGetFieldInfo(sw, celldm->dmFields[f], &fbs, NULL));
238: bs += fbs;
239: }
240: PetscCall(VecCreate(PetscObjectComm((PetscObject)sw), &x));
241: PetscCall(PetscObjectSetName((PetscObject)x, name));
242: PetscCall(VecSetSizes(x, n * bs, PETSC_DETERMINE));
243: PetscCall(VecSetBlockSize(x, bs));
244: PetscCall(VecSetDM(x, sw));
245: PetscCall(VecSetFromOptions(x));
246: PetscCall(VecSetOperation(x, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
247: *vec = x;
248: PetscFunctionReturn(PETSC_SUCCESS);
249: }
251: /* requires DMSwarmDefineFieldVector has been called */
252: static PetscErrorCode DMCreateLocalVector_Swarm(DM sw, Vec *vec)
253: {
254: DM_Swarm *swarm = (DM_Swarm *)sw->data;
255: DMSwarmCellDM celldm;
256: Vec x;
257: char name[PETSC_MAX_PATH_LEN];
258: PetscInt bs = 0, n;
260: PetscFunctionBegin;
261: if (!swarm->issetup) PetscCall(DMSetUp(sw));
262: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
263: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Active cell DM does not define any fields");
264: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
266: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
267: for (PetscInt f = 0; f < celldm->Nf; ++f) {
268: PetscInt fbs;
269: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
270: PetscCall(PetscStrlcat(name, celldm->dmFields[f], PETSC_MAX_PATH_LEN));
271: PetscCall(DMSwarmGetFieldInfo(sw, celldm->dmFields[f], &fbs, NULL));
272: bs += fbs;
273: }
274: PetscCall(VecCreate(PETSC_COMM_SELF, &x));
275: PetscCall(PetscObjectSetName((PetscObject)x, name));
276: PetscCall(VecSetSizes(x, n * bs, PETSC_DETERMINE));
277: PetscCall(VecSetBlockSize(x, bs));
278: PetscCall(VecSetDM(x, sw));
279: PetscCall(VecSetFromOptions(x));
280: *vec = x;
281: PetscFunctionReturn(PETSC_SUCCESS);
282: }
284: static PetscErrorCode DMSwarmDestroyVectorFromField_Private(DM dm, const char fieldname[], Vec *vec)
285: {
286: DM_Swarm *swarm = (DM_Swarm *)dm->data;
287: DMSwarmDataField gfield;
288: PetscInt bs, nlocal, fid = -1, cfid = -2;
289: PetscBool flg;
291: PetscFunctionBegin;
292: /* check vector is an inplace array */
293: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldname, &fid));
294: PetscCall(PetscObjectComposedDataGetInt((PetscObject)*vec, SwarmDataFieldId, cfid, flg));
295: (void)flg; /* avoid compiler warning */
296: PetscCheck(cfid == fid, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Vector being destroyed was not created from DMSwarm field(%s)! %" PetscInt_FMT " != %" PetscInt_FMT, fieldname, cfid, fid);
297: PetscCall(VecGetLocalSize(*vec, &nlocal));
298: PetscCall(VecGetBlockSize(*vec, &bs));
299: PetscCheck(nlocal / bs == swarm->db->L, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "DMSwarm sizes have changed since vector was created - cannot ensure pointers are valid");
300: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
301: PetscCall(DMSwarmDataFieldRestoreAccess(gfield));
302: PetscCall(VecResetArray(*vec));
303: PetscCall(VecDestroy(vec));
304: PetscFunctionReturn(PETSC_SUCCESS);
305: }
307: static PetscErrorCode DMSwarmCreateVectorFromField_Private(DM dm, const char fieldname[], MPI_Comm comm, Vec *vec)
308: {
309: DM_Swarm *swarm = (DM_Swarm *)dm->data;
310: PetscDataType type;
311: PetscScalar *array;
312: PetscInt bs, n, fid;
313: char name[PETSC_MAX_PATH_LEN];
314: PetscMPIInt size;
315: PetscBool iscuda, iskokkos, iship;
317: PetscFunctionBegin;
318: if (!swarm->issetup) PetscCall(DMSetUp(dm));
319: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
320: PetscCall(DMSwarmGetField(dm, fieldname, &bs, &type, (void **)&array));
321: PetscCheck(type == PETSC_REAL, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
323: PetscCallMPI(MPI_Comm_size(comm, &size));
324: PetscCall(PetscStrcmp(dm->vectype, VECKOKKOS, &iskokkos));
325: PetscCall(PetscStrcmp(dm->vectype, VECCUDA, &iscuda));
326: PetscCall(PetscStrcmp(dm->vectype, VECHIP, &iship));
327: PetscCall(VecCreate(comm, vec));
328: PetscCall(VecSetSizes(*vec, n * bs, PETSC_DETERMINE));
329: PetscCall(VecSetBlockSize(*vec, bs));
330: if (iskokkos) PetscCall(VecSetType(*vec, VECKOKKOS));
331: else if (iscuda) PetscCall(VecSetType(*vec, VECCUDA));
332: else if (iship) PetscCall(VecSetType(*vec, VECHIP));
333: else PetscCall(VecSetType(*vec, VECSTANDARD));
334: PetscCall(VecPlaceArray(*vec, array));
336: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "DMSwarmSharedField_%s", fieldname));
337: PetscCall(PetscObjectSetName((PetscObject)*vec, name));
339: /* Set guard */
340: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldname, &fid));
341: PetscCall(PetscObjectComposedDataSetInt((PetscObject)*vec, SwarmDataFieldId, fid));
343: PetscCall(VecSetDM(*vec, dm));
344: PetscCall(VecSetOperation(*vec, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: static PetscErrorCode DMSwarmDestroyVectorFromFields_Private(DM sw, PetscInt Nf, const char *fieldnames[], Vec *vec)
349: {
350: DM_Swarm *swarm = (DM_Swarm *)sw->data;
351: const PetscScalar *array;
352: PetscInt bs, n, id = 0, cid = -2;
353: PetscBool flg;
355: PetscFunctionBegin;
356: for (PetscInt f = 0; f < Nf; ++f) {
357: PetscInt fid;
359: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldnames[f], &fid));
360: id += fid;
361: }
362: PetscCall(PetscObjectComposedDataGetInt((PetscObject)*vec, SwarmDataFieldId, cid, flg));
363: (void)flg; /* avoid compiler warning */
364: PetscCheck(cid == id, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Vector being destroyed was not created from DMSwarm field(%s)! %" PetscInt_FMT " != %" PetscInt_FMT, fieldnames[0], cid, id);
365: PetscCall(VecGetLocalSize(*vec, &n));
366: PetscCall(VecGetBlockSize(*vec, &bs));
367: n /= bs;
368: PetscCheck(n == swarm->db->L, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSwarm sizes have changed since vector was created - cannot ensure pointers are valid");
369: PetscCall(VecGetArrayRead(*vec, &array));
370: for (PetscInt f = 0, off = 0; f < Nf; ++f) {
371: PetscScalar *farray;
372: PetscDataType ftype;
373: PetscInt fbs;
375: PetscCall(DMSwarmGetField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
376: PetscCheck(off + fbs <= bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid blocksize %" PetscInt_FMT " < %" PetscInt_FMT, bs, off + fbs);
377: for (PetscInt i = 0; i < n; ++i) {
378: for (PetscInt b = 0; b < fbs; ++b) farray[i * fbs + b] = array[i * bs + off + b];
379: }
380: off += fbs;
381: PetscCall(DMSwarmRestoreField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
382: }
383: PetscCall(VecRestoreArrayRead(*vec, &array));
384: PetscCall(VecDestroy(vec));
385: PetscFunctionReturn(PETSC_SUCCESS);
386: }
388: static PetscErrorCode DMSwarmCreateVectorFromFields_Private(DM sw, PetscInt Nf, const char *fieldnames[], MPI_Comm comm, Vec *vec)
389: {
390: DM_Swarm *swarm = (DM_Swarm *)sw->data;
391: PetscScalar *array;
392: PetscInt n, bs = 0, id = 0;
393: char name[PETSC_MAX_PATH_LEN];
395: PetscFunctionBegin;
396: if (!swarm->issetup) PetscCall(DMSetUp(sw));
397: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
398: for (PetscInt f = 0; f < Nf; ++f) {
399: PetscDataType ftype;
400: PetscInt fbs;
402: PetscCall(DMSwarmGetFieldInfo(sw, fieldnames[f], &fbs, &ftype));
403: PetscCheck(ftype == PETSC_REAL, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
404: bs += fbs;
405: }
407: PetscCall(VecCreate(comm, vec));
408: PetscCall(VecSetSizes(*vec, n * bs, PETSC_DETERMINE));
409: PetscCall(VecSetBlockSize(*vec, bs));
410: PetscCall(VecSetType(*vec, sw->vectype));
412: PetscCall(VecGetArrayWrite(*vec, &array));
413: for (PetscInt f = 0, off = 0; f < Nf; ++f) {
414: PetscScalar *farray;
415: PetscDataType ftype;
416: PetscInt fbs;
418: PetscCall(DMSwarmGetField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
419: for (PetscInt i = 0; i < n; ++i) {
420: for (PetscInt b = 0; b < fbs; ++b) array[i * bs + off + b] = farray[i * fbs + b];
421: }
422: off += fbs;
423: PetscCall(DMSwarmRestoreField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
424: }
425: PetscCall(VecRestoreArrayWrite(*vec, &array));
427: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
428: for (PetscInt f = 0; f < Nf; ++f) {
429: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
430: PetscCall(PetscStrlcat(name, fieldnames[f], PETSC_MAX_PATH_LEN));
431: }
432: PetscCall(PetscObjectSetName((PetscObject)*vec, name));
434: for (PetscInt f = 0; f < Nf; ++f) {
435: PetscInt fid;
437: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldnames[f], &fid));
438: id += fid;
439: }
440: PetscCall(PetscObjectComposedDataSetInt((PetscObject)*vec, SwarmDataFieldId, id));
442: PetscCall(VecSetDM(*vec, sw));
443: PetscCall(VecSetOperation(*vec, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: /*@
448: DMSwarmPreallocateMassMatrix - Preallocate particle mass matrix between a `DMSWARM` and a `DMPLEX`
450: Collective
452: Input Parameters:
453: + dmc - The `DMSWARM` object
454: . dmf - The `DMPLEX` object
455: . mass - The mass matrix to preallocate
456: . rStart - The starting row index for this process
457: . maxC - The maximum number of columns per row
458: - ctx - The user context
460: Level: developer
462: .seealso: [](ch_unstructured), `DM`, `DMSWARM`, `DMPLEX`, `DMCreateMassMatrix()`, `DMSwarmFillMassMatrix()`
463: @*/
464: PetscErrorCode DMSwarmPreallocateMassMatrix(DM dmc, DM dmf, Mat mass, PetscInt *rStart, PetscInt *maxC, PetscCtx ctx)
465: {
466: MPI_Comm comm;
467: PetscHSetIJ ht;
468: PetscDS ds;
469: PetscSection fsection, globalFSection;
470: PetscLayout rLayout, colLayout;
471: PetscInt *dnz, *onz;
472: PetscInt cStart, cEnd, locRows, locCols, colStart, colEnd, Nf, totNc = 0;
474: PetscFunctionBegin;
478: PetscAssertPointer(rStart, 4);
479: PetscAssertPointer(maxC, 5);
480: PetscCall(PetscObjectGetComm((PetscObject)mass, &comm));
481: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
482: PetscCall(DMGetLocalSection(dmf, &fsection));
483: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
484: PetscCall(DMGetDS(dmf, &ds));
485: PetscCall(PetscDSGetNumFields(ds, &Nf));
486: PetscCall(MatGetLocalSize(mass, &locRows, &locCols));
487: PetscCall(PetscCalloc2(locRows, &dnz, locRows, &onz));
488: PetscCall(PetscHSetIJCreate(&ht));
490: PetscCall(PetscLayoutCreate(comm, &colLayout));
491: PetscCall(PetscLayoutSetLocalSize(colLayout, locCols));
492: PetscCall(PetscLayoutSetBlockSize(colLayout, 1));
493: PetscCall(PetscLayoutSetUp(colLayout));
494: PetscCall(PetscLayoutGetRange(colLayout, &colStart, &colEnd));
495: PetscCall(PetscLayoutDestroy(&colLayout));
497: PetscCall(PetscLayoutCreate(comm, &rLayout));
498: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
499: PetscCall(PetscLayoutSetBlockSize(rLayout, 1));
500: PetscCall(PetscLayoutSetUp(rLayout));
501: PetscCall(PetscLayoutGetRange(rLayout, rStart, NULL));
502: PetscCall(PetscLayoutDestroy(&rLayout));
504: for (PetscInt field = 0; field < Nf; ++field) {
505: PetscObject obj;
506: PetscClassId id;
507: PetscInt Nc;
509: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
510: PetscCall(PetscObjectGetClassId(obj, &id));
511: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
512: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
513: totNc += Nc;
514: }
515: PetscCall(DMSwarmSortGetAccess(dmc));
516: for (PetscInt field = 0; field < Nf; ++field) {
517: PetscObject obj;
518: PetscClassId id;
519: PetscInt Nc;
521: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
522: PetscCall(PetscObjectGetClassId(obj, &id));
523: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
524: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
526: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
527: PetscInt *findices, *cindices; /* fine is vertices, coarse is particles */
528: PetscInt numFIndices, numCIndices;
530: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
531: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
532: *maxC = PetscMax(*maxC, numCIndices);
533: {
534: PetscHashIJKey key;
535: PetscBool missing;
536: for (PetscInt i = 0; i < numFIndices; ++i) {
537: key.j = findices[i]; /* global column (from Plex) */
538: if (key.j >= 0) {
539: /* Get indices for coarse elements */
540: for (PetscInt j = 0; j < numCIndices; ++j) {
541: for (PetscInt c = 0; c < Nc; ++c) {
542: // TODO Need field offset on particle here
543: key.i = cindices[j] * totNc + c + *rStart; /* global cols (from Swarm) */
544: if (key.i < 0) continue;
545: PetscCall(PetscHSetIJQueryAdd(ht, key, &missing));
546: PetscCheck(missing, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "Set new value at %" PetscInt_FMT ",%" PetscInt_FMT, key.i, key.j);
547: if ((key.j >= colStart) && (key.j < colEnd)) ++dnz[key.i - *rStart];
548: else ++onz[key.i - *rStart];
549: }
550: }
551: }
552: }
553: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
554: }
555: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
556: }
557: }
558: PetscCall(DMSwarmSortRestoreAccess(dmc));
559: PetscCall(PetscHSetIJDestroy(&ht));
560: PetscCall(MatXAIJSetPreallocation(mass, 1, dnz, onz, NULL, NULL));
561: PetscCall(MatSetOption(mass, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
562: PetscCall(PetscFree2(dnz, onz));
563: PetscFunctionReturn(PETSC_SUCCESS);
564: }
566: /*@
567: DMSwarmFillMassMatrix - Assemble the particle mass matrix between a `DMSWARM` and a `DMPLEX`
569: Collective
571: Input Parameters:
572: + dmc - The `DMSWARM` object
573: . dmf - The `DMPLEX` object
574: . mass - The mass matrix to fill up
575: . rStart - The starting row index for this process
576: . maxC - The maximum number of columns per row
577: . useDeltaFunction - Flag to use a delta function for the particle shape function
578: . Nfc - The number of swarm coordinate fields
579: . bs - The block size for each swarm coordinate field
580: . coordVals - The values for each particle for each swarm coordinate field
581: - ctx - The user context
583: Level: developer
585: .seealso: [](ch_unstructured), `DM`, `DMSWARM`, `DMPLEX`, `DMCreateMassMatrix()`, `DMSwarmPreallocateMassMatrix()`
586: @*/
587: PetscErrorCode DMSwarmFillMassMatrix(DM dmc, DM dmf, Mat mass, PetscInt rStart, PetscInt maxC, PetscBool useDeltaFunction, PetscInt Nfc, const PetscInt bs[], PetscReal *coordVals[], PetscCtx ctx)
588: {
589: const char *name = "Mass Matrix";
590: PetscDS ds;
591: PetscSection fsection, globalFSection;
592: PetscInt dim, cStart, cEnd, Nf, totDim, totNc = 0, *rowIDXs;
593: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
594: PetscScalar *elemMat;
596: PetscFunctionBegin;
600: PetscCall(DMGetCoordinateDim(dmf, &dim));
601: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
602: PetscCall(DMGetLocalSection(dmf, &fsection));
603: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
604: PetscCall(DMGetDS(dmf, &ds));
605: PetscCall(PetscDSGetNumFields(ds, &Nf));
606: PetscCall(PetscDSGetTotalDimension(ds, &totDim));
607: for (PetscInt field = 0; field < Nf; ++field) {
608: PetscObject obj;
609: PetscClassId id;
610: PetscInt Nc;
612: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
613: PetscCall(PetscObjectGetClassId(obj, &id));
614: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
615: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
616: totNc += Nc;
617: }
619: PetscCall(PetscMalloc3(maxC * totNc * totDim, &elemMat, maxC * totNc, &rowIDXs, maxC * dim, &xi));
620: PetscCall(PetscMalloc3(dim, &v0, dim * dim, &J, dim * dim, &invJ));
621: PetscCall(DMSwarmSortGetAccess(dmc));
622: for (PetscInt field = 0; field < Nf; ++field) {
623: PetscTabulation Tcoarse;
624: PetscObject obj;
625: PetscClassId id;
626: PetscInt Nc;
628: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
629: PetscCall(PetscObjectGetClassId(obj, &id));
630: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
631: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
633: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
634: PetscInt *findices, *cindices;
635: PetscInt numFIndices, numCIndices;
637: /* TODO: Use DMField instead of assuming affine */
638: PetscCall(DMPlexComputeCellGeometryFEM(dmf, cell, NULL, v0, J, invJ, &detJ));
639: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
640: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
641: for (PetscInt j = 0; j < numCIndices; ++j) {
642: PetscReal xr[8];
643: PetscInt off = 0;
645: for (PetscInt i = 0; i < Nfc; ++i) {
646: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][cindices[j] * bs[i] + b];
647: }
648: PetscCheck(off == dim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, dim);
649: CoordinatesRealToRef(dim, dim, v0ref, v0, invJ, xr, &xi[j * dim]);
650: }
651: if (id == PETSCFE_CLASSID) PetscCall(PetscFECreateTabulation((PetscFE)obj, 1, numCIndices, xi, 0, &Tcoarse));
652: else PetscCall(PetscFVCreateTabulation((PetscFV)obj, 1, numCIndices, xi, 0, &Tcoarse));
653: /* Get elemMat entries by multiplying by weight */
654: PetscCall(PetscArrayzero(elemMat, numCIndices * Nc * totDim));
655: for (PetscInt i = 0; i < numFIndices / Nc; ++i) {
656: for (PetscInt j = 0; j < numCIndices; ++j) {
657: for (PetscInt c = 0; c < Nc; ++c) {
658: // TODO Need field offset on particle and field here
659: /* B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c */
660: elemMat[(j * totNc + c) * numFIndices + i * Nc + c] += Tcoarse->T[0][(j * numFIndices + i * Nc + c) * Nc + c] * (useDeltaFunction ? 1.0 : detJ);
661: }
662: }
663: }
664: for (PetscInt j = 0; j < numCIndices; ++j)
665: // TODO Need field offset on particle here
666: for (PetscInt c = 0; c < Nc; ++c) rowIDXs[j * Nc + c] = cindices[j] * totNc + c + rStart;
667: if (0) PetscCall(DMPrintCellMatrix(cell, name, numCIndices * Nc, numFIndices, elemMat));
668: PetscCall(MatSetValues(mass, numCIndices * Nc, rowIDXs, numFIndices, findices, elemMat, ADD_VALUES));
669: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
670: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
671: PetscCall(PetscTabulationDestroy(&Tcoarse));
672: }
673: }
674: PetscCall(DMSwarmSortRestoreAccess(dmc));
675: PetscCall(PetscFree3(elemMat, rowIDXs, xi));
676: PetscCall(PetscFree3(v0, J, invJ));
677: PetscFunctionReturn(PETSC_SUCCESS);
678: }
680: /* This creates a "mass matrix" between a finite element and particle space. If a finite element interpolant is given by
682: \hat f = \sum_i f_i \phi_i
684: and a particle function is given by
686: f = \sum_i w_i \delta(x - x_i)
688: then we want to require that
690: M \hat f = M_p f
692: where the particle mass matrix is given by
694: (M_p)_{ij} = \int \phi_i \delta(x - x_j)
696: The way Dave May does particles, they amount to quadratue weights rather than delta functions, so he has |J| is in
697: his integral. We allow this with the boolean flag.
698: */
699: static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass, PetscBool useDeltaFunction, PetscCtx ctx)
700: {
701: DMSwarmCellDM celldm;
702: PetscInt rStart, maxC = 0;
703: PetscInt Nfc;
704: const char **coordFields;
705: PetscReal **coordVals;
706: PetscInt *bs;
708: PetscFunctionBegin;
709: PetscCall(DMSwarmPreallocateMassMatrix(dmc, dmf, mass, &rStart, &maxC, ctx));
711: PetscCall(DMSwarmGetCellDMActive(dmc, &celldm));
712: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
713: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
714: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
715: PetscCall(DMSwarmFillMassMatrix(dmc, dmf, mass, rStart, maxC, useDeltaFunction, Nfc, bs, coordVals, ctx));
716: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
717: PetscCall(PetscFree2(coordVals, bs));
719: PetscCall(MatAssemblyBegin(mass, MAT_FINAL_ASSEMBLY));
720: PetscCall(MatAssemblyEnd(mass, MAT_FINAL_ASSEMBLY));
721: PetscFunctionReturn(PETSC_SUCCESS);
722: }
724: /* Returns empty matrix for use with SNES FD */
725: static PetscErrorCode DMCreateMatrix_Swarm(DM sw, Mat *m)
726: {
727: Vec field;
728: PetscInt size;
730: PetscFunctionBegin;
731: PetscCall(DMGetGlobalVector(sw, &field));
732: PetscCall(VecGetLocalSize(field, &size));
733: PetscCall(DMRestoreGlobalVector(sw, &field));
734: PetscCall(MatCreate(PETSC_COMM_WORLD, m));
735: PetscCall(MatSetFromOptions(*m));
736: PetscCall(MatSetSizes(*m, PETSC_DECIDE, PETSC_DECIDE, size, size));
737: PetscCall(MatSeqAIJSetPreallocation(*m, 1, NULL));
738: PetscCall(MatZeroEntries(*m));
739: PetscCall(MatAssemblyBegin(*m, MAT_FINAL_ASSEMBLY));
740: PetscCall(MatAssemblyEnd(*m, MAT_FINAL_ASSEMBLY));
741: PetscCall(MatShift(*m, 1.0));
742: PetscCall(MatSetDM(*m, sw));
743: PetscFunctionReturn(PETSC_SUCCESS);
744: }
746: /* FEM cols, Particle rows */
747: static PetscErrorCode DMCreateMassMatrix_Swarm(DM dmCoarse, DM dmFine, Mat *mass)
748: {
749: DMSwarmCellDM celldm;
750: PetscSection gsf;
751: PetscInt m, n, Np, bs;
752: void *ctx;
754: PetscFunctionBegin;
755: PetscCall(DMSwarmGetCellDMActive(dmCoarse, &celldm));
756: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)dmCoarse), PETSC_ERR_USER, "Active cell DM does not define any fields");
757: PetscCall(DMGetGlobalSection(dmFine, &gsf));
758: PetscCall(PetscSectionGetConstrainedStorageSize(gsf, &m));
759: PetscCall(DMSwarmGetLocalSize(dmCoarse, &Np));
760: PetscCall(DMSwarmCellDMGetBlockSize(celldm, dmCoarse, &bs));
761: n = Np * bs;
762: PetscCall(MatCreate(PetscObjectComm((PetscObject)dmCoarse), mass));
763: PetscCall(MatSetSizes(*mass, n, m, PETSC_DETERMINE, PETSC_DETERMINE));
764: PetscCall(MatSetType(*mass, dmCoarse->mattype));
765: PetscCall(DMGetApplicationContext(dmFine, &ctx));
767: PetscCall(DMSwarmComputeMassMatrix_Private(dmCoarse, dmFine, *mass, PETSC_TRUE, ctx));
768: PetscCall(MatViewFromOptions(*mass, NULL, "-mass_mat_view"));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: static PetscErrorCode DMSwarmComputeMassMatrixSquare_Private(DM dmc, DM dmf, Mat mass, PetscBool useDeltaFunction, PetscCtx ctx)
773: {
774: const char *name = "Mass Matrix Square";
775: MPI_Comm comm;
776: DMSwarmCellDM celldm;
777: PetscDS prob;
778: PetscSection fsection, globalFSection;
779: PetscHSetIJ ht;
780: PetscLayout rLayout, colLayout;
781: PetscInt *dnz, *onz, *adj, depth, maxConeSize, maxSupportSize, maxAdjSize;
782: PetscInt locRows, locCols, rStart, colStart, colEnd, *rowIDXs;
783: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
784: PetscScalar *elemMat, *elemMatSq;
785: PetscInt cdim, Nf, Nfc, cStart, cEnd, totDim, maxC = 0;
786: const char **coordFields;
787: PetscReal **coordVals;
788: PetscInt *bs;
790: PetscFunctionBegin;
791: PetscCall(PetscObjectGetComm((PetscObject)mass, &comm));
792: PetscCall(DMGetCoordinateDim(dmf, &cdim));
793: PetscCall(DMGetDS(dmf, &prob));
794: PetscCall(PetscDSGetNumFields(prob, &Nf));
795: PetscCall(PetscDSGetTotalDimension(prob, &totDim));
796: PetscCall(PetscMalloc3(cdim, &v0, cdim * cdim, &J, cdim * cdim, &invJ));
797: PetscCall(DMGetLocalSection(dmf, &fsection));
798: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
799: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
800: PetscCall(MatGetLocalSize(mass, &locRows, &locCols));
802: PetscCall(DMSwarmGetCellDMActive(dmc, &celldm));
803: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
804: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
806: PetscCall(PetscLayoutCreate(comm, &colLayout));
807: PetscCall(PetscLayoutSetLocalSize(colLayout, locCols));
808: PetscCall(PetscLayoutSetBlockSize(colLayout, 1));
809: PetscCall(PetscLayoutSetUp(colLayout));
810: PetscCall(PetscLayoutGetRange(colLayout, &colStart, &colEnd));
811: PetscCall(PetscLayoutDestroy(&colLayout));
813: PetscCall(PetscLayoutCreate(comm, &rLayout));
814: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
815: PetscCall(PetscLayoutSetBlockSize(rLayout, 1));
816: PetscCall(PetscLayoutSetUp(rLayout));
817: PetscCall(PetscLayoutGetRange(rLayout, &rStart, NULL));
818: PetscCall(PetscLayoutDestroy(&rLayout));
820: PetscCall(DMPlexGetDepth(dmf, &depth));
821: PetscCall(DMPlexGetMaxSizes(dmf, &maxConeSize, &maxSupportSize));
822: maxAdjSize = PetscPowInt(maxConeSize * maxSupportSize, depth);
823: PetscCall(PetscMalloc1(maxAdjSize, &adj));
825: PetscCall(PetscCalloc2(locRows, &dnz, locRows, &onz));
826: PetscCall(PetscHSetIJCreate(&ht));
827: /* Count nonzeros
828: This is just FVM++, but we cannot use the Plex P0 allocation since unknowns in a cell will not be contiguous
829: */
830: PetscCall(DMSwarmSortGetAccess(dmc));
831: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
832: PetscInt *cindices;
833: PetscInt numCIndices;
834: #if 0
835: PetscInt adjSize = maxAdjSize, a, j;
836: #endif
838: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
839: maxC = PetscMax(maxC, numCIndices);
840: /* Diagonal block */
841: for (PetscInt i = 0; i < numCIndices; ++i) dnz[cindices[i]] += numCIndices;
842: #if 0
843: /* Off-diagonal blocks */
844: PetscCall(DMPlexGetAdjacency(dmf, cell, &adjSize, &adj));
845: for (a = 0; a < adjSize; ++a) {
846: if (adj[a] >= cStart && adj[a] < cEnd && adj[a] != cell) {
847: const PetscInt ncell = adj[a];
848: PetscInt *ncindices;
849: PetscInt numNCIndices;
851: PetscCall(DMSwarmSortGetPointsPerCell(dmc, ncell, &numNCIndices, &ncindices));
852: {
853: PetscHashIJKey key;
854: PetscBool missing;
856: for (i = 0; i < numCIndices; ++i) {
857: key.i = cindices[i] + rStart; /* global rows (from Swarm) */
858: if (key.i < 0) continue;
859: for (j = 0; j < numNCIndices; ++j) {
860: key.j = ncindices[j] + rStart; /* global column (from Swarm) */
861: if (key.j < 0) continue;
862: PetscCall(PetscHSetIJQueryAdd(ht, key, &missing));
863: if (missing) {
864: if ((key.j >= colStart) && (key.j < colEnd)) ++dnz[key.i - rStart];
865: else ++onz[key.i - rStart];
866: }
867: }
868: }
869: }
870: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, ncell, &numNCIndices, &ncindices));
871: }
872: }
873: #endif
874: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
875: }
876: PetscCall(PetscHSetIJDestroy(&ht));
877: PetscCall(MatXAIJSetPreallocation(mass, 1, dnz, onz, NULL, NULL));
878: PetscCall(MatSetOption(mass, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
879: PetscCall(PetscFree2(dnz, onz));
880: PetscCall(PetscMalloc4(maxC * totDim, &elemMat, maxC * maxC, &elemMatSq, maxC, &rowIDXs, maxC * cdim, &xi));
881: /* Fill in values
882: Each entry is a sum of terms \phi_i(x_p) \phi_i(x_q)
883: Start just by producing block diagonal
884: Could loop over adjacent cells
885: Produce neighboring element matrix
886: TODO Determine which columns and rows correspond to shared dual vector
887: Do MatMatMult with rectangular matrices
888: Insert block
889: */
890: for (PetscInt field = 0; field < Nf; ++field) {
891: PetscTabulation Tcoarse;
892: PetscObject obj;
893: PetscInt Nc;
895: PetscCall(PetscDSGetDiscretization(prob, field, &obj));
896: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
897: PetscCheck(Nc == 1, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "Can only interpolate a scalar field from particles, Nc = %" PetscInt_FMT, Nc);
898: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
899: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
900: PetscInt *findices, *cindices;
901: PetscInt numFIndices, numCIndices;
903: /* TODO: Use DMField instead of assuming affine */
904: PetscCall(DMPlexComputeCellGeometryFEM(dmf, cell, NULL, v0, J, invJ, &detJ));
905: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
906: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
907: for (PetscInt p = 0; p < numCIndices; ++p) {
908: PetscReal xr[8];
909: PetscInt off = 0;
911: for (PetscInt i = 0; i < Nfc; ++i) {
912: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][cindices[p] * bs[i] + b];
913: }
914: PetscCheck(off == cdim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, cdim);
915: CoordinatesRealToRef(cdim, cdim, v0ref, v0, invJ, xr, &xi[p * cdim]);
916: }
917: PetscCall(PetscFECreateTabulation((PetscFE)obj, 1, numCIndices, xi, 0, &Tcoarse));
918: /* Get elemMat entries by multiplying by weight */
919: PetscCall(PetscArrayzero(elemMat, numCIndices * totDim));
920: for (PetscInt i = 0; i < numFIndices; ++i) {
921: for (PetscInt p = 0; p < numCIndices; ++p) {
922: for (PetscInt c = 0; c < Nc; ++c) {
923: /* B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c */
924: elemMat[p * numFIndices + i] += Tcoarse->T[0][(p * numFIndices + i) * Nc + c] * (useDeltaFunction ? 1.0 : detJ);
925: }
926: }
927: }
928: PetscCall(PetscTabulationDestroy(&Tcoarse));
929: for (PetscInt p = 0; p < numCIndices; ++p) rowIDXs[p] = cindices[p] + rStart;
930: if (0) PetscCall(DMPrintCellMatrix(cell, name, 1, numCIndices, elemMat));
931: /* Block diagonal */
932: if (numCIndices) {
933: PetscBLASInt blasn, blask;
934: PetscScalar one = 1.0, zero = 0.0;
936: PetscCall(PetscBLASIntCast(numCIndices, &blasn));
937: PetscCall(PetscBLASIntCast(numFIndices, &blask));
938: PetscCallBLAS("BLASgemm", BLASgemm_("T", "N", &blasn, &blasn, &blask, &one, elemMat, &blask, elemMat, &blask, &zero, elemMatSq, &blasn));
939: }
940: PetscCall(MatSetValues(mass, numCIndices, rowIDXs, numCIndices, rowIDXs, elemMatSq, ADD_VALUES));
941: /* TODO off-diagonal */
942: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
943: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
944: }
945: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
946: }
947: PetscCall(PetscFree4(elemMat, elemMatSq, rowIDXs, xi));
948: PetscCall(PetscFree(adj));
949: PetscCall(DMSwarmSortRestoreAccess(dmc));
950: PetscCall(PetscFree3(v0, J, invJ));
951: PetscCall(PetscFree2(coordVals, bs));
952: PetscCall(MatAssemblyBegin(mass, MAT_FINAL_ASSEMBLY));
953: PetscCall(MatAssemblyEnd(mass, MAT_FINAL_ASSEMBLY));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: /*@
958: DMSwarmCreateMassMatrixSquare - Creates the block-diagonal of the square, M^T_p M_p, of the particle mass matrix M_p
960: Collective
962: Input Parameters:
963: + dmCoarse - a `DMSWARM`
964: - dmFine - a `DMPLEX`
966: Output Parameter:
967: . mass - the square of the particle mass matrix
969: Level: advanced
971: Note:
972: We only compute the block diagonal since this provides a good preconditioner and is completely local. It would be possible in the
973: future to compute the full normal equations.
975: .seealso: `DM`, `DMSWARM`, `DMCreateMassMatrix()`
976: @*/
977: PetscErrorCode DMSwarmCreateMassMatrixSquare(DM dmCoarse, DM dmFine, Mat *mass)
978: {
979: PetscInt n;
980: void *ctx;
982: PetscFunctionBegin;
983: PetscCall(DMSwarmGetLocalSize(dmCoarse, &n));
984: PetscCall(MatCreate(PetscObjectComm((PetscObject)dmCoarse), mass));
985: PetscCall(MatSetSizes(*mass, n, n, PETSC_DETERMINE, PETSC_DETERMINE));
986: PetscCall(MatSetType(*mass, dmCoarse->mattype));
987: PetscCall(DMGetApplicationContext(dmFine, &ctx));
989: PetscCall(DMSwarmComputeMassMatrixSquare_Private(dmCoarse, dmFine, *mass, PETSC_TRUE, ctx));
990: PetscCall(MatViewFromOptions(*mass, NULL, "-mass_sq_mat_view"));
991: PetscFunctionReturn(PETSC_SUCCESS);
992: }
994: /* This creates a "gradient matrix" between a finite element and particle space, which is meant to enforce a weak divergence condition on the particle space. We are looking for a finite element field that has the same divergence as our particle field, so that
996: \int_X \psi_i \nabla \cdot \hat f = \int_X \psi_i \nabla \cdot f
998: and then integrate by parts
1000: \int_X \nabla \psi_i \cdot \hat f = \int_X \nabla \psi_i \cdot f
1002: where \psi is from a scalar FE space. If a finite element interpolant is given by
1004: \hat f^c = \sum_i f_i \phi^c_i
1006: and a particle function is given by
1008: f^c = \sum_p f^c_p \delta(x - x_p)
1010: then we want to require that
1012: D_f \hat f = D_p f
1014: where the gradient matrices are given by
1016: (D_f)_{i(jc)} = \int \partial_c \psi_i \phi_j
1017: (D_p)_{i(jc)} = \int \partial_c \psi_i \delta(x - x_j)
1019: Thus we need two finite element spaces, a scalar and a vector. The vector space holds the representer for the
1020: vector particle field. The scalar space holds the output of D_p or D_f, which is the weak divergence of the field.
1022: The way Dave May does particles, they amount to quadratue weights rather than delta functions, so he has |J| is in
1023: his integral. We allow this with the boolean flag.
1024: */
1025: static PetscErrorCode DMSwarmComputeGradientMatrix_Private(DM sw, DM dm, Mat derv, PetscBool useDeltaFunction, PetscCtx ctx)
1026: {
1027: const char *name = "Derivative Matrix";
1028: MPI_Comm comm;
1029: DMSwarmCellDM celldm;
1030: PetscDS ds;
1031: PetscSection fsection, globalFSection;
1032: PetscLayout rLayout;
1033: PetscInt locRows, rStart, *rowIDXs;
1034: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
1035: PetscScalar *elemMat;
1036: PetscInt cdim, Nf, Nfc, cStart, cEnd, totDim, maxNpc = 0, totNc = 0;
1037: const char **coordFields;
1038: PetscReal **coordVals;
1039: PetscInt *bs;
1041: PetscFunctionBegin;
1042: PetscCall(PetscObjectGetComm((PetscObject)derv, &comm));
1043: PetscCall(DMGetCoordinateDim(dm, &cdim));
1044: PetscCall(DMGetDS(dm, &ds));
1045: PetscCall(PetscDSGetNumFields(ds, &Nf));
1046: PetscCheck(Nf == 1, comm, PETSC_ERR_SUP, "Currently, we only support a single field");
1047: PetscCall(PetscDSGetTotalDimension(ds, &totDim));
1048: PetscCall(PetscMalloc3(cdim, &v0, cdim * cdim, &J, cdim * cdim, &invJ));
1049: PetscCall(DMGetLocalSection(dm, &fsection));
1050: PetscCall(DMGetGlobalSection(dm, &globalFSection));
1051: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
1052: PetscCall(MatGetLocalSize(derv, &locRows, NULL));
1054: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1055: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
1056: PetscCheck(Nfc == 1, comm, PETSC_ERR_SUP, "Currently, we only support a single field");
1057: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
1059: PetscCall(PetscLayoutCreate(comm, &rLayout));
1060: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
1061: PetscCall(PetscLayoutSetBlockSize(rLayout, cdim));
1062: PetscCall(PetscLayoutSetUp(rLayout));
1063: PetscCall(PetscLayoutGetRange(rLayout, &rStart, NULL));
1064: PetscCall(PetscLayoutDestroy(&rLayout));
1066: for (PetscInt field = 0; field < Nf; ++field) {
1067: PetscObject obj;
1068: PetscInt Nc;
1070: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
1071: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
1072: totNc += Nc;
1073: }
1074: PetscCheck(totNc == 1, comm, PETSC_ERR_ARG_WRONG, "The number of field components %" PetscInt_FMT " != 1", totNc);
1075: /* count non-zeros */
1076: PetscCall(DMSwarmSortGetAccess(sw));
1077: for (PetscInt field = 0; field < Nf; ++field) {
1078: PetscObject obj;
1079: PetscInt Nc;
1081: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
1082: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
1083: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1084: PetscInt *pind;
1085: PetscInt Npc;
1087: PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Npc, &pind));
1088: maxNpc = PetscMax(maxNpc, Npc);
1089: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cell, &Npc, &pind));
1090: }
1091: }
1092: PetscCall(PetscMalloc3(maxNpc * cdim * totDim, &elemMat, maxNpc * cdim, &rowIDXs, maxNpc * cdim, &xi));
1093: for (PetscInt field = 0; field < Nf; ++field) {
1094: PetscTabulation Tcoarse;
1095: PetscFE fe;
1097: PetscCall(PetscDSGetDiscretization(ds, field, (PetscObject *)&fe));
1098: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(sw, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
1099: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1100: PetscInt *findices, *pind;
1101: PetscInt numFIndices, Npc;
1103: /* TODO: Use DMField instead of assuming affine */
1104: PetscCall(DMPlexComputeCellGeometryFEM(dm, cell, NULL, v0, J, invJ, &detJ));
1105: PetscCall(DMPlexGetClosureIndices(dm, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
1106: PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Npc, &pind));
1107: for (PetscInt j = 0; j < Npc; ++j) {
1108: PetscReal xr[8];
1109: PetscInt off = 0;
1111: for (PetscInt i = 0; i < Nfc; ++i) {
1112: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][pind[j] * bs[i] + b];
1113: }
1114: PetscCheck(off == cdim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, cdim);
1115: CoordinatesRealToRef(cdim, cdim, v0ref, v0, invJ, xr, &xi[j * cdim]);
1116: }
1117: PetscCall(PetscFECreateTabulation(fe, 1, Npc, xi, 1, &Tcoarse));
1118: /* Get elemMat entries by multiplying by weight */
1119: PetscCall(PetscArrayzero(elemMat, Npc * cdim * totDim));
1120: for (PetscInt i = 0; i < numFIndices; ++i) {
1121: for (PetscInt j = 0; j < Npc; ++j) {
1122: /* D[((p*pdim + i)*Nc + c)*cdim + d] is the value at point p for basis function i, component c, derivative d */
1123: for (PetscInt d = 0; d < cdim; ++d) {
1124: xi[d] = 0.;
1125: for (PetscInt e = 0; e < cdim; ++e) xi[d] += invJ[e * cdim + d] * Tcoarse->T[1][(j * numFIndices + i) * cdim + e];
1126: elemMat[(j * cdim + d) * numFIndices + i] += xi[d] * (useDeltaFunction ? 1.0 : detJ);
1127: }
1128: }
1129: }
1130: for (PetscInt j = 0; j < Npc; ++j)
1131: for (PetscInt d = 0; d < cdim; ++d) rowIDXs[j * cdim + d] = pind[j] * cdim + d + rStart;
1132: if (0) PetscCall(DMPrintCellMatrix(cell, name, Npc * cdim, numFIndices, elemMat));
1133: PetscCall(MatSetValues(derv, Npc * cdim, rowIDXs, numFIndices, findices, elemMat, ADD_VALUES));
1134: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cell, &Npc, &pind));
1135: PetscCall(DMPlexRestoreClosureIndices(dm, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
1136: PetscCall(PetscTabulationDestroy(&Tcoarse));
1137: }
1138: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(sw, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
1139: }
1140: PetscCall(PetscFree3(elemMat, rowIDXs, xi));
1141: PetscCall(DMSwarmSortRestoreAccess(sw));
1142: PetscCall(PetscFree3(v0, J, invJ));
1143: PetscCall(PetscFree2(coordVals, bs));
1144: PetscCall(MatAssemblyBegin(derv, MAT_FINAL_ASSEMBLY));
1145: PetscCall(MatAssemblyEnd(derv, MAT_FINAL_ASSEMBLY));
1146: PetscFunctionReturn(PETSC_SUCCESS);
1147: }
1149: /* FEM cols: this is a scalar space
1150: Particle rows: this is a vector space that contracts with the derivative
1151: */
1152: static PetscErrorCode DMCreateGradientMatrix_Swarm(DM sw, DM dm, Mat *derv)
1153: {
1154: DMSwarmCellDM celldm;
1155: PetscSection gs;
1156: PetscInt cdim, m, n, Np, bs;
1157: void *ctx;
1158: MPI_Comm comm;
1160: PetscFunctionBegin;
1161: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
1162: PetscCall(DMGetCoordinateDim(dm, &cdim));
1163: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1164: PetscCheck(celldm->Nf, comm, PETSC_ERR_USER, "Active cell DM does not define any fields");
1165: PetscCall(DMGetGlobalSection(dm, &gs));
1166: PetscCall(PetscSectionGetConstrainedStorageSize(gs, &n));
1167: PetscCall(DMSwarmGetLocalSize(sw, &Np));
1168: PetscCall(DMSwarmCellDMGetBlockSize(celldm, sw, &bs));
1169: PetscCheck(cdim == bs, comm, PETSC_ERR_ARG_WRONG, "Coordinate dimension %" PetscInt_FMT " != %" PetscInt_FMT " swarm field block size", cdim, bs);
1170: m = Np * bs;
1171: PetscCall(MatCreate(PetscObjectComm((PetscObject)sw), derv));
1172: PetscCall(PetscObjectSetName((PetscObject)*derv, "Swarm Derivative Matrix"));
1173: PetscCall(MatSetSizes(*derv, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
1174: PetscCall(MatSetType(*derv, sw->mattype));
1175: PetscCall(DMGetApplicationContext(dm, &ctx));
1177: PetscCall(DMSwarmComputeGradientMatrix_Private(sw, dm, *derv, PETSC_TRUE, ctx));
1178: PetscCall(MatViewFromOptions(*derv, NULL, "-gradient_mat_view"));
1179: PetscFunctionReturn(PETSC_SUCCESS);
1180: }
1182: /*@
1183: DMSwarmCreateGlobalVectorFromField - Creates a `Vec` object sharing the array associated with a given field
1185: Collective
1187: Input Parameters:
1188: + dm - a `DMSWARM`
1189: - fieldname - the textual name given to a registered field
1191: Output Parameter:
1192: . vec - the vector
1194: Level: beginner
1196: Note:
1197: The vector must be returned using a matching call to `DMSwarmDestroyGlobalVectorFromField()`.
1199: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyGlobalVectorFromField()`
1200: @*/
1201: PetscErrorCode DMSwarmCreateGlobalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1202: {
1203: MPI_Comm comm = PetscObjectComm((PetscObject)dm);
1205: PetscFunctionBegin;
1207: PetscCall(DMSwarmCreateVectorFromField_Private(dm, fieldname, comm, vec));
1208: PetscFunctionReturn(PETSC_SUCCESS);
1209: }
1211: /*@
1212: DMSwarmDestroyGlobalVectorFromField - Destroys the `Vec` object which share the array associated with a given field
1214: Collective
1216: Input Parameters:
1217: + dm - a `DMSWARM`
1218: - fieldname - the textual name given to a registered field
1220: Output Parameter:
1221: . vec - the vector
1223: Level: beginner
1225: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateGlobalVectorFromField()`
1226: @*/
1227: PetscErrorCode DMSwarmDestroyGlobalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1228: {
1229: PetscFunctionBegin;
1231: PetscCall(DMSwarmDestroyVectorFromField_Private(dm, fieldname, vec));
1232: PetscFunctionReturn(PETSC_SUCCESS);
1233: }
1235: /*@
1236: DMSwarmCreateLocalVectorFromField - Creates a `Vec` object sharing the array associated with a given field
1238: Collective
1240: Input Parameters:
1241: + dm - a `DMSWARM`
1242: - fieldname - the textual name given to a registered field
1244: Output Parameter:
1245: . vec - the vector
1247: Level: beginner
1249: Note:
1250: The vector must be returned using a matching call to DMSwarmDestroyLocalVectorFromField().
1252: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyLocalVectorFromField()`
1253: @*/
1254: PetscErrorCode DMSwarmCreateLocalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1255: {
1256: MPI_Comm comm = PETSC_COMM_SELF;
1258: PetscFunctionBegin;
1259: PetscCall(DMSwarmCreateVectorFromField_Private(dm, fieldname, comm, vec));
1260: PetscFunctionReturn(PETSC_SUCCESS);
1261: }
1263: /*@
1264: DMSwarmDestroyLocalVectorFromField - Destroys the `Vec` object which share the array associated with a given field
1266: Collective
1268: Input Parameters:
1269: + dm - a `DMSWARM`
1270: - fieldname - the textual name given to a registered field
1272: Output Parameter:
1273: . vec - the vector
1275: Level: beginner
1277: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateLocalVectorFromField()`
1278: @*/
1279: PetscErrorCode DMSwarmDestroyLocalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1280: {
1281: PetscFunctionBegin;
1283: PetscCall(DMSwarmDestroyVectorFromField_Private(dm, fieldname, vec));
1284: PetscFunctionReturn(PETSC_SUCCESS);
1285: }
1287: /*@
1288: DMSwarmCreateGlobalVectorFromFields - Creates a `Vec` object sharing the array associated with a given field set
1290: Collective
1292: Input Parameters:
1293: + dm - a `DMSWARM`
1294: . Nf - the number of fields
1295: - fieldnames - the textual names given to the registered fields
1297: Output Parameter:
1298: . vec - the vector
1300: Level: beginner
1302: Notes:
1303: The vector must be returned using a matching call to `DMSwarmDestroyGlobalVectorFromFields()`.
1305: This vector is copyin-copyout, rather than a direct pointer like `DMSwarmCreateGlobalVectorFromField()`
1307: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyGlobalVectorFromFields()`
1308: @*/
1309: PetscErrorCode DMSwarmCreateGlobalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1310: {
1311: MPI_Comm comm = PetscObjectComm((PetscObject)dm);
1313: PetscFunctionBegin;
1315: PetscCall(DMSwarmCreateVectorFromFields_Private(dm, Nf, fieldnames, comm, vec));
1316: PetscFunctionReturn(PETSC_SUCCESS);
1317: }
1319: /*@
1320: DMSwarmDestroyGlobalVectorFromFields - Destroys the `Vec` object which share the array associated with a given field set
1322: Collective
1324: Input Parameters:
1325: + dm - a `DMSWARM`
1326: . Nf - the number of fields
1327: - fieldnames - the textual names given to the registered fields
1329: Output Parameter:
1330: . vec - the vector
1332: Level: beginner
1334: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateGlobalVectorFromField()`
1335: @*/
1336: PetscErrorCode DMSwarmDestroyGlobalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1337: {
1338: PetscFunctionBegin;
1340: PetscCall(DMSwarmDestroyVectorFromFields_Private(dm, Nf, fieldnames, vec));
1341: PetscFunctionReturn(PETSC_SUCCESS);
1342: }
1344: /*@
1345: DMSwarmCreateLocalVectorFromFields - Creates a `Vec` object sharing the array associated with a given field set
1347: Collective
1349: Input Parameters:
1350: + dm - a `DMSWARM`
1351: . Nf - the number of fields
1352: - fieldnames - the textual names given to the registered fields
1354: Output Parameter:
1355: . vec - the vector
1357: Level: beginner
1359: Notes:
1360: The vector must be returned using a matching call to DMSwarmDestroyLocalVectorFromField().
1362: This vector is copyin-copyout, rather than a direct pointer like `DMSwarmCreateLocalVectorFromField()`
1364: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyLocalVectorFromField()`
1365: @*/
1366: PetscErrorCode DMSwarmCreateLocalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1367: {
1368: MPI_Comm comm = PETSC_COMM_SELF;
1370: PetscFunctionBegin;
1371: PetscCall(DMSwarmCreateVectorFromFields_Private(dm, Nf, fieldnames, comm, vec));
1372: PetscFunctionReturn(PETSC_SUCCESS);
1373: }
1375: /*@
1376: DMSwarmDestroyLocalVectorFromFields - Destroys the `Vec` object which share the array associated with a given field set
1378: Collective
1380: Input Parameters:
1381: + dm - a `DMSWARM`
1382: . Nf - the number of fields
1383: - fieldnames - the textual names given to the registered fields
1385: Output Parameter:
1386: . vec - the vector
1388: Level: beginner
1390: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateLocalVectorFromFields()`
1391: @*/
1392: PetscErrorCode DMSwarmDestroyLocalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1393: {
1394: PetscFunctionBegin;
1396: PetscCall(DMSwarmDestroyVectorFromFields_Private(dm, Nf, fieldnames, vec));
1397: PetscFunctionReturn(PETSC_SUCCESS);
1398: }
1400: /*@
1401: DMSwarmInitializeFieldRegister - Initiates the registration of fields to a `DMSWARM`
1403: Collective
1405: Input Parameter:
1406: . dm - a `DMSWARM`
1408: Level: beginner
1410: Note:
1411: After all fields have been registered, you must call `DMSwarmFinalizeFieldRegister()`.
1413: .seealso: `DM`, `DMSWARM`, `DMSwarmFinalizeFieldRegister()`, `DMSwarmRegisterPetscDatatypeField()`,
1414: `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1415: @*/
1416: PetscErrorCode DMSwarmInitializeFieldRegister(DM dm)
1417: {
1418: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1420: PetscFunctionBegin;
1421: if (!swarm->field_registration_initialized) {
1422: swarm->field_registration_initialized = PETSC_TRUE;
1423: PetscCall(DMSwarmRegisterPetscDatatypeField(dm, DMSwarmField_pid, 1, PETSC_INT64)); /* unique identifier */
1424: PetscCall(DMSwarmRegisterPetscDatatypeField(dm, DMSwarmField_rank, 1, PETSC_INT)); /* used for communication */
1425: }
1426: PetscFunctionReturn(PETSC_SUCCESS);
1427: }
1429: /*@
1430: DMSwarmFinalizeFieldRegister - Finalizes the registration of fields to a `DMSWARM`
1432: Collective
1434: Input Parameter:
1435: . dm - a `DMSWARM`
1437: Level: beginner
1439: Note:
1440: After `DMSwarmFinalizeFieldRegister()` has been called, no new fields can be defined on the `DMSWARM`.
1442: .seealso: `DM`, `DMSWARM`, `DMSwarmInitializeFieldRegister()`, `DMSwarmRegisterPetscDatatypeField()`,
1443: `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1444: @*/
1445: PetscErrorCode DMSwarmFinalizeFieldRegister(DM dm)
1446: {
1447: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1449: PetscFunctionBegin;
1450: if (!swarm->field_registration_finalized) PetscCall(DMSwarmDataBucketFinalize(swarm->db));
1451: swarm->field_registration_finalized = PETSC_TRUE;
1452: PetscFunctionReturn(PETSC_SUCCESS);
1453: }
1455: /*@
1456: DMSwarmSetLocalSizes - Sets the length of all registered fields on the `DMSWARM`
1458: Not Collective
1460: Input Parameters:
1461: + sw - a `DMSWARM`
1462: . nlocal - the length of each registered field
1463: - buffer - the length of the buffer used to efficient dynamic re-sizing
1465: Level: beginner
1467: .seealso: `DM`, `DMSWARM`, `DMSwarmGetLocalSize()`
1468: @*/
1469: PetscErrorCode DMSwarmSetLocalSizes(DM sw, PetscInt nlocal, PetscInt buffer)
1470: {
1471: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1472: PetscMPIInt rank;
1473: PetscInt *rankval;
1475: PetscFunctionBegin;
1476: PetscCall(PetscLogEventBegin(DMSWARM_SetSizes, 0, 0, 0, 0));
1477: PetscCall(DMSwarmDataBucketSetSizes(swarm->db, nlocal, buffer));
1478: PetscCall(PetscLogEventEnd(DMSWARM_SetSizes, 0, 0, 0, 0));
1480: // Initialize values in pid and rank placeholders
1481: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sw), &rank));
1482: PetscCall(DMSwarmGetField(sw, DMSwarmField_rank, NULL, NULL, (void **)&rankval));
1483: for (PetscInt p = 0; p < nlocal; p++) rankval[p] = rank;
1484: PetscCall(DMSwarmRestoreField(sw, DMSwarmField_rank, NULL, NULL, (void **)&rankval));
1485: /* TODO: [pid - use MPI_Scan] */
1486: PetscFunctionReturn(PETSC_SUCCESS);
1487: }
1489: /*@
1490: DMSwarmSetCellDM - Attaches a `DM` to a `DMSWARM`
1492: Collective
1494: Input Parameters:
1495: + sw - a `DMSWARM`
1496: - dm - the `DM` to attach to the `DMSWARM`
1498: Level: beginner
1500: Note:
1501: The attached `DM` (dm) will be queried for point location and
1502: neighbor MPI-rank information if `DMSwarmMigrate()` is called.
1504: .seealso: `DM`, `DMSWARM`, `DMSwarmSetType()`, `DMSwarmGetCellDM()`, `DMSwarmMigrate()`
1505: @*/
1506: PetscErrorCode DMSwarmSetCellDM(DM sw, DM dm)
1507: {
1508: DMSwarmCellDM celldm;
1509: const char *name;
1510: char *coordName;
1512: PetscFunctionBegin;
1515: PetscCall(PetscStrallocpy(DMSwarmPICField_coor, &coordName));
1516: PetscCall(DMSwarmCellDMCreate(dm, 0, NULL, 1, (const char **)&coordName, &celldm));
1517: PetscCall(PetscFree(coordName));
1518: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
1519: PetscCall(DMSwarmAddCellDM(sw, celldm));
1520: PetscCall(DMSwarmCellDMDestroy(&celldm));
1521: PetscCall(DMSwarmSetCellDMActive(sw, name));
1522: PetscFunctionReturn(PETSC_SUCCESS);
1523: }
1525: /*@
1526: DMSwarmGetCellDM - Fetches the active cell `DM`
1528: Collective
1530: Input Parameter:
1531: . sw - a `DMSWARM`
1533: Output Parameter:
1534: . dm - the active `DM` for the `DMSWARM`
1536: Level: beginner
1538: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`
1539: @*/
1540: PetscErrorCode DMSwarmGetCellDM(DM sw, DM *dm)
1541: {
1542: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1543: DMSwarmCellDM celldm;
1545: PetscFunctionBegin;
1547: PetscCall(PetscObjectListFind(swarm->cellDMs, swarm->activeCellDM, (PetscObject *)&celldm));
1548: PetscCheck(celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "There is no cell DM named %s in this Swarm", swarm->activeCellDM);
1549: PetscCall(DMSwarmCellDMGetDM(celldm, dm));
1550: PetscFunctionReturn(PETSC_SUCCESS);
1551: }
1553: /*@C
1554: DMSwarmGetCellDMNames - Get the list of cell `DM` names
1556: Not collective
1558: Input Parameter:
1559: . sw - a `DMSWARM`
1561: Output Parameters:
1562: + Ndm - the number of `DMSwarmCellDM` in the `DMSWARM`
1563: - celldms - the name of each `DMSwarmCellDM`
1565: Level: beginner
1567: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`, `DMSwarmGetCellDMByName()`
1568: @*/
1569: PetscErrorCode DMSwarmGetCellDMNames(DM sw, PetscInt *Ndm, const char **celldms[])
1570: {
1571: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1572: PetscObjectList next = swarm->cellDMs;
1573: PetscInt n = 0;
1575: PetscFunctionBegin;
1577: PetscAssertPointer(Ndm, 2);
1578: PetscAssertPointer(celldms, 3);
1579: while (next) {
1580: next = next->next;
1581: ++n;
1582: }
1583: PetscCall(PetscMalloc1(n, celldms));
1584: next = swarm->cellDMs;
1585: n = 0;
1586: while (next) {
1587: (*celldms)[n] = (const char *)next->obj->name;
1588: next = next->next;
1589: ++n;
1590: }
1591: *Ndm = n;
1592: PetscFunctionReturn(PETSC_SUCCESS);
1593: }
1595: /*@
1596: DMSwarmSetCellDMActive - Activates a cell `DM` for a `DMSWARM`
1598: Collective
1600: Input Parameters:
1601: + sw - a `DMSWARM`
1602: - name - name of the cell `DM` to active for the `DMSWARM`
1604: Level: beginner
1606: Note:
1607: The attached `DM` (dmcell) will be queried for point location and
1608: neighbor MPI-rank information if `DMSwarmMigrate()` is called.
1610: .seealso: `DM`, `DMSWARM`, `DMSwarmCellDM`, `DMSwarmSetType()`, `DMSwarmAddCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1611: @*/
1612: PetscErrorCode DMSwarmSetCellDMActive(DM sw, const char name[])
1613: {
1614: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1615: DMSwarmCellDM celldm;
1617: PetscFunctionBegin;
1619: PetscCall(PetscInfo(sw, "Setting cell DM to %s\n", name));
1620: PetscCall(PetscFree(swarm->activeCellDM));
1621: PetscCall(PetscStrallocpy(name, (char **)&swarm->activeCellDM));
1622: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1623: PetscFunctionReturn(PETSC_SUCCESS);
1624: }
1626: /*@
1627: DMSwarmGetCellDMActive - Returns the active cell `DM` for a `DMSWARM`
1629: Collective
1631: Input Parameter:
1632: . sw - a `DMSWARM`
1634: Output Parameter:
1635: . celldm - the active `DMSwarmCellDM`
1637: Level: beginner
1639: .seealso: `DM`, `DMSWARM`, `DMSwarmCellDM`, `DMSwarmSetType()`, `DMSwarmAddCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1640: @*/
1641: PetscErrorCode DMSwarmGetCellDMActive(DM sw, DMSwarmCellDM *celldm)
1642: {
1643: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1645: PetscFunctionBegin;
1647: PetscAssertPointer(celldm, 2);
1648: PetscCheck(swarm->activeCellDM, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONGSTATE, "Swarm has no active cell DM");
1649: PetscCall(PetscObjectListFind(swarm->cellDMs, swarm->activeCellDM, (PetscObject *)celldm));
1650: PetscCheck(*celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONGSTATE, "Swarm has no valid cell DM for %s", swarm->activeCellDM);
1651: PetscFunctionReturn(PETSC_SUCCESS);
1652: }
1654: /*@C
1655: DMSwarmGetCellDMByName - Get a `DMSwarmCellDM` from its name
1657: Not collective
1659: Input Parameters:
1660: + sw - a `DMSWARM`
1661: - name - the name
1663: Output Parameter:
1664: . celldm - the `DMSwarmCellDM`, or `NULL` if the name is unknown
1666: Level: beginner
1668: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`, `DMSwarmGetCellDMNames()`
1669: @*/
1670: PetscErrorCode DMSwarmGetCellDMByName(DM sw, const char name[], DMSwarmCellDM *celldm)
1671: {
1672: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1674: PetscFunctionBegin;
1676: PetscAssertPointer(name, 2);
1677: PetscAssertPointer(celldm, 3);
1678: PetscCall(PetscObjectListFind(swarm->cellDMs, name, (PetscObject *)celldm));
1679: PetscFunctionReturn(PETSC_SUCCESS);
1680: }
1682: /*@
1683: DMSwarmAddCellDM - Adds a cell `DM` to the `DMSWARM`
1685: Collective
1687: Input Parameters:
1688: + sw - a `DMSWARM`
1689: - celldm - the `DMSwarmCellDM`
1691: Level: beginner
1693: Note:
1694: Cell DMs with the same name will share the cellid field
1696: .seealso: `DM`, `DMSWARM`, `DMSwarmSetType()`, `DMSwarmPushCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1697: @*/
1698: PetscErrorCode DMSwarmAddCellDM(DM sw, DMSwarmCellDM celldm)
1699: {
1700: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1701: const char *name;
1702: PetscInt dim;
1703: PetscBool flg;
1704: MPI_Comm comm;
1706: PetscFunctionBegin;
1708: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
1710: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
1711: PetscCall(PetscObjectListAdd(&swarm->cellDMs, name, (PetscObject)celldm));
1712: PetscCall(DMGetDimension(sw, &dim));
1713: for (PetscInt f = 0; f < celldm->Nfc; ++f) {
1714: PetscCall(DMSwarmDataFieldStringInList(celldm->coordFields[f], swarm->db->nfields, (const DMSwarmDataField *)swarm->db->field, &flg));
1715: if (!flg) {
1716: PetscCall(DMSwarmRegisterPetscDatatypeField(sw, celldm->coordFields[f], dim, PETSC_DOUBLE));
1717: } else {
1718: PetscDataType dt;
1719: PetscInt bs;
1721: PetscCall(DMSwarmGetFieldInfo(sw, celldm->coordFields[f], &bs, &dt));
1722: PetscCheck(bs == dim, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s has blocksize %" PetscInt_FMT " != %" PetscInt_FMT " spatial dimension", celldm->coordFields[f], bs, dim);
1723: PetscCheck(dt == PETSC_DOUBLE, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s has datatype %s != PETSC_DOUBLE", celldm->coordFields[f], PetscDataTypes[dt]);
1724: }
1725: }
1726: // Assume that DMs with the same name share the cellid field
1727: PetscCall(DMSwarmDataFieldStringInList(celldm->cellid, swarm->db->nfields, (const DMSwarmDataField *)swarm->db->field, &flg));
1728: if (!flg) {
1729: PetscBool isShell, isDummy;
1730: const char *name;
1732: // Allow dummy DMSHELL (I don't think we should support this mode)
1733: PetscCall(PetscObjectTypeCompare((PetscObject)celldm->dm, DMSHELL, &isShell));
1734: PetscCall(PetscObjectGetName((PetscObject)celldm->dm, &name));
1735: PetscCall(PetscStrcmp(name, "dummy", &isDummy));
1736: if (!isShell || !isDummy) PetscCall(DMSwarmRegisterPetscDatatypeField(sw, celldm->cellid, 1, PETSC_INT));
1737: }
1738: PetscCall(DMSwarmSetCellDMActive(sw, name));
1739: PetscFunctionReturn(PETSC_SUCCESS);
1740: }
1742: /*@
1743: DMSwarmGetLocalSize - Retrieves the local length of fields registered
1745: Not Collective
1747: Input Parameter:
1748: . dm - a `DMSWARM`
1750: Output Parameter:
1751: . nlocal - the length of each registered field
1753: Level: beginner
1755: .seealso: `DM`, `DMSWARM`, `DMSwarmGetSize()`, `DMSwarmSetLocalSizes()`
1756: @*/
1757: PetscErrorCode DMSwarmGetLocalSize(DM dm, PetscInt *nlocal)
1758: {
1759: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1761: PetscFunctionBegin;
1762: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, nlocal, NULL, NULL));
1763: PetscFunctionReturn(PETSC_SUCCESS);
1764: }
1766: /*@
1767: DMSwarmGetSize - Retrieves the total length of fields registered
1769: Collective
1771: Input Parameter:
1772: . dm - a `DMSWARM`
1774: Output Parameter:
1775: . n - the total length of each registered field
1777: Level: beginner
1779: Note:
1780: This calls `MPI_Allreduce()` upon each call (inefficient but safe)
1782: .seealso: `DM`, `DMSWARM`, `DMSwarmGetLocalSize()`, `DMSwarmSetLocalSizes()`
1783: @*/
1784: PetscErrorCode DMSwarmGetSize(DM dm, PetscInt *n)
1785: {
1786: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1787: PetscInt nlocal;
1789: PetscFunctionBegin;
1790: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &nlocal, NULL, NULL));
1791: PetscCallMPI(MPIU_Allreduce(&nlocal, n, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
1792: PetscFunctionReturn(PETSC_SUCCESS);
1793: }
1795: /*@C
1796: DMSwarmRegisterPetscDatatypeField - Register a field to a `DMSWARM` with a native PETSc data type
1798: Collective
1800: Input Parameters:
1801: + dm - a `DMSWARM`
1802: . fieldname - the textual name to identify this field
1803: . blocksize - the number of each data type
1804: - type - a valid PETSc data type (`PETSC_CHAR`, `PETSC_SHORT`, `PETSC_INT`, `PETSC_FLOAT`, `PETSC_REAL`, `PETSC_LONG`)
1806: Level: beginner
1808: Notes:
1809: The textual name for each registered field must be unique.
1811: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1812: @*/
1813: PetscErrorCode DMSwarmRegisterPetscDatatypeField(DM dm, const char fieldname[], PetscInt blocksize, PetscDataType type)
1814: {
1815: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1816: size_t size;
1818: PetscFunctionBegin;
1819: PetscCheck(swarm->field_registration_initialized, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Must call DMSwarmInitializeFieldRegister() first");
1820: PetscCheck(!swarm->field_registration_finalized, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Cannot register additional fields after calling DMSwarmFinalizeFieldRegister() first");
1822: PetscCheck(type != PETSC_OBJECT, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1823: PetscCheck(type != PETSC_FUNCTION, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1824: PetscCheck(type != PETSC_STRING, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1825: PetscCheck(type != PETSC_STRUCT, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1826: PetscCheck(type != PETSC_DATATYPE_UNKNOWN, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1828: PetscCall(PetscDataTypeGetSize(type, &size));
1829: /* Load a specific data type into data bucket, specifying textual name and its size in bytes */
1830: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterPetscDatatypeField", fieldname, blocksize * size, NULL));
1831: {
1832: DMSwarmDataField gfield;
1834: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1835: PetscCall(DMSwarmDataFieldSetBlockSize(gfield, blocksize));
1836: }
1837: swarm->db->field[swarm->db->nfields - 1]->petsc_type = type;
1838: PetscFunctionReturn(PETSC_SUCCESS);
1839: }
1841: /*@C
1842: DMSwarmRegisterUserStructField - Register a user defined struct to a `DMSWARM`
1844: Collective
1846: Input Parameters:
1847: + dm - a `DMSWARM`
1848: . fieldname - the textual name to identify this field
1849: - size - the size in bytes of the user struct of each data type
1851: Level: beginner
1853: Note:
1854: The textual name for each registered field must be unique.
1856: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmRegisterUserDatatypeField()`
1857: @*/
1858: PetscErrorCode DMSwarmRegisterUserStructField(DM dm, const char fieldname[], size_t size)
1859: {
1860: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1862: PetscFunctionBegin;
1863: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterUserStructField", fieldname, size, NULL));
1864: swarm->db->field[swarm->db->nfields - 1]->petsc_type = PETSC_STRUCT;
1865: PetscFunctionReturn(PETSC_SUCCESS);
1866: }
1868: /*@C
1869: DMSwarmRegisterUserDatatypeField - Register a user defined data type to a `DMSWARM`
1871: Collective
1873: Input Parameters:
1874: + dm - a `DMSWARM`
1875: . fieldname - the textual name to identify this field
1876: . size - the size in bytes of the user data type
1877: - blocksize - the number of each data type
1879: Level: beginner
1881: Note:
1882: The textual name for each registered field must be unique.
1884: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmRegisterUserStructField()`
1885: @*/
1886: PetscErrorCode DMSwarmRegisterUserDatatypeField(DM dm, const char fieldname[], size_t size, PetscInt blocksize)
1887: {
1888: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1890: PetscFunctionBegin;
1891: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterUserDatatypeField", fieldname, blocksize * size, NULL));
1892: {
1893: DMSwarmDataField gfield;
1895: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1896: PetscCall(DMSwarmDataFieldSetBlockSize(gfield, blocksize));
1897: }
1898: swarm->db->field[swarm->db->nfields - 1]->petsc_type = PETSC_DATATYPE_UNKNOWN;
1899: PetscFunctionReturn(PETSC_SUCCESS);
1900: }
1902: /*@C
1903: DMSwarmGetField - Get access to the underlying array storing all entries associated with a registered field
1905: Not Collective, No Fortran Support
1907: Input Parameters:
1908: + dm - a `DMSWARM`
1909: - fieldname - the textual name to identify this field
1911: Output Parameters:
1912: + blocksize - the number of each data type
1913: . type - the data type
1914: - data - pointer to raw array
1916: Level: beginner
1918: Notes:
1919: The array must be returned using a matching call to `DMSwarmRestoreField()`.
1921: Fortran Note:
1922: Only works for `type` of `PETSC_SCALAR`
1924: .seealso: `DM`, `DMSWARM`, `DMSwarmRestoreField()`
1925: @*/
1926: PetscErrorCode DMSwarmGetField(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type, void **data) PeNS
1927: {
1928: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1929: DMSwarmDataField gfield;
1931: PetscFunctionBegin;
1933: if (!swarm->issetup) PetscCall(DMSetUp(dm));
1934: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1935: PetscCall(DMSwarmDataFieldGetAccess(gfield));
1936: PetscCall(DMSwarmDataFieldGetEntries(gfield, data));
1937: if (blocksize) *blocksize = gfield->bs;
1938: if (type) *type = gfield->petsc_type;
1939: PetscFunctionReturn(PETSC_SUCCESS);
1940: }
1942: /*@C
1943: DMSwarmRestoreField - Restore access to the underlying array storing all entries associated with a registered field
1945: Not Collective
1947: Input Parameters:
1948: + dm - a `DMSWARM`
1949: - fieldname - the textual name to identify this field
1951: Output Parameters:
1952: + blocksize - the number of each data type
1953: . type - the data type
1954: - data - pointer to raw array
1956: Level: beginner
1958: Notes:
1959: The user must call `DMSwarmGetField()` prior to calling `DMSwarmRestoreField()`.
1961: Fortran Note:
1962: Only works for `type` of `PETSC_SCALAR`
1964: .seealso: `DM`, `DMSWARM`, `DMSwarmGetField()`
1965: @*/
1966: PetscErrorCode DMSwarmRestoreField(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type, void **data) PeNS
1967: {
1968: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1969: DMSwarmDataField gfield;
1971: PetscFunctionBegin;
1973: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1974: PetscCall(DMSwarmDataFieldRestoreAccess(gfield));
1975: if (data) *data = NULL;
1976: PetscFunctionReturn(PETSC_SUCCESS);
1977: }
1979: PetscErrorCode DMSwarmGetFieldInfo(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type)
1980: {
1981: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1982: DMSwarmDataField gfield;
1984: PetscFunctionBegin;
1986: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1987: if (blocksize) *blocksize = gfield->bs;
1988: if (type) *type = gfield->petsc_type;
1989: PetscFunctionReturn(PETSC_SUCCESS);
1990: }
1992: /*@
1993: DMSwarmAddPoint - Add space for one new point in the `DMSWARM`
1995: Not Collective
1997: Input Parameter:
1998: . dm - a `DMSWARM`
2000: Level: beginner
2002: Notes:
2003: The new point will have all fields initialized to zero.
2005: .seealso: `DM`, `DMSWARM`, `DMSwarmAddNPoints()`
2006: @*/
2007: PetscErrorCode DMSwarmAddPoint(DM dm)
2008: {
2009: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2011: PetscFunctionBegin;
2012: if (!swarm->issetup) PetscCall(DMSetUp(dm));
2013: PetscCall(PetscLogEventBegin(DMSWARM_AddPoints, 0, 0, 0, 0));
2014: PetscCall(DMSwarmDataBucketAddPoint(swarm->db));
2015: PetscCall(PetscLogEventEnd(DMSWARM_AddPoints, 0, 0, 0, 0));
2016: PetscFunctionReturn(PETSC_SUCCESS);
2017: }
2019: /*@
2020: DMSwarmAddNPoints - Add space for a number of new points in the `DMSWARM`
2022: Not Collective
2024: Input Parameters:
2025: + dm - a `DMSWARM`
2026: - npoints - the number of new points to add
2028: Level: beginner
2030: Notes:
2031: The new point will have all fields initialized to zero.
2033: .seealso: `DM`, `DMSWARM`, `DMSwarmAddPoint()`
2034: @*/
2035: PetscErrorCode DMSwarmAddNPoints(DM dm, PetscInt npoints)
2036: {
2037: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2038: PetscInt nlocal;
2040: PetscFunctionBegin;
2041: PetscCall(PetscLogEventBegin(DMSWARM_AddPoints, 0, 0, 0, 0));
2042: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &nlocal, NULL, NULL));
2043: nlocal = PetscMax(nlocal, 0) + npoints;
2044: PetscCall(DMSwarmDataBucketSetSizes(swarm->db, nlocal, DMSWARM_DATA_BUCKET_BUFFER_DEFAULT));
2045: PetscCall(PetscLogEventEnd(DMSWARM_AddPoints, 0, 0, 0, 0));
2046: PetscFunctionReturn(PETSC_SUCCESS);
2047: }
2049: /*@
2050: DMSwarmRemovePoint - Remove the last point from the `DMSWARM`
2052: Not Collective
2054: Input Parameter:
2055: . dm - a `DMSWARM`
2057: Level: beginner
2059: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePointAtIndex()`
2060: @*/
2061: PetscErrorCode DMSwarmRemovePoint(DM dm)
2062: {
2063: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2065: PetscFunctionBegin;
2066: PetscCall(PetscLogEventBegin(DMSWARM_RemovePoints, 0, 0, 0, 0));
2067: PetscCall(DMSwarmDataBucketRemovePoint(swarm->db));
2068: PetscCall(PetscLogEventEnd(DMSWARM_RemovePoints, 0, 0, 0, 0));
2069: PetscFunctionReturn(PETSC_SUCCESS);
2070: }
2072: /*@
2073: DMSwarmRemovePointAtIndex - Removes a specific point from the `DMSWARM`
2075: Not Collective
2077: Input Parameters:
2078: + dm - a `DMSWARM`
2079: - idx - index of point to remove
2081: Level: beginner
2083: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePoint()`
2084: @*/
2085: PetscErrorCode DMSwarmRemovePointAtIndex(DM dm, PetscInt idx)
2086: {
2087: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2089: PetscFunctionBegin;
2090: PetscCall(PetscLogEventBegin(DMSWARM_RemovePoints, 0, 0, 0, 0));
2091: PetscCall(DMSwarmDataBucketRemovePointAtIndex(swarm->db, idx));
2092: PetscCall(PetscLogEventEnd(DMSWARM_RemovePoints, 0, 0, 0, 0));
2093: PetscFunctionReturn(PETSC_SUCCESS);
2094: }
2096: /*@
2097: DMSwarmCopyPoint - Copy point pj to point pi in the `DMSWARM`
2099: Not Collective
2101: Input Parameters:
2102: + dm - a `DMSWARM`
2103: . pi - the index of the point to copy
2104: - pj - the point index where the copy should be located
2106: Level: beginner
2108: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePoint()`
2109: @*/
2110: PetscErrorCode DMSwarmCopyPoint(DM dm, PetscInt pi, PetscInt pj)
2111: {
2112: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2114: PetscFunctionBegin;
2115: if (!swarm->issetup) PetscCall(DMSetUp(dm));
2116: PetscCall(DMSwarmDataBucketCopyPoint(swarm->db, pi, swarm->db, pj));
2117: PetscFunctionReturn(PETSC_SUCCESS);
2118: }
2120: static PetscErrorCode DMSwarmMigrate_Basic(DM dm, PetscBool remove_sent_points)
2121: {
2122: PetscFunctionBegin;
2123: PetscCall(DMSwarmMigrate_Push_Basic(dm, remove_sent_points));
2124: PetscFunctionReturn(PETSC_SUCCESS);
2125: }
2127: /*@
2128: DMSwarmMigrate - Relocates points defined in the `DMSWARM` to other MPI-ranks
2130: Collective
2132: Input Parameters:
2133: + dm - the `DMSWARM`
2134: - remove_sent_points - flag indicating if sent points should be removed from the current MPI-rank
2136: Level: advanced
2138: Notes:
2139: The `DM` will be modified to accommodate received points.
2140: If `remove_sent_points` is `PETSC_TRUE`, any points that were sent will be removed from the `DM`.
2141: Different styles of migration are supported. See `DMSwarmSetMigrateType()`.
2143: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`
2144: @*/
2145: PetscErrorCode DMSwarmMigrate(DM dm, PetscBool remove_sent_points)
2146: {
2147: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2149: PetscFunctionBegin;
2150: PetscCall(PetscLogEventBegin(DMSWARM_Migrate, 0, 0, 0, 0));
2151: switch (swarm->migrate_type) {
2152: case DMSWARM_MIGRATE_BASIC:
2153: PetscCall(DMSwarmMigrate_Basic(dm, remove_sent_points));
2154: break;
2155: case DMSWARM_MIGRATE_DMCELLNSCATTER:
2156: PetscCall(DMSwarmMigrate_CellDMScatter(dm, remove_sent_points));
2157: break;
2158: case DMSWARM_MIGRATE_DMCELLEXACT:
2159: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE_DMCELLEXACT not implemented");
2160: case DMSWARM_MIGRATE_USER:
2161: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE_USER not implemented");
2162: default:
2163: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE type unknown");
2164: }
2165: PetscCall(PetscLogEventEnd(DMSWARM_Migrate, 0, 0, 0, 0));
2166: PetscCall(DMClearGlobalVectors(dm));
2167: PetscFunctionReturn(PETSC_SUCCESS);
2168: }
2170: PetscErrorCode DMSwarmMigrate_GlobalToLocal_Basic(DM dm, PetscInt *globalsize);
2172: /*
2173: DMSwarmCollectViewCreate
2175: * Applies a collection method and gathers point neighbour points into dm
2177: Notes:
2178: Users should call DMSwarmCollectViewDestroy() after
2179: they have finished computations associated with the collected points
2180: */
2182: /*@
2183: DMSwarmCollectViewCreate - Applies a collection method and gathers points
2184: in neighbour ranks into the `DMSWARM`
2186: Collective
2188: Input Parameter:
2189: . dm - the `DMSWARM`
2191: Level: advanced
2193: Notes:
2194: Users should call `DMSwarmCollectViewDestroy()` after
2195: they have finished computations associated with the collected points
2197: Different collect methods are supported. See `DMSwarmSetCollectType()`.
2199: Developer Note:
2200: Create and Destroy routines create new objects that can get destroyed, they do not change the state
2201: of the current object.
2203: .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewDestroy()`, `DMSwarmSetCollectType()`
2204: @*/
2205: PetscErrorCode DMSwarmCollectViewCreate(DM dm)
2206: {
2207: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2208: PetscInt ng;
2210: PetscFunctionBegin;
2211: PetscCheck(!swarm->collect_view_active, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "CollectView currently active");
2212: PetscCall(DMSwarmGetLocalSize(dm, &ng));
2213: switch (swarm->collect_type) {
2214: case DMSWARM_COLLECT_BASIC:
2215: PetscCall(DMSwarmMigrate_GlobalToLocal_Basic(dm, &ng));
2216: break;
2217: case DMSWARM_COLLECT_DMDABOUNDINGBOX:
2218: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT_DMDABOUNDINGBOX not implemented");
2219: case DMSWARM_COLLECT_GENERAL:
2220: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT_GENERAL not implemented");
2221: default:
2222: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT type unknown");
2223: }
2224: swarm->collect_view_active = PETSC_TRUE;
2225: swarm->collect_view_reset_nlocal = ng;
2226: PetscFunctionReturn(PETSC_SUCCESS);
2227: }
2229: /*@
2230: DMSwarmCollectViewDestroy - Resets the `DMSWARM` to the size prior to calling `DMSwarmCollectViewCreate()`
2232: Collective
2234: Input Parameters:
2235: . dm - the `DMSWARM`
2237: Notes:
2238: Users should call `DMSwarmCollectViewCreate()` before this function is called.
2240: Level: advanced
2242: Developer Note:
2243: Create and Destroy routines create new objects that can get destroyed, they do not change the state
2244: of the current object.
2246: .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewCreate()`, `DMSwarmSetCollectType()`
2247: @*/
2248: PetscErrorCode DMSwarmCollectViewDestroy(DM dm)
2249: {
2250: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2252: PetscFunctionBegin;
2253: PetscCheck(swarm->collect_view_active, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "CollectView is currently not active");
2254: PetscCall(DMSwarmSetLocalSizes(dm, swarm->collect_view_reset_nlocal, -1));
2255: swarm->collect_view_active = PETSC_FALSE;
2256: PetscFunctionReturn(PETSC_SUCCESS);
2257: }
2259: static PetscErrorCode DMSwarmSetUpPIC(DM dm)
2260: {
2261: PetscInt dim;
2263: PetscFunctionBegin;
2264: PetscCall(DMSwarmSetNumSpecies(dm, 1));
2265: PetscCall(DMGetDimension(dm, &dim));
2266: PetscCheck(dim >= 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Dimension must be 1,2,3 - found %" PetscInt_FMT, dim);
2267: PetscCheck(dim <= 3, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Dimension must be 1,2,3 - found %" PetscInt_FMT, dim);
2268: PetscFunctionReturn(PETSC_SUCCESS);
2269: }
2271: /*@
2272: DMSwarmSetPointCoordinatesRandom - Sets initial coordinates for particles in each cell
2274: Collective
2276: Input Parameters:
2277: + dm - the `DMSWARM`
2278: - Npc - The number of particles per cell in the cell `DM`
2280: Level: intermediate
2282: Notes:
2283: The user must use `DMSwarmSetCellDM()` to set the cell `DM` first. The particles are placed randomly inside each cell. If only
2284: one particle is in each cell, it is placed at the centroid.
2286: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`
2287: @*/
2288: PetscErrorCode DMSwarmSetPointCoordinatesRandom(DM dm, PetscInt Npc)
2289: {
2290: DM cdm;
2291: DMSwarmCellDM celldm;
2292: PetscRandom rnd;
2293: DMPolytopeType ct;
2294: PetscBool simplex;
2295: PetscReal *centroid, *coords, *xi0, *v0, *J, *invJ, detJ;
2296: PetscInt dim, d, cStart, cEnd, c, p, Nfc;
2297: const char **coordFields;
2299: PetscFunctionBeginUser;
2300: PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)dm), &rnd));
2301: PetscCall(PetscRandomSetInterval(rnd, -1.0, 1.0));
2302: PetscCall(PetscRandomSetType(rnd, PETSCRAND48));
2304: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2305: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
2306: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
2307: PetscCall(DMSwarmGetCellDM(dm, &cdm));
2308: PetscCall(DMGetDimension(cdm, &dim));
2309: PetscCall(DMPlexGetHeightStratum(cdm, 0, &cStart, &cEnd));
2310: PetscCall(DMPlexGetCellType(cdm, cStart, &ct));
2311: simplex = DMPolytopeTypeGetNumVertices(ct) == DMPolytopeTypeGetDim(ct) + 1 ? PETSC_TRUE : PETSC_FALSE;
2313: PetscCall(PetscMalloc5(dim, ¢roid, dim, &xi0, dim, &v0, dim * dim, &J, dim * dim, &invJ));
2314: for (d = 0; d < dim; ++d) xi0[d] = -1.0;
2315: PetscCall(DMSwarmGetField(dm, coordFields[0], NULL, NULL, (void **)&coords));
2316: for (c = cStart; c < cEnd; ++c) {
2317: if (Npc == 1) {
2318: PetscCall(DMPlexComputeCellGeometryFVM(cdm, c, NULL, centroid, NULL));
2319: for (d = 0; d < dim; ++d) coords[c * dim + d] = centroid[d];
2320: } else {
2321: PetscCall(DMPlexComputeCellGeometryFEM(cdm, c, NULL, v0, J, invJ, &detJ)); /* affine */
2322: for (p = 0; p < Npc; ++p) {
2323: const PetscInt n = c * Npc + p;
2324: PetscReal sum = 0.0, refcoords[3];
2326: for (d = 0; d < dim; ++d) {
2327: PetscCall(PetscRandomGetValueReal(rnd, &refcoords[d]));
2328: sum += refcoords[d];
2329: }
2330: if (simplex && sum > 0.0)
2331: for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim) * sum;
2332: CoordinatesRefToReal(dim, dim, xi0, v0, J, refcoords, &coords[n * dim]);
2333: }
2334: }
2335: }
2336: PetscCall(DMSwarmRestoreField(dm, coordFields[0], NULL, NULL, (void **)&coords));
2337: PetscCall(PetscFree5(centroid, xi0, v0, J, invJ));
2338: PetscCall(PetscRandomDestroy(&rnd));
2339: PetscFunctionReturn(PETSC_SUCCESS);
2340: }
2342: /*@
2343: DMSwarmGetType - Get particular flavor of `DMSWARM`
2345: Collective
2347: Input Parameter:
2348: . sw - the `DMSWARM`
2350: Output Parameter:
2351: . stype - the `DMSWARM` type (e.g. `DMSWARM_PIC`)
2353: Level: advanced
2355: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`, `DMSwarmSetCollectType()`, `DMSwarmType`, `DMSWARM_PIC`, `DMSWARM_BASIC`
2356: @*/
2357: PetscErrorCode DMSwarmGetType(DM sw, DMSwarmType *stype)
2358: {
2359: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2361: PetscFunctionBegin;
2363: PetscAssertPointer(stype, 2);
2364: *stype = swarm->swarm_type;
2365: PetscFunctionReturn(PETSC_SUCCESS);
2366: }
2368: /*@
2369: DMSwarmSetType - Set particular flavor of `DMSWARM`
2371: Collective
2373: Input Parameters:
2374: + sw - the `DMSWARM`
2375: - stype - the `DMSWARM` type (e.g. `DMSWARM_PIC`)
2377: Level: advanced
2379: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`, `DMSwarmSetCollectType()`, `DMSwarmType`, `DMSWARM_PIC`, `DMSWARM_BASIC`
2380: @*/
2381: PetscErrorCode DMSwarmSetType(DM sw, DMSwarmType stype)
2382: {
2383: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2385: PetscFunctionBegin;
2387: swarm->swarm_type = stype;
2388: if (swarm->swarm_type == DMSWARM_PIC) PetscCall(DMSwarmSetUpPIC(sw));
2389: PetscFunctionReturn(PETSC_SUCCESS);
2390: }
2392: static PetscErrorCode DMSwarmCreateRemapDM_Private(DM sw, DM *rdm)
2393: {
2394: PetscFE fe;
2395: DMPolytopeType ct;
2396: PetscInt dim, cStart;
2397: const char *prefix = "remap_";
2399: PetscFunctionBegin;
2400: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), rdm));
2401: PetscCall(DMSetType(*rdm, DMPLEX));
2402: PetscCall(DMPlexSetOptionsPrefix(*rdm, prefix));
2403: PetscCall(DMSetFromOptions(*rdm));
2404: PetscCall(PetscObjectSetName((PetscObject)*rdm, "remap"));
2405: PetscCall(DMViewFromOptions(*rdm, NULL, "-dm_view"));
2407: PetscCall(DMGetDimension(*rdm, &dim));
2408: PetscCall(DMPlexGetHeightStratum(*rdm, 0, &cStart, NULL));
2409: PetscCall(DMPlexGetCellType(*rdm, cStart, &ct));
2410: PetscCall(PetscFECreateByCell(PETSC_COMM_SELF, dim, 1, ct, prefix, PETSC_DETERMINE, &fe));
2411: PetscCall(PetscObjectSetName((PetscObject)fe, "distribution"));
2412: PetscCall(DMSetField(*rdm, 0, NULL, (PetscObject)fe));
2413: PetscCall(DMCreateDS(*rdm));
2414: PetscCall(PetscFEDestroy(&fe));
2415: PetscFunctionReturn(PETSC_SUCCESS);
2416: }
2418: static PetscErrorCode DMSetup_Swarm(DM sw)
2419: {
2420: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2422: PetscFunctionBegin;
2423: if (swarm->issetup) PetscFunctionReturn(PETSC_SUCCESS);
2424: swarm->issetup = PETSC_TRUE;
2426: if (swarm->remap_type != DMSWARM_REMAP_NONE) {
2427: DMSwarmCellDM celldm;
2429: PetscCall(DMSwarmGetCellDMByName(sw, "remap", &celldm));
2430: if (!celldm) {
2431: DM rdm;
2432: const char *fieldnames[2] = {DMSwarmPICField_coor, "velocity"};
2433: const char *vfieldnames[1] = {"w_q"};
2435: PetscCall(DMSwarmCreateRemapDM_Private(sw, &rdm));
2436: PetscCall(DMSwarmCellDMCreate(rdm, 1, vfieldnames, 2, fieldnames, &celldm));
2437: PetscCall(DMSwarmAddCellDM(sw, celldm));
2438: PetscCall(DMSwarmCellDMDestroy(&celldm));
2439: PetscCall(DMDestroy(&rdm));
2440: }
2441: }
2443: if (swarm->swarm_type == DMSWARM_PIC) {
2444: DMSwarmCellDM celldm;
2446: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
2447: PetscCheck(celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "No active cell DM. DMSWARM_PIC requires you call DMSwarmSetCellDM() or DMSwarmAddCellDM()");
2448: if (celldm->dm->ops->locatepointssubdomain) {
2449: /* check methods exists for exact ownership identificiation */
2450: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->ops->LocatePointsSubdomain\n"));
2451: swarm->migrate_type = DMSWARM_MIGRATE_DMCELLEXACT;
2452: } else {
2453: /* check methods exist for point location AND rank neighbor identification */
2454: PetscCheck(celldm->dm->ops->locatepoints, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSWARM_PIC requires the method CellDM->ops->locatepoints be defined");
2455: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->LocatePoints\n"));
2457: PetscCheck(celldm->dm->ops->getneighbors, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSWARM_PIC requires the method CellDM->ops->getneighbors be defined");
2458: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->GetNeigbors\n"));
2460: swarm->migrate_type = DMSWARM_MIGRATE_DMCELLNSCATTER;
2461: }
2462: }
2464: PetscCall(DMSwarmFinalizeFieldRegister(sw));
2466: /* check some fields were registered */
2467: PetscCheck(swarm->db->nfields > 2, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "At least one field user must be registered via DMSwarmRegisterXXX()");
2468: PetscFunctionReturn(PETSC_SUCCESS);
2469: }
2471: static PetscErrorCode DMDestroy_Swarm(DM dm)
2472: {
2473: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2475: PetscFunctionBegin;
2476: if (--swarm->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
2477: PetscCall(PetscObjectListDestroy(&swarm->cellDMs));
2478: PetscCall(PetscFree(swarm->activeCellDM));
2479: PetscCall(DMSwarmDataBucketDestroy(&swarm->db));
2480: PetscCall(PetscFree(swarm));
2481: PetscFunctionReturn(PETSC_SUCCESS);
2482: }
2484: static PetscErrorCode DMSwarmView_Draw(DM dm, PetscViewer viewer)
2485: {
2486: DM cdm;
2487: DMSwarmCellDM celldm;
2488: PetscDraw draw;
2489: PetscReal *coords, oldPause, radius = 0.01;
2490: PetscInt Np, p, bs, Nfc;
2491: const char **coordFields;
2493: PetscFunctionBegin;
2494: PetscCall(PetscOptionsGetReal(NULL, ((PetscObject)dm)->prefix, "-dm_view_swarm_radius", &radius, NULL));
2495: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
2496: PetscCall(DMSwarmGetCellDM(dm, &cdm));
2497: PetscCall(PetscDrawGetPause(draw, &oldPause));
2498: PetscCall(PetscDrawSetPause(draw, 0.0));
2499: PetscCall(DMView(cdm, viewer));
2500: PetscCall(PetscDrawSetPause(draw, oldPause));
2502: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2503: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
2504: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
2505: PetscCall(DMSwarmGetLocalSize(dm, &Np));
2506: PetscCall(DMSwarmGetField(dm, coordFields[0], &bs, NULL, (void **)&coords));
2507: for (p = 0; p < Np; ++p) {
2508: const PetscInt i = p * bs;
2510: PetscCall(PetscDrawEllipse(draw, coords[i], coords[i + 1], radius, radius, PETSC_DRAW_BLUE));
2511: }
2512: PetscCall(DMSwarmRestoreField(dm, coordFields[0], &bs, NULL, (void **)&coords));
2513: PetscCall(PetscDrawFlush(draw));
2514: PetscCall(PetscDrawPause(draw));
2515: PetscCall(PetscDrawSave(draw));
2516: PetscFunctionReturn(PETSC_SUCCESS);
2517: }
2519: static PetscErrorCode DMView_Swarm_Ascii(DM dm, PetscViewer viewer)
2520: {
2521: PetscViewerFormat format;
2522: PetscInt *sizes;
2523: PetscInt dim, Np, maxSize = 17;
2524: MPI_Comm comm;
2525: PetscMPIInt rank, size;
2526: const char *name, *cellid;
2528: PetscFunctionBegin;
2529: PetscCall(PetscViewerGetFormat(viewer, &format));
2530: PetscCall(DMGetDimension(dm, &dim));
2531: PetscCall(DMSwarmGetLocalSize(dm, &Np));
2532: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2533: PetscCallMPI(MPI_Comm_rank(comm, &rank));
2534: PetscCallMPI(MPI_Comm_size(comm, &size));
2535: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
2536: if (name) PetscCall(PetscViewerASCIIPrintf(viewer, "%s in %" PetscInt_FMT " dimension%s:\n", name, dim, dim == 1 ? "" : "s"));
2537: else PetscCall(PetscViewerASCIIPrintf(viewer, "Swarm in %" PetscInt_FMT " dimension%s:\n", dim, dim == 1 ? "" : "s"));
2538: if (size < maxSize) PetscCall(PetscCalloc1(size, &sizes));
2539: else PetscCall(PetscCalloc1(3, &sizes));
2540: if (size < maxSize) {
2541: PetscCallMPI(MPI_Gather(&Np, 1, MPIU_INT, sizes, 1, MPIU_INT, 0, comm));
2542: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of particles per rank:"));
2543: for (PetscInt p = 0; p < size; ++p) {
2544: if (rank == 0) PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, sizes[p]));
2545: }
2546: } else {
2547: PetscInt locMinMax[2] = {Np, Np};
2549: PetscCall(PetscGlobalMinMaxInt(comm, locMinMax, sizes));
2550: PetscCall(PetscViewerASCIIPrintf(viewer, " Min/Max of particles per rank: %" PetscInt_FMT "/%" PetscInt_FMT, sizes[0], sizes[1]));
2551: }
2552: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
2553: PetscCall(PetscFree(sizes));
2554: if (format == PETSC_VIEWER_ASCII_INFO) {
2555: DM_Swarm *sw = (DM_Swarm *)dm->data;
2556: DMSwarmCellDM celldm;
2557: PetscInt *cell;
2558: PetscBool hasWeight;
2559: const char *fname = "w_q";
2561: PetscCall(DMSwarmDataFieldStringInList(fname, sw->db->nfields, (const DMSwarmDataField *)sw->db->field, &hasWeight));
2562: PetscCall(PetscViewerASCIIPrintf(viewer, " Cells containing each particle:\n"));
2563: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
2564: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2565: PetscCall(DMSwarmCellDMGetCellID(celldm, &cellid));
2566: PetscCall(DMSwarmGetField(dm, cellid, NULL, NULL, (void **)&cell));
2567: if (hasWeight) {
2568: PetscReal *weight, **coords;
2569: PetscInt Ncf, *bsC, bs;
2570: const char **coordNames;
2572: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncf, &coordNames));
2573: PetscCall(PetscMalloc2(Ncf, &coords, Ncf, &bsC));
2574: for (PetscInt n = 0; n < Ncf; ++n) PetscCall(DMSwarmGetField(dm, coordNames[n], &bsC[n], NULL, (void **)&coords[n]));
2575: PetscCall(DMSwarmGetField(dm, fname, &bs, NULL, (void **)&weight));
2576: PetscCheck(bs == 1, comm, PETSC_ERR_ARG_WRONG, "The weight field must be a scalar");
2577: for (PetscInt p = 0; p < Np; ++p) {
2578: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " p%" PetscInt_FMT ": %" PetscInt_FMT " wt: %g x: (", p, cell[p], (double)weight[p]));
2579: for (PetscInt n = 0; n < Ncf; ++n) {
2580: if (n > 0) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ", "));
2581: for (PetscInt d = 0; d < bsC[n]; ++d) {
2582: if (d > 0) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ", "));
2583: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%g", (double)coords[n][p * bsC[n] + d]));
2584: }
2585: }
2586: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ")\n"));
2587: }
2588: PetscCall(DMSwarmRestoreField(dm, fname, NULL, NULL, (void **)&weight));
2589: for (PetscInt n = 0; n < Ncf; ++n) PetscCall(DMSwarmRestoreField(dm, coordNames[n], &bsC[n], NULL, (void **)&coords[n]));
2590: PetscCall(PetscFree2(coords, bsC));
2591: } else {
2592: for (PetscInt p = 0; p < Np; ++p) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " p%" PetscInt_FMT ": %" PetscInt_FMT "\n", p, cell[p]));
2593: }
2594: PetscCall(DMSwarmRestoreField(dm, cellid, NULL, NULL, (void **)&cell));
2595: PetscCall(PetscViewerFlush(viewer));
2596: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
2597: }
2598: PetscFunctionReturn(PETSC_SUCCESS);
2599: }
2601: static PetscErrorCode DMView_Swarm(DM dm, PetscViewer viewer)
2602: {
2603: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2604: PetscBool isascii, ibinary, isvtk, isdraw, ispython;
2605: #if defined(PETSC_HAVE_HDF5)
2606: PetscBool ishdf5;
2607: #endif
2609: PetscFunctionBegin;
2612: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2613: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &ibinary));
2614: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
2615: #if defined(PETSC_HAVE_HDF5)
2616: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
2617: #endif
2618: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
2619: PetscCall(PetscObjectHasFunction((PetscObject)viewer, "PetscViewerPythonViewObject_C", &ispython));
2620: if (isascii) {
2621: PetscViewerFormat format;
2623: PetscCall(PetscViewerGetFormat(viewer, &format));
2624: switch (format) {
2625: case PETSC_VIEWER_ASCII_INFO_DETAIL:
2626: PetscCall(DMSwarmDataBucketView(PetscObjectComm((PetscObject)dm), swarm->db, NULL, DATABUCKET_VIEW_STDOUT));
2627: break;
2628: default:
2629: PetscCall(DMView_Swarm_Ascii(dm, viewer));
2630: }
2631: } else {
2632: #if defined(PETSC_HAVE_HDF5)
2633: if (ishdf5) PetscCall(DMSwarmView_HDF5(dm, viewer));
2634: #endif
2635: if (isdraw) PetscCall(DMSwarmView_Draw(dm, viewer));
2636: if (ispython) PetscCall(PetscViewerPythonViewObject(viewer, (PetscObject)dm));
2637: }
2638: PetscFunctionReturn(PETSC_SUCCESS);
2639: }
2641: /*@
2642: DMSwarmGetCellSwarm - Extracts a single cell from the `DMSWARM` object, returns it as a single cell `DMSWARM`.
2643: The cell `DM` is filtered for fields of that cell, and the filtered `DM` is used as the cell `DM` of the new swarm object.
2645: Noncollective
2647: Input Parameters:
2648: + sw - the `DMSWARM`
2649: . cellID - the integer id of the cell to be extracted and filtered
2650: - cellswarm - The `DMSWARM` to receive the cell
2652: Level: beginner
2654: Notes:
2655: This presently only supports `DMSWARM_PIC` type
2657: Should be restored with `DMSwarmRestoreCellSwarm()`
2659: Changes to this cell of the swarm will be lost if they are made prior to restoring this cell.
2661: .seealso: `DM`, `DMSWARM`, `DMSwarmRestoreCellSwarm()`
2662: @*/
2663: PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cellswarm)
2664: {
2665: DM_Swarm *original = (DM_Swarm *)sw->data;
2666: DMLabel label;
2667: DM dmc, subdmc;
2668: PetscInt *pids, particles, dim;
2669: const char *name;
2671: PetscFunctionBegin;
2672: /* Configure new swarm */
2673: PetscCall(DMSetType(cellswarm, DMSWARM));
2674: PetscCall(DMGetDimension(sw, &dim));
2675: PetscCall(DMSetDimension(cellswarm, dim));
2676: PetscCall(DMSwarmSetType(cellswarm, DMSWARM_PIC));
2677: /* Destroy the unused, unconfigured data bucket to prevent stragglers in memory */
2678: PetscCall(DMSwarmDataBucketDestroy(&((DM_Swarm *)cellswarm->data)->db));
2679: PetscCall(DMSwarmSortGetAccess(sw));
2680: PetscCall(DMSwarmSortGetNumberOfPointsPerCell(sw, cellID, &particles));
2681: PetscCall(DMSwarmSortGetPointsPerCell(sw, cellID, &particles, &pids));
2682: PetscCall(DMSwarmDataBucketCreateFromSubset(original->db, particles, pids, &((DM_Swarm *)cellswarm->data)->db));
2683: PetscCall(DMSwarmSortRestoreAccess(sw));
2684: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cellID, &particles, &pids));
2685: PetscCall(DMSwarmGetCellDM(sw, &dmc));
2686: PetscCall(DMLabelCreate(PetscObjectComm((PetscObject)sw), "singlecell", &label));
2687: PetscCall(DMAddLabel(dmc, label));
2688: PetscCall(DMLabelSetValue(label, cellID, 1));
2689: PetscCall(DMPlexFilter(dmc, label, 1, PETSC_FALSE, PETSC_FALSE, PetscObjectComm((PetscObject)dmc), NULL, &subdmc));
2690: PetscCall(PetscObjectGetName((PetscObject)dmc, &name));
2691: PetscCall(PetscObjectSetName((PetscObject)subdmc, name));
2692: PetscCall(DMSwarmSetCellDM(cellswarm, subdmc));
2693: PetscCall(DMLabelDestroy(&label));
2694: PetscFunctionReturn(PETSC_SUCCESS);
2695: }
2697: /*@
2698: DMSwarmRestoreCellSwarm - Restores a `DMSWARM` object obtained with `DMSwarmGetCellSwarm()`. All fields are copied back into the parent swarm.
2700: Noncollective
2702: Input Parameters:
2703: + sw - the parent `DMSWARM`
2704: . cellID - the integer id of the cell to be copied back into the parent swarm
2705: - cellswarm - the cell swarm object
2707: Level: beginner
2709: Note:
2710: This only supports `DMSWARM_PIC` types of `DMSWARM`s
2712: .seealso: `DM`, `DMSWARM`, `DMSwarmGetCellSwarm()`
2713: @*/
2714: PetscErrorCode DMSwarmRestoreCellSwarm(DM sw, PetscInt cellID, DM cellswarm)
2715: {
2716: DM dmc;
2717: PetscInt *pids, particles, p;
2719: PetscFunctionBegin;
2720: PetscCall(DMSwarmSortGetAccess(sw));
2721: PetscCall(DMSwarmSortGetPointsPerCell(sw, cellID, &particles, &pids));
2722: PetscCall(DMSwarmSortRestoreAccess(sw));
2723: /* Pointwise copy of each particle based on pid. The parent swarm may not be altered during this process. */
2724: for (p = 0; p < particles; ++p) PetscCall(DMSwarmDataBucketCopyPoint(((DM_Swarm *)cellswarm->data)->db, pids[p], ((DM_Swarm *)sw->data)->db, pids[p]));
2725: /* Free memory, destroy cell dm */
2726: PetscCall(DMSwarmGetCellDM(cellswarm, &dmc));
2727: PetscCall(DMDestroy(&dmc));
2728: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cellID, &particles, &pids));
2729: PetscFunctionReturn(PETSC_SUCCESS);
2730: }
2732: /*@
2733: DMSwarmComputeMoments - Compute the first three particle moments for a given field
2735: Noncollective
2737: Input Parameters:
2738: + sw - the `DMSWARM`
2739: . coordinate - the coordinate field name
2740: - weight - the weight field name
2742: Output Parameter:
2743: . moments - the field moments
2745: Level: intermediate
2747: Notes:
2748: The `moments` array should be of length bs + 2, where bs is the block size of the coordinate field.
2750: The weight field must be a scalar, having blocksize 1.
2752: .seealso: `DM`, `DMSWARM`, `DMPlexComputeMoments()`
2753: @*/
2754: PetscErrorCode DMSwarmComputeMoments(DM sw, const char coordinate[], const char weight[], PetscReal moments[])
2755: {
2756: const PetscReal *coords;
2757: const PetscReal *w;
2758: PetscReal *mom;
2759: PetscDataType dtc, dtw;
2760: PetscInt bsc, bsw, Np;
2761: MPI_Comm comm;
2763: PetscFunctionBegin;
2765: PetscAssertPointer(coordinate, 2);
2766: PetscAssertPointer(weight, 3);
2767: PetscAssertPointer(moments, 4);
2768: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
2769: PetscCall(DMSwarmGetField(sw, coordinate, &bsc, &dtc, (void **)&coords));
2770: PetscCall(DMSwarmGetField(sw, weight, &bsw, &dtw, (void **)&w));
2771: PetscCheck(dtc == PETSC_REAL, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s must be real, not %s", coordinate, PetscDataTypes[dtc]);
2772: PetscCheck(dtw == PETSC_REAL, comm, PETSC_ERR_ARG_WRONG, "Weight field %s must be real, not %s", weight, PetscDataTypes[dtw]);
2773: PetscCheck(bsw == 1, comm, PETSC_ERR_ARG_WRONG, "Weight field %s must be a scalar, not blocksize %" PetscInt_FMT, weight, bsw);
2774: PetscCall(DMSwarmGetLocalSize(sw, &Np));
2775: PetscCall(DMGetWorkArray(sw, bsc + 2, MPIU_REAL, &mom));
2776: PetscCall(PetscArrayzero(mom, bsc + 2));
2777: for (PetscInt p = 0; p < Np; ++p) {
2778: const PetscReal *c = &coords[p * bsc];
2779: const PetscReal wp = w[p];
2781: mom[0] += wp;
2782: for (PetscInt d = 0; d < bsc; ++d) {
2783: mom[d + 1] += wp * c[d];
2784: mom[d + bsc + 1] += wp * PetscSqr(c[d]);
2785: }
2786: }
2787: PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&coords));
2788: PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w));
2789: PetscCallMPI(MPIU_Allreduce(mom, moments, bsc + 2, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)sw)));
2790: PetscCall(DMRestoreWorkArray(sw, bsc + 2, MPIU_REAL, &mom));
2791: PetscFunctionReturn(PETSC_SUCCESS);
2792: }
2794: static PetscErrorCode DMSetFromOptions_Swarm(DM dm, PetscOptionItems PetscOptionsObject)
2795: {
2796: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2798: PetscFunctionBegin;
2799: PetscOptionsHeadBegin(PetscOptionsObject, "DMSwarm Options");
2800: PetscCall(PetscOptionsEnum("-dm_swarm_remap_type", "Remap algorithm", "DMSwarmSetRemapType", DMSwarmRemapTypeNames, (PetscEnum)swarm->remap_type, (PetscEnum *)&swarm->remap_type, NULL));
2801: PetscOptionsHeadEnd();
2802: PetscFunctionReturn(PETSC_SUCCESS);
2803: }
2805: PETSC_INTERN PetscErrorCode DMClone_Swarm(DM, DM *);
2807: static PetscErrorCode DMInitialize_Swarm(DM sw)
2808: {
2809: PetscFunctionBegin;
2810: sw->ops->view = DMView_Swarm;
2811: sw->ops->load = NULL;
2812: sw->ops->setfromoptions = DMSetFromOptions_Swarm;
2813: sw->ops->clone = DMClone_Swarm;
2814: sw->ops->setup = DMSetup_Swarm;
2815: sw->ops->createlocalsection = NULL;
2816: sw->ops->createsectionpermutation = NULL;
2817: sw->ops->createdefaultconstraints = NULL;
2818: sw->ops->createglobalvector = DMCreateGlobalVector_Swarm;
2819: sw->ops->createlocalvector = DMCreateLocalVector_Swarm;
2820: sw->ops->getlocaltoglobalmapping = NULL;
2821: sw->ops->createfieldis = NULL;
2822: sw->ops->createcoordinatedm = NULL;
2823: sw->ops->createcellcoordinatedm = NULL;
2824: sw->ops->getcoloring = NULL;
2825: sw->ops->creatematrix = DMCreateMatrix_Swarm;
2826: sw->ops->createinterpolation = NULL;
2827: sw->ops->createinjection = NULL;
2828: sw->ops->createmassmatrix = DMCreateMassMatrix_Swarm;
2829: sw->ops->creategradientmatrix = DMCreateGradientMatrix_Swarm;
2830: sw->ops->refine = NULL;
2831: sw->ops->coarsen = NULL;
2832: sw->ops->refinehierarchy = NULL;
2833: sw->ops->coarsenhierarchy = NULL;
2834: sw->ops->globaltolocalbegin = DMGlobalToLocalBegin_Swarm;
2835: sw->ops->globaltolocalend = DMGlobalToLocalEnd_Swarm;
2836: sw->ops->localtoglobalbegin = DMLocalToGlobalBegin_Swarm;
2837: sw->ops->localtoglobalend = DMLocalToGlobalEnd_Swarm;
2838: sw->ops->destroy = DMDestroy_Swarm;
2839: sw->ops->createsubdm = NULL;
2840: sw->ops->getdimpoints = NULL;
2841: sw->ops->locatepoints = NULL;
2842: sw->ops->projectfieldlocal = DMProjectFieldLocal_Swarm;
2843: PetscFunctionReturn(PETSC_SUCCESS);
2844: }
2846: PETSC_INTERN PetscErrorCode DMClone_Swarm(DM dm, DM *newdm)
2847: {
2848: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2850: PetscFunctionBegin;
2851: swarm->refct++;
2852: (*newdm)->data = swarm;
2853: PetscCall(PetscObjectChangeTypeName((PetscObject)*newdm, DMSWARM));
2854: PetscCall(DMInitialize_Swarm(*newdm));
2855: (*newdm)->dim = dm->dim;
2856: PetscFunctionReturn(PETSC_SUCCESS);
2857: }
2859: /*MC
2860: DMSWARM = "swarm" - A `DM` object for particle methods, such as particle-in-cell (PIC), in which the underlying
2861: data is both (i) dynamic in length, (ii) and of arbitrary data type.
2863: Level: intermediate
2865: Notes:
2866: User data can be represented by `DMSWARM` through a registering "fields" which are to be stored on particles.
2867: To register a field, the user must provide;
2868: (a) a unique name;
2869: (b) the data type (or size in bytes);
2870: (c) the block size of the data.
2872: For example, suppose the application requires a unique id, energy, momentum and density to be stored
2873: on a set of particles. Then the following code could be used
2874: .vb
2875: DMSwarmInitializeFieldRegister(dm)
2876: DMSwarmRegisterPetscDatatypeField(dm,"uid",1,PETSC_LONG);
2877: DMSwarmRegisterPetscDatatypeField(dm,"energy",1,PETSC_REAL);
2878: DMSwarmRegisterPetscDatatypeField(dm,"momentum",3,PETSC_REAL);
2879: DMSwarmRegisterPetscDatatypeField(dm,"density",1,PETSC_FLOAT);
2880: DMSwarmFinalizeFieldRegister(dm)
2881: .ve
2883: The fields represented by `DMSWARM` are dynamic and can be re-sized at any time.
2884: The only restriction imposed by `DMSWARM` is that all fields contain the same number of particles.
2886: To support particle methods, "migration" techniques are provided. These methods migrate data
2887: between ranks.
2889: `DMSWARM` supports the methods `DMCreateGlobalVector()` and `DMCreateLocalVector()`.
2890: As a `DMSWARM` may internally define and store values of different data types,
2891: before calling `DMCreateGlobalVector()` or `DMCreateLocalVector()`, the user must inform `DMSWARM` which
2892: fields should be used to define a `Vec` object via `DMSwarmVectorDefineField()`
2893: The specified field can be changed at any time - thereby permitting vectors
2894: compatible with different fields to be created.
2896: A dual representation of fields in the `DMSWARM` and a Vec object is permitted via `DMSwarmCreateGlobalVectorFromField()`
2897: Here the data defining the field in the `DMSWARM` is shared with a `Vec`.
2898: This is inherently unsafe if you alter the size of the field at any time between
2899: calls to `DMSwarmCreateGlobalVectorFromField()` and `DMSwarmDestroyGlobalVectorFromField()`.
2900: If the local size of the `DMSWARM` does not match the local size of the global vector
2901: when `DMSwarmDestroyGlobalVectorFromField()` is called, an error is thrown.
2903: Additional high-level support is provided for Particle-In-Cell methods. Refer to `DMSwarmSetType()`.
2905: .seealso: `DM`, `DMSWARM`, `DMType`, `DMCreate()`, `DMSetType()`, `DMSwarmSetType()`, `DMSwarmType`, `DMSwarmCreateGlobalVectorFromField()`,
2906: `DMCreateGlobalVector()`, `DMCreateLocalVector()`
2907: M*/
2909: PETSC_EXTERN PetscErrorCode DMCreate_Swarm(DM dm)
2910: {
2911: DM_Swarm *swarm;
2913: PetscFunctionBegin;
2915: PetscCall(PetscNew(&swarm));
2916: dm->data = swarm;
2917: PetscCall(DMSwarmDataBucketCreate(&swarm->db));
2918: PetscCall(DMSwarmInitializeFieldRegister(dm));
2919: dm->dim = 0;
2920: swarm->refct = 1;
2921: swarm->issetup = PETSC_FALSE;
2922: swarm->swarm_type = DMSWARM_BASIC;
2923: swarm->migrate_type = DMSWARM_MIGRATE_BASIC;
2924: swarm->collect_type = DMSWARM_COLLECT_BASIC;
2925: swarm->migrate_error_on_missing_point = PETSC_FALSE;
2926: swarm->collect_view_active = PETSC_FALSE;
2927: swarm->collect_view_reset_nlocal = -1;
2928: PetscCall(DMInitialize_Swarm(dm));
2929: if (SwarmDataFieldId == -1) PetscCall(PetscObjectComposedDataRegister(&SwarmDataFieldId));
2930: PetscFunctionReturn(PETSC_SUCCESS);
2931: }
2933: /* Replace dm with the contents of ndm, and then destroy ndm
2934: - Share the DM_Swarm structure
2935: */
2936: PetscErrorCode DMSwarmReplace(DM dm, DM *ndm)
2937: {
2938: DM dmNew = *ndm;
2939: const PetscReal *maxCell, *Lstart, *L;
2940: PetscInt dim;
2942: PetscFunctionBegin;
2943: if (dm == dmNew) {
2944: PetscCall(DMDestroy(ndm));
2945: PetscFunctionReturn(PETSC_SUCCESS);
2946: }
2947: dm->setupcalled = dmNew->setupcalled;
2948: if (!dm->hdr.name) {
2949: const char *name;
2951: PetscCall(PetscObjectGetName((PetscObject)*ndm, &name));
2952: PetscCall(PetscObjectSetName((PetscObject)dm, name));
2953: }
2954: PetscCall(DMGetDimension(dmNew, &dim));
2955: PetscCall(DMSetDimension(dm, dim));
2956: PetscCall(DMGetPeriodicity(dmNew, &maxCell, &Lstart, &L));
2957: PetscCall(DMSetPeriodicity(dm, maxCell, Lstart, L));
2958: PetscCall(DMDestroy_Swarm(dm));
2959: PetscCall(DMInitialize_Swarm(dm));
2960: dm->data = dmNew->data;
2961: ((DM_Swarm *)dmNew->data)->refct++;
2962: PetscCall(DMDestroy(ndm));
2963: PetscFunctionReturn(PETSC_SUCCESS);
2964: }
2966: /*@
2967: DMSwarmDuplicate - Creates a new `DMSWARM` with the same fields and cell `DM`s but no particles
2969: Collective
2971: Input Parameter:
2972: . sw - the `DMSWARM`
2974: Output Parameter:
2975: . nsw - the new `DMSWARM`
2977: Level: beginner
2979: .seealso: `DM`, `DMSWARM`, `DMSwarmCreate()`, `DMClone()`
2980: @*/
2981: PetscErrorCode DMSwarmDuplicate(DM sw, DM *nsw)
2982: {
2983: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2984: DMSwarmDataField *fields;
2985: DMSwarmCellDM celldm, ncelldm;
2986: DMSwarmType stype;
2987: const char *name, **celldmnames;
2988: void *ctx;
2989: PetscInt dim, Nf, Ndm;
2990: PetscBool flg;
2992: PetscFunctionBegin;
2993: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), nsw));
2994: PetscCall(DMSetType(*nsw, DMSWARM));
2995: PetscCall(PetscObjectGetName((PetscObject)sw, &name));
2996: PetscCall(PetscObjectSetName((PetscObject)*nsw, name));
2997: PetscCall(DMGetDimension(sw, &dim));
2998: PetscCall(DMSetDimension(*nsw, dim));
2999: PetscCall(DMSwarmGetType(sw, &stype));
3000: PetscCall(DMSwarmSetType(*nsw, stype));
3001: PetscCall(DMGetApplicationContext(sw, &ctx));
3002: PetscCall(DMSetApplicationContext(*nsw, ctx));
3004: PetscCall(DMSwarmDataBucketGetDMSwarmDataFields(swarm->db, &Nf, &fields));
3005: for (PetscInt f = 0; f < Nf; ++f) {
3006: PetscCall(DMSwarmDataFieldStringInList(fields[f]->name, ((DM_Swarm *)(*nsw)->data)->db->nfields, (const DMSwarmDataField *)((DM_Swarm *)(*nsw)->data)->db->field, &flg));
3007: if (!flg) PetscCall(DMSwarmRegisterPetscDatatypeField(*nsw, fields[f]->name, fields[f]->bs, fields[f]->petsc_type));
3008: }
3010: PetscCall(DMSwarmGetCellDMNames(sw, &Ndm, &celldmnames));
3011: for (PetscInt c = 0; c < Ndm; ++c) {
3012: DM dm;
3013: PetscInt Ncf;
3014: const char **coordfields, **fields;
3016: PetscCall(DMSwarmGetCellDMByName(sw, celldmnames[c], &celldm));
3017: PetscCall(DMSwarmCellDMGetDM(celldm, &dm));
3018: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncf, &coordfields));
3019: PetscCall(DMSwarmCellDMGetFields(celldm, &Nf, &fields));
3020: PetscCall(DMSwarmCellDMCreate(dm, Nf, fields, Ncf, coordfields, &ncelldm));
3021: PetscCall(DMSwarmAddCellDM(*nsw, ncelldm));
3022: PetscCall(DMSwarmCellDMDestroy(&ncelldm));
3023: }
3024: PetscCall(PetscFree(celldmnames));
3026: PetscCall(DMSetFromOptions(*nsw));
3027: PetscCall(DMSetUp(*nsw));
3028: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
3029: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
3030: PetscCall(DMSwarmSetCellDMActive(*nsw, name));
3031: PetscFunctionReturn(PETSC_SUCCESS);
3032: }
3034: PetscErrorCode DMLocalToGlobalBegin_Swarm(DM dm, Vec l, InsertMode mode, Vec g)
3035: {
3036: PetscFunctionBegin;
3037: PetscFunctionReturn(PETSC_SUCCESS);
3038: }
3040: PetscErrorCode DMLocalToGlobalEnd_Swarm(DM dm, Vec l, InsertMode mode, Vec g)
3041: {
3042: PetscFunctionBegin;
3043: switch (mode) {
3044: case INSERT_VALUES:
3045: PetscCall(VecCopy(l, g));
3046: break;
3047: case ADD_VALUES:
3048: PetscCall(VecAXPY(g, 1., l));
3049: break;
3050: default:
3051: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Mode not supported: %d", mode);
3052: }
3053: PetscFunctionReturn(PETSC_SUCCESS);
3054: }
3056: PetscErrorCode DMGlobalToLocalBegin_Swarm(DM dm, Vec g, InsertMode mode, Vec l)
3057: {
3058: PetscFunctionBegin;
3059: PetscFunctionReturn(PETSC_SUCCESS);
3060: }
3062: PetscErrorCode DMGlobalToLocalEnd_Swarm(DM dm, Vec g, InsertMode mode, Vec l)
3063: {
3064: PetscFunctionBegin;
3065: PetscCall(DMLocalToGlobalEnd_Swarm(dm, g, mode, l));
3066: PetscFunctionReturn(PETSC_SUCCESS);
3067: }