Actual source code: swarm.c
1: #include "petscdmswarm.h"
2: #include <petsc/private/dmswarmimpl.h>
3: #include <petsc/private/hashsetij.h>
4: #include <petsc/private/petscfeimpl.h>
5: #include <petscviewer.h>
6: #include <petscdraw.h>
7: #include <petscdmplex.h>
8: #include <petscblaslapack.h>
9: #include "../src/dm/impls/swarm/data_bucket.h"
10: #include <petscdmlabel.h>
11: #include <petscsection.h>
13: PetscLogEvent DMSWARM_Migrate, DMSWARM_SetSizes, DMSWARM_AddPoints, DMSWARM_RemovePoints, DMSWARM_Sort;
14: PetscLogEvent DMSWARM_DataExchangerTopologySetup, DMSWARM_DataExchangerBegin, DMSWARM_DataExchangerEnd;
15: PetscLogEvent DMSWARM_DataExchangerSendCount, DMSWARM_DataExchangerPack;
17: const char *DMSwarmTypeNames[] = {"basic", "pic", NULL};
18: const char *DMSwarmMigrateTypeNames[] = {"basic", "dmcellnscatter", "dmcellexact", "user", NULL};
19: const char *DMSwarmCollectTypeNames[] = {"basic", "boundingbox", "general", "user", NULL};
20: const char *DMSwarmRemapTypeNames[] = {"none", "pfak", "colella", "DMSwarmRemapType", "DMSWARM_REMAP_", NULL};
21: const char *DMSwarmPICLayoutTypeNames[] = {"regular", "gauss", "subdivision", NULL};
23: const char DMSwarmField_pid[] = "DMSwarm_pid";
24: const char DMSwarmField_rank[] = "DMSwarm_rank";
25: const char DMSwarmPICField_coor[] = "DMSwarmPIC_coor";
27: PetscInt SwarmDataFieldId = -1;
29: #if defined(PETSC_HAVE_HDF5)
30: #include <petscviewerhdf5.h>
32: static PetscErrorCode VecView_Swarm_HDF5_Internal(Vec v, PetscViewer viewer)
33: {
34: DM dm;
35: PetscReal seqval;
36: PetscInt seqnum, bs;
37: PetscBool isseq, ists;
39: PetscFunctionBegin;
40: PetscCall(VecGetDM(v, &dm));
41: PetscCall(VecGetBlockSize(v, &bs));
42: PetscCall(PetscViewerHDF5PushGroup(viewer, "/particle_fields"));
43: PetscCall(PetscObjectTypeCompare((PetscObject)v, VECSEQ, &isseq));
44: PetscCall(DMGetOutputSequenceNumber(dm, &seqnum, &seqval));
45: PetscCall(PetscViewerHDF5IsTimestepping(viewer, &ists));
46: if (ists) PetscCall(PetscViewerHDF5SetTimestep(viewer, seqnum));
47: /* PetscCall(DMSequenceView_HDF5(dm, "time", seqnum, (PetscScalar) seqval, viewer)); */
48: PetscCall(VecViewNative(v, viewer));
49: PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)v, "Nc", PETSC_INT, (void *)&bs));
50: PetscCall(PetscViewerHDF5PopGroup(viewer));
51: PetscFunctionReturn(PETSC_SUCCESS);
52: }
54: static PetscErrorCode DMSwarmView_HDF5(DM dm, PetscViewer viewer)
55: {
56: DMSwarmCellDM celldm;
57: Vec coordinates;
58: PetscInt Np, Nfc;
59: PetscBool isseq;
60: const char **coordFields;
62: PetscFunctionBegin;
63: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
64: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
65: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
66: PetscCall(DMSwarmGetSize(dm, &Np));
67: PetscCall(DMSwarmCreateGlobalVectorFromField(dm, coordFields[0], &coordinates));
68: PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
69: PetscCall(PetscViewerHDF5PushGroup(viewer, "/particles"));
70: PetscCall(PetscObjectTypeCompare((PetscObject)coordinates, VECSEQ, &isseq));
71: PetscCall(VecViewNative(coordinates, viewer));
72: PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)coordinates, "Np", PETSC_INT, (void *)&Np));
73: PetscCall(PetscViewerHDF5PopGroup(viewer));
74: PetscCall(DMSwarmDestroyGlobalVectorFromField(dm, coordFields[0], &coordinates));
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
77: #endif
79: static PetscErrorCode VecView_Swarm(Vec v, PetscViewer viewer)
80: {
81: DM dm;
82: #if defined(PETSC_HAVE_HDF5)
83: PetscBool ishdf5;
84: #endif
86: PetscFunctionBegin;
87: PetscCall(VecGetDM(v, &dm));
88: PetscCheck(dm, PetscObjectComm((PetscObject)v), PETSC_ERR_ARG_WRONG, "Vector not generated from a DM");
89: #if defined(PETSC_HAVE_HDF5)
90: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
91: if (ishdf5) {
92: PetscCall(VecView_Swarm_HDF5_Internal(v, viewer));
93: PetscFunctionReturn(PETSC_SUCCESS);
94: }
95: #endif
96: PetscCall(VecViewNative(v, viewer));
97: PetscFunctionReturn(PETSC_SUCCESS);
98: }
100: /*@C
101: DMSwarmVectorGetField - Gets the fields from which to define a `Vec` object
102: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
104: Not collective
106: Input Parameter:
107: . sw - a `DMSWARM`
109: Output Parameters:
110: + Nf - the number of fields
111: - fieldnames - the textual name given to each registered field, or NULL if it has not been set
113: Level: beginner
115: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
116: @*/
117: PetscErrorCode DMSwarmVectorGetField(DM sw, PetscInt *Nf, const char **fieldnames[])
118: {
119: DMSwarmCellDM celldm;
121: PetscFunctionBegin;
123: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
124: PetscCall(DMSwarmCellDMGetFields(celldm, Nf, fieldnames));
125: PetscFunctionReturn(PETSC_SUCCESS);
126: }
128: /*@
129: DMSwarmVectorDefineField - Sets the field from which to define a `Vec` object
130: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
132: Collective
134: Input Parameters:
135: + dm - a `DMSWARM`
136: - fieldname - the textual name given to each registered field
138: Level: beginner
140: Notes:
141: The field with name `fieldname` must be defined as having a data type of `PetscScalar`.
143: This function must be called prior to calling `DMCreateLocalVector()`, `DMCreateGlobalVector()`.
144: Multiple calls to `DMSwarmVectorDefineField()` are permitted.
146: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineFields()`, `DMSwarmVectorGetField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
147: @*/
148: PetscErrorCode DMSwarmVectorDefineField(DM dm, const char fieldname[])
149: {
150: PetscFunctionBegin;
151: PetscCall(DMSwarmVectorDefineFields(dm, 1, &fieldname));
152: PetscFunctionReturn(PETSC_SUCCESS);
153: }
155: /*@C
156: DMSwarmVectorDefineFields - Sets the fields from which to define a `Vec` object
157: when `DMCreateLocalVector()`, or `DMCreateGlobalVector()` is called
159: Collective, No Fortran support
161: Input Parameters:
162: + sw - a `DMSWARM`
163: . Nf - the number of fields
164: - fieldnames - the textual name given to each registered field
166: Level: beginner
168: Notes:
169: Each field with name in `fieldnames` must be defined as having a data type of `PetscScalar`.
171: This function must be called prior to calling `DMCreateLocalVector()`, `DMCreateGlobalVector()`.
172: Multiple calls to `DMSwarmVectorDefineField()` are permitted.
174: .seealso: `DM`, `DMSWARM`, `DMSwarmVectorDefineField()`, `DMSwarmVectorGetField()`, `DMSwarmRegisterPetscDatatypeField()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`
175: @*/
176: PetscErrorCode DMSwarmVectorDefineFields(DM sw, PetscInt Nf, const char *fieldnames[])
177: {
178: DM_Swarm *swarm = (DM_Swarm *)sw->data;
179: DMSwarmCellDM celldm;
181: PetscFunctionBegin;
183: if (fieldnames) PetscAssertPointer(fieldnames, 3);
184: if (!swarm->issetup) PetscCall(DMSetUp(sw));
185: PetscCheck(Nf >= 0, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_OUTOFRANGE, "Number of fields must be non-negative, not %" PetscInt_FMT, Nf);
186: // Create a dummy cell DM if none has been specified (I think we should not support this mode)
187: if (!swarm->activeCellDM) {
188: DM dm;
189: DMSwarmCellDM celldm;
191: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), &dm));
192: PetscCall(DMSetType(dm, DMSHELL));
193: PetscCall(PetscObjectSetName((PetscObject)dm, "dummy"));
194: PetscCall(DMSwarmCellDMCreate(dm, 0, NULL, 0, NULL, &celldm));
195: PetscCall(DMDestroy(&dm));
196: PetscCall(DMSwarmAddCellDM(sw, celldm));
197: PetscCall(DMSwarmCellDMDestroy(&celldm));
198: PetscCall(DMSwarmSetCellDMActive(sw, "dummy"));
199: }
200: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
201: for (PetscInt f = 0; f < celldm->Nf; ++f) PetscCall(PetscFree(celldm->dmFields[f]));
202: PetscCall(PetscFree(celldm->dmFields));
204: celldm->Nf = Nf;
205: PetscCall(PetscMalloc1(Nf, &celldm->dmFields));
206: for (PetscInt f = 0; f < Nf; ++f) {
207: PetscDataType type;
209: // Check all fields are of type PETSC_REAL or PETSC_SCALAR
210: PetscCall(DMSwarmGetFieldInfo(sw, fieldnames[f], NULL, &type));
211: PetscCheck(type == PETSC_REAL, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
212: PetscCall(PetscStrallocpy(fieldnames[f], (char **)&celldm->dmFields[f]));
213: }
214: PetscFunctionReturn(PETSC_SUCCESS);
215: }
217: /* requires DMSwarmDefineFieldVector has been called */
218: static PetscErrorCode DMCreateGlobalVector_Swarm(DM sw, Vec *vec)
219: {
220: DM_Swarm *swarm = (DM_Swarm *)sw->data;
221: DMSwarmCellDM celldm;
222: Vec x;
223: char name[PETSC_MAX_PATH_LEN];
224: PetscInt bs = 0, n;
226: PetscFunctionBegin;
227: if (!swarm->issetup) PetscCall(DMSetUp(sw));
228: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
229: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Active cell DM does not define any fields");
230: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
232: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
233: for (PetscInt f = 0; f < celldm->Nf; ++f) {
234: PetscInt fbs;
235: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
236: PetscCall(PetscStrlcat(name, celldm->dmFields[f], PETSC_MAX_PATH_LEN));
237: PetscCall(DMSwarmGetFieldInfo(sw, celldm->dmFields[f], &fbs, NULL));
238: bs += fbs;
239: }
240: PetscCall(VecCreate(PetscObjectComm((PetscObject)sw), &x));
241: PetscCall(PetscObjectSetName((PetscObject)x, name));
242: PetscCall(VecSetSizes(x, n * bs, PETSC_DETERMINE));
243: PetscCall(VecSetBlockSize(x, bs));
244: PetscCall(VecSetDM(x, sw));
245: PetscCall(VecSetFromOptions(x));
246: PetscCall(VecSetOperation(x, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
247: *vec = x;
248: PetscFunctionReturn(PETSC_SUCCESS);
249: }
251: /* requires DMSwarmDefineFieldVector has been called */
252: static PetscErrorCode DMCreateLocalVector_Swarm(DM sw, Vec *vec)
253: {
254: DM_Swarm *swarm = (DM_Swarm *)sw->data;
255: DMSwarmCellDM celldm;
256: Vec x;
257: char name[PETSC_MAX_PATH_LEN];
258: PetscInt bs = 0, n;
260: PetscFunctionBegin;
261: if (!swarm->issetup) PetscCall(DMSetUp(sw));
262: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
263: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Active cell DM does not define any fields");
264: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
266: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
267: for (PetscInt f = 0; f < celldm->Nf; ++f) {
268: PetscInt fbs;
269: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
270: PetscCall(PetscStrlcat(name, celldm->dmFields[f], PETSC_MAX_PATH_LEN));
271: PetscCall(DMSwarmGetFieldInfo(sw, celldm->dmFields[f], &fbs, NULL));
272: bs += fbs;
273: }
274: PetscCall(VecCreate(PETSC_COMM_SELF, &x));
275: PetscCall(PetscObjectSetName((PetscObject)x, name));
276: PetscCall(VecSetSizes(x, n * bs, PETSC_DETERMINE));
277: PetscCall(VecSetBlockSize(x, bs));
278: PetscCall(VecSetDM(x, sw));
279: PetscCall(VecSetFromOptions(x));
280: *vec = x;
281: PetscFunctionReturn(PETSC_SUCCESS);
282: }
284: static PetscErrorCode DMSwarmDestroyVectorFromField_Private(DM dm, const char fieldname[], Vec *vec)
285: {
286: DM_Swarm *swarm = (DM_Swarm *)dm->data;
287: DMSwarmDataField gfield;
288: PetscInt bs, nlocal, fid = -1, cfid = -2;
289: PetscBool flg;
291: PetscFunctionBegin;
292: /* check vector is an inplace array */
293: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldname, &fid));
294: PetscCall(PetscObjectComposedDataGetInt((PetscObject)*vec, SwarmDataFieldId, cfid, flg));
295: (void)flg; /* avoid compiler warning */
296: PetscCheck(cfid == fid, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Vector being destroyed was not created from DMSwarm field(%s)! %" PetscInt_FMT " != %" PetscInt_FMT, fieldname, cfid, fid);
297: PetscCall(VecGetLocalSize(*vec, &nlocal));
298: PetscCall(VecGetBlockSize(*vec, &bs));
299: PetscCheck(nlocal / bs == swarm->db->L, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "DMSwarm sizes have changed since vector was created - cannot ensure pointers are valid");
300: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
301: PetscCall(DMSwarmDataFieldRestoreAccess(gfield));
302: PetscCall(VecResetArray(*vec));
303: PetscCall(VecDestroy(vec));
304: PetscFunctionReturn(PETSC_SUCCESS);
305: }
307: static PetscErrorCode DMSwarmCreateVectorFromField_Private(DM dm, const char fieldname[], MPI_Comm comm, Vec *vec)
308: {
309: DM_Swarm *swarm = (DM_Swarm *)dm->data;
310: PetscDataType type;
311: PetscScalar *array;
312: PetscInt bs, n, fid;
313: char name[PETSC_MAX_PATH_LEN];
314: PetscMPIInt size;
315: PetscBool iscuda, iskokkos, iship;
317: PetscFunctionBegin;
318: if (!swarm->issetup) PetscCall(DMSetUp(dm));
319: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
320: PetscCall(DMSwarmGetField(dm, fieldname, &bs, &type, (void **)&array));
321: PetscCheck(type == PETSC_REAL, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
323: PetscCallMPI(MPI_Comm_size(comm, &size));
324: PetscCall(PetscStrcmp(dm->vectype, VECKOKKOS, &iskokkos));
325: PetscCall(PetscStrcmp(dm->vectype, VECCUDA, &iscuda));
326: PetscCall(PetscStrcmp(dm->vectype, VECHIP, &iship));
327: PetscCall(VecCreate(comm, vec));
328: PetscCall(VecSetSizes(*vec, n * bs, PETSC_DETERMINE));
329: PetscCall(VecSetBlockSize(*vec, bs));
330: if (iskokkos) PetscCall(VecSetType(*vec, VECKOKKOS));
331: else if (iscuda) PetscCall(VecSetType(*vec, VECCUDA));
332: else if (iship) PetscCall(VecSetType(*vec, VECHIP));
333: else PetscCall(VecSetType(*vec, VECSTANDARD));
334: PetscCall(VecPlaceArray(*vec, array));
336: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "DMSwarmSharedField_%s", fieldname));
337: PetscCall(PetscObjectSetName((PetscObject)*vec, name));
339: /* Set guard */
340: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldname, &fid));
341: PetscCall(PetscObjectComposedDataSetInt((PetscObject)*vec, SwarmDataFieldId, fid));
343: PetscCall(VecSetDM(*vec, dm));
344: PetscCall(VecSetOperation(*vec, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: static PetscErrorCode DMSwarmDestroyVectorFromFields_Private(DM sw, PetscInt Nf, const char *fieldnames[], Vec *vec)
349: {
350: DM_Swarm *swarm = (DM_Swarm *)sw->data;
351: const PetscScalar *array;
352: PetscInt bs, n, id = 0, cid = -2;
353: PetscBool flg;
355: PetscFunctionBegin;
356: for (PetscInt f = 0; f < Nf; ++f) {
357: PetscInt fid;
359: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldnames[f], &fid));
360: id += fid;
361: }
362: PetscCall(PetscObjectComposedDataGetInt((PetscObject)*vec, SwarmDataFieldId, cid, flg));
363: (void)flg; /* avoid compiler warning */
364: PetscCheck(cid == id, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "Vector being destroyed was not created from DMSwarm field(%s)! %" PetscInt_FMT " != %" PetscInt_FMT, fieldnames[0], cid, id);
365: PetscCall(VecGetLocalSize(*vec, &n));
366: PetscCall(VecGetBlockSize(*vec, &bs));
367: n /= bs;
368: PetscCheck(n == swarm->db->L, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSwarm sizes have changed since vector was created - cannot ensure pointers are valid");
369: PetscCall(VecGetArrayRead(*vec, &array));
370: for (PetscInt f = 0, off = 0; f < Nf; ++f) {
371: PetscScalar *farray;
372: PetscDataType ftype;
373: PetscInt fbs;
375: PetscCall(DMSwarmGetField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
376: PetscCheck(off + fbs <= bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid blocksize %" PetscInt_FMT " < %" PetscInt_FMT, bs, off + fbs);
377: for (PetscInt i = 0; i < n; ++i) {
378: for (PetscInt b = 0; b < fbs; ++b) farray[i * fbs + b] = array[i * bs + off + b];
379: }
380: off += fbs;
381: PetscCall(DMSwarmRestoreField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
382: }
383: PetscCall(VecRestoreArrayRead(*vec, &array));
384: PetscCall(VecDestroy(vec));
385: PetscFunctionReturn(PETSC_SUCCESS);
386: }
388: static PetscErrorCode DMSwarmCreateVectorFromFields_Private(DM sw, PetscInt Nf, const char *fieldnames[], MPI_Comm comm, Vec *vec)
389: {
390: DM_Swarm *swarm = (DM_Swarm *)sw->data;
391: PetscScalar *array;
392: PetscInt n, bs = 0, id = 0;
393: char name[PETSC_MAX_PATH_LEN];
395: PetscFunctionBegin;
396: if (!swarm->issetup) PetscCall(DMSetUp(sw));
397: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &n, NULL, NULL));
398: for (PetscInt f = 0; f < Nf; ++f) {
399: PetscDataType ftype;
400: PetscInt fbs;
402: PetscCall(DMSwarmGetFieldInfo(sw, fieldnames[f], &fbs, &ftype));
403: PetscCheck(ftype == PETSC_REAL, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only valid for PETSC_REAL");
404: bs += fbs;
405: }
407: PetscCall(VecCreate(comm, vec));
408: PetscCall(VecSetSizes(*vec, n * bs, PETSC_DETERMINE));
409: PetscCall(VecSetBlockSize(*vec, bs));
410: PetscCall(VecSetType(*vec, sw->vectype));
412: PetscCall(VecGetArrayWrite(*vec, &array));
413: for (PetscInt f = 0, off = 0; f < Nf; ++f) {
414: PetscScalar *farray;
415: PetscDataType ftype;
416: PetscInt fbs;
418: PetscCall(DMSwarmGetField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
419: for (PetscInt i = 0; i < n; ++i) {
420: for (PetscInt b = 0; b < fbs; ++b) array[i * bs + off + b] = farray[i * fbs + b];
421: }
422: off += fbs;
423: PetscCall(DMSwarmRestoreField(sw, fieldnames[f], &fbs, &ftype, (void **)&farray));
424: }
425: PetscCall(VecRestoreArrayWrite(*vec, &array));
427: PetscCall(PetscStrncpy(name, "DMSwarmField", PETSC_MAX_PATH_LEN));
428: for (PetscInt f = 0; f < Nf; ++f) {
429: PetscCall(PetscStrlcat(name, "_", PETSC_MAX_PATH_LEN));
430: PetscCall(PetscStrlcat(name, fieldnames[f], PETSC_MAX_PATH_LEN));
431: }
432: PetscCall(PetscObjectSetName((PetscObject)*vec, name));
434: for (PetscInt f = 0; f < Nf; ++f) {
435: PetscInt fid;
437: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldIdByName(swarm->db, fieldnames[f], &fid));
438: id += fid;
439: }
440: PetscCall(PetscObjectComposedDataSetInt((PetscObject)*vec, SwarmDataFieldId, id));
442: PetscCall(VecSetDM(*vec, sw));
443: PetscCall(VecSetOperation(*vec, VECOP_VIEW, (PetscErrorCodeFn *)VecView_Swarm));
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: /*@
448: DMSwarmPreallocateMassMatrix - Preallocate particle mass matrix between a `DMSWARM` and a `DMPLEX`
450: Collective
452: Input Parameters:
453: + dmc - The `DMSWARM` object
454: . dmf - The `DMPLEX` object
455: . mass - The mass matrix to preallocate
456: . rStart - The starting row index for this process
457: . maxC - The maximum number of columns per row
458: - ctx - The user context
460: Level: developer
462: .seealso: [](ch_unstructured), `DM`, `DMSWARM`, `DMPLEX`, `DMCreateMassMatrix()`, `DMSwarmFillMassMatrix()`
463: @*/
464: PetscErrorCode DMSwarmPreallocateMassMatrix(DM dmc, DM dmf, Mat mass, PetscInt *rStart, PetscInt *maxC, PetscCtx ctx)
465: {
466: MPI_Comm comm;
467: PetscHSetIJ ht;
468: PetscDS ds;
469: PetscSection fsection, globalFSection;
470: PetscLayout rLayout, colLayout;
471: PetscInt *dnz, *onz;
472: PetscInt cStart, cEnd, locRows, locCols, colStart, colEnd, Nf, totNc = 0;
474: PetscFunctionBegin;
478: PetscAssertPointer(rStart, 4);
479: PetscAssertPointer(maxC, 5);
480: PetscCall(PetscObjectGetComm((PetscObject)mass, &comm));
481: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
482: PetscCall(DMGetLocalSection(dmf, &fsection));
483: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
484: PetscCall(DMGetDS(dmf, &ds));
485: PetscCall(PetscDSGetNumFields(ds, &Nf));
486: PetscCall(MatGetLocalSize(mass, &locRows, &locCols));
487: PetscCall(PetscCalloc2(locRows, &dnz, locRows, &onz));
488: PetscCall(PetscHSetIJCreate(&ht));
490: PetscCall(PetscLayoutCreate(comm, &colLayout));
491: PetscCall(PetscLayoutSetLocalSize(colLayout, locCols));
492: PetscCall(PetscLayoutSetBlockSize(colLayout, 1));
493: PetscCall(PetscLayoutSetUp(colLayout));
494: PetscCall(PetscLayoutGetRange(colLayout, &colStart, &colEnd));
495: PetscCall(PetscLayoutDestroy(&colLayout));
497: PetscCall(PetscLayoutCreate(comm, &rLayout));
498: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
499: PetscCall(PetscLayoutSetBlockSize(rLayout, 1));
500: PetscCall(PetscLayoutSetUp(rLayout));
501: PetscCall(PetscLayoutGetRange(rLayout, rStart, NULL));
502: PetscCall(PetscLayoutDestroy(&rLayout));
504: for (PetscInt field = 0; field < Nf; ++field) {
505: PetscObject obj;
506: PetscClassId id;
507: PetscInt Nc;
509: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
510: PetscCall(PetscObjectGetClassId(obj, &id));
511: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
512: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
513: totNc += Nc;
514: }
515: PetscCall(DMSwarmSortGetAccess(dmc));
516: for (PetscInt field = 0; field < Nf; ++field) {
517: PetscObject obj;
518: PetscClassId id;
519: PetscInt Nc;
521: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
522: PetscCall(PetscObjectGetClassId(obj, &id));
523: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
524: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
526: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
527: PetscInt *findices, *cindices; /* fine is vertices, coarse is particles */
528: PetscInt numFIndices, numCIndices;
530: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
531: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
532: *maxC = PetscMax(*maxC, numCIndices);
533: {
534: PetscHashIJKey key;
535: PetscBool missing;
536: for (PetscInt i = 0; i < numFIndices; ++i) {
537: key.j = findices[i]; /* global column (from Plex) */
538: if (key.j >= 0) {
539: /* Get indices for coarse elements */
540: for (PetscInt j = 0; j < numCIndices; ++j) {
541: for (PetscInt c = 0; c < Nc; ++c) {
542: // TODO Need field offset on particle here
543: key.i = cindices[j] * totNc + c + *rStart; /* global cols (from Swarm) */
544: if (key.i < 0) continue;
545: PetscCall(PetscHSetIJQueryAdd(ht, key, &missing));
546: PetscCheck(missing, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "Set new value at %" PetscInt_FMT ",%" PetscInt_FMT, key.i, key.j);
547: if ((key.j >= colStart) && (key.j < colEnd)) ++dnz[key.i - *rStart];
548: else ++onz[key.i - *rStart];
549: }
550: }
551: }
552: }
553: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
554: }
555: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
556: }
557: }
558: PetscCall(DMSwarmSortRestoreAccess(dmc));
559: PetscCall(PetscHSetIJDestroy(&ht));
560: PetscCall(MatXAIJSetPreallocation(mass, 1, dnz, onz, NULL, NULL));
561: PetscCall(MatSetOption(mass, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
562: PetscCall(PetscFree2(dnz, onz));
563: PetscFunctionReturn(PETSC_SUCCESS);
564: }
566: /*@
567: DMSwarmFillMassMatrix - Assemble the particle mass matrix between a `DMSWARM` and a `DMPLEX`
569: Collective
571: Input Parameters:
572: + dmc - The `DMSWARM` object
573: . dmf - The `DMPLEX` object
574: . mass - The mass matrix to fill up
575: . rStart - The starting row index for this process
576: . maxC - The maximum number of columns per row
577: . useDeltaFunction - Flag to use a delta function for the particle shape function
578: . Nfc - The number of swarm coordinate fields
579: . bs - The block size for each swarm coordinate field
580: . coordVals - The values for each particle for each swarm coordinate field
581: - ctx - The user context
583: Level: developer
585: .seealso: [](ch_unstructured), `DM`, `DMSWARM`, `DMPLEX`, `DMCreateMassMatrix()`, `DMSwarmPreallocateMassMatrix()`
586: @*/
587: PetscErrorCode DMSwarmFillMassMatrix(DM dmc, DM dmf, Mat mass, PetscInt rStart, PetscInt maxC, PetscBool useDeltaFunction, PetscInt Nfc, const PetscInt bs[], PetscReal *coordVals[], PetscCtx ctx)
588: {
589: const char *name = "Mass Matrix";
590: PetscDS ds;
591: PetscSection fsection, globalFSection;
592: PetscInt dim, cStart, cEnd, Nf, totDim, totNc = 0, *rowIDXs;
593: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
594: PetscScalar *elemMat;
596: PetscFunctionBegin;
600: PetscCall(DMGetCoordinateDim(dmf, &dim));
601: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
602: PetscCall(DMGetLocalSection(dmf, &fsection));
603: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
604: PetscCall(DMGetDS(dmf, &ds));
605: PetscCall(PetscDSGetNumFields(ds, &Nf));
606: PetscCall(PetscDSGetTotalDimension(ds, &totDim));
607: for (PetscInt field = 0; field < Nf; ++field) {
608: PetscObject obj;
609: PetscClassId id;
610: PetscInt Nc;
612: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
613: PetscCall(PetscObjectGetClassId(obj, &id));
614: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
615: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
616: totNc += Nc;
617: }
619: PetscCall(PetscMalloc3(maxC * totNc * totDim, &elemMat, maxC * totNc, &rowIDXs, maxC * dim, &xi));
620: PetscCall(PetscMalloc3(dim, &v0, dim * dim, &J, dim * dim, &invJ));
621: PetscCall(DMSwarmSortGetAccess(dmc));
622: for (PetscInt field = 0; field < Nf; ++field) {
623: PetscTabulation Tcoarse;
624: PetscObject obj;
625: PetscClassId id;
626: PetscInt Nc;
628: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
629: PetscCall(PetscObjectGetClassId(obj, &id));
630: if (id == PETSCFE_CLASSID) PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
631: else PetscCall(PetscFVGetNumComponents((PetscFV)obj, &Nc));
633: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
634: PetscInt *findices, *cindices;
635: PetscInt numFIndices, numCIndices;
637: /* TODO: Use DMField instead of assuming affine */
638: PetscCall(DMPlexComputeCellGeometryFEM(dmf, cell, NULL, v0, J, invJ, &detJ));
639: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
640: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
641: for (PetscInt j = 0; j < numCIndices; ++j) {
642: PetscReal xr[8];
643: PetscInt off = 0;
645: for (PetscInt i = 0; i < Nfc; ++i) {
646: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][cindices[j] * bs[i] + b];
647: }
648: PetscCheck(off == dim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, dim);
649: CoordinatesRealToRef(dim, dim, v0ref, v0, invJ, xr, &xi[j * dim]);
650: }
651: if (id == PETSCFE_CLASSID) PetscCall(PetscFECreateTabulation((PetscFE)obj, 1, numCIndices, xi, 0, &Tcoarse));
652: else PetscCall(PetscFVCreateTabulation((PetscFV)obj, 1, numCIndices, xi, 0, &Tcoarse));
653: /* Get elemMat entries by multiplying by weight */
654: PetscCall(PetscArrayzero(elemMat, numCIndices * Nc * totDim));
655: for (PetscInt i = 0; i < numFIndices / Nc; ++i) {
656: for (PetscInt j = 0; j < numCIndices; ++j) {
657: for (PetscInt c = 0; c < Nc; ++c) {
658: // TODO Need field offset on particle and field here
659: /* B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c */
660: elemMat[(j * totNc + c) * numFIndices + i * Nc + c] += Tcoarse->T[0][(j * numFIndices + i * Nc + c) * Nc + c] * (useDeltaFunction ? 1.0 : detJ);
661: }
662: }
663: }
664: for (PetscInt j = 0; j < numCIndices; ++j)
665: // TODO Need field offset on particle here
666: for (PetscInt c = 0; c < Nc; ++c) rowIDXs[j * Nc + c] = cindices[j] * totNc + c + rStart;
667: if (0) PetscCall(DMPrintCellMatrix(cell, name, numCIndices * Nc, numFIndices, elemMat));
668: PetscCall(MatSetValues(mass, numCIndices * Nc, rowIDXs, numFIndices, findices, elemMat, ADD_VALUES));
669: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
670: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
671: PetscCall(PetscTabulationDestroy(&Tcoarse));
672: }
673: }
674: PetscCall(DMSwarmSortRestoreAccess(dmc));
675: PetscCall(PetscFree3(elemMat, rowIDXs, xi));
676: PetscCall(PetscFree3(v0, J, invJ));
677: PetscFunctionReturn(PETSC_SUCCESS);
678: }
680: /* This creates a "mass matrix" between a finite element and particle space. If a finite element interpolant is given by
682: \hat f = \sum_i f_i \phi_i
684: and a particle function is given by
686: f = \sum_i w_i \delta(x - x_i)
688: then we want to require that
690: M \hat f = M_p f
692: where the particle mass matrix is given by
694: (M_p)_{ij} = \int \phi_i \delta(x - x_j)
696: The way Dave May does particles, they amount to quadratue weights rather than delta functions, so he has |J| is in
697: his integral. We allow this with the boolean flag.
698: */
699: static PetscErrorCode DMSwarmComputeMassMatrix_Private(DM dmc, DM dmf, Mat mass, PetscBool useDeltaFunction, PetscCtx ctx)
700: {
701: DMSwarmCellDM celldm;
702: PetscInt rStart, maxC = 0;
703: PetscInt Nfc;
704: const char **coordFields;
705: PetscReal **coordVals;
706: PetscInt *bs;
708: PetscFunctionBegin;
709: PetscCall(DMSwarmPreallocateMassMatrix(dmc, dmf, mass, &rStart, &maxC, ctx));
711: PetscCall(DMSwarmGetCellDMActive(dmc, &celldm));
712: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
713: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
714: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
715: PetscCall(DMSwarmFillMassMatrix(dmc, dmf, mass, rStart, maxC, useDeltaFunction, Nfc, bs, coordVals, ctx));
716: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
717: PetscCall(PetscFree2(coordVals, bs));
719: PetscCall(MatAssemblyBegin(mass, MAT_FINAL_ASSEMBLY));
720: PetscCall(MatAssemblyEnd(mass, MAT_FINAL_ASSEMBLY));
721: PetscFunctionReturn(PETSC_SUCCESS);
722: }
724: /* Returns empty matrix for use with SNES FD */
725: static PetscErrorCode DMCreateMatrix_Swarm(DM sw, Mat *m)
726: {
727: Vec field;
728: PetscInt size;
730: PetscFunctionBegin;
731: PetscCall(DMGetGlobalVector(sw, &field));
732: PetscCall(VecGetLocalSize(field, &size));
733: PetscCall(DMRestoreGlobalVector(sw, &field));
734: PetscCall(MatCreate(PETSC_COMM_WORLD, m));
735: PetscCall(MatSetFromOptions(*m));
736: PetscCall(MatSetSizes(*m, PETSC_DECIDE, PETSC_DECIDE, size, size));
737: PetscCall(MatSeqAIJSetPreallocation(*m, 1, NULL));
738: PetscCall(MatZeroEntries(*m));
739: PetscCall(MatAssemblyBegin(*m, MAT_FINAL_ASSEMBLY));
740: PetscCall(MatAssemblyEnd(*m, MAT_FINAL_ASSEMBLY));
741: PetscCall(MatShift(*m, 1.0));
742: PetscCall(MatSetDM(*m, sw));
743: PetscFunctionReturn(PETSC_SUCCESS);
744: }
746: /* FEM cols, Particle rows */
747: static PetscErrorCode DMCreateMassMatrix_Swarm(DM dmCoarse, DM dmFine, Mat *mass)
748: {
749: DMSwarmCellDM celldm;
750: PetscSection gsf;
751: PetscInt m, n, Np, bs;
752: void *ctx;
754: PetscFunctionBegin;
755: PetscCall(DMSwarmGetCellDMActive(dmCoarse, &celldm));
756: PetscCheck(celldm->Nf, PetscObjectComm((PetscObject)dmCoarse), PETSC_ERR_USER, "Active cell DM does not define any fields");
757: PetscCall(DMGetGlobalSection(dmFine, &gsf));
758: PetscCall(PetscSectionGetConstrainedStorageSize(gsf, &m));
759: PetscCall(DMSwarmGetLocalSize(dmCoarse, &Np));
760: PetscCall(DMSwarmCellDMGetBlockSize(celldm, dmCoarse, &bs));
761: n = Np * bs;
762: PetscCall(MatCreate(PetscObjectComm((PetscObject)dmCoarse), mass));
763: PetscCall(MatSetSizes(*mass, n, m, PETSC_DETERMINE, PETSC_DETERMINE));
764: PetscCall(MatSetType(*mass, dmCoarse->mattype));
765: PetscCall(DMGetApplicationContext(dmFine, &ctx));
767: PetscCall(DMSwarmComputeMassMatrix_Private(dmCoarse, dmFine, *mass, PETSC_TRUE, ctx));
768: PetscCall(MatViewFromOptions(*mass, NULL, "-mass_mat_view"));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: static PetscErrorCode DMSwarmComputeMassMatrixSquare_Private(DM dmc, DM dmf, Mat mass, PetscBool useDeltaFunction, PetscCtx ctx)
773: {
774: const char *name = "Mass Matrix Square";
775: MPI_Comm comm;
776: DMSwarmCellDM celldm;
777: PetscDS prob;
778: PetscSection fsection, globalFSection;
779: PetscHSetIJ ht;
780: PetscLayout rLayout, colLayout;
781: PetscInt *dnz, *onz, *adj, depth, maxConeSize, maxSupportSize, maxAdjSize;
782: PetscInt locRows, locCols, rStart, colStart, colEnd, *rowIDXs;
783: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
784: PetscScalar *elemMat, *elemMatSq;
785: PetscInt cdim, Nf, Nfc, cStart, cEnd, totDim, maxC = 0;
786: const char **coordFields;
787: PetscReal **coordVals;
788: PetscInt *bs;
790: PetscFunctionBegin;
791: PetscCall(PetscObjectGetComm((PetscObject)mass, &comm));
792: PetscCall(DMGetCoordinateDim(dmf, &cdim));
793: PetscCall(DMGetDS(dmf, &prob));
794: PetscCall(PetscDSGetNumFields(prob, &Nf));
795: PetscCall(PetscDSGetTotalDimension(prob, &totDim));
796: PetscCall(PetscMalloc3(cdim, &v0, cdim * cdim, &J, cdim * cdim, &invJ));
797: PetscCall(DMGetLocalSection(dmf, &fsection));
798: PetscCall(DMGetGlobalSection(dmf, &globalFSection));
799: PetscCall(DMPlexGetHeightStratum(dmf, 0, &cStart, &cEnd));
800: PetscCall(MatGetLocalSize(mass, &locRows, &locCols));
802: PetscCall(DMSwarmGetCellDMActive(dmc, &celldm));
803: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
804: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
806: PetscCall(PetscLayoutCreate(comm, &colLayout));
807: PetscCall(PetscLayoutSetLocalSize(colLayout, locCols));
808: PetscCall(PetscLayoutSetBlockSize(colLayout, 1));
809: PetscCall(PetscLayoutSetUp(colLayout));
810: PetscCall(PetscLayoutGetRange(colLayout, &colStart, &colEnd));
811: PetscCall(PetscLayoutDestroy(&colLayout));
813: PetscCall(PetscLayoutCreate(comm, &rLayout));
814: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
815: PetscCall(PetscLayoutSetBlockSize(rLayout, 1));
816: PetscCall(PetscLayoutSetUp(rLayout));
817: PetscCall(PetscLayoutGetRange(rLayout, &rStart, NULL));
818: PetscCall(PetscLayoutDestroy(&rLayout));
820: PetscCall(DMPlexGetDepth(dmf, &depth));
821: PetscCall(DMPlexGetMaxSizes(dmf, &maxConeSize, &maxSupportSize));
822: maxAdjSize = PetscPowInt(maxConeSize * maxSupportSize, depth);
823: PetscCall(PetscMalloc1(maxAdjSize, &adj));
825: PetscCall(PetscCalloc2(locRows, &dnz, locRows, &onz));
826: PetscCall(PetscHSetIJCreate(&ht));
827: /* Count nonzeros
828: This is just FVM++, but we cannot use the Plex P0 allocation since unknowns in a cell will not be contiguous
829: */
830: PetscCall(DMSwarmSortGetAccess(dmc));
831: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
832: PetscInt *cindices;
833: PetscInt numCIndices;
834: #if 0
835: PetscInt adjSize = maxAdjSize, a, j;
836: #endif
838: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
839: maxC = PetscMax(maxC, numCIndices);
840: /* Diagonal block */
841: for (PetscInt i = 0; i < numCIndices; ++i) dnz[cindices[i]] += numCIndices;
842: #if 0
843: /* Off-diagonal blocks */
844: PetscCall(DMPlexGetAdjacency(dmf, cell, &adjSize, &adj));
845: for (a = 0; a < adjSize; ++a) {
846: if (adj[a] >= cStart && adj[a] < cEnd && adj[a] != cell) {
847: const PetscInt ncell = adj[a];
848: PetscInt *ncindices;
849: PetscInt numNCIndices;
851: PetscCall(DMSwarmSortGetPointsPerCell(dmc, ncell, &numNCIndices, &ncindices));
852: {
853: PetscHashIJKey key;
854: PetscBool missing;
856: for (i = 0; i < numCIndices; ++i) {
857: key.i = cindices[i] + rStart; /* global rows (from Swarm) */
858: if (key.i < 0) continue;
859: for (j = 0; j < numNCIndices; ++j) {
860: key.j = ncindices[j] + rStart; /* global column (from Swarm) */
861: if (key.j < 0) continue;
862: PetscCall(PetscHSetIJQueryAdd(ht, key, &missing));
863: if (missing) {
864: if ((key.j >= colStart) && (key.j < colEnd)) ++dnz[key.i - rStart];
865: else ++onz[key.i - rStart];
866: }
867: }
868: }
869: }
870: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, ncell, &numNCIndices, &ncindices));
871: }
872: }
873: #endif
874: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
875: }
876: PetscCall(PetscHSetIJDestroy(&ht));
877: PetscCall(MatXAIJSetPreallocation(mass, 1, dnz, onz, NULL, NULL));
878: PetscCall(MatSetOption(mass, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
879: PetscCall(PetscFree2(dnz, onz));
880: PetscCall(PetscMalloc4(maxC * totDim, &elemMat, maxC * maxC, &elemMatSq, maxC, &rowIDXs, maxC * cdim, &xi));
881: /* Fill in values
882: Each entry is a sum of terms \phi_i(x_p) \phi_i(x_q)
883: Start just by producing block diagonal
884: Could loop over adjacent cells
885: Produce neighboring element matrix
886: TODO Determine which columns and rows correspond to shared dual vector
887: Do MatMatMult with rectangular matrices
888: Insert block
889: */
890: for (PetscInt field = 0; field < Nf; ++field) {
891: PetscTabulation Tcoarse;
892: PetscObject obj;
893: PetscInt Nc;
895: PetscCall(PetscDSGetDiscretization(prob, field, &obj));
896: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
897: PetscCheck(Nc == 1, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "Can only interpolate a scalar field from particles, Nc = %" PetscInt_FMT, Nc);
898: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
899: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
900: PetscInt *findices, *cindices;
901: PetscInt numFIndices, numCIndices;
903: /* TODO: Use DMField instead of assuming affine */
904: PetscCall(DMPlexComputeCellGeometryFEM(dmf, cell, NULL, v0, J, invJ, &detJ));
905: PetscCall(DMPlexGetClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
906: PetscCall(DMSwarmSortGetPointsPerCell(dmc, cell, &numCIndices, &cindices));
907: for (PetscInt p = 0; p < numCIndices; ++p) {
908: PetscReal xr[8];
909: PetscInt off = 0;
911: for (PetscInt i = 0; i < Nfc; ++i) {
912: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][cindices[p] * bs[i] + b];
913: }
914: PetscCheck(off == cdim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, cdim);
915: CoordinatesRealToRef(cdim, cdim, v0ref, v0, invJ, xr, &xi[p * cdim]);
916: }
917: PetscCall(PetscFECreateTabulation((PetscFE)obj, 1, numCIndices, xi, 0, &Tcoarse));
918: /* Get elemMat entries by multiplying by weight */
919: PetscCall(PetscArrayzero(elemMat, numCIndices * totDim));
920: for (PetscInt i = 0; i < numFIndices; ++i) {
921: for (PetscInt p = 0; p < numCIndices; ++p) {
922: for (PetscInt c = 0; c < Nc; ++c) {
923: /* B[(p*pdim + i)*Nc + c] is the value at point p for basis function i and component c */
924: elemMat[p * numFIndices + i] += Tcoarse->T[0][(p * numFIndices + i) * Nc + c] * (useDeltaFunction ? 1.0 : detJ);
925: }
926: }
927: }
928: PetscCall(PetscTabulationDestroy(&Tcoarse));
929: for (PetscInt p = 0; p < numCIndices; ++p) rowIDXs[p] = cindices[p] + rStart;
930: if (0) PetscCall(DMPrintCellMatrix(cell, name, 1, numCIndices, elemMat));
931: /* Block diagonal */
932: if (numCIndices) {
933: PetscBLASInt blasn, blask;
934: PetscScalar one = 1.0, zero = 0.0;
936: PetscCall(PetscBLASIntCast(numCIndices, &blasn));
937: PetscCall(PetscBLASIntCast(numFIndices, &blask));
938: PetscCallBLAS("BLASgemm", BLASgemm_("T", "N", &blasn, &blasn, &blask, &one, elemMat, &blask, elemMat, &blask, &zero, elemMatSq, &blasn));
939: }
940: PetscCall(MatSetValues(mass, numCIndices, rowIDXs, numCIndices, rowIDXs, elemMatSq, ADD_VALUES));
941: /* TODO off-diagonal */
942: PetscCall(DMSwarmSortRestorePointsPerCell(dmc, cell, &numCIndices, &cindices));
943: PetscCall(DMPlexRestoreClosureIndices(dmf, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
944: }
945: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(dmc, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
946: }
947: PetscCall(PetscFree4(elemMat, elemMatSq, rowIDXs, xi));
948: PetscCall(PetscFree(adj));
949: PetscCall(DMSwarmSortRestoreAccess(dmc));
950: PetscCall(PetscFree3(v0, J, invJ));
951: PetscCall(PetscFree2(coordVals, bs));
952: PetscCall(MatAssemblyBegin(mass, MAT_FINAL_ASSEMBLY));
953: PetscCall(MatAssemblyEnd(mass, MAT_FINAL_ASSEMBLY));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: /*@
958: DMSwarmCreateMassMatrixSquare - Creates the block-diagonal of the square, M^T_p M_p, of the particle mass matrix M_p
960: Collective
962: Input Parameters:
963: + dmCoarse - a `DMSWARM`
964: - dmFine - a `DMPLEX`
966: Output Parameter:
967: . mass - the square of the particle mass matrix
969: Level: advanced
971: Note:
972: We only compute the block diagonal since this provides a good preconditioner and is completely local. It would be possible in the
973: future to compute the full normal equations.
975: .seealso: `DM`, `DMSWARM`, `DMCreateMassMatrix()`
976: @*/
977: PetscErrorCode DMSwarmCreateMassMatrixSquare(DM dmCoarse, DM dmFine, Mat *mass)
978: {
979: PetscInt n;
980: void *ctx;
982: PetscFunctionBegin;
983: PetscCall(DMSwarmGetLocalSize(dmCoarse, &n));
984: PetscCall(MatCreate(PetscObjectComm((PetscObject)dmCoarse), mass));
985: PetscCall(MatSetSizes(*mass, n, n, PETSC_DETERMINE, PETSC_DETERMINE));
986: PetscCall(MatSetType(*mass, dmCoarse->mattype));
987: PetscCall(DMGetApplicationContext(dmFine, &ctx));
989: PetscCall(DMSwarmComputeMassMatrixSquare_Private(dmCoarse, dmFine, *mass, PETSC_TRUE, ctx));
990: PetscCall(MatViewFromOptions(*mass, NULL, "-mass_sq_mat_view"));
991: PetscFunctionReturn(PETSC_SUCCESS);
992: }
994: /* This creates a "gradient matrix" between a finite element and particle space, which is meant to enforce a weak divergence condition on the particle space. We are looking for a finite element field that has the same divergence as our particle field, so that
996: \int_X \psi_i \nabla \cdot \hat f = \int_X \psi_i \nabla \cdot f
998: and then integrate by parts
1000: \int_X \nabla \psi_i \cdot \hat f = \int_X \nabla \psi_i \cdot f
1002: where \psi is from a scalar FE space. If a finite element interpolant is given by
1004: \hat f^c = \sum_i f_i \phi^c_i
1006: and a particle function is given by
1008: f^c = \sum_p f^c_p \delta(x - x_p)
1010: then we want to require that
1012: D_f \hat f = D_p f
1014: where the gradient matrices are given by
1016: (D_f)_{i(jc)} = \int \partial_c \psi_i \phi_j
1017: (D_p)_{i(jc)} = \int \partial_c \psi_i \delta(x - x_j)
1019: Thus we need two finite element spaces, a scalar and a vector. The vector space holds the representer for the
1020: vector particle field. The scalar space holds the output of D_p or D_f, which is the weak divergence of the field.
1022: The way Dave May does particles, they amount to quadratue weights rather than delta functions, so he has |J| is in
1023: his integral. We allow this with the boolean flag.
1024: */
1025: static PetscErrorCode DMSwarmComputeGradientMatrix_Private(DM sw, DM dm, Mat derv, PetscBool useDeltaFunction, PetscCtx ctx)
1026: {
1027: const char *name = "Derivative Matrix";
1028: MPI_Comm comm;
1029: DMSwarmCellDM celldm;
1030: PetscDS ds;
1031: PetscSection fsection, globalFSection;
1032: PetscLayout rLayout;
1033: PetscInt locRows, rStart, *rowIDXs;
1034: PetscReal *xi, *v0, *J, *invJ, detJ = 1.0, v0ref[3] = {-1.0, -1.0, -1.0};
1035: PetscScalar *elemMat;
1036: PetscInt cdim, Nf, Nfc, cStart, cEnd, totDim, maxNpc = 0, totNc = 0;
1037: const char **coordFields;
1038: PetscReal **coordVals;
1039: PetscInt *bs;
1041: PetscFunctionBegin;
1042: PetscCall(PetscObjectGetComm((PetscObject)derv, &comm));
1043: PetscCall(DMGetCoordinateDim(dm, &cdim));
1044: PetscCall(DMGetDS(dm, &ds));
1045: PetscCall(PetscDSGetNumFields(ds, &Nf));
1046: PetscCheck(Nf == 1, comm, PETSC_ERR_SUP, "Currently, we only support a single field");
1047: PetscCall(PetscDSGetTotalDimension(ds, &totDim));
1048: PetscCall(PetscMalloc3(cdim, &v0, cdim * cdim, &J, cdim * cdim, &invJ));
1049: PetscCall(DMGetLocalSection(dm, &fsection));
1050: PetscCall(DMGetGlobalSection(dm, &globalFSection));
1051: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
1052: PetscCall(MatGetLocalSize(derv, &locRows, NULL));
1054: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1055: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
1056: PetscCheck(Nfc == 1, comm, PETSC_ERR_SUP, "Currently, we only support a single field");
1057: PetscCall(PetscMalloc2(Nfc, &coordVals, Nfc, &bs));
1059: PetscCall(PetscLayoutCreate(comm, &rLayout));
1060: PetscCall(PetscLayoutSetLocalSize(rLayout, locRows));
1061: PetscCall(PetscLayoutSetBlockSize(rLayout, cdim));
1062: PetscCall(PetscLayoutSetUp(rLayout));
1063: PetscCall(PetscLayoutGetRange(rLayout, &rStart, NULL));
1064: PetscCall(PetscLayoutDestroy(&rLayout));
1066: for (PetscInt field = 0; field < Nf; ++field) {
1067: PetscObject obj;
1068: PetscInt Nc;
1070: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
1071: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
1072: totNc += Nc;
1073: }
1074: PetscCheck(totNc == 1, comm, PETSC_ERR_ARG_WRONG, "The number of field components %" PetscInt_FMT " != 1", totNc);
1075: /* count non-zeros */
1076: PetscCall(DMSwarmSortGetAccess(sw));
1077: for (PetscInt field = 0; field < Nf; ++field) {
1078: PetscObject obj;
1079: PetscInt Nc;
1081: PetscCall(PetscDSGetDiscretization(ds, field, &obj));
1082: PetscCall(PetscFEGetNumComponents((PetscFE)obj, &Nc));
1083: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1084: PetscInt *pind;
1085: PetscInt Npc;
1087: PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Npc, &pind));
1088: maxNpc = PetscMax(maxNpc, Npc);
1089: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cell, &Npc, &pind));
1090: }
1091: }
1092: PetscCall(PetscMalloc3(maxNpc * cdim * totDim, &elemMat, maxNpc * cdim, &rowIDXs, maxNpc * cdim, &xi));
1093: for (PetscInt field = 0; field < Nf; ++field) {
1094: PetscTabulation Tcoarse;
1095: PetscFE fe;
1097: PetscCall(PetscDSGetDiscretization(ds, field, (PetscObject *)&fe));
1098: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmGetField(sw, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
1099: for (PetscInt cell = cStart; cell < cEnd; ++cell) {
1100: PetscInt *findices, *pind;
1101: PetscInt numFIndices, Npc;
1103: /* TODO: Use DMField instead of assuming affine */
1104: PetscCall(DMPlexComputeCellGeometryFEM(dm, cell, NULL, v0, J, invJ, &detJ));
1105: PetscCall(DMPlexGetClosureIndices(dm, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
1106: PetscCall(DMSwarmSortGetPointsPerCell(sw, cell, &Npc, &pind));
1107: for (PetscInt j = 0; j < Npc; ++j) {
1108: PetscReal xr[8];
1109: PetscInt off = 0;
1111: for (PetscInt i = 0; i < Nfc; ++i) {
1112: for (PetscInt b = 0; b < bs[i]; ++b, ++off) xr[off] = coordVals[i][pind[j] * bs[i] + b];
1113: }
1114: PetscCheck(off == cdim, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "The total block size of coordinates is %" PetscInt_FMT " != %" PetscInt_FMT " the DM coordinate dimension", off, cdim);
1115: CoordinatesRealToRef(cdim, cdim, v0ref, v0, invJ, xr, &xi[j * cdim]);
1116: }
1117: PetscCall(PetscFECreateTabulation(fe, 1, Npc, xi, 1, &Tcoarse));
1118: /* Get elemMat entries by multiplying by weight */
1119: PetscCall(PetscArrayzero(elemMat, Npc * cdim * totDim));
1120: for (PetscInt i = 0; i < numFIndices; ++i) {
1121: for (PetscInt j = 0; j < Npc; ++j) {
1122: /* D[((p*pdim + i)*Nc + c)*cdim + d] is the value at point p for basis function i, component c, derivative d */
1123: for (PetscInt d = 0; d < cdim; ++d) {
1124: xi[d] = 0.;
1125: for (PetscInt e = 0; e < cdim; ++e) xi[d] += invJ[e * cdim + d] * Tcoarse->T[1][(j * numFIndices + i) * cdim + e];
1126: elemMat[(j * cdim + d) * numFIndices + i] += xi[d] * (useDeltaFunction ? 1.0 : detJ);
1127: }
1128: }
1129: }
1130: for (PetscInt j = 0; j < Npc; ++j)
1131: for (PetscInt d = 0; d < cdim; ++d) rowIDXs[j * cdim + d] = pind[j] * cdim + d + rStart;
1132: if (0) PetscCall(DMPrintCellMatrix(cell, name, Npc * cdim, numFIndices, elemMat));
1133: PetscCall(MatSetValues(derv, Npc * cdim, rowIDXs, numFIndices, findices, elemMat, ADD_VALUES));
1134: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cell, &Npc, &pind));
1135: PetscCall(DMPlexRestoreClosureIndices(dm, fsection, globalFSection, cell, PETSC_FALSE, &numFIndices, &findices, NULL, NULL));
1136: PetscCall(PetscTabulationDestroy(&Tcoarse));
1137: }
1138: for (PetscInt i = 0; i < Nfc; ++i) PetscCall(DMSwarmRestoreField(sw, coordFields[i], &bs[i], NULL, (void **)&coordVals[i]));
1139: }
1140: PetscCall(PetscFree3(elemMat, rowIDXs, xi));
1141: PetscCall(DMSwarmSortRestoreAccess(sw));
1142: PetscCall(PetscFree3(v0, J, invJ));
1143: PetscCall(PetscFree2(coordVals, bs));
1144: PetscCall(MatAssemblyBegin(derv, MAT_FINAL_ASSEMBLY));
1145: PetscCall(MatAssemblyEnd(derv, MAT_FINAL_ASSEMBLY));
1146: PetscFunctionReturn(PETSC_SUCCESS);
1147: }
1149: /* FEM cols: this is a scalar space
1150: Particle rows: this is a vector space that contracts with the derivative
1151: */
1152: static PetscErrorCode DMCreateGradientMatrix_Swarm(DM sw, DM dm, Mat *derv)
1153: {
1154: DMSwarmCellDM celldm;
1155: PetscSection gs;
1156: PetscInt cdim, m, n, Np, bs;
1157: void *ctx;
1158: MPI_Comm comm;
1160: PetscFunctionBegin;
1161: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
1162: PetscCall(DMGetCoordinateDim(dm, &cdim));
1163: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1164: PetscCheck(celldm->Nf, comm, PETSC_ERR_USER, "Active cell DM does not define any fields");
1165: PetscCall(DMGetGlobalSection(dm, &gs));
1166: PetscCall(PetscSectionGetConstrainedStorageSize(gs, &n));
1167: PetscCall(DMSwarmGetLocalSize(sw, &Np));
1168: PetscCall(DMSwarmCellDMGetBlockSize(celldm, sw, &bs));
1169: PetscCheck(cdim == bs, comm, PETSC_ERR_ARG_WRONG, "Coordinate dimension %" PetscInt_FMT " != %" PetscInt_FMT " swarm field block size", cdim, bs);
1170: m = Np * bs;
1171: PetscCall(MatCreate(PetscObjectComm((PetscObject)sw), derv));
1172: PetscCall(PetscObjectSetName((PetscObject)*derv, "Swarm Derivative Matrix"));
1173: PetscCall(MatSetSizes(*derv, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
1174: PetscCall(MatSetType(*derv, sw->mattype));
1175: PetscCall(DMGetApplicationContext(dm, &ctx));
1177: PetscCall(DMSwarmComputeGradientMatrix_Private(sw, dm, *derv, PETSC_TRUE, ctx));
1178: PetscCall(MatViewFromOptions(*derv, NULL, "-gradient_mat_view"));
1179: PetscFunctionReturn(PETSC_SUCCESS);
1180: }
1182: /*@
1183: DMSwarmCreateGlobalVectorFromField - Creates a `Vec` object sharing the array associated with a given field
1185: Collective
1187: Input Parameters:
1188: + dm - a `DMSWARM`
1189: - fieldname - the textual name given to a registered field
1191: Output Parameter:
1192: . vec - the vector
1194: Level: beginner
1196: Note:
1197: The vector must be returned using a matching call to `DMSwarmDestroyGlobalVectorFromField()`.
1199: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyGlobalVectorFromField()`
1200: @*/
1201: PetscErrorCode DMSwarmCreateGlobalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1202: {
1203: MPI_Comm comm = PetscObjectComm((PetscObject)dm);
1205: PetscFunctionBegin;
1207: PetscCall(DMSwarmCreateVectorFromField_Private(dm, fieldname, comm, vec));
1208: PetscFunctionReturn(PETSC_SUCCESS);
1209: }
1211: /*@
1212: DMSwarmDestroyGlobalVectorFromField - Destroys the `Vec` object which share the array associated with a given field
1214: Collective
1216: Input Parameters:
1217: + dm - a `DMSWARM`
1218: - fieldname - the textual name given to a registered field
1220: Output Parameter:
1221: . vec - the vector
1223: Level: beginner
1225: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateGlobalVectorFromField()`
1226: @*/
1227: PetscErrorCode DMSwarmDestroyGlobalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1228: {
1229: PetscFunctionBegin;
1231: PetscCall(DMSwarmDestroyVectorFromField_Private(dm, fieldname, vec));
1232: PetscFunctionReturn(PETSC_SUCCESS);
1233: }
1235: /*@
1236: DMSwarmCreateLocalVectorFromField - Creates a `Vec` object sharing the array associated with a given field
1238: Collective
1240: Input Parameters:
1241: + dm - a `DMSWARM`
1242: - fieldname - the textual name given to a registered field
1244: Output Parameter:
1245: . vec - the vector
1247: Level: beginner
1249: Note:
1250: The vector must be returned using a matching call to DMSwarmDestroyLocalVectorFromField().
1252: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyLocalVectorFromField()`
1253: @*/
1254: PetscErrorCode DMSwarmCreateLocalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1255: {
1256: PetscFunctionBegin;
1257: PetscCall(DMSwarmCreateVectorFromField_Private(dm, fieldname, PETSC_COMM_SELF, vec));
1258: PetscFunctionReturn(PETSC_SUCCESS);
1259: }
1261: /*@
1262: DMSwarmDestroyLocalVectorFromField - Destroys the `Vec` object which share the array associated with a given field
1264: Collective
1266: Input Parameters:
1267: + dm - a `DMSWARM`
1268: - fieldname - the textual name given to a registered field
1270: Output Parameter:
1271: . vec - the vector
1273: Level: beginner
1275: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateLocalVectorFromField()`
1276: @*/
1277: PetscErrorCode DMSwarmDestroyLocalVectorFromField(DM dm, const char fieldname[], Vec *vec)
1278: {
1279: PetscFunctionBegin;
1281: PetscCall(DMSwarmDestroyVectorFromField_Private(dm, fieldname, vec));
1282: PetscFunctionReturn(PETSC_SUCCESS);
1283: }
1285: /*@
1286: DMSwarmCreateGlobalVectorFromFields - Creates a `Vec` object sharing the array associated with a given field set
1288: Collective
1290: Input Parameters:
1291: + dm - a `DMSWARM`
1292: . Nf - the number of fields
1293: - fieldnames - the textual names given to the registered fields
1295: Output Parameter:
1296: . vec - the vector
1298: Level: beginner
1300: Notes:
1301: The vector must be returned using a matching call to `DMSwarmDestroyGlobalVectorFromFields()`.
1303: This vector is copyin-copyout, rather than a direct pointer like `DMSwarmCreateGlobalVectorFromField()`
1305: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyGlobalVectorFromFields()`
1306: @*/
1307: PetscErrorCode DMSwarmCreateGlobalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1308: {
1309: MPI_Comm comm = PetscObjectComm((PetscObject)dm);
1311: PetscFunctionBegin;
1313: PetscCall(DMSwarmCreateVectorFromFields_Private(dm, Nf, fieldnames, comm, vec));
1314: PetscFunctionReturn(PETSC_SUCCESS);
1315: }
1317: /*@
1318: DMSwarmDestroyGlobalVectorFromFields - Destroys the `Vec` object which share the array associated with a given field set
1320: Collective
1322: Input Parameters:
1323: + dm - a `DMSWARM`
1324: . Nf - the number of fields
1325: - fieldnames - the textual names given to the registered fields
1327: Output Parameter:
1328: . vec - the vector
1330: Level: beginner
1332: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateGlobalVectorFromField()`
1333: @*/
1334: PetscErrorCode DMSwarmDestroyGlobalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1335: {
1336: PetscFunctionBegin;
1338: PetscCall(DMSwarmDestroyVectorFromFields_Private(dm, Nf, fieldnames, vec));
1339: PetscFunctionReturn(PETSC_SUCCESS);
1340: }
1342: /*@
1343: DMSwarmCreateLocalVectorFromFields - Creates a `Vec` object sharing the array associated with a given field set
1345: Collective
1347: Input Parameters:
1348: + dm - a `DMSWARM`
1349: . Nf - the number of fields
1350: - fieldnames - the textual names given to the registered fields
1352: Output Parameter:
1353: . vec - the vector
1355: Level: beginner
1357: Notes:
1358: The vector must be returned using a matching call to DMSwarmDestroyLocalVectorFromField().
1360: This vector is copyin-copyout, rather than a direct pointer like `DMSwarmCreateLocalVectorFromField()`
1362: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmDestroyLocalVectorFromField()`
1363: @*/
1364: PetscErrorCode DMSwarmCreateLocalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1365: {
1366: PetscFunctionBegin;
1367: PetscCall(DMSwarmCreateVectorFromFields_Private(dm, Nf, fieldnames, PETSC_COMM_SELF, vec));
1368: PetscFunctionReturn(PETSC_SUCCESS);
1369: }
1371: /*@
1372: DMSwarmDestroyLocalVectorFromFields - Destroys the `Vec` object which share the array associated with a given field set
1374: Collective
1376: Input Parameters:
1377: + dm - a `DMSWARM`
1378: . Nf - the number of fields
1379: - fieldnames - the textual names given to the registered fields
1381: Output Parameter:
1382: . vec - the vector
1384: Level: beginner
1386: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmCreateLocalVectorFromFields()`
1387: @*/
1388: PetscErrorCode DMSwarmDestroyLocalVectorFromFields(DM dm, PetscInt Nf, const char *fieldnames[], Vec *vec)
1389: {
1390: PetscFunctionBegin;
1392: PetscCall(DMSwarmDestroyVectorFromFields_Private(dm, Nf, fieldnames, vec));
1393: PetscFunctionReturn(PETSC_SUCCESS);
1394: }
1396: /*@
1397: DMSwarmInitializeFieldRegister - Initiates the registration of fields to a `DMSWARM`
1399: Collective
1401: Input Parameter:
1402: . dm - a `DMSWARM`
1404: Level: beginner
1406: Note:
1407: After all fields have been registered, you must call `DMSwarmFinalizeFieldRegister()`.
1409: .seealso: `DM`, `DMSWARM`, `DMSwarmFinalizeFieldRegister()`, `DMSwarmRegisterPetscDatatypeField()`,
1410: `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1411: @*/
1412: PetscErrorCode DMSwarmInitializeFieldRegister(DM dm)
1413: {
1414: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1416: PetscFunctionBegin;
1417: if (!swarm->field_registration_initialized) {
1418: swarm->field_registration_initialized = PETSC_TRUE;
1419: PetscCall(DMSwarmRegisterPetscDatatypeField(dm, DMSwarmField_pid, 1, PETSC_INT64)); /* unique identifier */
1420: PetscCall(DMSwarmRegisterPetscDatatypeField(dm, DMSwarmField_rank, 1, PETSC_INT)); /* used for communication */
1421: }
1422: PetscFunctionReturn(PETSC_SUCCESS);
1423: }
1425: /*@
1426: DMSwarmFinalizeFieldRegister - Finalizes the registration of fields to a `DMSWARM`
1428: Collective
1430: Input Parameter:
1431: . dm - a `DMSWARM`
1433: Level: beginner
1435: Note:
1436: After `DMSwarmFinalizeFieldRegister()` has been called, no new fields can be defined on the `DMSWARM`.
1438: .seealso: `DM`, `DMSWARM`, `DMSwarmInitializeFieldRegister()`, `DMSwarmRegisterPetscDatatypeField()`,
1439: `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1440: @*/
1441: PetscErrorCode DMSwarmFinalizeFieldRegister(DM dm)
1442: {
1443: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1445: PetscFunctionBegin;
1446: if (!swarm->field_registration_finalized) PetscCall(DMSwarmDataBucketFinalize(swarm->db));
1447: swarm->field_registration_finalized = PETSC_TRUE;
1448: PetscFunctionReturn(PETSC_SUCCESS);
1449: }
1451: /*@
1452: DMSwarmSetLocalSizes - Sets the length of all registered fields on the `DMSWARM`
1454: Not Collective
1456: Input Parameters:
1457: + sw - a `DMSWARM`
1458: . nlocal - the length of each registered field
1459: - buffer - the length of the buffer used to efficient dynamic re-sizing
1461: Level: beginner
1463: .seealso: `DM`, `DMSWARM`, `DMSwarmGetLocalSize()`
1464: @*/
1465: PetscErrorCode DMSwarmSetLocalSizes(DM sw, PetscInt nlocal, PetscInt buffer)
1466: {
1467: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1468: PetscMPIInt rank;
1469: PetscInt *rankval;
1471: PetscFunctionBegin;
1472: PetscCall(PetscLogEventBegin(DMSWARM_SetSizes, 0, 0, 0, 0));
1473: PetscCall(DMSwarmDataBucketSetSizes(swarm->db, nlocal, buffer));
1474: PetscCall(PetscLogEventEnd(DMSWARM_SetSizes, 0, 0, 0, 0));
1476: // Initialize values in pid and rank placeholders
1477: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)sw), &rank));
1478: PetscCall(DMSwarmGetField(sw, DMSwarmField_rank, NULL, NULL, (void **)&rankval));
1479: for (PetscInt p = 0; p < nlocal; p++) rankval[p] = rank;
1480: PetscCall(DMSwarmRestoreField(sw, DMSwarmField_rank, NULL, NULL, (void **)&rankval));
1481: /* TODO: [pid - use MPI_Scan] */
1482: PetscFunctionReturn(PETSC_SUCCESS);
1483: }
1485: /*@
1486: DMSwarmSetCellDM - Attaches a `DM` to a `DMSWARM`
1488: Collective
1490: Input Parameters:
1491: + sw - a `DMSWARM`
1492: - dm - the `DM` to attach to the `DMSWARM`
1494: Level: beginner
1496: Note:
1497: The attached `DM` (dm) will be queried for point location and
1498: neighbor MPI-rank information if `DMSwarmMigrate()` is called.
1500: .seealso: `DM`, `DMSWARM`, `DMSwarmSetType()`, `DMSwarmGetCellDM()`, `DMSwarmMigrate()`
1501: @*/
1502: PetscErrorCode DMSwarmSetCellDM(DM sw, DM dm)
1503: {
1504: DMSwarmCellDM celldm;
1505: const char *name;
1506: char *coordName;
1508: PetscFunctionBegin;
1511: PetscCall(PetscStrallocpy(DMSwarmPICField_coor, &coordName));
1512: PetscCall(DMSwarmCellDMCreate(dm, 0, NULL, 1, (const char **)&coordName, &celldm));
1513: PetscCall(PetscFree(coordName));
1514: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
1515: PetscCall(DMSwarmAddCellDM(sw, celldm));
1516: PetscCall(DMSwarmCellDMDestroy(&celldm));
1517: PetscCall(DMSwarmSetCellDMActive(sw, name));
1518: PetscFunctionReturn(PETSC_SUCCESS);
1519: }
1521: /*@
1522: DMSwarmGetCellDM - Fetches the active cell `DM`
1524: Collective
1526: Input Parameter:
1527: . sw - a `DMSWARM`
1529: Output Parameter:
1530: . dm - the active `DM` for the `DMSWARM`
1532: Level: beginner
1534: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`
1535: @*/
1536: PetscErrorCode DMSwarmGetCellDM(DM sw, DM *dm)
1537: {
1538: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1539: DMSwarmCellDM celldm;
1541: PetscFunctionBegin;
1543: PetscCall(PetscObjectListFind(swarm->cellDMs, swarm->activeCellDM, (PetscObject *)&celldm));
1544: PetscCheck(celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "There is no cell DM named %s in this Swarm", swarm->activeCellDM);
1545: PetscCall(DMSwarmCellDMGetDM(celldm, dm));
1546: PetscFunctionReturn(PETSC_SUCCESS);
1547: }
1549: /*@C
1550: DMSwarmGetCellDMNames - Get the list of cell `DM` names
1552: Not collective
1554: Input Parameter:
1555: . sw - a `DMSWARM`
1557: Output Parameters:
1558: + Ndm - the number of `DMSwarmCellDM` in the `DMSWARM`
1559: - celldms - the name of each `DMSwarmCellDM`
1561: Level: beginner
1563: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`, `DMSwarmGetCellDMByName()`
1564: @*/
1565: PetscErrorCode DMSwarmGetCellDMNames(DM sw, PetscInt *Ndm, const char **celldms[])
1566: {
1567: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1568: PetscObjectList next = swarm->cellDMs;
1569: PetscInt n = 0;
1571: PetscFunctionBegin;
1573: PetscAssertPointer(Ndm, 2);
1574: PetscAssertPointer(celldms, 3);
1575: while (next) {
1576: next = next->next;
1577: ++n;
1578: }
1579: PetscCall(PetscMalloc1(n, celldms));
1580: next = swarm->cellDMs;
1581: n = 0;
1582: while (next) {
1583: (*celldms)[n] = (const char *)next->obj->name;
1584: next = next->next;
1585: ++n;
1586: }
1587: *Ndm = n;
1588: PetscFunctionReturn(PETSC_SUCCESS);
1589: }
1591: /*@
1592: DMSwarmSetCellDMActive - Activates a cell `DM` for a `DMSWARM`
1594: Collective
1596: Input Parameters:
1597: + sw - a `DMSWARM`
1598: - name - name of the cell `DM` to active for the `DMSWARM`
1600: Level: beginner
1602: Note:
1603: The attached `DM` (dmcell) will be queried for point location and
1604: neighbor MPI-rank information if `DMSwarmMigrate()` is called.
1606: .seealso: `DM`, `DMSWARM`, `DMSwarmCellDM`, `DMSwarmSetType()`, `DMSwarmAddCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1607: @*/
1608: PetscErrorCode DMSwarmSetCellDMActive(DM sw, const char name[])
1609: {
1610: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1611: DMSwarmCellDM celldm;
1613: PetscFunctionBegin;
1615: PetscCall(PetscInfo(sw, "Setting cell DM to %s\n", name));
1616: PetscCall(PetscFree(swarm->activeCellDM));
1617: PetscCall(PetscStrallocpy(name, (char **)&swarm->activeCellDM));
1618: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
1619: PetscFunctionReturn(PETSC_SUCCESS);
1620: }
1622: /*@
1623: DMSwarmGetCellDMActive - Returns the active cell `DM` for a `DMSWARM`
1625: Collective
1627: Input Parameter:
1628: . sw - a `DMSWARM`
1630: Output Parameter:
1631: . celldm - the active `DMSwarmCellDM`
1633: Level: beginner
1635: .seealso: `DM`, `DMSWARM`, `DMSwarmCellDM`, `DMSwarmSetType()`, `DMSwarmAddCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1636: @*/
1637: PetscErrorCode DMSwarmGetCellDMActive(DM sw, DMSwarmCellDM *celldm)
1638: {
1639: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1641: PetscFunctionBegin;
1643: PetscAssertPointer(celldm, 2);
1644: PetscCheck(swarm->activeCellDM, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONGSTATE, "Swarm has no active cell DM");
1645: PetscCall(PetscObjectListFind(swarm->cellDMs, swarm->activeCellDM, (PetscObject *)celldm));
1646: PetscCheck(*celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONGSTATE, "Swarm has no valid cell DM for %s", swarm->activeCellDM);
1647: PetscFunctionReturn(PETSC_SUCCESS);
1648: }
1650: /*@C
1651: DMSwarmGetCellDMByName - Get a `DMSwarmCellDM` from its name
1653: Not collective
1655: Input Parameters:
1656: + sw - a `DMSWARM`
1657: - name - the name
1659: Output Parameter:
1660: . celldm - the `DMSwarmCellDM`, or `NULL` if the name is unknown
1662: Level: beginner
1664: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`, `DMSwarmGetCellDMNames()`
1665: @*/
1666: PetscErrorCode DMSwarmGetCellDMByName(DM sw, const char name[], DMSwarmCellDM *celldm)
1667: {
1668: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1670: PetscFunctionBegin;
1672: PetscAssertPointer(name, 2);
1673: PetscAssertPointer(celldm, 3);
1674: PetscCall(PetscObjectListFind(swarm->cellDMs, name, (PetscObject *)celldm));
1675: PetscFunctionReturn(PETSC_SUCCESS);
1676: }
1678: /*@
1679: DMSwarmAddCellDM - Adds a cell `DM` to the `DMSWARM`
1681: Collective
1683: Input Parameters:
1684: + sw - a `DMSWARM`
1685: - celldm - the `DMSwarmCellDM`
1687: Level: beginner
1689: Note:
1690: Cell DMs with the same name will share the cellid field
1692: .seealso: `DM`, `DMSWARM`, `DMSwarmSetType()`, `DMSwarmPushCellDM()`, `DMSwarmSetCellDM()`, `DMSwarmMigrate()`
1693: @*/
1694: PetscErrorCode DMSwarmAddCellDM(DM sw, DMSwarmCellDM celldm)
1695: {
1696: DM_Swarm *swarm = (DM_Swarm *)sw->data;
1697: const char *name;
1698: PetscInt dim;
1699: PetscBool flg;
1700: MPI_Comm comm;
1702: PetscFunctionBegin;
1704: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
1706: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
1707: PetscCall(PetscObjectListAdd(&swarm->cellDMs, name, (PetscObject)celldm));
1708: PetscCall(DMGetDimension(sw, &dim));
1709: for (PetscInt f = 0; f < celldm->Nfc; ++f) {
1710: PetscCall(DMSwarmDataFieldStringInList(celldm->coordFields[f], swarm->db->nfields, (const DMSwarmDataField *)swarm->db->field, &flg));
1711: if (!flg) {
1712: PetscCall(DMSwarmRegisterPetscDatatypeField(sw, celldm->coordFields[f], dim, PETSC_DOUBLE));
1713: } else {
1714: PetscDataType dt;
1715: PetscInt bs;
1717: PetscCall(DMSwarmGetFieldInfo(sw, celldm->coordFields[f], &bs, &dt));
1718: PetscCheck(bs == dim, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s has blocksize %" PetscInt_FMT " != %" PetscInt_FMT " spatial dimension", celldm->coordFields[f], bs, dim);
1719: PetscCheck(dt == PETSC_DOUBLE, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s has datatype %s != PETSC_DOUBLE", celldm->coordFields[f], PetscDataTypes[dt]);
1720: }
1721: }
1722: // Assume that DMs with the same name share the cellid field
1723: PetscCall(DMSwarmDataFieldStringInList(celldm->cellid, swarm->db->nfields, (const DMSwarmDataField *)swarm->db->field, &flg));
1724: if (!flg) {
1725: PetscBool isShell, isDummy;
1726: const char *name;
1728: // Allow dummy DMSHELL (I don't think we should support this mode)
1729: PetscCall(PetscObjectTypeCompare((PetscObject)celldm->dm, DMSHELL, &isShell));
1730: PetscCall(PetscObjectGetName((PetscObject)celldm->dm, &name));
1731: PetscCall(PetscStrcmp(name, "dummy", &isDummy));
1732: if (!isShell || !isDummy) PetscCall(DMSwarmRegisterPetscDatatypeField(sw, celldm->cellid, 1, PETSC_INT));
1733: }
1734: PetscCall(DMSwarmSetCellDMActive(sw, name));
1735: PetscFunctionReturn(PETSC_SUCCESS);
1736: }
1738: /*@
1739: DMSwarmGetLocalSize - Retrieves the local length of fields registered
1741: Not Collective
1743: Input Parameter:
1744: . dm - a `DMSWARM`
1746: Output Parameter:
1747: . nlocal - the length of each registered field
1749: Level: beginner
1751: .seealso: `DM`, `DMSWARM`, `DMSwarmGetSize()`, `DMSwarmSetLocalSizes()`
1752: @*/
1753: PetscErrorCode DMSwarmGetLocalSize(DM dm, PetscInt *nlocal)
1754: {
1755: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1757: PetscFunctionBegin;
1758: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, nlocal, NULL, NULL));
1759: PetscFunctionReturn(PETSC_SUCCESS);
1760: }
1762: /*@
1763: DMSwarmGetSize - Retrieves the total length of fields registered
1765: Collective
1767: Input Parameter:
1768: . dm - a `DMSWARM`
1770: Output Parameter:
1771: . n - the total length of each registered field
1773: Level: beginner
1775: Note:
1776: This calls `MPI_Allreduce()` upon each call (inefficient but safe)
1778: .seealso: `DM`, `DMSWARM`, `DMSwarmGetLocalSize()`, `DMSwarmSetLocalSizes()`
1779: @*/
1780: PetscErrorCode DMSwarmGetSize(DM dm, PetscInt *n)
1781: {
1782: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1783: PetscInt nlocal;
1785: PetscFunctionBegin;
1786: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &nlocal, NULL, NULL));
1787: PetscCallMPI(MPIU_Allreduce(&nlocal, n, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)dm)));
1788: PetscFunctionReturn(PETSC_SUCCESS);
1789: }
1791: /*@C
1792: DMSwarmRegisterPetscDatatypeField - Register a field to a `DMSWARM` with a native PETSc data type
1794: Collective
1796: Input Parameters:
1797: + dm - a `DMSWARM`
1798: . fieldname - the textual name to identify this field
1799: . blocksize - the number of each data type
1800: - type - a valid PETSc data type (`PETSC_CHAR`, `PETSC_SHORT`, `PETSC_INT`, `PETSC_FLOAT`, `PETSC_REAL`, `PETSC_LONG`)
1802: Level: beginner
1804: Notes:
1805: The textual name for each registered field must be unique.
1807: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterUserStructField()`, `DMSwarmRegisterUserDatatypeField()`
1808: @*/
1809: PetscErrorCode DMSwarmRegisterPetscDatatypeField(DM dm, const char fieldname[], PetscInt blocksize, PetscDataType type)
1810: {
1811: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1812: size_t size;
1814: PetscFunctionBegin;
1815: PetscCheck(swarm->field_registration_initialized, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Must call DMSwarmInitializeFieldRegister() first");
1816: PetscCheck(!swarm->field_registration_finalized, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Cannot register additional fields after calling DMSwarmFinalizeFieldRegister() first");
1818: PetscCheck(type != PETSC_OBJECT, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1819: PetscCheck(type != PETSC_FUNCTION, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1820: PetscCheck(type != PETSC_STRING, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1821: PetscCheck(type != PETSC_STRUCT, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1822: PetscCheck(type != PETSC_DATATYPE_UNKNOWN, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Valid for {char,short,int,long,float,double}");
1824: PetscCall(PetscDataTypeGetSize(type, &size));
1825: /* Load a specific data type into data bucket, specifying textual name and its size in bytes */
1826: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterPetscDatatypeField", fieldname, blocksize * size, NULL));
1827: {
1828: DMSwarmDataField gfield;
1830: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1831: PetscCall(DMSwarmDataFieldSetBlockSize(gfield, blocksize));
1832: }
1833: swarm->db->field[swarm->db->nfields - 1]->petsc_type = type;
1834: PetscFunctionReturn(PETSC_SUCCESS);
1835: }
1837: /*@C
1838: DMSwarmRegisterUserStructField - Register a user defined struct to a `DMSWARM`
1840: Collective
1842: Input Parameters:
1843: + dm - a `DMSWARM`
1844: . fieldname - the textual name to identify this field
1845: - size - the size in bytes of the user struct of each data type
1847: Level: beginner
1849: Note:
1850: The textual name for each registered field must be unique.
1852: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmRegisterUserDatatypeField()`
1853: @*/
1854: PetscErrorCode DMSwarmRegisterUserStructField(DM dm, const char fieldname[], size_t size)
1855: {
1856: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1858: PetscFunctionBegin;
1859: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterUserStructField", fieldname, size, NULL));
1860: swarm->db->field[swarm->db->nfields - 1]->petsc_type = PETSC_STRUCT;
1861: PetscFunctionReturn(PETSC_SUCCESS);
1862: }
1864: /*@C
1865: DMSwarmRegisterUserDatatypeField - Register a user defined data type to a `DMSWARM`
1867: Collective
1869: Input Parameters:
1870: + dm - a `DMSWARM`
1871: . fieldname - the textual name to identify this field
1872: . size - the size in bytes of the user data type
1873: - blocksize - the number of each data type
1875: Level: beginner
1877: Note:
1878: The textual name for each registered field must be unique.
1880: .seealso: `DM`, `DMSWARM`, `DMSwarmRegisterPetscDatatypeField()`, `DMSwarmRegisterUserStructField()`
1881: @*/
1882: PetscErrorCode DMSwarmRegisterUserDatatypeField(DM dm, const char fieldname[], size_t size, PetscInt blocksize)
1883: {
1884: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1886: PetscFunctionBegin;
1887: PetscCall(DMSwarmDataBucketRegisterField(swarm->db, "DMSwarmRegisterUserDatatypeField", fieldname, blocksize * size, NULL));
1888: {
1889: DMSwarmDataField gfield;
1891: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1892: PetscCall(DMSwarmDataFieldSetBlockSize(gfield, blocksize));
1893: }
1894: swarm->db->field[swarm->db->nfields - 1]->petsc_type = PETSC_DATATYPE_UNKNOWN;
1895: PetscFunctionReturn(PETSC_SUCCESS);
1896: }
1898: /*@C
1899: DMSwarmGetField - Get access to the underlying array storing all entries associated with a registered field
1901: Not Collective, No Fortran Support
1903: Input Parameters:
1904: + dm - a `DMSWARM`
1905: - fieldname - the textual name to identify this field
1907: Output Parameters:
1908: + blocksize - the number of each data type
1909: . type - the data type
1910: - data - pointer to raw array
1912: Level: beginner
1914: Notes:
1915: The array must be returned using a matching call to `DMSwarmRestoreField()`.
1917: Fortran Note:
1918: Only works for `type` of `PETSC_SCALAR`
1920: .seealso: `DM`, `DMSWARM`, `DMSwarmRestoreField()`
1921: @*/
1922: PetscErrorCode DMSwarmGetField(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type, void **data) PeNS
1923: {
1924: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1925: DMSwarmDataField gfield;
1927: PetscFunctionBegin;
1929: if (!swarm->issetup) PetscCall(DMSetUp(dm));
1930: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1931: PetscCall(DMSwarmDataFieldGetAccess(gfield));
1932: PetscCall(DMSwarmDataFieldGetEntries(gfield, data));
1933: if (blocksize) *blocksize = gfield->bs;
1934: if (type) *type = gfield->petsc_type;
1935: PetscFunctionReturn(PETSC_SUCCESS);
1936: }
1938: /*@C
1939: DMSwarmRestoreField - Restore access to the underlying array storing all entries associated with a registered field
1941: Not Collective
1943: Input Parameters:
1944: + dm - a `DMSWARM`
1945: - fieldname - the textual name to identify this field
1947: Output Parameters:
1948: + blocksize - the number of each data type
1949: . type - the data type
1950: - data - pointer to raw array
1952: Level: beginner
1954: Notes:
1955: The user must call `DMSwarmGetField()` prior to calling `DMSwarmRestoreField()`.
1957: Fortran Note:
1958: Only works for `type` of `PETSC_SCALAR`
1960: .seealso: `DM`, `DMSWARM`, `DMSwarmGetField()`
1961: @*/
1962: PetscErrorCode DMSwarmRestoreField(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type, void **data) PeNS
1963: {
1964: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1965: DMSwarmDataField gfield;
1967: PetscFunctionBegin;
1969: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1970: PetscCall(DMSwarmDataFieldRestoreAccess(gfield));
1971: if (data) *data = NULL;
1972: PetscFunctionReturn(PETSC_SUCCESS);
1973: }
1975: PetscErrorCode DMSwarmGetFieldInfo(DM dm, const char fieldname[], PetscInt *blocksize, PetscDataType *type)
1976: {
1977: DM_Swarm *swarm = (DM_Swarm *)dm->data;
1978: DMSwarmDataField gfield;
1980: PetscFunctionBegin;
1982: PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldname, &gfield));
1983: if (blocksize) *blocksize = gfield->bs;
1984: if (type) *type = gfield->petsc_type;
1985: PetscFunctionReturn(PETSC_SUCCESS);
1986: }
1988: /*@
1989: DMSwarmAddPoint - Add space for one new point in the `DMSWARM`
1991: Not Collective
1993: Input Parameter:
1994: . dm - a `DMSWARM`
1996: Level: beginner
1998: Notes:
1999: The new point will have all fields initialized to zero.
2001: .seealso: `DM`, `DMSWARM`, `DMSwarmAddNPoints()`
2002: @*/
2003: PetscErrorCode DMSwarmAddPoint(DM dm)
2004: {
2005: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2007: PetscFunctionBegin;
2008: if (!swarm->issetup) PetscCall(DMSetUp(dm));
2009: PetscCall(PetscLogEventBegin(DMSWARM_AddPoints, 0, 0, 0, 0));
2010: PetscCall(DMSwarmDataBucketAddPoint(swarm->db));
2011: PetscCall(PetscLogEventEnd(DMSWARM_AddPoints, 0, 0, 0, 0));
2012: PetscFunctionReturn(PETSC_SUCCESS);
2013: }
2015: /*@
2016: DMSwarmAddNPoints - Add space for a number of new points in the `DMSWARM`
2018: Not Collective
2020: Input Parameters:
2021: + dm - a `DMSWARM`
2022: - npoints - the number of new points to add
2024: Level: beginner
2026: Notes:
2027: The new point will have all fields initialized to zero.
2029: .seealso: `DM`, `DMSWARM`, `DMSwarmAddPoint()`
2030: @*/
2031: PetscErrorCode DMSwarmAddNPoints(DM dm, PetscInt npoints)
2032: {
2033: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2034: PetscInt nlocal;
2036: PetscFunctionBegin;
2037: PetscCall(PetscLogEventBegin(DMSWARM_AddPoints, 0, 0, 0, 0));
2038: PetscCall(DMSwarmDataBucketGetSizes(swarm->db, &nlocal, NULL, NULL));
2039: nlocal = PetscMax(nlocal, 0) + npoints;
2040: PetscCall(DMSwarmDataBucketSetSizes(swarm->db, nlocal, DMSWARM_DATA_BUCKET_BUFFER_DEFAULT));
2041: PetscCall(PetscLogEventEnd(DMSWARM_AddPoints, 0, 0, 0, 0));
2042: PetscFunctionReturn(PETSC_SUCCESS);
2043: }
2045: /*@
2046: DMSwarmRemovePoint - Remove the last point from the `DMSWARM`
2048: Not Collective
2050: Input Parameter:
2051: . dm - a `DMSWARM`
2053: Level: beginner
2055: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePointAtIndex()`
2056: @*/
2057: PetscErrorCode DMSwarmRemovePoint(DM dm)
2058: {
2059: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2061: PetscFunctionBegin;
2062: PetscCall(PetscLogEventBegin(DMSWARM_RemovePoints, 0, 0, 0, 0));
2063: PetscCall(DMSwarmDataBucketRemovePoint(swarm->db));
2064: PetscCall(PetscLogEventEnd(DMSWARM_RemovePoints, 0, 0, 0, 0));
2065: PetscFunctionReturn(PETSC_SUCCESS);
2066: }
2068: /*@
2069: DMSwarmRemovePointAtIndex - Removes a specific point from the `DMSWARM`
2071: Not Collective
2073: Input Parameters:
2074: + dm - a `DMSWARM`
2075: - idx - index of point to remove
2077: Level: beginner
2079: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePoint()`
2080: @*/
2081: PetscErrorCode DMSwarmRemovePointAtIndex(DM dm, PetscInt idx)
2082: {
2083: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2085: PetscFunctionBegin;
2086: PetscCall(PetscLogEventBegin(DMSWARM_RemovePoints, 0, 0, 0, 0));
2087: PetscCall(DMSwarmDataBucketRemovePointAtIndex(swarm->db, idx));
2088: PetscCall(PetscLogEventEnd(DMSWARM_RemovePoints, 0, 0, 0, 0));
2089: PetscFunctionReturn(PETSC_SUCCESS);
2090: }
2092: /*@
2093: DMSwarmCopyPoint - Copy point pj to point pi in the `DMSWARM`
2095: Not Collective
2097: Input Parameters:
2098: + dm - a `DMSWARM`
2099: . pi - the index of the point to copy
2100: - pj - the point index where the copy should be located
2102: Level: beginner
2104: .seealso: `DM`, `DMSWARM`, `DMSwarmRemovePoint()`
2105: @*/
2106: PetscErrorCode DMSwarmCopyPoint(DM dm, PetscInt pi, PetscInt pj)
2107: {
2108: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2110: PetscFunctionBegin;
2111: if (!swarm->issetup) PetscCall(DMSetUp(dm));
2112: PetscCall(DMSwarmDataBucketCopyPoint(swarm->db, pi, swarm->db, pj));
2113: PetscFunctionReturn(PETSC_SUCCESS);
2114: }
2116: static PetscErrorCode DMSwarmMigrate_Basic(DM dm, PetscBool remove_sent_points)
2117: {
2118: PetscFunctionBegin;
2119: PetscCall(DMSwarmMigrate_Push_Basic(dm, remove_sent_points));
2120: PetscFunctionReturn(PETSC_SUCCESS);
2121: }
2123: /*@
2124: DMSwarmMigrate - Relocates points defined in the `DMSWARM` to other MPI-ranks
2126: Collective
2128: Input Parameters:
2129: + dm - the `DMSWARM`
2130: - remove_sent_points - flag indicating if sent points should be removed from the current MPI-rank
2132: Level: advanced
2134: Notes:
2135: The `DM` will be modified to accommodate received points.
2136: If `remove_sent_points` is `PETSC_TRUE`, any points that were sent will be removed from the `DM`.
2137: Different styles of migration are supported. See `DMSwarmSetMigrateType()`.
2139: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`
2140: @*/
2141: PetscErrorCode DMSwarmMigrate(DM dm, PetscBool remove_sent_points)
2142: {
2143: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2145: PetscFunctionBegin;
2146: PetscCall(PetscLogEventBegin(DMSWARM_Migrate, 0, 0, 0, 0));
2147: switch (swarm->migrate_type) {
2148: case DMSWARM_MIGRATE_BASIC:
2149: PetscCall(DMSwarmMigrate_Basic(dm, remove_sent_points));
2150: break;
2151: case DMSWARM_MIGRATE_DMCELLNSCATTER:
2152: PetscCall(DMSwarmMigrate_CellDMScatter(dm, remove_sent_points));
2153: break;
2154: case DMSWARM_MIGRATE_DMCELLEXACT:
2155: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE_DMCELLEXACT not implemented");
2156: case DMSWARM_MIGRATE_USER:
2157: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE_USER not implemented");
2158: default:
2159: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_MIGRATE type unknown");
2160: }
2161: PetscCall(PetscLogEventEnd(DMSWARM_Migrate, 0, 0, 0, 0));
2162: PetscCall(DMClearGlobalVectors(dm));
2163: PetscFunctionReturn(PETSC_SUCCESS);
2164: }
2166: PetscErrorCode DMSwarmMigrate_GlobalToLocal_Basic(DM dm, PetscInt *globalsize);
2168: /*
2169: DMSwarmCollectViewCreate
2171: * Applies a collection method and gathers point neighbour points into dm
2173: Notes:
2174: Users should call DMSwarmCollectViewDestroy() after
2175: they have finished computations associated with the collected points
2176: */
2178: /*@
2179: DMSwarmCollectViewCreate - Applies a collection method and gathers points
2180: in neighbour ranks into the `DMSWARM`
2182: Collective
2184: Input Parameter:
2185: . dm - the `DMSWARM`
2187: Level: advanced
2189: Notes:
2190: Users should call `DMSwarmCollectViewDestroy()` after
2191: they have finished computations associated with the collected points
2193: Different collect methods are supported. See `DMSwarmSetCollectType()`.
2195: Developer Note:
2196: Create and Destroy routines create new objects that can get destroyed, they do not change the state
2197: of the current object.
2199: .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewDestroy()`, `DMSwarmSetCollectType()`
2200: @*/
2201: PetscErrorCode DMSwarmCollectViewCreate(DM dm)
2202: {
2203: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2204: PetscInt ng;
2206: PetscFunctionBegin;
2207: PetscCheck(!swarm->collect_view_active, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "CollectView currently active");
2208: PetscCall(DMSwarmGetLocalSize(dm, &ng));
2209: switch (swarm->collect_type) {
2210: case DMSWARM_COLLECT_BASIC:
2211: PetscCall(DMSwarmMigrate_GlobalToLocal_Basic(dm, &ng));
2212: break;
2213: case DMSWARM_COLLECT_DMDABOUNDINGBOX:
2214: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT_DMDABOUNDINGBOX not implemented");
2215: case DMSWARM_COLLECT_GENERAL:
2216: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT_GENERAL not implemented");
2217: default:
2218: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "DMSWARM_COLLECT type unknown");
2219: }
2220: swarm->collect_view_active = PETSC_TRUE;
2221: swarm->collect_view_reset_nlocal = ng;
2222: PetscFunctionReturn(PETSC_SUCCESS);
2223: }
2225: /*@
2226: DMSwarmCollectViewDestroy - Resets the `DMSWARM` to the size prior to calling `DMSwarmCollectViewCreate()`
2228: Collective
2230: Input Parameters:
2231: . dm - the `DMSWARM`
2233: Notes:
2234: Users should call `DMSwarmCollectViewCreate()` before this function is called.
2236: Level: advanced
2238: Developer Note:
2239: Create and Destroy routines create new objects that can get destroyed, they do not change the state
2240: of the current object.
2242: .seealso: `DM`, `DMSWARM`, `DMSwarmCollectViewCreate()`, `DMSwarmSetCollectType()`
2243: @*/
2244: PetscErrorCode DMSwarmCollectViewDestroy(DM dm)
2245: {
2246: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2248: PetscFunctionBegin;
2249: PetscCheck(swarm->collect_view_active, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "CollectView is currently not active");
2250: PetscCall(DMSwarmSetLocalSizes(dm, swarm->collect_view_reset_nlocal, -1));
2251: swarm->collect_view_active = PETSC_FALSE;
2252: PetscFunctionReturn(PETSC_SUCCESS);
2253: }
2255: static PetscErrorCode DMSwarmSetUpPIC(DM dm)
2256: {
2257: PetscInt dim;
2259: PetscFunctionBegin;
2260: PetscCall(DMSwarmSetNumSpecies(dm, 1));
2261: PetscCall(DMGetDimension(dm, &dim));
2262: PetscCheck(dim >= 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Dimension must be 1,2,3 - found %" PetscInt_FMT, dim);
2263: PetscCheck(dim <= 3, PetscObjectComm((PetscObject)dm), PETSC_ERR_USER, "Dimension must be 1,2,3 - found %" PetscInt_FMT, dim);
2264: PetscFunctionReturn(PETSC_SUCCESS);
2265: }
2267: /*@
2268: DMSwarmSetPointCoordinatesRandom - Sets initial coordinates for particles in each cell
2270: Collective
2272: Input Parameters:
2273: + dm - the `DMSWARM`
2274: - Npc - The number of particles per cell in the cell `DM`
2276: Level: intermediate
2278: Notes:
2279: The user must use `DMSwarmSetCellDM()` to set the cell `DM` first. The particles are placed randomly inside each cell. If only
2280: one particle is in each cell, it is placed at the centroid.
2282: .seealso: `DM`, `DMSWARM`, `DMSwarmSetCellDM()`
2283: @*/
2284: PetscErrorCode DMSwarmSetPointCoordinatesRandom(DM dm, PetscInt Npc)
2285: {
2286: DM cdm;
2287: DMSwarmCellDM celldm;
2288: PetscRandom rnd;
2289: DMPolytopeType ct;
2290: PetscBool simplex;
2291: PetscReal *centroid, *coords, *xi0, *v0, *J, *invJ, detJ;
2292: PetscInt dim, d, cStart, cEnd, c, p, Nfc;
2293: const char **coordFields;
2295: PetscFunctionBeginUser;
2296: PetscCall(PetscRandomCreate(PetscObjectComm((PetscObject)dm), &rnd));
2297: PetscCall(PetscRandomSetInterval(rnd, -1.0, 1.0));
2298: PetscCall(PetscRandomSetType(rnd, PETSCRAND48));
2300: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2301: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
2302: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
2303: PetscCall(DMSwarmGetCellDM(dm, &cdm));
2304: PetscCall(DMGetDimension(cdm, &dim));
2305: PetscCall(DMPlexGetHeightStratum(cdm, 0, &cStart, &cEnd));
2306: PetscCall(DMPlexGetCellType(cdm, cStart, &ct));
2307: simplex = DMPolytopeTypeGetNumVertices(ct) == DMPolytopeTypeGetDim(ct) + 1 ? PETSC_TRUE : PETSC_FALSE;
2309: PetscCall(PetscMalloc5(dim, ¢roid, dim, &xi0, dim, &v0, dim * dim, &J, dim * dim, &invJ));
2310: for (d = 0; d < dim; ++d) xi0[d] = -1.0;
2311: PetscCall(DMSwarmGetField(dm, coordFields[0], NULL, NULL, (void **)&coords));
2312: for (c = cStart; c < cEnd; ++c) {
2313: if (Npc == 1) {
2314: PetscCall(DMPlexComputeCellGeometryFVM(cdm, c, NULL, centroid, NULL));
2315: for (d = 0; d < dim; ++d) coords[c * dim + d] = centroid[d];
2316: } else {
2317: PetscCall(DMPlexComputeCellGeometryFEM(cdm, c, NULL, v0, J, invJ, &detJ)); /* affine */
2318: for (p = 0; p < Npc; ++p) {
2319: const PetscInt n = c * Npc + p;
2320: PetscReal sum = 0.0, refcoords[3];
2322: for (d = 0; d < dim; ++d) {
2323: PetscCall(PetscRandomGetValueReal(rnd, &refcoords[d]));
2324: sum += refcoords[d];
2325: }
2326: if (simplex && sum > 0.0)
2327: for (d = 0; d < dim; ++d) refcoords[d] -= PetscSqrtReal(dim) * sum;
2328: CoordinatesRefToReal(dim, dim, xi0, v0, J, refcoords, &coords[n * dim]);
2329: }
2330: }
2331: }
2332: PetscCall(DMSwarmRestoreField(dm, coordFields[0], NULL, NULL, (void **)&coords));
2333: PetscCall(PetscFree5(centroid, xi0, v0, J, invJ));
2334: PetscCall(PetscRandomDestroy(&rnd));
2335: PetscFunctionReturn(PETSC_SUCCESS);
2336: }
2338: /*@
2339: DMSwarmGetType - Get particular flavor of `DMSWARM`
2341: Collective
2343: Input Parameter:
2344: . sw - the `DMSWARM`
2346: Output Parameter:
2347: . stype - the `DMSWARM` type (e.g. `DMSWARM_PIC`)
2349: Level: advanced
2351: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`, `DMSwarmSetCollectType()`, `DMSwarmType`, `DMSWARM_PIC`, `DMSWARM_BASIC`
2352: @*/
2353: PetscErrorCode DMSwarmGetType(DM sw, DMSwarmType *stype)
2354: {
2355: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2357: PetscFunctionBegin;
2359: PetscAssertPointer(stype, 2);
2360: *stype = swarm->swarm_type;
2361: PetscFunctionReturn(PETSC_SUCCESS);
2362: }
2364: /*@
2365: DMSwarmSetType - Set particular flavor of `DMSWARM`
2367: Collective
2369: Input Parameters:
2370: + sw - the `DMSWARM`
2371: - stype - the `DMSWARM` type (e.g. `DMSWARM_PIC`)
2373: Level: advanced
2375: .seealso: `DM`, `DMSWARM`, `DMSwarmSetMigrateType()`, `DMSwarmSetCollectType()`, `DMSwarmType`, `DMSWARM_PIC`, `DMSWARM_BASIC`
2376: @*/
2377: PetscErrorCode DMSwarmSetType(DM sw, DMSwarmType stype)
2378: {
2379: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2381: PetscFunctionBegin;
2383: swarm->swarm_type = stype;
2384: if (swarm->swarm_type == DMSWARM_PIC) PetscCall(DMSwarmSetUpPIC(sw));
2385: PetscFunctionReturn(PETSC_SUCCESS);
2386: }
2388: static PetscErrorCode DMSwarmCreateRemapDM_Private(DM sw, DM *rdm)
2389: {
2390: PetscFE fe;
2391: DMPolytopeType ct;
2392: PetscInt dim, cStart;
2393: const char *prefix = "remap_";
2395: PetscFunctionBegin;
2396: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), rdm));
2397: PetscCall(DMSetType(*rdm, DMPLEX));
2398: PetscCall(DMPlexSetOptionsPrefix(*rdm, prefix));
2399: PetscCall(DMSetFromOptions(*rdm));
2400: PetscCall(PetscObjectSetName((PetscObject)*rdm, "remap"));
2401: PetscCall(DMViewFromOptions(*rdm, NULL, "-dm_view"));
2403: PetscCall(DMGetDimension(*rdm, &dim));
2404: PetscCall(DMPlexGetHeightStratum(*rdm, 0, &cStart, NULL));
2405: PetscCall(DMPlexGetCellType(*rdm, cStart, &ct));
2406: PetscCall(PetscFECreateByCell(PETSC_COMM_SELF, dim, 1, ct, prefix, PETSC_DETERMINE, &fe));
2407: PetscCall(PetscObjectSetName((PetscObject)fe, "distribution"));
2408: PetscCall(DMSetField(*rdm, 0, NULL, (PetscObject)fe));
2409: PetscCall(DMCreateDS(*rdm));
2410: PetscCall(PetscFEDestroy(&fe));
2411: PetscFunctionReturn(PETSC_SUCCESS);
2412: }
2414: static PetscErrorCode DMSetup_Swarm(DM sw)
2415: {
2416: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2418: PetscFunctionBegin;
2419: if (swarm->issetup) PetscFunctionReturn(PETSC_SUCCESS);
2420: swarm->issetup = PETSC_TRUE;
2422: if (swarm->remap_type != DMSWARM_REMAP_NONE) {
2423: DMSwarmCellDM celldm;
2425: PetscCall(DMSwarmGetCellDMByName(sw, "remap", &celldm));
2426: if (!celldm) {
2427: DM rdm;
2428: const char *fieldnames[2] = {DMSwarmPICField_coor, "velocity"};
2429: const char *vfieldnames[1] = {"w_q"};
2431: PetscCall(DMSwarmCreateRemapDM_Private(sw, &rdm));
2432: PetscCall(DMSwarmCellDMCreate(rdm, 1, vfieldnames, 2, fieldnames, &celldm));
2433: PetscCall(DMSwarmAddCellDM(sw, celldm));
2434: PetscCall(DMSwarmCellDMDestroy(&celldm));
2435: PetscCall(DMDestroy(&rdm));
2436: }
2437: }
2439: if (swarm->swarm_type == DMSWARM_PIC) {
2440: DMSwarmCellDM celldm;
2442: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
2443: PetscCheck(celldm, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "No active cell DM. DMSWARM_PIC requires you call DMSwarmSetCellDM() or DMSwarmAddCellDM()");
2444: if (celldm->dm->ops->locatepointssubdomain) {
2445: /* check methods exists for exact ownership identificiation */
2446: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->ops->LocatePointsSubdomain\n"));
2447: swarm->migrate_type = DMSWARM_MIGRATE_DMCELLEXACT;
2448: } else {
2449: /* check methods exist for point location AND rank neighbor identification */
2450: PetscCheck(celldm->dm->ops->locatepoints, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSWARM_PIC requires the method CellDM->ops->locatepoints be defined");
2451: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->LocatePoints\n"));
2453: PetscCheck(celldm->dm->ops->getneighbors, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "DMSWARM_PIC requires the method CellDM->ops->getneighbors be defined");
2454: PetscCall(PetscInfo(sw, "DMSWARM_PIC: Using method CellDM->GetNeigbors\n"));
2456: swarm->migrate_type = DMSWARM_MIGRATE_DMCELLNSCATTER;
2457: }
2458: }
2460: PetscCall(DMSwarmFinalizeFieldRegister(sw));
2462: /* check some fields were registered */
2463: PetscCheck(swarm->db->nfields > 2, PetscObjectComm((PetscObject)sw), PETSC_ERR_USER, "At least one field user must be registered via DMSwarmRegisterXXX()");
2464: PetscFunctionReturn(PETSC_SUCCESS);
2465: }
2467: static PetscErrorCode DMDestroy_Swarm(DM dm)
2468: {
2469: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2471: PetscFunctionBegin;
2472: if (--swarm->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
2473: PetscCall(PetscObjectListDestroy(&swarm->cellDMs));
2474: PetscCall(PetscFree(swarm->activeCellDM));
2475: PetscCall(DMSwarmDataBucketDestroy(&swarm->db));
2476: PetscCall(PetscFree(swarm));
2477: PetscFunctionReturn(PETSC_SUCCESS);
2478: }
2480: static PetscErrorCode DMSwarmView_Draw(DM dm, PetscViewer viewer)
2481: {
2482: DM cdm;
2483: DMSwarmCellDM celldm;
2484: PetscDraw draw;
2485: PetscReal *coords, oldPause, radius = 0.01;
2486: PetscInt Np, p, bs, Nfc;
2487: const char **coordFields;
2489: PetscFunctionBegin;
2490: PetscCall(PetscOptionsGetReal(NULL, ((PetscObject)dm)->prefix, "-dm_view_swarm_radius", &radius, NULL));
2491: PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
2492: PetscCall(DMSwarmGetCellDM(dm, &cdm));
2493: PetscCall(PetscDrawGetPause(draw, &oldPause));
2494: PetscCall(PetscDrawSetPause(draw, 0.0));
2495: PetscCall(DMView(cdm, viewer));
2496: PetscCall(PetscDrawSetPause(draw, oldPause));
2498: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2499: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
2500: PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
2501: PetscCall(DMSwarmGetLocalSize(dm, &Np));
2502: PetscCall(DMSwarmGetField(dm, coordFields[0], &bs, NULL, (void **)&coords));
2503: for (p = 0; p < Np; ++p) {
2504: const PetscInt i = p * bs;
2506: PetscCall(PetscDrawEllipse(draw, coords[i], coords[i + 1], radius, radius, PETSC_DRAW_BLUE));
2507: }
2508: PetscCall(DMSwarmRestoreField(dm, coordFields[0], &bs, NULL, (void **)&coords));
2509: PetscCall(PetscDrawFlush(draw));
2510: PetscCall(PetscDrawPause(draw));
2511: PetscCall(PetscDrawSave(draw));
2512: PetscFunctionReturn(PETSC_SUCCESS);
2513: }
2515: static PetscErrorCode DMView_Swarm_Ascii(DM dm, PetscViewer viewer)
2516: {
2517: PetscViewerFormat format;
2518: PetscInt *sizes;
2519: PetscInt dim, Np, maxSize = 17;
2520: MPI_Comm comm;
2521: PetscMPIInt rank, size;
2522: const char *name, *cellid;
2524: PetscFunctionBegin;
2525: PetscCall(PetscViewerGetFormat(viewer, &format));
2526: PetscCall(DMGetDimension(dm, &dim));
2527: PetscCall(DMSwarmGetLocalSize(dm, &Np));
2528: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
2529: PetscCallMPI(MPI_Comm_rank(comm, &rank));
2530: PetscCallMPI(MPI_Comm_size(comm, &size));
2531: PetscCall(PetscObjectGetName((PetscObject)dm, &name));
2532: if (name) PetscCall(PetscViewerASCIIPrintf(viewer, "%s in %" PetscInt_FMT " dimension%s:\n", name, dim, dim == 1 ? "" : "s"));
2533: else PetscCall(PetscViewerASCIIPrintf(viewer, "Swarm in %" PetscInt_FMT " dimension%s:\n", dim, dim == 1 ? "" : "s"));
2534: if (size < maxSize) PetscCall(PetscCalloc1(size, &sizes));
2535: else PetscCall(PetscCalloc1(3, &sizes));
2536: if (size < maxSize) {
2537: PetscCallMPI(MPI_Gather(&Np, 1, MPIU_INT, sizes, 1, MPIU_INT, 0, comm));
2538: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of particles per rank:"));
2539: for (PetscInt p = 0; p < size; ++p) {
2540: if (rank == 0) PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT, sizes[p]));
2541: }
2542: } else {
2543: PetscInt locMinMax[2] = {Np, Np};
2545: PetscCall(PetscGlobalMinMaxInt(comm, locMinMax, sizes));
2546: PetscCall(PetscViewerASCIIPrintf(viewer, " Min/Max of particles per rank: %" PetscInt_FMT "/%" PetscInt_FMT, sizes[0], sizes[1]));
2547: }
2548: PetscCall(PetscViewerASCIIPrintf(viewer, "\n"));
2549: PetscCall(PetscFree(sizes));
2550: if (format == PETSC_VIEWER_ASCII_INFO) {
2551: DM_Swarm *sw = (DM_Swarm *)dm->data;
2552: DMSwarmCellDM celldm;
2553: PetscInt *cell;
2554: PetscBool hasWeight;
2555: const char *fname = "w_q";
2557: PetscCall(DMSwarmDataFieldStringInList(fname, sw->db->nfields, (const DMSwarmDataField *)sw->db->field, &hasWeight));
2558: PetscCall(PetscViewerASCIIPrintf(viewer, " Cells containing each particle:\n"));
2559: PetscCall(PetscViewerASCIIPushSynchronized(viewer));
2560: PetscCall(DMSwarmGetCellDMActive(dm, &celldm));
2561: PetscCall(DMSwarmCellDMGetCellID(celldm, &cellid));
2562: PetscCall(DMSwarmGetField(dm, cellid, NULL, NULL, (void **)&cell));
2563: if (hasWeight) {
2564: PetscReal *weight, **coords;
2565: PetscInt Ncf, *bsC, bs;
2566: const char **coordNames;
2568: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncf, &coordNames));
2569: PetscCall(PetscMalloc2(Ncf, &coords, Ncf, &bsC));
2570: for (PetscInt n = 0; n < Ncf; ++n) PetscCall(DMSwarmGetField(dm, coordNames[n], &bsC[n], NULL, (void **)&coords[n]));
2571: PetscCall(DMSwarmGetField(dm, fname, &bs, NULL, (void **)&weight));
2572: PetscCheck(bs == 1, comm, PETSC_ERR_ARG_WRONG, "The weight field must be a scalar");
2573: for (PetscInt p = 0; p < Np; ++p) {
2574: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " p%" PetscInt_FMT ": %" PetscInt_FMT " wt: %g x: (", p, cell[p], (double)weight[p]));
2575: for (PetscInt n = 0; n < Ncf; ++n) {
2576: if (n > 0) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ", "));
2577: for (PetscInt d = 0; d < bsC[n]; ++d) {
2578: if (d > 0) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ", "));
2579: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "%g", (double)coords[n][p * bsC[n] + d]));
2580: }
2581: }
2582: PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, ")\n"));
2583: }
2584: PetscCall(DMSwarmRestoreField(dm, fname, NULL, NULL, (void **)&weight));
2585: for (PetscInt n = 0; n < Ncf; ++n) PetscCall(DMSwarmRestoreField(dm, coordNames[n], &bsC[n], NULL, (void **)&coords[n]));
2586: PetscCall(PetscFree2(coords, bsC));
2587: } else {
2588: for (PetscInt p = 0; p < Np; ++p) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, " p%" PetscInt_FMT ": %" PetscInt_FMT "\n", p, cell[p]));
2589: }
2590: PetscCall(DMSwarmRestoreField(dm, cellid, NULL, NULL, (void **)&cell));
2591: PetscCall(PetscViewerFlush(viewer));
2592: PetscCall(PetscViewerASCIIPopSynchronized(viewer));
2593: }
2594: PetscFunctionReturn(PETSC_SUCCESS);
2595: }
2597: static PetscErrorCode DMView_Swarm(DM dm, PetscViewer viewer)
2598: {
2599: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2600: PetscBool isascii, ibinary, isvtk, isdraw, ispython;
2601: #if defined(PETSC_HAVE_HDF5)
2602: PetscBool ishdf5;
2603: #endif
2605: PetscFunctionBegin;
2608: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2609: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &ibinary));
2610: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
2611: #if defined(PETSC_HAVE_HDF5)
2612: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
2613: #endif
2614: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
2615: PetscCall(PetscObjectHasFunction((PetscObject)viewer, "PetscViewerPythonViewObject_C", &ispython));
2616: if (isascii) {
2617: PetscViewerFormat format;
2619: PetscCall(PetscViewerGetFormat(viewer, &format));
2620: switch (format) {
2621: case PETSC_VIEWER_ASCII_INFO_DETAIL:
2622: PetscCall(DMSwarmDataBucketView(PetscObjectComm((PetscObject)dm), swarm->db, NULL, DATABUCKET_VIEW_STDOUT));
2623: break;
2624: default:
2625: PetscCall(DMView_Swarm_Ascii(dm, viewer));
2626: }
2627: } else {
2628: #if defined(PETSC_HAVE_HDF5)
2629: if (ishdf5) PetscCall(DMSwarmView_HDF5(dm, viewer));
2630: #endif
2631: if (isdraw) PetscCall(DMSwarmView_Draw(dm, viewer));
2632: if (ispython) PetscCall(PetscViewerPythonViewObject(viewer, (PetscObject)dm));
2633: }
2634: PetscFunctionReturn(PETSC_SUCCESS);
2635: }
2637: /*@
2638: DMSwarmGetCellSwarm - Extracts a single cell from the `DMSWARM` object, returns it as a single cell `DMSWARM`.
2639: The cell `DM` is filtered for fields of that cell, and the filtered `DM` is used as the cell `DM` of the new swarm object.
2641: Noncollective
2643: Input Parameters:
2644: + sw - the `DMSWARM`
2645: . cellID - the integer id of the cell to be extracted and filtered
2646: - cellswarm - The `DMSWARM` to receive the cell
2648: Level: beginner
2650: Notes:
2651: This presently only supports `DMSWARM_PIC` type
2653: Should be restored with `DMSwarmRestoreCellSwarm()`
2655: Changes to this cell of the swarm will be lost if they are made prior to restoring this cell.
2657: .seealso: `DM`, `DMSWARM`, `DMSwarmRestoreCellSwarm()`
2658: @*/
2659: PetscErrorCode DMSwarmGetCellSwarm(DM sw, PetscInt cellID, DM cellswarm)
2660: {
2661: DM_Swarm *original = (DM_Swarm *)sw->data;
2662: DMLabel label;
2663: DM dmc, subdmc;
2664: PetscInt *pids, particles, dim;
2665: const char *name;
2667: PetscFunctionBegin;
2668: /* Configure new swarm */
2669: PetscCall(DMSetType(cellswarm, DMSWARM));
2670: PetscCall(DMGetDimension(sw, &dim));
2671: PetscCall(DMSetDimension(cellswarm, dim));
2672: PetscCall(DMSwarmSetType(cellswarm, DMSWARM_PIC));
2673: /* Destroy the unused, unconfigured data bucket to prevent stragglers in memory */
2674: PetscCall(DMSwarmDataBucketDestroy(&((DM_Swarm *)cellswarm->data)->db));
2675: PetscCall(DMSwarmSortGetAccess(sw));
2676: PetscCall(DMSwarmSortGetNumberOfPointsPerCell(sw, cellID, &particles));
2677: PetscCall(DMSwarmSortGetPointsPerCell(sw, cellID, &particles, &pids));
2678: PetscCall(DMSwarmDataBucketCreateFromSubset(original->db, particles, pids, &((DM_Swarm *)cellswarm->data)->db));
2679: PetscCall(DMSwarmSortRestoreAccess(sw));
2680: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cellID, &particles, &pids));
2681: PetscCall(DMSwarmGetCellDM(sw, &dmc));
2682: PetscCall(DMLabelCreate(PetscObjectComm((PetscObject)sw), "singlecell", &label));
2683: PetscCall(DMAddLabel(dmc, label));
2684: PetscCall(DMLabelSetValue(label, cellID, 1));
2685: PetscCall(DMPlexFilter(dmc, label, 1, PETSC_FALSE, PETSC_FALSE, PetscObjectComm((PetscObject)dmc), NULL, &subdmc));
2686: PetscCall(PetscObjectGetName((PetscObject)dmc, &name));
2687: PetscCall(PetscObjectSetName((PetscObject)subdmc, name));
2688: PetscCall(DMSwarmSetCellDM(cellswarm, subdmc));
2689: PetscCall(DMLabelDestroy(&label));
2690: PetscFunctionReturn(PETSC_SUCCESS);
2691: }
2693: /*@
2694: DMSwarmRestoreCellSwarm - Restores a `DMSWARM` object obtained with `DMSwarmGetCellSwarm()`. All fields are copied back into the parent swarm.
2696: Noncollective
2698: Input Parameters:
2699: + sw - the parent `DMSWARM`
2700: . cellID - the integer id of the cell to be copied back into the parent swarm
2701: - cellswarm - the cell swarm object
2703: Level: beginner
2705: Note:
2706: This only supports `DMSWARM_PIC` types of `DMSWARM`s
2708: .seealso: `DM`, `DMSWARM`, `DMSwarmGetCellSwarm()`
2709: @*/
2710: PetscErrorCode DMSwarmRestoreCellSwarm(DM sw, PetscInt cellID, DM cellswarm)
2711: {
2712: DM dmc;
2713: PetscInt *pids, particles, p;
2715: PetscFunctionBegin;
2716: PetscCall(DMSwarmSortGetAccess(sw));
2717: PetscCall(DMSwarmSortGetPointsPerCell(sw, cellID, &particles, &pids));
2718: PetscCall(DMSwarmSortRestoreAccess(sw));
2719: /* Pointwise copy of each particle based on pid. The parent swarm may not be altered during this process. */
2720: for (p = 0; p < particles; ++p) PetscCall(DMSwarmDataBucketCopyPoint(((DM_Swarm *)cellswarm->data)->db, pids[p], ((DM_Swarm *)sw->data)->db, pids[p]));
2721: /* Free memory, destroy cell dm */
2722: PetscCall(DMSwarmGetCellDM(cellswarm, &dmc));
2723: PetscCall(DMDestroy(&dmc));
2724: PetscCall(DMSwarmSortRestorePointsPerCell(sw, cellID, &particles, &pids));
2725: PetscFunctionReturn(PETSC_SUCCESS);
2726: }
2728: /*@
2729: DMSwarmComputeMoments - Compute the first three particle moments for a given field
2731: Noncollective
2733: Input Parameters:
2734: + sw - the `DMSWARM`
2735: . coordinate - the coordinate field name
2736: - weight - the weight field name
2738: Output Parameter:
2739: . moments - the field moments
2741: Level: intermediate
2743: Notes:
2744: The `moments` array should be of length bs + 2, where bs is the block size of the coordinate field.
2746: The weight field must be a scalar, having blocksize 1.
2748: .seealso: `DM`, `DMSWARM`, `DMPlexComputeMoments()`
2749: @*/
2750: PetscErrorCode DMSwarmComputeMoments(DM sw, const char coordinate[], const char weight[], PetscReal moments[])
2751: {
2752: const PetscReal *coords;
2753: const PetscReal *w;
2754: PetscReal *mom;
2755: PetscDataType dtc, dtw;
2756: PetscInt bsc, bsw, Np;
2757: MPI_Comm comm;
2759: PetscFunctionBegin;
2761: PetscAssertPointer(coordinate, 2);
2762: PetscAssertPointer(weight, 3);
2763: PetscAssertPointer(moments, 4);
2764: PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
2765: PetscCall(DMSwarmGetField(sw, coordinate, &bsc, &dtc, (void **)&coords));
2766: PetscCall(DMSwarmGetField(sw, weight, &bsw, &dtw, (void **)&w));
2767: PetscCheck(dtc == PETSC_REAL, comm, PETSC_ERR_ARG_WRONG, "Coordinate field %s must be real, not %s", coordinate, PetscDataTypes[dtc]);
2768: PetscCheck(dtw == PETSC_REAL, comm, PETSC_ERR_ARG_WRONG, "Weight field %s must be real, not %s", weight, PetscDataTypes[dtw]);
2769: PetscCheck(bsw == 1, comm, PETSC_ERR_ARG_WRONG, "Weight field %s must be a scalar, not blocksize %" PetscInt_FMT, weight, bsw);
2770: PetscCall(DMSwarmGetLocalSize(sw, &Np));
2771: PetscCall(DMGetWorkArray(sw, bsc + 2, MPIU_REAL, &mom));
2772: PetscCall(PetscArrayzero(mom, bsc + 2));
2773: for (PetscInt p = 0; p < Np; ++p) {
2774: const PetscReal *c = &coords[p * bsc];
2775: const PetscReal wp = w[p];
2777: mom[0] += wp;
2778: for (PetscInt d = 0; d < bsc; ++d) {
2779: mom[d + 1] += wp * c[d];
2780: mom[d + bsc + 1] += wp * PetscSqr(c[d]);
2781: }
2782: }
2783: PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&coords));
2784: PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w));
2785: PetscCallMPI(MPIU_Allreduce(mom, moments, bsc + 2, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)sw)));
2786: PetscCall(DMRestoreWorkArray(sw, bsc + 2, MPIU_REAL, &mom));
2787: PetscFunctionReturn(PETSC_SUCCESS);
2788: }
2790: static PetscErrorCode DMSetFromOptions_Swarm(DM dm, PetscOptionItems PetscOptionsObject)
2791: {
2792: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2794: PetscFunctionBegin;
2795: PetscOptionsHeadBegin(PetscOptionsObject, "DMSwarm Options");
2796: PetscCall(PetscOptionsEnum("-dm_swarm_remap_type", "Remap algorithm", "DMSwarmSetRemapType", DMSwarmRemapTypeNames, (PetscEnum)swarm->remap_type, (PetscEnum *)&swarm->remap_type, NULL));
2797: PetscOptionsHeadEnd();
2798: PetscFunctionReturn(PETSC_SUCCESS);
2799: }
2801: PETSC_INTERN PetscErrorCode DMClone_Swarm(DM, DM *);
2803: static PetscErrorCode DMInitialize_Swarm(DM sw)
2804: {
2805: PetscFunctionBegin;
2806: sw->ops->view = DMView_Swarm;
2807: sw->ops->load = NULL;
2808: sw->ops->setfromoptions = DMSetFromOptions_Swarm;
2809: sw->ops->clone = DMClone_Swarm;
2810: sw->ops->setup = DMSetup_Swarm;
2811: sw->ops->createlocalsection = NULL;
2812: sw->ops->createsectionpermutation = NULL;
2813: sw->ops->createdefaultconstraints = NULL;
2814: sw->ops->createglobalvector = DMCreateGlobalVector_Swarm;
2815: sw->ops->createlocalvector = DMCreateLocalVector_Swarm;
2816: sw->ops->getlocaltoglobalmapping = NULL;
2817: sw->ops->createfieldis = NULL;
2818: sw->ops->createcoordinatedm = NULL;
2819: sw->ops->createcellcoordinatedm = NULL;
2820: sw->ops->getcoloring = NULL;
2821: sw->ops->creatematrix = DMCreateMatrix_Swarm;
2822: sw->ops->createinterpolation = NULL;
2823: sw->ops->createinjection = NULL;
2824: sw->ops->createmassmatrix = DMCreateMassMatrix_Swarm;
2825: sw->ops->creategradientmatrix = DMCreateGradientMatrix_Swarm;
2826: sw->ops->refine = NULL;
2827: sw->ops->coarsen = NULL;
2828: sw->ops->refinehierarchy = NULL;
2829: sw->ops->coarsenhierarchy = NULL;
2830: sw->ops->globaltolocalbegin = DMGlobalToLocalBegin_Swarm;
2831: sw->ops->globaltolocalend = DMGlobalToLocalEnd_Swarm;
2832: sw->ops->localtoglobalbegin = DMLocalToGlobalBegin_Swarm;
2833: sw->ops->localtoglobalend = DMLocalToGlobalEnd_Swarm;
2834: sw->ops->destroy = DMDestroy_Swarm;
2835: sw->ops->createsubdm = NULL;
2836: sw->ops->getdimpoints = NULL;
2837: sw->ops->locatepoints = NULL;
2838: sw->ops->projectfieldlocal = DMProjectFieldLocal_Swarm;
2839: PetscFunctionReturn(PETSC_SUCCESS);
2840: }
2842: PETSC_INTERN PetscErrorCode DMClone_Swarm(DM dm, DM *newdm)
2843: {
2844: DM_Swarm *swarm = (DM_Swarm *)dm->data;
2846: PetscFunctionBegin;
2847: swarm->refct++;
2848: (*newdm)->data = swarm;
2849: PetscCall(PetscObjectChangeTypeName((PetscObject)*newdm, DMSWARM));
2850: PetscCall(DMInitialize_Swarm(*newdm));
2851: (*newdm)->dim = dm->dim;
2852: PetscFunctionReturn(PETSC_SUCCESS);
2853: }
2855: /*MC
2856: DMSWARM = "swarm" - A `DM` object for particle methods, such as particle-in-cell (PIC), in which the underlying
2857: data is both (i) dynamic in length, (ii) and of arbitrary data type.
2859: Level: intermediate
2861: Notes:
2862: User data can be represented by `DMSWARM` through a registering "fields" which are to be stored on particles.
2863: To register a field, the user must provide;
2864: (a) a unique name;
2865: (b) the data type (or size in bytes);
2866: (c) the block size of the data.
2868: For example, suppose the application requires a unique id, energy, momentum and density to be stored
2869: on a set of particles. Then the following code could be used
2870: .vb
2871: DMSwarmInitializeFieldRegister(dm)
2872: DMSwarmRegisterPetscDatatypeField(dm,"uid",1,PETSC_LONG);
2873: DMSwarmRegisterPetscDatatypeField(dm,"energy",1,PETSC_REAL);
2874: DMSwarmRegisterPetscDatatypeField(dm,"momentum",3,PETSC_REAL);
2875: DMSwarmRegisterPetscDatatypeField(dm,"density",1,PETSC_FLOAT);
2876: DMSwarmFinalizeFieldRegister(dm)
2877: .ve
2879: The fields represented by `DMSWARM` are dynamic and can be re-sized at any time.
2880: The only restriction imposed by `DMSWARM` is that all fields contain the same number of particles.
2882: To support particle methods, "migration" techniques are provided. These methods migrate data
2883: between ranks.
2885: `DMSWARM` supports the methods `DMCreateGlobalVector()` and `DMCreateLocalVector()`.
2886: As a `DMSWARM` may internally define and store values of different data types,
2887: before calling `DMCreateGlobalVector()` or `DMCreateLocalVector()`, the user must inform `DMSWARM` which
2888: fields should be used to define a `Vec` object via `DMSwarmVectorDefineField()`
2889: The specified field can be changed at any time - thereby permitting vectors
2890: compatible with different fields to be created.
2892: A dual representation of fields in the `DMSWARM` and a Vec object is permitted via `DMSwarmCreateGlobalVectorFromField()`
2893: Here the data defining the field in the `DMSWARM` is shared with a `Vec`.
2894: This is inherently unsafe if you alter the size of the field at any time between
2895: calls to `DMSwarmCreateGlobalVectorFromField()` and `DMSwarmDestroyGlobalVectorFromField()`.
2896: If the local size of the `DMSWARM` does not match the local size of the global vector
2897: when `DMSwarmDestroyGlobalVectorFromField()` is called, an error is thrown.
2899: Additional high-level support is provided for Particle-In-Cell methods. Refer to `DMSwarmSetType()`.
2901: .seealso: `DM`, `DMSWARM`, `DMType`, `DMCreate()`, `DMSetType()`, `DMSwarmSetType()`, `DMSwarmType`, `DMSwarmCreateGlobalVectorFromField()`,
2902: `DMCreateGlobalVector()`, `DMCreateLocalVector()`
2903: M*/
2905: PETSC_EXTERN PetscErrorCode DMCreate_Swarm(DM dm)
2906: {
2907: DM_Swarm *swarm;
2909: PetscFunctionBegin;
2911: PetscCall(PetscNew(&swarm));
2912: dm->data = swarm;
2913: PetscCall(DMSwarmDataBucketCreate(&swarm->db));
2914: PetscCall(DMSwarmInitializeFieldRegister(dm));
2915: dm->dim = 0;
2916: swarm->refct = 1;
2917: swarm->issetup = PETSC_FALSE;
2918: swarm->swarm_type = DMSWARM_BASIC;
2919: swarm->migrate_type = DMSWARM_MIGRATE_BASIC;
2920: swarm->collect_type = DMSWARM_COLLECT_BASIC;
2921: swarm->migrate_error_on_missing_point = PETSC_FALSE;
2922: swarm->collect_view_active = PETSC_FALSE;
2923: swarm->collect_view_reset_nlocal = -1;
2924: PetscCall(DMInitialize_Swarm(dm));
2925: if (SwarmDataFieldId == -1) PetscCall(PetscObjectComposedDataRegister(&SwarmDataFieldId));
2926: PetscFunctionReturn(PETSC_SUCCESS);
2927: }
2929: /* Replace dm with the contents of ndm, and then destroy ndm
2930: - Share the DM_Swarm structure
2931: */
2932: PetscErrorCode DMSwarmReplace(DM dm, DM *ndm)
2933: {
2934: DM dmNew = *ndm;
2935: const PetscReal *maxCell, *Lstart, *L;
2936: PetscInt dim;
2938: PetscFunctionBegin;
2939: if (dm == dmNew) {
2940: PetscCall(DMDestroy(ndm));
2941: PetscFunctionReturn(PETSC_SUCCESS);
2942: }
2943: dm->setupcalled = dmNew->setupcalled;
2944: if (!dm->hdr.name) {
2945: const char *name;
2947: PetscCall(PetscObjectGetName((PetscObject)*ndm, &name));
2948: PetscCall(PetscObjectSetName((PetscObject)dm, name));
2949: }
2950: PetscCall(DMGetDimension(dmNew, &dim));
2951: PetscCall(DMSetDimension(dm, dim));
2952: PetscCall(DMGetPeriodicity(dmNew, &maxCell, &Lstart, &L));
2953: PetscCall(DMSetPeriodicity(dm, maxCell, Lstart, L));
2954: PetscCall(DMDestroy_Swarm(dm));
2955: PetscCall(DMInitialize_Swarm(dm));
2956: dm->data = dmNew->data;
2957: ((DM_Swarm *)dmNew->data)->refct++;
2958: PetscCall(DMDestroy(ndm));
2959: PetscFunctionReturn(PETSC_SUCCESS);
2960: }
2962: /*@
2963: DMSwarmDuplicate - Creates a new `DMSWARM` with the same fields and cell `DM`s but no particles
2965: Collective
2967: Input Parameter:
2968: . sw - the `DMSWARM`
2970: Output Parameter:
2971: . nsw - the new `DMSWARM`
2973: Level: beginner
2975: .seealso: `DM`, `DMSWARM`, `DMSwarmCreate()`, `DMClone()`
2976: @*/
2977: PetscErrorCode DMSwarmDuplicate(DM sw, DM *nsw)
2978: {
2979: DM_Swarm *swarm = (DM_Swarm *)sw->data;
2980: DMSwarmDataField *fields;
2981: DMSwarmCellDM celldm, ncelldm;
2982: DMSwarmType stype;
2983: const char *name, **celldmnames;
2984: void *ctx;
2985: PetscInt dim, Nf, Ndm;
2986: PetscBool flg;
2988: PetscFunctionBegin;
2989: PetscCall(DMCreate(PetscObjectComm((PetscObject)sw), nsw));
2990: PetscCall(DMSetType(*nsw, DMSWARM));
2991: PetscCall(PetscObjectGetName((PetscObject)sw, &name));
2992: PetscCall(PetscObjectSetName((PetscObject)*nsw, name));
2993: PetscCall(DMGetDimension(sw, &dim));
2994: PetscCall(DMSetDimension(*nsw, dim));
2995: PetscCall(DMSwarmGetType(sw, &stype));
2996: PetscCall(DMSwarmSetType(*nsw, stype));
2997: PetscCall(DMGetApplicationContext(sw, &ctx));
2998: PetscCall(DMSetApplicationContext(*nsw, ctx));
3000: PetscCall(DMSwarmDataBucketGetDMSwarmDataFields(swarm->db, &Nf, &fields));
3001: for (PetscInt f = 0; f < Nf; ++f) {
3002: PetscCall(DMSwarmDataFieldStringInList(fields[f]->name, ((DM_Swarm *)(*nsw)->data)->db->nfields, (const DMSwarmDataField *)((DM_Swarm *)(*nsw)->data)->db->field, &flg));
3003: if (!flg) PetscCall(DMSwarmRegisterPetscDatatypeField(*nsw, fields[f]->name, fields[f]->bs, fields[f]->petsc_type));
3004: }
3006: PetscCall(DMSwarmGetCellDMNames(sw, &Ndm, &celldmnames));
3007: for (PetscInt c = 0; c < Ndm; ++c) {
3008: DM dm;
3009: PetscInt Ncf;
3010: const char **coordfields, **fields;
3012: PetscCall(DMSwarmGetCellDMByName(sw, celldmnames[c], &celldm));
3013: PetscCall(DMSwarmCellDMGetDM(celldm, &dm));
3014: PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncf, &coordfields));
3015: PetscCall(DMSwarmCellDMGetFields(celldm, &Nf, &fields));
3016: PetscCall(DMSwarmCellDMCreate(dm, Nf, fields, Ncf, coordfields, &ncelldm));
3017: PetscCall(DMSwarmAddCellDM(*nsw, ncelldm));
3018: PetscCall(DMSwarmCellDMDestroy(&ncelldm));
3019: }
3020: PetscCall(PetscFree(celldmnames));
3022: PetscCall(DMSetFromOptions(*nsw));
3023: PetscCall(DMSetUp(*nsw));
3024: PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
3025: PetscCall(PetscObjectGetName((PetscObject)celldm, &name));
3026: PetscCall(DMSwarmSetCellDMActive(*nsw, name));
3027: PetscFunctionReturn(PETSC_SUCCESS);
3028: }
3030: PetscErrorCode DMLocalToGlobalBegin_Swarm(DM dm, Vec l, InsertMode mode, Vec g)
3031: {
3032: PetscFunctionBegin;
3033: PetscFunctionReturn(PETSC_SUCCESS);
3034: }
3036: PetscErrorCode DMLocalToGlobalEnd_Swarm(DM dm, Vec l, InsertMode mode, Vec g)
3037: {
3038: PetscFunctionBegin;
3039: switch (mode) {
3040: case INSERT_VALUES:
3041: PetscCall(VecCopy(l, g));
3042: break;
3043: case ADD_VALUES:
3044: PetscCall(VecAXPY(g, 1., l));
3045: break;
3046: default:
3047: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Mode not supported: %d", mode);
3048: }
3049: PetscFunctionReturn(PETSC_SUCCESS);
3050: }
3052: PetscErrorCode DMGlobalToLocalBegin_Swarm(DM dm, Vec g, InsertMode mode, Vec l)
3053: {
3054: PetscFunctionBegin;
3055: PetscFunctionReturn(PETSC_SUCCESS);
3056: }
3058: PetscErrorCode DMGlobalToLocalEnd_Swarm(DM dm, Vec g, InsertMode mode, Vec l)
3059: {
3060: PetscFunctionBegin;
3061: PetscCall(DMLocalToGlobalEnd_Swarm(dm, g, mode, l));
3062: PetscFunctionReturn(PETSC_SUCCESS);
3063: }