Actual source code: telescope_coarsedm.c
1: #include <petsc/private/matimpl.h>
2: #include <petsc/private/pcimpl.h>
3: #include <petsc/private/dmimpl.h>
4: #include <petscksp.h>
5: #include <petscdm.h>
6: #include <petscdmda.h>
7: #include <petscdmshell.h>
9: #include "../src/ksp/pc/impls/telescope/telescope.h"
11: static PetscBool cited = PETSC_FALSE;
12: static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
13: " title = {Extreme-Scale Multigrid Components within PETSc},\n"
14: " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
15: " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
16: " series = {PASC '16},\n"
17: " isbn = {978-1-4503-4126-4},\n"
18: " location = {Lausanne, Switzerland},\n"
19: " pages = {5:1--5:12},\n"
20: " articleno = {5},\n"
21: " numpages = {12},\n"
22: " url = {https://doi.acm.org/10.1145/2929908.2929913},\n"
23: " doi = {10.1145/2929908.2929913},\n"
24: " acmid = {2929913},\n"
25: " publisher = {ACM},\n"
26: " address = {New York, NY, USA},\n"
27: " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
28: " year = {2016}\n"
29: "}\n";
31: typedef struct {
32: DM dm_fine, dm_coarse; /* these DM's should be topologically identical but use different communicators */
33: Mat permutation;
34: Vec xp;
35: PetscErrorCode (*fp_dm_field_scatter)(DM, Vec, ScatterMode, DM, Vec);
36: PetscErrorCode (*fp_dm_state_scatter)(DM, ScatterMode, DM);
37: void *dmksp_context_determined;
38: void *dmksp_context_user;
39: } PC_Telescope_CoarseDMCtx;
41: static PetscErrorCode PCTelescopeSetUp_scatters_CoarseDM(PC pc, PC_Telescope sred, PC_Telescope_CoarseDMCtx *ctx)
42: {
43: Vec xred, yred, xtmp, x, xp;
44: VecScatter scatter;
45: IS isin;
46: Mat B;
47: PetscInt m, bs, st, ed;
48: MPI_Comm comm;
50: PetscFunctionBegin;
51: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
52: PetscCall(PCGetOperators(pc, NULL, &B));
53: PetscCall(MatCreateVecs(B, &x, NULL));
54: PetscCall(MatGetBlockSize(B, &bs));
55: PetscCall(VecDuplicate(x, &xp));
56: m = 0;
57: xred = NULL;
58: yred = NULL;
59: if (PCTelescope_isActiveRank(sred)) {
60: PetscCall(DMCreateGlobalVector(ctx->dm_coarse, &xred));
61: PetscCall(VecDuplicate(xred, &yred));
62: PetscCall(VecGetOwnershipRange(xred, &st, &ed));
63: PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
64: PetscCall(VecGetLocalSize(xred, &m));
65: } else {
66: PetscCall(VecGetOwnershipRange(x, &st, &ed));
67: PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
68: }
69: PetscCall(ISSetBlockSize(isin, bs));
70: PetscCall(VecCreate(comm, &xtmp));
71: PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
72: PetscCall(VecSetBlockSize(xtmp, bs));
73: PetscCall(VecSetType(xtmp, ((PetscObject)x)->type_name));
74: PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
75: sred->xred = xred;
76: sred->yred = yred;
77: sred->isin = isin;
78: sred->scatter = scatter;
79: sred->xtmp = xtmp;
80: ctx->xp = xp;
81: PetscCall(VecDestroy(&x));
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: PetscErrorCode PCTelescopeSetUp_CoarseDM(PC pc, PC_Telescope sred)
86: {
87: PC_Telescope_CoarseDMCtx *ctx;
88: DM dm, dm_coarse = NULL;
89: MPI_Comm comm;
90: PetscBool has_perm, has_kspcomputeoperators, using_kspcomputeoperators;
92: PetscFunctionBegin;
93: PetscCall(PetscInfo(pc, "PCTelescope: setup (CoarseDM)\n"));
94: PetscCall(PetscNew(&ctx));
95: sred->dm_ctx = (void *)ctx;
97: PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
98: PetscCall(PCGetDM(pc, &dm));
99: PetscCall(DMGetCoarseDM(dm, &dm_coarse));
100: ctx->dm_fine = dm;
101: ctx->dm_coarse = dm_coarse;
103: /* attach coarse dm to ksp on sub communicator */
104: if (PCTelescope_isActiveRank(sred)) {
105: PetscCall(KSPSetDM(sred->ksp, ctx->dm_coarse));
106: if (sred->ignore_kspcomputeoperators) PetscCall(KSPSetDMActive(sred->ksp, PETSC_FALSE));
107: }
109: /* check if there is a method to provide a permutation */
110: has_perm = PETSC_FALSE;
111: has_kspcomputeoperators = PETSC_FALSE;
112: using_kspcomputeoperators = PETSC_FALSE;
114: /* if no permutation is provided, we must rely on KSPSetComputeOperators */
115: {
116: PetscErrorCode (*dmfine_kspfunc)(KSP, Mat, Mat, void *) = NULL;
117: void *dmfine_kspctx = NULL, *dmcoarse_kspctx = NULL;
118: void *dmfine_appctx = NULL, *dmcoarse_appctx = NULL;
119: void *dmfine_shellctx = NULL, *dmcoarse_shellctx = NULL;
121: PetscCall(DMKSPGetComputeOperators(dm, &dmfine_kspfunc, &dmfine_kspctx));
122: if (dmfine_kspfunc) has_kspcomputeoperators = PETSC_TRUE;
124: PetscCall(DMGetApplicationContext(ctx->dm_fine, &dmfine_appctx));
125: PetscCall(DMShellGetContext(ctx->dm_fine, &dmfine_shellctx));
127: /* need to define dmcoarse_kspctx */
128: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
129: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators fetched from parent DM\n"));
130: if (PCTelescope_isActiveRank(sred)) {
131: PetscCall(DMGetApplicationContext(ctx->dm_coarse, &dmcoarse_appctx));
132: PetscCall(DMShellGetContext(ctx->dm_coarse, &dmcoarse_shellctx));
133: }
135: /* Assume that if the fine operator didn't require any context, neither will the coarse */
136: if (!dmfine_kspctx) {
137: dmcoarse_kspctx = NULL;
138: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using NULL context\n"));
139: } else {
140: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators detected non-NULL context from parent DM \n"));
141: if (PCTelescope_isActiveRank(sred)) {
142: if (dmfine_kspctx == dmfine_appctx) {
143: dmcoarse_kspctx = dmcoarse_appctx;
144: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using context from DM->ApplicationContext\n"));
145: PetscCheck(dmcoarse_kspctx, PETSC_COMM_SELF, PETSC_ERR_USER, "Non NULL dmfine->kspctx == dmfine->appctx. NULL dmcoarse->appctx found. Likely this is an error");
146: } else if (dmfine_kspctx == dmfine_shellctx) {
147: dmcoarse_kspctx = dmcoarse_shellctx;
148: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators using context from DMShell->Context\n"));
149: PetscCheck(dmcoarse_kspctx, PETSC_COMM_SELF, PETSC_ERR_USER, "Non NULL dmfine->kspctx == dmfine.shell->ctx. NULL dmcoarse.shell->ctx found. Likely this is an error");
150: }
151: ctx->dmksp_context_determined = dmcoarse_kspctx;
153: /* look for user provided method to fetch the context */
154: {
155: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
156: void *dmcoarse_context_user = NULL;
157: char dmcoarse_method[PETSC_MAX_PATH_LEN];
159: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMKSPContext"));
160: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
161: if (fp_get_coarsedm_context) {
162: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n"));
163: PetscCall(fp_get_coarsedm_context(ctx->dm_coarse, &dmcoarse_context_user));
164: ctx->dmksp_context_user = dmcoarse_context_user;
165: dmcoarse_kspctx = dmcoarse_context_user;
166: } else {
167: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n"));
168: }
169: }
171: if (!dmcoarse_kspctx) {
172: PetscCall(PetscInfo(pc, "PCTelescope: KSPSetComputeOperators failed to determine the context to use on sub-communicator\n"));
173: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot determine which context with use for KSPSetComputeOperators() on sub-communicator");
174: }
175: }
176: }
177: }
179: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
180: using_kspcomputeoperators = PETSC_TRUE;
182: if (PCTelescope_isActiveRank(sred)) {
183: /* sub ksp inherits dmksp_func and context provided by user */
184: PetscCall(KSPSetComputeOperators(sred->ksp, dmfine_kspfunc, dmcoarse_kspctx));
185: /* PetscCall(PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)ctx->dmrepart)); */
186: PetscCall(KSPSetDMActive(sred->ksp, PETSC_TRUE));
187: }
188: }
189: }
191: PetscCheck(has_perm || !has_kspcomputeoperators || using_kspcomputeoperators, comm, PETSC_ERR_SUP, "No method to permute an operator was found on the parent DM. A method for KSPSetComputeOperators() was provided but it was requested to be ignored. Telescope setup cannot proceed");
192: PetscCheck(has_perm || has_kspcomputeoperators, comm, PETSC_ERR_SUP, "No method to permute an operator was found on the parent DM. No method for KSPSetComputeOperators() was provided. Telescope setup cannot proceed");
194: {
195: char dmfine_method[PETSC_MAX_PATH_LEN];
197: PetscCall(PetscSNPrintf(dmfine_method, sizeof(dmfine_method), "PCTelescopeFieldScatter"));
198: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_fine, dmfine_method, &ctx->fp_dm_field_scatter));
200: PetscCall(PetscSNPrintf(dmfine_method, sizeof(dmfine_method), "PCTelescopeStateScatter"));
201: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_fine, dmfine_method, &ctx->fp_dm_state_scatter));
202: }
204: if (ctx->fp_dm_state_scatter) {
205: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeStateScatter from parent DM\n"));
206: } else {
207: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeStateScatter from parent DM\n"));
208: }
210: if (ctx->fp_dm_field_scatter) {
211: PetscCall(PetscInfo(pc, "PCTelescope: Found composed method PCTelescopeFieldScatter from parent DM\n"));
212: } else {
213: PetscCall(PetscInfo(pc, "PCTelescope: Failed to find composed method PCTelescopeFieldScatter from parent DM\n"));
214: SETERRQ(comm, PETSC_ERR_SUP, "No method to scatter fields between the parent DM and coarse DM was found. Must call PetscObjectComposeFunction() with the parent DM. Telescope setup cannot proceed");
215: }
217: /* PetscCall(PCTelescopeSetUp_permutation_CoarseDM(pc,sred,ctx)); */
218: PetscCall(PCTelescopeSetUp_scatters_CoarseDM(pc, sred, ctx));
219: PetscFunctionReturn(PETSC_SUCCESS);
220: }
222: PetscErrorCode PCApply_Telescope_CoarseDM(PC pc, Vec x, Vec y)
223: {
224: PC_Telescope sred = (PC_Telescope)pc->data;
225: Vec xred, yred;
226: PC_Telescope_CoarseDMCtx *ctx;
228: PetscFunctionBegin;
229: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
230: xred = sred->xred;
231: yred = sred->yred;
233: PetscCall(PetscCitationsRegister(citation, &cited));
235: if (ctx->fp_dm_state_scatter) PetscCall(ctx->fp_dm_state_scatter(ctx->dm_fine, SCATTER_FORWARD, ctx->dm_coarse));
237: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, x, SCATTER_FORWARD, ctx->dm_coarse, xred));
239: /* solve */
240: if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSolve(sred->ksp, xred, yred));
242: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, y, SCATTER_REVERSE, ctx->dm_coarse, yred));
243: PetscFunctionReturn(PETSC_SUCCESS);
244: }
246: static PetscErrorCode PCTelescopeSubNullSpaceCreate_CoarseDM(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
247: {
248: PetscBool has_const;
249: PetscInt k, n = 0;
250: const Vec *vecs;
251: Vec *sub_vecs = NULL;
252: MPI_Comm subcomm;
253: PC_Telescope_CoarseDMCtx *ctx;
255: PetscFunctionBegin;
256: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
257: subcomm = sred->subcomm;
258: PetscCall(MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs));
260: if (PCTelescope_isActiveRank(sred)) {
261: /* create new vectors */
262: if (n) PetscCall(VecDuplicateVecs(sred->xred, n, &sub_vecs));
263: }
265: /* copy entries */
266: for (k = 0; k < n; k++) PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, vecs[k], SCATTER_FORWARD, ctx->dm_coarse, sub_vecs[k]));
268: if (PCTelescope_isActiveRank(sred)) {
269: /* create new (near) nullspace for redundant object */
270: PetscCall(MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace));
271: PetscCall(VecDestroyVecs(n, &sub_vecs));
272: }
273: PetscFunctionReturn(PETSC_SUCCESS);
274: }
276: PetscErrorCode PCTelescopeMatNullSpaceCreate_CoarseDM(PC pc, PC_Telescope sred, Mat sub_mat)
277: {
278: Mat B;
279: PC_Telescope_CoarseDMCtx *ctx;
281: PetscFunctionBegin;
282: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
283: PetscCall(PCGetOperators(pc, NULL, &B));
284: {
285: MatNullSpace nullspace, sub_nullspace;
286: PetscCall(MatGetNullSpace(B, &nullspace));
287: if (nullspace) {
288: PetscCall(PetscInfo(pc, "PCTelescope: generating nullspace (CoarseDM)\n"));
289: PetscCall(PCTelescopeSubNullSpaceCreate_CoarseDM(pc, sred, nullspace, &sub_nullspace));
291: /* attach any user nullspace removal methods and contexts */
292: if (PCTelescope_isActiveRank(sred)) {
293: void *context = NULL;
294: if (nullspace->remove && !nullspace->rmctx) {
295: PetscCall(MatNullSpaceSetFunction(sub_nullspace, nullspace->remove, context));
296: } else if (nullspace->remove && nullspace->rmctx) {
297: char dmcoarse_method[PETSC_MAX_PATH_LEN];
298: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
300: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMNullSpaceUserContext"));
301: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
302: PetscCheck(context, PETSC_COMM_SELF, PETSC_ERR_SUP, "Propagation of user null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"", dmcoarse_method);
303: PetscCall(MatNullSpaceSetFunction(sub_nullspace, nullspace->remove, context));
304: }
305: }
307: if (PCTelescope_isActiveRank(sred)) {
308: PetscCall(MatSetNullSpace(sub_mat, sub_nullspace));
309: PetscCall(MatNullSpaceDestroy(&sub_nullspace));
310: }
311: }
312: }
313: {
314: MatNullSpace nearnullspace, sub_nearnullspace;
315: PetscCall(MatGetNearNullSpace(B, &nearnullspace));
316: if (nearnullspace) {
317: PetscCall(PetscInfo(pc, "PCTelescope: generating near nullspace (CoarseDM)\n"));
318: PetscCall(PCTelescopeSubNullSpaceCreate_CoarseDM(pc, sred, nearnullspace, &sub_nearnullspace));
320: /* attach any user nullspace removal methods and contexts */
321: if (PCTelescope_isActiveRank(sred)) {
322: void *context = NULL;
323: if (nearnullspace->remove && !nearnullspace->rmctx) {
324: PetscCall(MatNullSpaceSetFunction(sub_nearnullspace, nearnullspace->remove, context));
325: } else if (nearnullspace->remove && nearnullspace->rmctx) {
326: char dmcoarse_method[PETSC_MAX_PATH_LEN];
327: PetscErrorCode (*fp_get_coarsedm_context)(DM, void **) = NULL;
329: PetscCall(PetscSNPrintf(dmcoarse_method, sizeof(dmcoarse_method), "PCTelescopeGetCoarseDMNearNullSpaceUserContext"));
330: PetscCall(PetscObjectQueryFunction((PetscObject)ctx->dm_coarse, dmcoarse_method, &fp_get_coarsedm_context));
331: PetscCheck(context, PETSC_COMM_SELF, PETSC_ERR_SUP, "Propagation of user near null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"", dmcoarse_method);
332: PetscCall(MatNullSpaceSetFunction(sub_nearnullspace, nearnullspace->remove, context));
333: }
334: }
336: if (PCTelescope_isActiveRank(sred)) {
337: PetscCall(MatSetNearNullSpace(sub_mat, sub_nearnullspace));
338: PetscCall(MatNullSpaceDestroy(&sub_nearnullspace));
339: }
340: }
341: }
342: PetscFunctionReturn(PETSC_SUCCESS);
343: }
345: PetscErrorCode PCReset_Telescope_CoarseDM(PC pc)
346: {
347: PC_Telescope sred = (PC_Telescope)pc->data;
348: PC_Telescope_CoarseDMCtx *ctx;
350: PetscFunctionBegin;
351: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
352: ctx->dm_fine = NULL; /* since I did not increment the ref counter we set these to NULL */
353: ctx->dm_coarse = NULL; /* since I did not increment the ref counter we set these to NULL */
354: ctx->permutation = NULL; /* this will be fetched from the dm so no need to call destroy */
355: PetscCall(VecDestroy(&ctx->xp));
356: ctx->fp_dm_field_scatter = NULL;
357: ctx->fp_dm_state_scatter = NULL;
358: ctx->dmksp_context_determined = NULL;
359: ctx->dmksp_context_user = NULL;
360: PetscFunctionReturn(PETSC_SUCCESS);
361: }
363: PetscErrorCode PCApplyRichardson_Telescope_CoarseDM(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
364: {
365: PC_Telescope sred = (PC_Telescope)pc->data;
366: Vec yred = NULL;
367: PetscBool default_init_guess_value = PETSC_FALSE;
368: PC_Telescope_CoarseDMCtx *ctx;
370: PetscFunctionBegin;
371: ctx = (PC_Telescope_CoarseDMCtx *)sred->dm_ctx;
372: yred = sred->yred;
374: PetscCheck(its <= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCApplyRichardson_Telescope_CoarseDM only supports max_it = 1");
375: *reason = (PCRichardsonConvergedReason)0;
377: if (!zeroguess) {
378: PetscCall(PetscInfo(pc, "PCTelescopeCoarseDM: Scattering y for non-zero-initial guess\n"));
380: PetscCall(ctx->fp_dm_field_scatter(ctx->dm_fine, y, SCATTER_FORWARD, ctx->dm_coarse, yred));
381: }
383: if (PCTelescope_isActiveRank(sred)) {
384: PetscCall(KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value));
385: if (!zeroguess) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE));
386: }
388: PetscCall(PCApply_Telescope_CoarseDM(pc, x, y));
390: if (PCTelescope_isActiveRank(sred)) PetscCall(KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value));
392: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
393: *outits = 1;
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }