Actual source code: ex73.c
1: /*
2: This example was derived from src/ksp/ksp/tutorials ex29.c
4: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
6: -div \rho grad u = f, 0 < x,y < 1,
8: with forcing function
10: f = e^{-x^2/\nu} e^{-y^2/\nu}
12: with Dirichlet boundary conditions
14: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
16: or pure Neumman boundary conditions.
17: */
19: static char help[] = "Solves 2D inhomogeneous Laplacian. Demonstrates using PCTelescopeSetCoarseDM functionality of PCTelescope via a DMShell\n\n";
21: #include <petscdm.h>
22: #include <petscdmda.h>
23: #include <petscdmshell.h>
24: #include <petscksp.h>
26: PetscErrorCode ComputeMatrix_ShellDA(KSP, Mat, Mat, void *);
27: PetscErrorCode ComputeMatrix_DMDA(DM, Mat, Mat, void *);
28: PetscErrorCode ComputeRHS_DMDA(DM, Vec, void *);
29: PetscErrorCode DMShellCreate_ShellDA(DM, DM *);
30: PetscErrorCode DMFieldScatter_ShellDA(DM, Vec, ScatterMode, DM, Vec);
31: PetscErrorCode DMStateScatter_ShellDA(DM, ScatterMode, DM);
33: typedef enum {
34: DIRICHLET,
35: NEUMANN
36: } BCType;
38: typedef struct {
39: PetscReal rho;
40: PetscReal nu;
41: BCType bcType;
42: MPI_Comm comm;
43: } UserContext;
45: PetscErrorCode UserContextCreate(MPI_Comm comm, UserContext **ctx)
46: {
47: UserContext *user;
48: const char *bcTypes[2] = {"dirichlet", "neumann"};
49: PetscInt bc;
51: PetscFunctionBeginUser;
52: PetscCall(PetscCalloc1(1, &user));
53: user->comm = comm;
54: PetscOptionsBegin(comm, "", "Options for the inhomogeneous Poisson equation", "DMqq");
55: user->rho = 1.0;
56: PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex29.c", user->rho, &user->rho, NULL));
57: user->nu = 0.1;
58: PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source", "ex29.c", user->nu, &user->nu, NULL));
59: bc = (PetscInt)DIRICHLET;
60: PetscCall(PetscOptionsEList("-bc_type", "Type of boundary condition", "ex29.c", bcTypes, 2, bcTypes[0], &bc, NULL));
61: user->bcType = (BCType)bc;
62: PetscOptionsEnd();
63: *ctx = user;
64: PetscFunctionReturn(PETSC_SUCCESS);
65: }
67: PetscErrorCode CommCoarsen(MPI_Comm comm, PetscInt number, PetscSubcomm *p)
68: {
69: PetscSubcomm psubcomm;
71: PetscFunctionBeginUser;
72: PetscCall(PetscSubcommCreate(comm, &psubcomm));
73: PetscCall(PetscSubcommSetNumber(psubcomm, number));
74: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_INTERLACED));
75: *p = psubcomm;
76: PetscFunctionReturn(PETSC_SUCCESS);
77: }
79: PetscErrorCode CommHierarchyCreate(MPI_Comm comm, PetscInt n, PetscInt number[], PetscSubcomm pscommlist[])
80: {
81: PetscInt k;
82: PetscBool view_hierarchy = PETSC_FALSE;
84: PetscFunctionBeginUser;
85: for (k = 0; k < n; k++) pscommlist[k] = NULL;
87: if (n < 1) PetscFunctionReturn(PETSC_SUCCESS);
89: PetscCall(CommCoarsen(comm, number[n - 1], &pscommlist[n - 1]));
90: for (k = n - 2; k >= 0; k--) {
91: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k + 1]);
92: if (pscommlist[k + 1]->color == 0) PetscCall(CommCoarsen(comm_k, number[k], &pscommlist[k]));
93: }
95: PetscCall(PetscOptionsGetBool(NULL, NULL, "-view_hierarchy", &view_hierarchy, NULL));
96: if (view_hierarchy) {
97: PetscMPIInt size;
99: PetscCallMPI(MPI_Comm_size(comm, &size));
100: PetscCall(PetscPrintf(comm, "level[%" PetscInt_FMT "] size %d\n", n, (int)size));
101: for (k = n - 1; k >= 0; k--) {
102: if (pscommlist[k]) {
103: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
105: if (pscommlist[k]->color == 0) {
106: PetscCallMPI(MPI_Comm_size(comm_k, &size));
107: PetscCall(PetscPrintf(comm_k, "level[%" PetscInt_FMT "] size %d\n", k, (int)size));
108: }
109: }
110: }
111: }
112: PetscFunctionReturn(PETSC_SUCCESS);
113: }
115: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
116: static PetscErrorCode _DMDADetermineRankFromGlobalIJ_2d(PetscInt i, PetscInt j, PetscInt Mp, PetscInt Np, PetscInt start_i[], PetscInt start_j[], PetscInt span_i[], PetscInt span_j[], PetscMPIInt *_pi, PetscMPIInt *_pj, PetscMPIInt *rank_re)
117: {
118: PetscInt pi, pj, n;
120: PetscFunctionBeginUser;
121: *rank_re = -1;
122: pi = pj = -1;
123: if (_pi) {
124: for (n = 0; n < Mp; n++) {
125: if ((i >= start_i[n]) && (i < start_i[n] + span_i[n])) {
126: pi = n;
127: break;
128: }
129: }
130: PetscCheck(pi != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ij] pi cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Mp, i);
131: *_pi = (PetscMPIInt)pi;
132: }
134: if (_pj) {
135: for (n = 0; n < Np; n++) {
136: if ((j >= start_j[n]) && (j < start_j[n] + span_j[n])) {
137: pj = n;
138: break;
139: }
140: }
141: PetscCheck(pj != -1, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmda-ij] pj cannot be determined : range %" PetscInt_FMT ", val %" PetscInt_FMT, Np, j);
142: *_pj = (PetscMPIInt)pj;
143: }
145: *rank_re = (PetscMPIInt)(pi + pj * Mp);
146: PetscFunctionReturn(PETSC_SUCCESS);
147: }
149: /* taken from src/ksp/pc/impls/telescope/telescope_dmda.c */
150: static PetscErrorCode _DMDADetermineGlobalS0_2d(PetscMPIInt rank_re, PetscInt Mp_re, PetscInt Np_re, PetscInt range_i_re[], PetscInt range_j_re[], PetscInt *s0)
151: {
152: PetscInt i, j, start_IJ = 0;
153: PetscMPIInt rank_ij;
155: PetscFunctionBeginUser;
156: *s0 = -1;
157: for (j = 0; j < Np_re; j++) {
158: for (i = 0; i < Mp_re; i++) {
159: rank_ij = (PetscMPIInt)(i + j * Mp_re);
160: if (rank_ij < rank_re) start_IJ += range_i_re[i] * range_j_re[j];
161: }
162: }
163: *s0 = start_IJ;
164: PetscFunctionReturn(PETSC_SUCCESS);
165: }
167: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
168: static PetscErrorCode DMDACreatePermutation_2d(DM dmrepart, DM dmf, Mat *mat)
169: {
170: PetscInt k, sum, Mp_re = 0, Np_re = 0;
171: PetscInt nx, ny, sr, er, Mr, ndof;
172: PetscInt i, j, location, startI[2], endI[2], lenI[2];
173: const PetscInt *_range_i_re = NULL, *_range_j_re = NULL;
174: PetscInt *range_i_re, *range_j_re;
175: PetscInt *start_i_re, *start_j_re;
176: MPI_Comm comm;
177: Vec V;
178: Mat Pscalar;
180: PetscFunctionBeginUser;
181: PetscCall(PetscInfo(dmf, "setting up the permutation matrix (DMDA-2D)\n"));
182: PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm));
184: _range_i_re = _range_j_re = NULL;
185: /* Create DMDA on the child communicator */
186: if (dmrepart) {
187: PetscCall(DMDAGetInfo(dmrepart, NULL, NULL, NULL, NULL, &Mp_re, &Np_re, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
188: PetscCall(DMDAGetOwnershipRanges(dmrepart, &_range_i_re, &_range_j_re, NULL));
189: }
191: /* note - assume rank 0 always participates */
192: PetscCallMPI(MPI_Bcast(&Mp_re, 1, MPIU_INT, 0, comm));
193: PetscCallMPI(MPI_Bcast(&Np_re, 1, MPIU_INT, 0, comm));
195: PetscCall(PetscCalloc1(Mp_re, &range_i_re));
196: PetscCall(PetscCalloc1(Np_re, &range_j_re));
198: if (_range_i_re) PetscCall(PetscArraycpy(range_i_re, _range_i_re, Mp_re));
199: if (_range_j_re) PetscCall(PetscArraycpy(range_j_re, _range_j_re, Np_re));
201: PetscCallMPI(MPI_Bcast(range_i_re, (PetscMPIInt)Mp_re, MPIU_INT, 0, comm));
202: PetscCallMPI(MPI_Bcast(range_j_re, (PetscMPIInt)Np_re, MPIU_INT, 0, comm));
204: PetscCall(PetscMalloc1(Mp_re, &start_i_re));
205: PetscCall(PetscMalloc1(Np_re, &start_j_re));
207: sum = 0;
208: for (k = 0; k < Mp_re; k++) {
209: start_i_re[k] = sum;
210: sum += range_i_re[k];
211: }
213: sum = 0;
214: for (k = 0; k < Np_re; k++) {
215: start_j_re[k] = sum;
216: sum += range_j_re[k];
217: }
219: /* Create permutation */
220: PetscCall(DMDAGetInfo(dmf, NULL, &nx, &ny, NULL, NULL, NULL, NULL, &ndof, NULL, NULL, NULL, NULL, NULL));
221: PetscCall(DMGetGlobalVector(dmf, &V));
222: PetscCall(VecGetSize(V, &Mr));
223: PetscCall(VecGetOwnershipRange(V, &sr, &er));
224: PetscCall(DMRestoreGlobalVector(dmf, &V));
225: sr = sr / ndof;
226: er = er / ndof;
227: Mr = Mr / ndof;
229: PetscCall(MatCreate(comm, &Pscalar));
230: PetscCall(MatSetSizes(Pscalar, er - sr, er - sr, Mr, Mr));
231: PetscCall(MatSetType(Pscalar, MATAIJ));
232: PetscCall(MatSeqAIJSetPreallocation(Pscalar, 1, NULL));
233: PetscCall(MatMPIAIJSetPreallocation(Pscalar, 1, NULL, 1, NULL));
235: PetscCall(DMDAGetCorners(dmf, NULL, NULL, NULL, &lenI[0], &lenI[1], NULL));
236: PetscCall(DMDAGetCorners(dmf, &startI[0], &startI[1], NULL, &endI[0], &endI[1], NULL));
237: endI[0] += startI[0];
238: endI[1] += startI[1];
240: for (j = startI[1]; j < endI[1]; j++) {
241: for (i = startI[0]; i < endI[0]; i++) {
242: PetscMPIInt rank_ijk_re, rank_reI[] = {0, 0};
243: PetscInt s0_re;
244: PetscInt ii, jj, local_ijk_re, mapped_ijk;
245: PetscInt lenI_re[] = {0, 0};
247: location = (i - startI[0]) + (j - startI[1]) * lenI[0];
248: PetscCall(_DMDADetermineRankFromGlobalIJ_2d(i, j, Mp_re, Np_re, start_i_re, start_j_re, range_i_re, range_j_re, &rank_reI[0], &rank_reI[1], &rank_ijk_re));
250: PetscCall(_DMDADetermineGlobalS0_2d(rank_ijk_re, Mp_re, Np_re, range_i_re, range_j_re, &s0_re));
252: ii = i - start_i_re[rank_reI[0]];
253: PetscCheck(ii >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error ii");
254: jj = j - start_j_re[rank_reI[1]];
255: PetscCheck(jj >= 0, PETSC_COMM_SELF, PETSC_ERR_USER, "[dmdarepart-perm2d] index error jj");
257: lenI_re[0] = range_i_re[rank_reI[0]];
258: lenI_re[1] = range_j_re[rank_reI[1]];
259: local_ijk_re = ii + jj * lenI_re[0];
260: mapped_ijk = s0_re + local_ijk_re;
261: PetscCall(MatSetValue(Pscalar, sr + location, mapped_ijk, 1.0, INSERT_VALUES));
262: }
263: }
264: PetscCall(MatAssemblyBegin(Pscalar, MAT_FINAL_ASSEMBLY));
265: PetscCall(MatAssemblyEnd(Pscalar, MAT_FINAL_ASSEMBLY));
267: *mat = Pscalar;
269: PetscCall(PetscFree(range_i_re));
270: PetscCall(PetscFree(range_j_re));
271: PetscCall(PetscFree(start_i_re));
272: PetscCall(PetscFree(start_j_re));
273: PetscFunctionReturn(PETSC_SUCCESS);
274: }
276: /* adapted from src/ksp/pc/impls/telescope/telescope_dmda.c */
277: static PetscErrorCode PCTelescopeSetUp_dmda_scatters(DM dmf, DM dmc)
278: {
279: Vec xred, yred, xtmp, x, xp;
280: VecScatter scatter;
281: IS isin;
282: PetscInt m, bs, st, ed;
283: MPI_Comm comm;
284: VecType vectype;
286: PetscFunctionBeginUser;
287: PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm));
288: PetscCall(DMCreateGlobalVector(dmf, &x));
289: PetscCall(VecGetBlockSize(x, &bs));
290: PetscCall(VecGetType(x, &vectype));
292: /* cannot use VecDuplicate as xp is already composed with dmf */
293: /*PetscCall(VecDuplicate(x,&xp));*/
294: {
295: PetscInt m, M;
297: PetscCall(VecGetSize(x, &M));
298: PetscCall(VecGetLocalSize(x, &m));
299: PetscCall(VecCreate(comm, &xp));
300: PetscCall(VecSetSizes(xp, m, M));
301: PetscCall(VecSetBlockSize(xp, bs));
302: PetscCall(VecSetType(xp, vectype));
303: }
305: m = 0;
306: xred = NULL;
307: yred = NULL;
308: if (dmc) {
309: PetscCall(DMCreateGlobalVector(dmc, &xred));
310: PetscCall(VecDuplicate(xred, &yred));
311: PetscCall(VecGetOwnershipRange(xred, &st, &ed));
312: PetscCall(ISCreateStride(comm, ed - st, st, 1, &isin));
313: PetscCall(VecGetLocalSize(xred, &m));
314: } else {
315: PetscCall(VecGetOwnershipRange(x, &st, &ed));
316: PetscCall(ISCreateStride(comm, 0, st, 1, &isin));
317: }
318: PetscCall(ISSetBlockSize(isin, bs));
319: PetscCall(VecCreate(comm, &xtmp));
320: PetscCall(VecSetSizes(xtmp, m, PETSC_DECIDE));
321: PetscCall(VecSetBlockSize(xtmp, bs));
322: PetscCall(VecSetType(xtmp, vectype));
323: PetscCall(VecScatterCreate(x, isin, xtmp, NULL, &scatter));
325: PetscCall(PetscObjectCompose((PetscObject)dmf, "isin", (PetscObject)isin));
326: PetscCall(PetscObjectCompose((PetscObject)dmf, "scatter", (PetscObject)scatter));
327: PetscCall(PetscObjectCompose((PetscObject)dmf, "xtmp", (PetscObject)xtmp));
328: PetscCall(PetscObjectCompose((PetscObject)dmf, "xp", (PetscObject)xp));
330: PetscCall(VecDestroy(&xred));
331: PetscCall(VecDestroy(&yred));
332: PetscCall(VecDestroy(&x));
333: PetscFunctionReturn(PETSC_SUCCESS);
334: }
336: PetscErrorCode DMCreateMatrix_ShellDA(DM dm, Mat *A)
337: {
338: DM da;
339: MPI_Comm comm;
340: PetscMPIInt size;
341: UserContext *ctx = NULL;
342: PetscInt M, N;
344: PetscFunctionBeginUser;
345: PetscCall(DMShellGetContext(dm, &da));
346: PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
347: PetscCallMPI(MPI_Comm_size(comm, &size));
348: PetscCall(DMCreateMatrix(da, A));
349: PetscCall(MatGetSize(*A, &M, &N));
350: PetscCall(PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA (%" PetscInt_FMT " x %" PetscInt_FMT ")\n", (PetscInt)size, M, N));
352: PetscCall(DMGetApplicationContext(dm, &ctx));
353: if (ctx->bcType == NEUMANN) {
354: MatNullSpace nullspace = NULL;
355: PetscCall(PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: using neumann bcs\n", (PetscInt)size));
357: PetscCall(MatGetNullSpace(*A, &nullspace));
358: if (!nullspace) {
359: PetscCall(PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: operator does not have nullspace - attaching\n", (PetscInt)size));
360: PetscCall(MatNullSpaceCreate(comm, PETSC_TRUE, 0, 0, &nullspace));
361: PetscCall(MatSetNullSpace(*A, nullspace));
362: PetscCall(MatNullSpaceDestroy(&nullspace));
363: } else {
364: PetscCall(PetscPrintf(comm, "[size %" PetscInt_FMT "] DMCreateMatrix_ShellDA: operator already has a nullspace\n", (PetscInt)size));
365: }
366: }
367: PetscFunctionReturn(PETSC_SUCCESS);
368: }
370: PetscErrorCode DMCreateGlobalVector_ShellDA(DM dm, Vec *x)
371: {
372: DM da;
374: PetscFunctionBeginUser;
375: PetscCall(DMShellGetContext(dm, &da));
376: PetscCall(DMCreateGlobalVector(da, x));
377: PetscCall(VecSetDM(*x, dm));
378: PetscFunctionReturn(PETSC_SUCCESS);
379: }
381: PetscErrorCode DMCreateLocalVector_ShellDA(DM dm, Vec *x)
382: {
383: DM da;
385: PetscFunctionBeginUser;
386: PetscCall(DMShellGetContext(dm, &da));
387: PetscCall(DMCreateLocalVector(da, x));
388: PetscCall(VecSetDM(*x, dm));
389: PetscFunctionReturn(PETSC_SUCCESS);
390: }
392: PetscErrorCode DMCoarsen_ShellDA(DM dm, MPI_Comm comm, DM *dmc)
393: {
394: PetscFunctionBeginUser;
395: *dmc = NULL;
396: PetscCall(DMGetCoarseDM(dm, dmc));
397: if (!*dmc) {
398: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "The coarse DM should never be NULL. The DM hierarchy should have already been defined");
399: } else {
400: PetscCall(PetscObjectReference((PetscObject)*dmc));
401: }
402: PetscFunctionReturn(PETSC_SUCCESS);
403: }
405: PetscErrorCode DMCreateInterpolation_ShellDA(DM dm1, DM dm2, Mat *mat, Vec *vec)
406: {
407: DM da1, da2;
409: PetscFunctionBeginUser;
410: PetscCall(DMShellGetContext(dm1, &da1));
411: PetscCall(DMShellGetContext(dm2, &da2));
412: PetscCall(DMCreateInterpolation(da1, da2, mat, vec));
413: PetscFunctionReturn(PETSC_SUCCESS);
414: }
416: PetscErrorCode DMShellDASetUp_TelescopeDMScatter(DM dmf_shell, DM dmc_shell)
417: {
418: Mat P = NULL;
419: DM dmf = NULL, dmc = NULL;
421: PetscFunctionBeginUser;
422: PetscCall(DMShellGetContext(dmf_shell, &dmf));
423: if (dmc_shell) PetscCall(DMShellGetContext(dmc_shell, &dmc));
424: PetscCall(DMDACreatePermutation_2d(dmc, dmf, &P));
425: PetscCall(PetscObjectCompose((PetscObject)dmf, "P", (PetscObject)P));
426: PetscCall(PCTelescopeSetUp_dmda_scatters(dmf, dmc));
427: PetscCall(PetscObjectComposeFunction((PetscObject)dmf_shell, "PCTelescopeFieldScatter", DMFieldScatter_ShellDA));
428: PetscCall(PetscObjectComposeFunction((PetscObject)dmf_shell, "PCTelescopeStateScatter", DMStateScatter_ShellDA));
429: PetscFunctionReturn(PETSC_SUCCESS);
430: }
432: PetscErrorCode DMShellDAFieldScatter_Forward(DM dmf, Vec x, DM dmc, Vec xc)
433: {
434: Mat P = NULL;
435: Vec xp = NULL, xtmp = NULL;
436: VecScatter scatter = NULL;
437: const PetscScalar *x_array;
438: PetscInt i, st, ed;
440: PetscFunctionBeginUser;
441: PetscCall(PetscObjectQuery((PetscObject)dmf, "P", (PetscObject *)&P));
442: PetscCall(PetscObjectQuery((PetscObject)dmf, "xp", (PetscObject *)&xp));
443: PetscCall(PetscObjectQuery((PetscObject)dmf, "scatter", (PetscObject *)&scatter));
444: PetscCall(PetscObjectQuery((PetscObject)dmf, "xtmp", (PetscObject *)&xtmp));
445: PetscCheck(P, PETSC_COMM_SELF, PETSC_ERR_USER, "Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
446: PetscCheck(xp, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"xp\" to be composed with the parent (fine) DM");
447: PetscCheck(scatter, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"scatter\" to be composed with the parent (fine) DM");
448: PetscCheck(xtmp, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"xtmp\" to be composed with the parent (fine) DM");
450: PetscCall(MatMultTranspose(P, x, xp));
452: /* pull in vector x->xtmp */
453: PetscCall(VecScatterBegin(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
454: PetscCall(VecScatterEnd(scatter, xp, xtmp, INSERT_VALUES, SCATTER_FORWARD));
456: /* copy vector entries into xred */
457: PetscCall(VecGetArrayRead(xtmp, &x_array));
458: if (xc) {
459: PetscScalar *LA_xred;
460: PetscCall(VecGetOwnershipRange(xc, &st, &ed));
462: PetscCall(VecGetArray(xc, &LA_xred));
463: for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
464: PetscCall(VecRestoreArray(xc, &LA_xred));
465: }
466: PetscCall(VecRestoreArrayRead(xtmp, &x_array));
467: PetscFunctionReturn(PETSC_SUCCESS);
468: }
470: PetscErrorCode DMShellDAFieldScatter_Reverse(DM dmf, Vec y, DM dmc, Vec yc)
471: {
472: Mat P = NULL;
473: Vec xp = NULL, xtmp = NULL;
474: VecScatter scatter = NULL;
475: PetscScalar *array;
476: PetscInt i, st, ed;
478: PetscFunctionBeginUser;
479: PetscCall(PetscObjectQuery((PetscObject)dmf, "P", (PetscObject *)&P));
480: PetscCall(PetscObjectQuery((PetscObject)dmf, "xp", (PetscObject *)&xp));
481: PetscCall(PetscObjectQuery((PetscObject)dmf, "scatter", (PetscObject *)&scatter));
482: PetscCall(PetscObjectQuery((PetscObject)dmf, "xtmp", (PetscObject *)&xtmp));
484: PetscCheck(P, PETSC_COMM_SELF, PETSC_ERR_USER, "Require a permutation matrix (\"P\")to be composed with the parent (fine) DM");
485: PetscCheck(xp, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"xp\" to be composed with the parent (fine) DM");
486: PetscCheck(scatter, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"scatter\" to be composed with the parent (fine) DM");
487: PetscCheck(xtmp, PETSC_COMM_SELF, PETSC_ERR_USER, "Require \"xtmp\" to be composed with the parent (fine) DM");
489: /* return vector */
490: PetscCall(VecGetArray(xtmp, &array));
491: if (yc) {
492: const PetscScalar *LA_yred;
493: PetscCall(VecGetOwnershipRange(yc, &st, &ed));
494: PetscCall(VecGetArrayRead(yc, &LA_yred));
495: for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
496: PetscCall(VecRestoreArrayRead(yc, &LA_yred));
497: }
498: PetscCall(VecRestoreArray(xtmp, &array));
499: PetscCall(VecScatterBegin(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
500: PetscCall(VecScatterEnd(scatter, xtmp, xp, INSERT_VALUES, SCATTER_REVERSE));
501: PetscCall(MatMult(P, xp, y));
502: PetscFunctionReturn(PETSC_SUCCESS);
503: }
505: PetscErrorCode DMFieldScatter_ShellDA(DM dmf_shell, Vec x, ScatterMode mode, DM dmc_shell, Vec xc)
506: {
507: DM dmf = NULL, dmc = NULL;
509: PetscFunctionBeginUser;
510: PetscCall(DMShellGetContext(dmf_shell, &dmf));
511: if (dmc_shell) PetscCall(DMShellGetContext(dmc_shell, &dmc));
512: if (mode == SCATTER_FORWARD) {
513: PetscCall(DMShellDAFieldScatter_Forward(dmf, x, dmc, xc));
514: } else if (mode == SCATTER_REVERSE) {
515: PetscCall(DMShellDAFieldScatter_Reverse(dmf, x, dmc, xc));
516: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell), PETSC_ERR_SUP, "Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
517: PetscFunctionReturn(PETSC_SUCCESS);
518: }
520: PetscErrorCode DMStateScatter_ShellDA(DM dmf_shell, ScatterMode mode, DM dmc_shell)
521: {
522: PetscMPIInt size_f = 0, size_c = 0;
524: PetscFunctionBeginUser;
525: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dmf_shell), &size_f));
526: if (dmc_shell) PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dmc_shell), &size_c));
527: if (mode == SCATTER_FORWARD) {
528: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)dmf_shell), "User supplied state scatter (fine [size %d]-> coarse [size %d])\n", (int)size_f, (int)size_c));
529: } else if (mode == SCATTER_REVERSE) {
530: } else SETERRQ(PetscObjectComm((PetscObject)dmf_shell), PETSC_ERR_SUP, "Only mode = SCATTER_FORWARD, SCATTER_REVERSE supported");
531: PetscFunctionReturn(PETSC_SUCCESS);
532: }
534: PetscErrorCode DMShellCreate_ShellDA(DM da, DM *dms)
535: {
536: PetscFunctionBeginUser;
537: if (da) {
538: PetscCall(DMShellCreate(PetscObjectComm((PetscObject)da), dms));
539: PetscCall(DMShellSetContext(*dms, da));
540: PetscCall(DMShellSetCreateGlobalVector(*dms, DMCreateGlobalVector_ShellDA));
541: PetscCall(DMShellSetCreateLocalVector(*dms, DMCreateLocalVector_ShellDA));
542: PetscCall(DMShellSetCreateMatrix(*dms, DMCreateMatrix_ShellDA));
543: PetscCall(DMShellSetCoarsen(*dms, DMCoarsen_ShellDA));
544: PetscCall(DMShellSetCreateInterpolation(*dms, DMCreateInterpolation_ShellDA));
545: } else {
546: *dms = NULL;
547: }
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: PetscErrorCode DMDestroyShellDMDA(DM *_dm)
552: {
553: DM dm, da = NULL;
555: PetscFunctionBeginUser;
556: if (!_dm) PetscFunctionReturn(PETSC_SUCCESS);
557: dm = *_dm;
558: if (!dm) PetscFunctionReturn(PETSC_SUCCESS);
560: PetscCall(DMShellGetContext(dm, &da));
561: if (da) {
562: Vec vec;
563: VecScatter scatter = NULL;
564: IS is = NULL;
565: Mat P = NULL;
567: PetscCall(PetscObjectQuery((PetscObject)da, "P", (PetscObject *)&P));
568: PetscCall(MatDestroy(&P));
570: vec = NULL;
571: PetscCall(PetscObjectQuery((PetscObject)da, "xp", (PetscObject *)&vec));
572: PetscCall(VecDestroy(&vec));
574: PetscCall(PetscObjectQuery((PetscObject)da, "scatter", (PetscObject *)&scatter));
575: PetscCall(VecScatterDestroy(&scatter));
577: vec = NULL;
578: PetscCall(PetscObjectQuery((PetscObject)da, "xtmp", (PetscObject *)&vec));
579: PetscCall(VecDestroy(&vec));
581: PetscCall(PetscObjectQuery((PetscObject)da, "isin", (PetscObject *)&is));
582: PetscCall(ISDestroy(&is));
584: PetscCall(DMDestroy(&da));
585: }
586: PetscCall(PetscObjectComposeFunction((PetscObject)dm, "PCTelescopeFieldScatter", NULL));
587: PetscCall(PetscObjectComposeFunction((PetscObject)dm, "PCTelescopeStateScatter", NULL));
588: PetscCall(DMDestroy(&dm));
589: *_dm = NULL;
590: PetscFunctionReturn(PETSC_SUCCESS);
591: }
593: PetscErrorCode HierarchyCreate_Basic(DM *dm_f, DM *dm_c, UserContext *ctx)
594: {
595: DM dm, dmc, dm_shell, dmc_shell;
596: PetscMPIInt rank;
598: PetscFunctionBeginUser;
599: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
600: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 17, 17, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dm));
601: PetscCall(DMSetFromOptions(dm));
602: PetscCall(DMSetUp(dm));
603: PetscCall(DMDASetUniformCoordinates(dm, 0, 1, 0, 1, 0, 0));
604: PetscCall(DMDASetFieldName(dm, 0, "Pressure"));
605: PetscCall(DMShellCreate_ShellDA(dm, &dm_shell));
606: PetscCall(DMSetApplicationContext(dm_shell, ctx));
608: dmc = NULL;
609: dmc_shell = NULL;
610: if (rank == 0) {
611: PetscCall(DMDACreate2d(PETSC_COMM_SELF, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 17, 17, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dmc));
612: PetscCall(DMSetFromOptions(dmc));
613: PetscCall(DMSetUp(dmc));
614: PetscCall(DMDASetUniformCoordinates(dmc, 0, 1, 0, 1, 0, 0));
615: PetscCall(DMDASetFieldName(dmc, 0, "Pressure"));
616: PetscCall(DMShellCreate_ShellDA(dmc, &dmc_shell));
617: PetscCall(DMSetApplicationContext(dmc_shell, ctx));
618: }
620: PetscCall(DMSetCoarseDM(dm_shell, dmc_shell));
621: PetscCall(DMShellDASetUp_TelescopeDMScatter(dm_shell, dmc_shell));
623: *dm_f = dm_shell;
624: *dm_c = dmc_shell;
625: PetscFunctionReturn(PETSC_SUCCESS);
626: }
628: PetscErrorCode HierarchyCreate(PetscInt *_nd, PetscInt *_nref, MPI_Comm **_cl, DM **_dl)
629: {
630: PetscInt d, k, ndecomps, ncoarsen, found, nx;
631: PetscInt levelrefs, *number;
632: PetscSubcomm *pscommlist;
633: MPI_Comm *commlist;
634: DM *dalist, *dmlist;
635: PetscBool set;
637: PetscFunctionBeginUser;
638: ndecomps = 1;
639: PetscCall(PetscOptionsGetInt(NULL, NULL, "-ndecomps", &ndecomps, NULL));
640: ncoarsen = ndecomps - 1;
641: PetscCheck(ncoarsen >= 0, PETSC_COMM_WORLD, PETSC_ERR_USER, "-ndecomps must be >= 1");
643: levelrefs = 2;
644: PetscCall(PetscOptionsGetInt(NULL, NULL, "-level_nrefs", &levelrefs, NULL));
645: PetscCall(PetscMalloc1(ncoarsen + 1, &number));
646: for (k = 0; k < ncoarsen + 1; k++) number[k] = 2;
647: found = ncoarsen;
648: set = PETSC_FALSE;
649: PetscCall(PetscOptionsGetIntArray(NULL, NULL, "-level_comm_red_factor", number, &found, &set));
650: if (set) PetscCheck(found == ncoarsen, PETSC_COMM_WORLD, PETSC_ERR_USER, "Expected %" PetscInt_FMT " values for -level_comm_red_factor. Found %" PetscInt_FMT, ncoarsen, found);
652: PetscCall(PetscMalloc1(ncoarsen + 1, &pscommlist));
653: for (k = 0; k < ncoarsen + 1; k++) pscommlist[k] = NULL;
655: PetscCall(PetscMalloc1(ndecomps, &commlist));
656: for (k = 0; k < ndecomps; k++) commlist[k] = MPI_COMM_NULL;
657: PetscCall(PetscMalloc1(ndecomps * levelrefs, &dalist));
658: PetscCall(PetscMalloc1(ndecomps * levelrefs, &dmlist));
659: for (k = 0; k < ndecomps * levelrefs; k++) {
660: dalist[k] = NULL;
661: dmlist[k] = NULL;
662: }
664: PetscCall(CommHierarchyCreate(PETSC_COMM_WORLD, ncoarsen, number, pscommlist));
665: for (k = 0; k < ncoarsen; k++) {
666: if (pscommlist[k]) {
667: MPI_Comm comm_k = PetscSubcommChild(pscommlist[k]);
668: if (pscommlist[k]->color == 0) PetscCall(PetscCommDuplicate(comm_k, &commlist[k], NULL));
669: }
670: }
671: PetscCall(PetscCommDuplicate(PETSC_COMM_WORLD, &commlist[ndecomps - 1], NULL));
673: for (k = 0; k < ncoarsen; k++) {
674: if (pscommlist[k]) PetscCall(PetscSubcommDestroy(&pscommlist[k]));
675: }
677: nx = 17;
678: PetscCall(PetscOptionsGetInt(NULL, NULL, "-m", &nx, NULL));
679: for (d = 0; d < ndecomps; d++) {
680: DM dmroot = NULL;
681: char name[PETSC_MAX_PATH_LEN];
683: if (commlist[d] != MPI_COMM_NULL) {
684: PetscCall(DMDACreate2d(commlist[d], DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, nx, nx, PETSC_DECIDE, PETSC_DECIDE, 1, 1, 0, 0, &dmroot));
685: PetscCall(DMSetUp(dmroot));
686: PetscCall(DMDASetUniformCoordinates(dmroot, 0, 1, 0, 1, 0, 0));
687: PetscCall(DMDASetFieldName(dmroot, 0, "Pressure"));
688: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "root-decomp-%" PetscInt_FMT, d));
689: PetscCall(PetscObjectSetName((PetscObject)dmroot, name));
690: /*PetscCall(DMView(dmroot,PETSC_VIEWER_STDOUT_(commlist[d])));*/
691: }
693: dalist[d * levelrefs + 0] = dmroot;
694: for (k = 1; k < levelrefs; k++) {
695: DM dmref = NULL;
697: if (commlist[d] != MPI_COMM_NULL) {
698: PetscCall(DMRefine(dalist[d * levelrefs + (k - 1)], MPI_COMM_NULL, &dmref));
699: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "ref%" PetscInt_FMT "-decomp-%" PetscInt_FMT, k, d));
700: PetscCall(PetscObjectSetName((PetscObject)dmref, name));
701: PetscCall(DMDAGetInfo(dmref, NULL, &nx, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
702: /*PetscCall(DMView(dmref,PETSC_VIEWER_STDOUT_(commlist[d])));*/
703: }
704: dalist[d * levelrefs + k] = dmref;
705: }
706: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &nx, 1, MPIU_INT, MPI_MAX, PETSC_COMM_WORLD));
707: }
709: /* create the hierarchy of DMShell's */
710: for (d = 0; d < ndecomps; d++) {
711: char name[PETSC_MAX_PATH_LEN];
713: UserContext *ctx = NULL;
714: if (commlist[d] != MPI_COMM_NULL) {
715: PetscCall(UserContextCreate(commlist[d], &ctx));
716: for (k = 0; k < levelrefs; k++) {
717: PetscCall(DMShellCreate_ShellDA(dalist[d * levelrefs + k], &dmlist[d * levelrefs + k]));
718: PetscCall(DMSetApplicationContext(dmlist[d * levelrefs + k], ctx));
719: PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN - 1, "level%" PetscInt_FMT "-decomp-%" PetscInt_FMT, k, d));
720: PetscCall(PetscObjectSetName((PetscObject)dmlist[d * levelrefs + k], name));
721: }
722: }
723: }
725: /* set all the coarse DMs */
726: for (k = 1; k < ndecomps * levelrefs; k++) { /* skip first DM as it doesn't have a coarse representation */
727: DM dmfine = NULL, dmcoarse = NULL;
729: dmfine = dmlist[k];
730: dmcoarse = dmlist[k - 1];
731: if (dmfine) PetscCall(DMSetCoarseDM(dmfine, dmcoarse));
732: }
734: /* do special setup on the fine DM coupling different decompositions */
735: for (d = 1; d < ndecomps; d++) { /* skip first decomposition as it doesn't have a coarse representation */
736: DM dmfine = NULL, dmcoarse = NULL;
738: dmfine = dmlist[d * levelrefs + 0];
739: dmcoarse = dmlist[(d - 1) * levelrefs + (levelrefs - 1)];
740: if (dmfine) PetscCall(DMShellDASetUp_TelescopeDMScatter(dmfine, dmcoarse));
741: }
743: PetscCall(PetscFree(number));
744: for (k = 0; k < ncoarsen; k++) PetscCall(PetscSubcommDestroy(&pscommlist[k]));
745: PetscCall(PetscFree(pscommlist));
747: if (_nd) *_nd = ndecomps;
748: if (_nref) *_nref = levelrefs;
749: if (_cl) {
750: *_cl = commlist;
751: } else {
752: for (k = 0; k < ndecomps; k++) {
753: if (commlist[k] != MPI_COMM_NULL) PetscCall(PetscCommDestroy(&commlist[k]));
754: }
755: PetscCall(PetscFree(commlist));
756: }
757: if (_dl) {
758: *_dl = dmlist;
759: PetscCall(PetscFree(dalist));
760: } else {
761: for (k = 0; k < ndecomps * levelrefs; k++) {
762: PetscCall(DMDestroy(&dmlist[k]));
763: PetscCall(DMDestroy(&dalist[k]));
764: }
765: PetscCall(PetscFree(dmlist));
766: PetscCall(PetscFree(dalist));
767: }
768: PetscFunctionReturn(PETSC_SUCCESS);
769: }
771: PetscErrorCode test_hierarchy(void)
772: {
773: PetscInt d, k, nd, nref;
774: MPI_Comm *comms;
775: DM *dms;
777: PetscFunctionBeginUser;
778: PetscCall(HierarchyCreate(&nd, &nref, &comms, &dms));
780: /* destroy user context */
781: for (d = 0; d < nd; d++) {
782: DM first = dms[d * nref + 0];
784: if (first) {
785: UserContext *ctx = NULL;
787: PetscCall(DMGetApplicationContext(first, &ctx));
788: if (ctx) PetscCall(PetscFree(ctx));
789: PetscCall(DMSetApplicationContext(first, NULL));
790: }
791: for (k = 1; k < nref; k++) {
792: DM dm = dms[d * nref + k];
793: if (dm) PetscCall(DMSetApplicationContext(dm, NULL));
794: }
795: }
797: /* destroy DMs */
798: for (k = 0; k < nd * nref; k++) {
799: if (dms[k]) PetscCall(DMDestroyShellDMDA(&dms[k]));
800: }
801: PetscCall(PetscFree(dms));
803: /* destroy communicators */
804: for (k = 0; k < nd; k++) {
805: if (comms[k] != MPI_COMM_NULL) PetscCall(PetscCommDestroy(&comms[k]));
806: }
807: PetscCall(PetscFree(comms));
808: PetscFunctionReturn(PETSC_SUCCESS);
809: }
811: PetscErrorCode test_basic(void)
812: {
813: DM dmF, dmdaF = NULL, dmC = NULL;
814: Mat A;
815: Vec x, b;
816: KSP ksp;
817: PC pc;
818: UserContext *user = NULL;
820: PetscFunctionBeginUser;
821: PetscCall(UserContextCreate(PETSC_COMM_WORLD, &user));
822: PetscCall(HierarchyCreate_Basic(&dmF, &dmC, user));
823: PetscCall(DMShellGetContext(dmF, &dmdaF));
825: PetscCall(DMCreateMatrix(dmF, &A));
826: PetscCall(DMCreateGlobalVector(dmF, &x));
827: PetscCall(DMCreateGlobalVector(dmF, &b));
828: PetscCall(ComputeRHS_DMDA(dmdaF, b, user));
830: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
831: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix_ShellDA, user));
832: /*PetscCall(KSPSetOperators(ksp,A,A));*/
833: PetscCall(KSPSetDM(ksp, dmF));
834: PetscCall(KSPSetDMActive(ksp, PETSC_TRUE));
835: PetscCall(KSPSetFromOptions(ksp));
836: PetscCall(KSPGetPC(ksp, &pc));
837: PetscCall(PCTelescopeSetUseCoarseDM(pc, PETSC_TRUE));
839: PetscCall(KSPSolve(ksp, b, x));
841: if (dmC) PetscCall(DMDestroyShellDMDA(&dmC));
842: PetscCall(DMDestroyShellDMDA(&dmF));
843: PetscCall(KSPDestroy(&ksp));
844: PetscCall(MatDestroy(&A));
845: PetscCall(VecDestroy(&b));
846: PetscCall(VecDestroy(&x));
847: PetscCall(PetscFree(user));
848: PetscFunctionReturn(PETSC_SUCCESS);
849: }
851: PetscErrorCode test_mg(void)
852: {
853: DM dmF, dmdaF = NULL, *dms = NULL;
854: Mat A;
855: Vec x, b;
856: KSP ksp;
857: PetscInt k, d, nd, nref;
858: MPI_Comm *comms = NULL;
859: UserContext *user = NULL;
861: PetscFunctionBeginUser;
862: PetscCall(HierarchyCreate(&nd, &nref, &comms, &dms));
863: dmF = dms[nd * nref - 1];
865: PetscCall(DMShellGetContext(dmF, &dmdaF));
866: PetscCall(DMGetApplicationContext(dmF, &user));
868: PetscCall(DMCreateMatrix(dmF, &A));
869: PetscCall(DMCreateGlobalVector(dmF, &x));
870: PetscCall(DMCreateGlobalVector(dmF, &b));
871: PetscCall(ComputeRHS_DMDA(dmdaF, b, user));
873: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
874: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix_ShellDA, user));
875: /*PetscCall(KSPSetOperators(ksp,A,A));*/
876: PetscCall(KSPSetDM(ksp, dmF));
877: PetscCall(KSPSetDMActive(ksp, PETSC_TRUE));
878: PetscCall(KSPSetFromOptions(ksp));
880: PetscCall(KSPSolve(ksp, b, x));
882: for (d = 0; d < nd; d++) {
883: DM first = dms[d * nref + 0];
885: if (first) {
886: UserContext *ctx = NULL;
888: PetscCall(DMGetApplicationContext(first, &ctx));
889: if (ctx) PetscCall(PetscFree(ctx));
890: PetscCall(DMSetApplicationContext(first, NULL));
891: }
892: for (k = 1; k < nref; k++) {
893: DM dm = dms[d * nref + k];
894: if (dm) PetscCall(DMSetApplicationContext(dm, NULL));
895: }
896: }
898: for (k = 0; k < nd * nref; k++) {
899: if (dms[k]) PetscCall(DMDestroyShellDMDA(&dms[k]));
900: }
901: PetscCall(PetscFree(dms));
903: PetscCall(KSPDestroy(&ksp));
904: PetscCall(MatDestroy(&A));
905: PetscCall(VecDestroy(&b));
906: PetscCall(VecDestroy(&x));
908: for (k = 0; k < nd; k++) {
909: if (comms[k] != MPI_COMM_NULL) PetscCall(PetscCommDestroy(&comms[k]));
910: }
911: PetscCall(PetscFree(comms));
912: PetscFunctionReturn(PETSC_SUCCESS);
913: }
915: int main(int argc, char **argv)
916: {
917: PetscInt test_id = 0;
919: PetscFunctionBeginUser;
920: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
921: PetscCall(PetscOptionsGetInt(NULL, NULL, "-tid", &test_id, NULL));
922: switch (test_id) {
923: case 0:
924: PetscCall(test_basic());
925: break;
926: case 1:
927: PetscCall(test_hierarchy());
928: break;
929: case 2:
930: PetscCall(test_mg());
931: break;
932: default:
933: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "-tid must be 0,1,2");
934: }
935: PetscCall(PetscFinalize());
936: return 0;
937: }
939: PetscErrorCode ComputeRHS_DMDA(DM da, Vec b, void *ctx)
940: {
941: UserContext *user = (UserContext *)ctx;
942: PetscInt i, j, mx, my, xm, ym, xs, ys;
943: PetscScalar Hx, Hy;
944: PetscScalar **array;
945: PetscBool isda = PETSC_FALSE;
947: PetscFunctionBeginUser;
948: PetscCall(PetscObjectTypeCompare((PetscObject)da, DMDA, &isda));
949: PetscCheck(isda, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "DM provided must be a DMDA");
950: PetscCall(DMDAGetInfo(da, NULL, &mx, &my, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
951: Hx = 1.0 / (PetscReal)(mx - 1);
952: Hy = 1.0 / (PetscReal)(my - 1);
953: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
954: PetscCall(DMDAVecGetArray(da, b, &array));
955: for (j = ys; j < ys + ym; j++) {
956: for (i = xs; i < xs + xm; i++) array[j][i] = PetscExpScalar(-((PetscReal)i * Hx) * ((PetscReal)i * Hx) / user->nu) * PetscExpScalar(-((PetscReal)j * Hy) * ((PetscReal)j * Hy) / user->nu) * Hx * Hy;
957: }
958: PetscCall(DMDAVecRestoreArray(da, b, &array));
959: PetscCall(VecAssemblyBegin(b));
960: PetscCall(VecAssemblyEnd(b));
962: /* force right-hand side to be consistent for singular matrix */
963: /* note this is really a hack, normally the model would provide you with a consistent right handside */
964: if (user->bcType == NEUMANN) {
965: MatNullSpace nullspace;
967: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
968: PetscCall(MatNullSpaceRemove(nullspace, b));
969: PetscCall(MatNullSpaceDestroy(&nullspace));
970: }
971: PetscFunctionReturn(PETSC_SUCCESS);
972: }
974: PetscErrorCode ComputeRho(PetscInt i, PetscInt j, PetscInt mx, PetscInt my, PetscReal centerRho, PetscReal *rho)
975: {
976: PetscFunctionBeginUser;
977: if ((i > mx / 3.0) && (i < 2.0 * mx / 3.0) && (j > my / 3.0) && (j < 2.0 * my / 3.0)) {
978: *rho = centerRho;
979: } else {
980: *rho = 1.0;
981: }
982: PetscFunctionReturn(PETSC_SUCCESS);
983: }
985: PetscErrorCode ComputeMatrix_DMDA(DM da, Mat J, Mat jac, void *ctx)
986: {
987: UserContext *user = (UserContext *)ctx;
988: PetscReal centerRho;
989: PetscInt i, j, mx, my, xm, ym, xs, ys;
990: PetscScalar v[5];
991: PetscReal Hx, Hy, HydHx, HxdHy, rho;
992: MatStencil row, col[5];
993: PetscBool isda = PETSC_FALSE;
995: PetscFunctionBeginUser;
996: PetscCall(PetscObjectTypeCompare((PetscObject)da, DMDA, &isda));
997: PetscCheck(isda, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "DM provided must be a DMDA");
998: PetscCall(MatZeroEntries(jac));
999: centerRho = user->rho;
1000: PetscCall(DMDAGetInfo(da, NULL, &mx, &my, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1001: Hx = 1.0 / (PetscReal)(mx - 1);
1002: Hy = 1.0 / (PetscReal)(my - 1);
1003: HxdHy = Hx / Hy;
1004: HydHx = Hy / Hx;
1005: PetscCall(DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL));
1006: for (j = ys; j < ys + ym; j++) {
1007: for (i = xs; i < xs + xm; i++) {
1008: row.i = i;
1009: row.j = j;
1010: PetscCall(ComputeRho(i, j, mx, my, centerRho, &rho));
1011: if (i == 0 || j == 0 || i == mx - 1 || j == my - 1) {
1012: if (user->bcType == DIRICHLET) {
1013: v[0] = 2.0 * rho * (HxdHy + HydHx);
1014: PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
1015: } else if (user->bcType == NEUMANN) {
1016: PetscInt numx = 0, numy = 0, num = 0;
1017: if (j != 0) {
1018: v[num] = -rho * HxdHy;
1019: col[num].i = i;
1020: col[num].j = j - 1;
1021: numy++;
1022: num++;
1023: }
1024: if (i != 0) {
1025: v[num] = -rho * HydHx;
1026: col[num].i = i - 1;
1027: col[num].j = j;
1028: numx++;
1029: num++;
1030: }
1031: if (i != mx - 1) {
1032: v[num] = -rho * HydHx;
1033: col[num].i = i + 1;
1034: col[num].j = j;
1035: numx++;
1036: num++;
1037: }
1038: if (j != my - 1) {
1039: v[num] = -rho * HxdHy;
1040: col[num].i = i;
1041: col[num].j = j + 1;
1042: numy++;
1043: num++;
1044: }
1045: v[num] = numx * rho * HydHx + numy * rho * HxdHy;
1046: col[num].i = i;
1047: col[num].j = j;
1048: num++;
1049: PetscCall(MatSetValuesStencil(jac, 1, &row, num, col, v, INSERT_VALUES));
1050: }
1051: } else {
1052: v[0] = -rho * HxdHy;
1053: col[0].i = i;
1054: col[0].j = j - 1;
1055: v[1] = -rho * HydHx;
1056: col[1].i = i - 1;
1057: col[1].j = j;
1058: v[2] = 2.0 * rho * (HxdHy + HydHx);
1059: col[2].i = i;
1060: col[2].j = j;
1061: v[3] = -rho * HydHx;
1062: col[3].i = i + 1;
1063: col[3].j = j;
1064: v[4] = -rho * HxdHy;
1065: col[4].i = i;
1066: col[4].j = j + 1;
1067: PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
1068: }
1069: }
1070: }
1071: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
1072: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
1073: PetscFunctionReturn(PETSC_SUCCESS);
1074: }
1076: PetscErrorCode ComputeMatrix_ShellDA(KSP ksp, Mat J, Mat jac, void *ctx)
1077: {
1078: DM dm, da;
1080: PetscFunctionBeginUser;
1081: PetscCall(KSPGetDM(ksp, &dm));
1082: PetscCall(DMShellGetContext(dm, &da));
1083: PetscCall(ComputeMatrix_DMDA(da, J, jac, ctx));
1084: PetscFunctionReturn(PETSC_SUCCESS);
1085: }
1087: /*TEST
1089: test:
1090: suffix: basic_dirichlet
1091: nsize: 4
1092: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type lu -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr
1094: test:
1095: suffix: basic_neumann
1096: nsize: 4
1097: requires: !single
1098: args: -tid 0 -ksp_monitor_short -pc_type telescope -telescope_ksp_max_it 100000 -telescope_pc_type jacobi -telescope_ksp_type fgmres -telescope_ksp_monitor_short -ksp_type gcr -bc_type neumann
1100: test:
1101: suffix: mg_2lv_2mg
1102: nsize: 6
1103: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 2 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -level_comm_red_factor 6 -mg_coarse_telescope_mg_coarse_pc_type lu
1105: test:
1106: suffix: mg_3lv_2mg
1107: nsize: 4
1108: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu -m 5
1110: test:
1111: suffix: mg_3lv_2mg_customcommsize
1112: nsize: 12
1113: args: -tid 2 -ksp_monitor_short -ksp_type gcr -pc_type mg -pc_mg_levels 3 -level_nrefs 3 -ndecomps 3 -mg_coarse_pc_type telescope -mg_coarse_pc_telescope_use_coarse_dm -m 5 -level_comm_red_factor 2,6 -mg_coarse_telescope_pc_type mg -mg_coarse_telescope_pc_mg_levels 3 -mg_coarse_telescope_mg_coarse_pc_type telescope -mg_coarse_telescope_mg_coarse_pc_telescope_use_coarse_dm -mg_coarse_telescope_mg_coarse_telescope_pc_type lu
1115: TEST*/