Actual source code: ex35.cxx
2: /*
3: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
5: -div \rho grad u = f, 0 < x,y < 1,
7: Problem 1: (Default)
9: Use the following exact solution with Dirichlet boundary condition
11: u = sin(pi*x)*sin(pi*y)
13: and generate an appropriate forcing function to measure convergence.
15: Usage:
17: Measure convergence rate with uniform refinement with the options: "-problem 1 -error".
19: mpiexec -n $NP ./ex35 -problem 1 -error -n 16 -levels 5 -pc_type gamg
20: mpiexec -n $NP ./ex35 -problem 1 -error -n 32 -levels 4 -pc_type gamg
21: mpiexec -n $NP ./ex35 -problem 1 -error -n 64 -levels 3 -pc_type mg
22: mpiexec -n $NP ./ex35 -problem 1 -error -n 128 -levels 2 -pc_type mg
23: mpiexec -n $NP ./ex35 -problem 1 -error -n 256 -levels 1 -mg
24: mpiexec -n $NP ./ex35 -problem 1 -error -n 512 -levels 0 -mg
26: Or with an external mesh file representing [0, 1]^2,
28: mpiexec -n $NP ./ex35 -problem 1 -file ./external_mesh.h5m -levels 2 -error -pc_type mg
30: Problem 2:
32: Use the forcing function
34: f = e^{-((x-xr)^2+(y-yr)^2)/\nu}
36: with Dirichlet boundary conditions
38: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
40: or pure Neumman boundary conditions
42: Usage:
44: Run with different values of \rho and \nu (problem 1) to control diffusion and gaussian source spread. This uses the internal mesh generator implemented in DMMoab.
46: mpiexec -n $NP ./ex35 -problem 2 -n 20 -nu 0.02 -rho 0.01
47: mpiexec -n $NP ./ex35 -problem 2 -n 40 -nu 0.01 -rho 0.005 -io -ksp_monitor
48: mpiexec -n $NP ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type hypre
49: mpiexec -n $NP ./ex35 -problem 2 -n 160 -bc neumann -nu 0.005 -rho 0.01 -io
50: mpiexec -n $NP ./ex35 -problem 2 -n 320 -bc neumann -nu 0.001 -rho 1 -io
52: Or with an external mesh file representing [0, 1]^2,
54: mpiexec -n $NP ./ex35 -problem 2 -file ./external_mesh.h5m -levels 1 -pc_type gamg
56: Problem 3:
58: Use the forcing function
60: f = \nu*sin(\pi*x/LX)*sin(\pi*y/LY)
62: with Dirichlet boundary conditions
64: u = 0.0, for x = 0, x = 1, y = 0, y = 1 (outer boundary) &&
65: u = 1.0, for (x-0.5)^2 + (y-0.5)^2 = 0.01
67: Usage:
69: Now, you could alternately load an external MOAB mesh that discretizes the unit square and use that to run the solver.
71: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg
72: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg -levels 2 -io -ksp_monitor
73: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -io -ksp_monitor -pc_type hypre
75: */
77: static char help[] = "\
78: Solves the 2D inhomogeneous Laplacian equation.\n \
79: Usage: ./ex35 -problem 1 -error -n 4 -levels 3 -mg\n \
80: Usage: ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type gamg\n \
81: Usage: ./ex35 -problem 3 -file input/square_with_hole.h5m -mg\n";
83: /* PETSc includes */
84: #include <petscksp.h>
85: #include <petscdmmoab.h>
87: #define LOCAL_ASSEMBLY
89: typedef enum {
90: DIRICHLET,
91: NEUMANN
92: } BCType;
94: typedef struct {
95: PetscInt dim, n, problem, nlevels;
96: PetscReal rho;
97: PetscReal bounds[6];
98: PetscReal xref, yref;
99: PetscReal nu;
100: PetscInt VPERE;
101: BCType bcType;
102: PetscBool use_extfile, io, error, usetri, usemg;
103: char filename[PETSC_MAX_PATH_LEN];
104: } UserContext;
106: static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
107: static PetscErrorCode ComputeRHS(KSP, Vec, void *);
108: static PetscErrorCode ComputeDiscreteL2Error(KSP, Vec, UserContext *);
109: static PetscErrorCode InitializeOptions(UserContext *);
111: int main(int argc, char **argv)
112: {
113: KSP ksp;
114: PC pc;
115: Mat R;
116: DM dm, dmref, *dmhierarchy;
118: UserContext user;
119: const char *fields[1] = {"T-Variable"};
120: PetscInt k;
121: Vec b, x, errv;
123: PetscFunctionBeginUser;
124: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
126: PetscCall(InitializeOptions(&user));
128: /* Create the DM object from either a mesh file or from in-memory structured grid */
129: if (user.use_extfile) {
130: PetscCall(DMMoabLoadFromFile(PETSC_COMM_WORLD, user.dim, 1, user.filename, "", &dm));
131: } else {
132: PetscCall(DMMoabCreateBoxMesh(PETSC_COMM_WORLD, user.dim, user.usetri, user.bounds, user.n, 1, &dm));
133: }
134: PetscCall(DMSetFromOptions(dm));
135: PetscCall(DMMoabSetFieldNames(dm, 1, fields));
137: /* SetUp the data structures for DMMOAB */
138: PetscCall(DMSetUp(dm));
140: PetscCall(DMSetApplicationContext(dm, &user));
142: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
143: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS, &user));
144: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix, &user));
146: if (user.nlevels) {
147: PetscCall(KSPGetPC(ksp, &pc));
148: PetscCall(PetscMalloc(sizeof(DM) * (user.nlevels + 1), &dmhierarchy));
149: for (k = 0; k <= user.nlevels; k++) dmhierarchy[k] = NULL;
151: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of mesh hierarchy levels: %d\n", user.nlevels));
152: PetscCall(DMMoabGenerateHierarchy(dm, user.nlevels, NULL));
154: /* coarsest grid = 0, finest grid = nlevels */
155: dmhierarchy[0] = dm;
156: PetscBool usehierarchy = PETSC_FALSE;
157: if (usehierarchy) {
158: PetscCall(DMRefineHierarchy(dm, user.nlevels, &dmhierarchy[1]));
159: } else {
160: for (k = 1; k <= user.nlevels; k++) PetscCall(DMRefine(dmhierarchy[k - 1], MPI_COMM_NULL, &dmhierarchy[k]));
161: }
162: dmref = dmhierarchy[user.nlevels];
163: PetscCall(PetscObjectReference((PetscObject)dmref));
165: if (user.usemg) {
166: PetscCall(PCSetType(pc, PCMG));
167: PetscCall(PCMGSetLevels(pc, user.nlevels + 1, NULL));
168: PetscCall(PCMGSetType(pc, PC_MG_MULTIPLICATIVE));
169: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
170: PetscCall(PCMGSetCycleType(pc, PC_MG_CYCLE_V));
171: PetscCall(PCMGSetNumberSmooth(pc, 2));
173: for (k = 1; k <= user.nlevels; k++) {
174: PetscCall(DMCreateInterpolation(dmhierarchy[k - 1], dmhierarchy[k], &R, NULL));
175: PetscCall(PCMGSetInterpolation(pc, k, R));
176: PetscCall(MatDestroy(&R));
177: }
178: }
180: for (k = 1; k <= user.nlevels; k++) PetscCall(DMDestroy(&dmhierarchy[k]));
181: PetscCall(PetscFree(dmhierarchy));
182: } else {
183: dmref = dm;
184: PetscCall(PetscObjectReference((PetscObject)dm));
185: }
187: PetscCall(KSPSetDM(ksp, dmref));
188: PetscCall(KSPSetFromOptions(ksp));
190: /* Perform the actual solve */
191: PetscCall(KSPSolve(ksp, NULL, NULL));
192: PetscCall(KSPGetSolution(ksp, &x));
193: PetscCall(KSPGetRhs(ksp, &b));
195: if (user.error) {
196: PetscCall(VecDuplicate(b, &errv));
197: PetscCall(ComputeDiscreteL2Error(ksp, errv, &user));
198: PetscCall(VecDestroy(&errv));
199: }
201: if (user.io) {
202: /* Write out the solution along with the mesh */
203: PetscCall(DMMoabSetGlobalFieldVector(dmref, x));
204: #ifdef MOAB_HAVE_HDF5
205: PetscCall(DMMoabOutput(dmref, "ex35.h5m", NULL));
206: #else
207: /* MOAB does not support true parallel writers that aren't HDF5 based
208: And so if you are using VTK as the output format in parallel,
209: the data could be jumbled due to the order in which the processors
210: write out their parts of the mesh and solution tags */
211: PetscCall(DMMoabOutput(dmref, "ex35.vtk", NULL));
212: #endif
213: }
215: /* Cleanup objects */
216: PetscCall(KSPDestroy(&ksp));
217: PetscCall(DMDestroy(&dmref));
218: PetscCall(DMDestroy(&dm));
219: PetscCall(PetscFinalize());
220: return 0;
221: }
223: PetscScalar ComputeDiffusionCoefficient(PetscReal coords[3], UserContext *user)
224: {
225: switch (user->problem) {
226: case 2:
227: if ((coords[0] > user->bounds[1] / 3.0) && (coords[0] < 2.0 * user->bounds[1] / 3.0) && (coords[1] > user->bounds[3] / 3.0) && (coords[1] < 2.0 * user->bounds[3] / 3.0)) {
228: return user->rho;
229: } else {
230: return 1.0;
231: }
232: case 1:
233: case 3:
234: default:
235: return user->rho;
236: }
237: }
239: PetscScalar ExactSolution(PetscReal coords[3], UserContext *user)
240: {
241: switch (user->problem) {
242: case 1:
243: return sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
244: case 3:
245: case 2:
246: default:
247: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Exact solution for -problem = [%" PetscInt_FMT "] is not available.", user->problem);
248: }
249: }
251: PetscScalar ComputeForcingFunction(PetscReal coords[3], UserContext *user)
252: {
253: switch (user->problem) {
254: case 3:
255: return user->nu * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
256: case 2:
257: return PetscExpScalar(-((coords[0] - user->xref) * (coords[0] - user->xref) + (coords[1] - user->yref) * (coords[1] - user->yref)) / user->nu);
258: case 1:
259: default:
260: return PETSC_PI * PETSC_PI * ComputeDiffusionCoefficient(coords, user) * (1.0 / user->bounds[1] / user->bounds[1] + 1.0 / user->bounds[3] / user->bounds[3]) * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
261: }
262: }
264: #define BCHECKEPS 1e-10
265: #define BCHECK(coordxyz, truetrace) ((coordxyz < truetrace + BCHECKEPS && coordxyz > truetrace - BCHECKEPS))
267: PetscScalar EvaluateStrongDirichletCondition(PetscReal coords[3], UserContext *user)
268: {
269: switch (user->problem) {
270: case 3:
271: if (BCHECK(coords[0], user->bounds[0]) || BCHECK(coords[0], user->bounds[1]) || BCHECK(coords[1], user->bounds[2]) || BCHECK(coords[1], user->bounds[3])) return 0.0;
272: else // ( coords[0]*coords[0] + coords[1]*coords[1] < 0.04 + BCHECKEPS)
273: return 1.0;
274: case 2:
275: return ComputeForcingFunction(coords, user);
276: case 1:
277: default:
278: return ExactSolution(coords, user);
279: }
280: }
282: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ptr)
283: {
284: UserContext *user = (UserContext *)ptr;
285: DM dm;
286: PetscInt dof_indices[4];
287: PetscBool dbdry[4];
288: PetscReal vpos[4 * 3];
289: PetscScalar ff;
290: PetscInt i, q, nconn, nc, npoints;
291: const moab::EntityHandle *connect;
292: const moab::Range *elocal;
293: moab::Interface *mbImpl;
294: PetscScalar localv[4];
295: PetscReal *phi, *phypts, *jxw;
296: PetscBool elem_on_boundary;
297: PetscQuadrature quadratureObj;
299: PetscFunctionBegin;
300: PetscCall(KSPGetDM(ksp, &dm));
302: /* reset the RHS */
303: PetscCall(VecSet(b, 0.0));
305: PetscCall(DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj));
306: PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
307: PetscCall(PetscMalloc3(user->VPERE * npoints, &phi, npoints * 3, &phypts, npoints, &jxw));
309: /* get the essential MOAB mesh related quantities needed for FEM assembly */
310: PetscCall(DMMoabGetInterface(dm, &mbImpl));
311: PetscCall(DMMoabGetLocalElements(dm, &elocal));
313: /* loop over local elements */
314: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
315: const moab::EntityHandle ehandle = *iter;
317: /* Get connectivity information: */
318: PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
319: PetscCheck(nconn == 3 || nconn == 4, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only TRI3/QUAD4 element bases are supported in the current example. n(Connectivity)=%" PetscInt_FMT ".", nconn);
321: PetscCall(PetscArrayzero(localv, nconn));
323: /* get the coordinates of the element vertices */
324: PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
326: /* get the local DoF numbers to appropriately set the element contribution in the operator */
327: #ifdef LOCAL_ASSEMBLY
328: PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
329: #else
330: PetscCall(DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices));
331: #endif
333: /* 1) compute the basis functions and the derivatives wrt x and y directions
334: 2) compute the quadrature points transformed to the physical space */
335: PetscCall(DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, NULL));
337: /* Compute function over the locally owned part of the grid */
338: for (q = 0; q < npoints; ++q) {
339: ff = ComputeForcingFunction(&phypts[3 * q], user);
341: for (i = 0; i < nconn; ++i) localv[i] += jxw[q] * phi[q * nconn + i] * ff;
342: }
344: /* check if element is on the boundary */
345: PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
347: /* apply dirichlet boundary conditions */
348: if (elem_on_boundary && user->bcType == DIRICHLET) {
349: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
350: PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
352: for (i = 0; i < nconn; ++i) {
353: if (dbdry[i]) { /* dirichlet node */
354: /* think about strongly imposing dirichlet */
355: localv[i] = EvaluateStrongDirichletCondition(&vpos[3 * i], user);
356: }
357: }
358: }
360: #ifdef LOCAL_ASSEMBLY
361: /* set the values directly into appropriate locations. Can alternately use VecSetValues */
362: PetscCall(VecSetValuesLocal(b, nconn, dof_indices, localv, ADD_VALUES));
363: #else
364: PetscCall(VecSetValues(b, nconn, dof_indices, localv, ADD_VALUES));
365: #endif
366: }
368: /* force right hand side to be consistent for singular matrix */
369: /* note this is really a hack, normally the model would provide you with a consistent right handside */
370: if (user->bcType == NEUMANN) {
371: MatNullSpace nullspace;
372: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
373: PetscCall(MatNullSpaceRemove(nullspace, b));
374: PetscCall(MatNullSpaceDestroy(&nullspace));
375: }
377: /* Restore vectors */
378: PetscCall(VecAssemblyBegin(b));
379: PetscCall(VecAssemblyEnd(b));
380: PetscCall(PetscFree3(phi, phypts, jxw));
381: PetscCall(PetscQuadratureDestroy(&quadratureObj));
382: PetscFunctionReturn(PETSC_SUCCESS);
383: }
385: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
386: {
387: UserContext *user = (UserContext *)ctx;
388: DM dm;
389: PetscInt i, j, q, nconn, nglobale, nglobalv, nc, npoints, hlevel;
390: PetscInt dof_indices[4];
391: PetscReal vpos[4 * 3], rho;
392: PetscBool dbdry[4];
393: const moab::EntityHandle *connect;
394: const moab::Range *elocal;
395: moab::Interface *mbImpl;
396: PetscBool elem_on_boundary;
397: PetscScalar array[4 * 4];
398: PetscReal *phi, *dphi[2], *phypts, *jxw;
399: PetscQuadrature quadratureObj;
401: PetscFunctionBeginUser;
402: PetscCall(KSPGetDM(ksp, &dm));
404: /* get the essential MOAB mesh related quantities needed for FEM assembly */
405: PetscCall(DMMoabGetInterface(dm, &mbImpl));
406: PetscCall(DMMoabGetLocalElements(dm, &elocal));
407: PetscCall(DMMoabGetSize(dm, &nglobale, &nglobalv));
408: PetscCall(DMMoabGetHierarchyLevel(dm, &hlevel));
409: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "ComputeMatrix: Level = %d, N(elements) = %d, N(vertices) = %d \n", hlevel, nglobale, nglobalv));
411: PetscCall(DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj));
412: PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
413: PetscCall(PetscMalloc5(user->VPERE * npoints, &phi, user->VPERE * npoints, &dphi[0], user->VPERE * npoints, &dphi[1], npoints * 3, &phypts, npoints, &jxw));
415: /* loop over local elements */
416: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
417: const moab::EntityHandle ehandle = *iter;
419: // Get connectivity information:
420: PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
421: PetscCheck(nconn == 3 || nconn == 4, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only QUAD4 or TRI3 element bases are supported in the current example. Connectivity=%" PetscInt_FMT ".", nconn);
423: /* compute the mid-point of the element and use a 1-point lumped quadrature */
424: PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
426: /* get the global DOF number to appropriately set the element contribution in the RHS vector */
427: #ifdef LOCAL_ASSEMBLY
428: PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
429: #else
430: PetscCall(DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices));
431: #endif
433: /* 1) compute the basis functions and the derivatives wrt x and y directions
434: 2) compute the quadrature points transformed to the physical space */
435: PetscCall(DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, dphi));
437: PetscCall(PetscArrayzero(array, nconn * nconn));
439: /* Compute function over the locally owned part of the grid */
440: for (q = 0; q < npoints; ++q) {
441: /* compute the inhomogeneous (piece-wise constant) diffusion coefficient at the quadrature point
442: -- for large spatial variations (within an element), embed this property evaluation inside the quadrature loop
443: */
444: rho = ComputeDiffusionCoefficient(&phypts[q * 3], user);
446: for (i = 0; i < nconn; ++i) {
447: for (j = 0; j < nconn; ++j) array[i * nconn + j] += jxw[q] * rho * (dphi[0][q * nconn + i] * dphi[0][q * nconn + j] + dphi[1][q * nconn + i] * dphi[1][q * nconn + j]);
448: }
449: }
451: /* check if element is on the boundary */
452: PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
454: /* apply dirichlet boundary conditions */
455: if (elem_on_boundary && user->bcType == DIRICHLET) {
456: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
457: PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
459: for (i = 0; i < nconn; ++i) {
460: if (dbdry[i]) { /* dirichlet node */
461: /* think about strongly imposing dirichlet */
462: for (j = 0; j < nconn; ++j) {
463: /* TODO: symmetrize the system - need the RHS */
464: array[i * nconn + j] = 0.0;
465: }
466: array[i * nconn + i] = 1.0;
467: }
468: }
469: }
471: /* set the values directly into appropriate locations. */
472: #ifdef LOCAL_ASSEMBLY
473: PetscCall(MatSetValuesLocal(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES));
474: #else
475: PetscCall(MatSetValues(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES));
476: #endif
477: }
479: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
480: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
482: if (user->bcType == NEUMANN) {
483: MatNullSpace nullspace;
485: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
486: PetscCall(MatSetNullSpace(J, nullspace));
487: PetscCall(MatNullSpaceDestroy(&nullspace));
488: }
489: PetscCall(PetscFree5(phi, dphi[0], dphi[1], phypts, jxw));
490: PetscCall(PetscQuadratureDestroy(&quadratureObj));
491: PetscFunctionReturn(PETSC_SUCCESS);
492: }
494: PetscErrorCode ComputeDiscreteL2Error(KSP ksp, Vec err, UserContext *user)
495: {
496: DM dm;
497: Vec sol;
498: PetscScalar vpos[3];
499: const PetscScalar *x;
500: PetscScalar *e;
501: PetscReal l2err = 0.0, linferr = 0.0, global_l2, global_linf;
502: PetscInt dof_index, N;
503: const moab::Range *ownedvtx;
505: PetscFunctionBegin;
506: PetscCall(KSPGetDM(ksp, &dm));
508: /* get the solution vector */
509: PetscCall(KSPGetSolution(ksp, &sol));
511: /* Get the internal reference to the vector arrays */
512: PetscCall(VecGetArrayRead(sol, &x));
513: PetscCall(VecGetSize(sol, &N));
514: if (err) {
515: /* reset the error vector */
516: PetscCall(VecSet(err, 0.0));
517: /* get array reference */
518: PetscCall(VecGetArray(err, &e));
519: }
521: PetscCall(DMMoabGetLocalVertices(dm, &ownedvtx, NULL));
523: /* Compute function over the locally owned part of the grid */
524: for (moab::Range::iterator iter = ownedvtx->begin(); iter != ownedvtx->end(); iter++) {
525: const moab::EntityHandle vhandle = *iter;
527: /* get the local DoF numbers to appropriately set the element contribution in the operator */
528: #ifdef LOCAL_ASSEMBLY
529: PetscCall(DMMoabGetFieldDofsLocal(dm, 1, &vhandle, 0, &dof_index));
530: #else
531: PetscCall(DMMoabGetFieldDofs(dm, 1, &vhandle, 0, &dof_index));
532: #endif
534: /* compute the mid-point of the element and use a 1-point lumped quadrature */
535: PetscCall(DMMoabGetVertexCoordinates(dm, 1, &vhandle, vpos));
537: /* compute the discrete L2 error against the exact solution */
538: const PetscScalar lerr = (ExactSolution(vpos, user) - x[dof_index]);
539: l2err += lerr * lerr;
540: if (linferr < fabs(lerr)) linferr = fabs(lerr);
542: if (err) { /* set the discrete L2 error against the exact solution */
543: e[dof_index] = lerr;
544: }
545: }
547: PetscCallMPI(MPI_Allreduce(&l2err, &global_l2, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD));
548: PetscCallMPI(MPI_Allreduce(&linferr, &global_linf, 1, MPI_DOUBLE, MPI_MAX, PETSC_COMM_WORLD));
549: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Computed Errors: L_2 = %f, L_inf = %f\n", sqrt(global_l2 / N), global_linf));
551: /* Restore vectors */
552: PetscCall(VecRestoreArrayRead(sol, &x));
553: if (err) PetscCall(VecRestoreArray(err, &e));
554: PetscFunctionReturn(PETSC_SUCCESS);
555: }
557: PetscErrorCode InitializeOptions(UserContext *user)
558: {
559: const char *bcTypes[2] = {"dirichlet", "neumann"};
560: PetscInt bc;
562: PetscFunctionBegin;
563: /* set default parameters */
564: user->dim = 2;
565: user->problem = 1;
566: user->n = 2;
567: user->nlevels = 2;
568: user->rho = 0.1;
569: user->bounds[0] = user->bounds[2] = user->bounds[4] = 0.0;
570: user->bounds[1] = user->bounds[3] = user->bounds[5] = 1.0;
571: user->xref = user->bounds[1] / 2;
572: user->yref = user->bounds[3] / 2;
573: user->nu = 0.05;
574: user->usemg = PETSC_FALSE;
575: user->io = PETSC_FALSE;
576: user->usetri = PETSC_FALSE;
577: user->error = PETSC_FALSE;
578: bc = (PetscInt)DIRICHLET;
580: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "ex35.cxx");
581: PetscCall(PetscOptionsInt("-problem", "The type of problem being solved (controls forcing function)", "ex35.cxx", user->problem, &user->problem, NULL));
582: PetscCall(PetscOptionsInt("-n", "The elements in each direction", "ex35.cxx", user->n, &user->n, NULL));
583: PetscCall(PetscOptionsInt("-levels", "Number of levels in the multigrid hierarchy", "ex35.cxx", user->nlevels, &user->nlevels, NULL));
584: PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex35.cxx", user->rho, &user->rho, NULL));
585: PetscCall(PetscOptionsReal("-x", "The domain size in x-direction", "ex35.cxx", user->bounds[1], &user->bounds[1], NULL));
586: PetscCall(PetscOptionsReal("-y", "The domain size in y-direction", "ex35.cxx", user->bounds[3], &user->bounds[3], NULL));
587: PetscCall(PetscOptionsReal("-xref", "The x-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->xref, &user->xref, NULL));
588: PetscCall(PetscOptionsReal("-yref", "The y-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->yref, &user->yref, NULL));
589: PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source (for -problem 1)", "ex35.cxx", user->nu, &user->nu, NULL));
590: PetscCall(PetscOptionsBool("-mg", "Use multigrid preconditioner", "ex35.cxx", user->usemg, &user->usemg, NULL));
591: PetscCall(PetscOptionsBool("-io", "Write out the solution and mesh data", "ex35.cxx", user->io, &user->io, NULL));
592: PetscCall(PetscOptionsBool("-tri", "Use triangles to discretize the domain", "ex35.cxx", user->usetri, &user->usetri, NULL));
593: PetscCall(PetscOptionsBool("-error", "Compute the discrete L_2 and L_inf errors of the solution", "ex35.cxx", user->error, &user->error, NULL));
594: PetscCall(PetscOptionsEList("-bc", "Type of boundary condition", "ex35.cxx", bcTypes, 2, bcTypes[0], &bc, NULL));
595: PetscCall(PetscOptionsString("-file", "The mesh file for the problem", "ex35.cxx", "", user->filename, sizeof(user->filename), &user->use_extfile));
596: PetscOptionsEnd();
598: if (user->problem < 1 || user->problem > 3) user->problem = 1;
599: user->bcType = (BCType)bc;
600: user->VPERE = (user->usetri ? 3 : 4);
601: if (user->problem == 3) {
602: user->bounds[0] = user->bounds[2] = -0.5;
603: user->bounds[1] = user->bounds[3] = 0.5;
604: user->bounds[4] = user->bounds[5] = 0.5;
605: }
606: PetscFunctionReturn(PETSC_SUCCESS);
607: }
609: /*TEST
611: build:
612: requires: moab
614: test:
615: args: -levels 0 -nu .01 -n 10 -ksp_type cg -pc_type sor -ksp_converged_reason
617: test:
618: suffix: 2
619: nsize: 2
620: requires: hdf5
621: args: -levels 3 -nu .01 -n 2 -mg -ksp_converged_reason
623: test:
624: suffix: 3
625: nsize: 2
626: requires: hdf5
627: args: -problem 3 -file data/ex35_mesh.h5m -mg -levels 1 -ksp_converged_reason
628: localrunfiles: data
630: TEST*/