Actual source code: ex36.cxx
1: /*
2: Inhomogeneous Laplacian in 3-D. Modeled by the partial differential equation
4: -div \rho grad u + \alpha u = f, 0 < x,y,z < 1,
6: Problem 1: (Default)
8: Use the following exact solution with Dirichlet boundary condition
10: u = sin(pi*x)*sin(pi*y)*sin(pi*z)
12: with Dirichlet boundary conditions
14: u = f(x,y,z) for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1
16: and \rho = 1.0, \alpha = 10.0 uniformly in the domain.
18: Use an appropriate forcing function to measure convergence.
20: Usage:
22: Measure convergence rate with uniform refinement with the options: "-problem 1 -error".
24: mpiexec -n $NP ./ex36 -problem 1 -error -n 4 -levels 5 -pc_type gamg
25: mpiexec -n $NP ./ex36 -problem 1 -error -n 8 -levels 4 -pc_type gamg
26: mpiexec -n $NP ./ex36 -problem 1 -error -n 16 -levels 3 -pc_type mg
27: mpiexec -n $NP ./ex36 -problem 1 -error -n 32 -levels 2 -pc_type mg
28: mpiexec -n $NP ./ex36 -problem 1 -error -n 64 -levels 1 -mg
29: mpiexec -n $NP ./ex36 -problem 1 -error -n 128 -levels 0 -mg
31: Or with an external mesh file representing [0, 1]^3,
33: mpiexec -n $NP ./ex36 -problem 1 -file ./external_mesh.h5m -levels 2 -error -pc_type mg
35: Problem 2:
37: Use the forcing function
39: f = e^{-((x-xr)^2+(y-yr)^2+(z-zr)^2)/\nu}
41: with Dirichlet boundary conditions
43: u = f(x,y,z) for x = 0, x = 1, y = 0, y = 1, z = 0, z = 1
45: or pure Neumman boundary conditions
47: Usage:
49: Run with different values of \rho and \nu (problem 1) to control diffusion and gaussian source spread. This uses the internal mesh generator implemented in DMMoab.
51: mpiexec -n $NP ./ex36 -problem 2 -n 20 -nu 0.02 -rho 0.01
52: mpiexec -n $NP ./ex36 -problem 2 -n 40 -nu 0.01 -rho 0.005 -io -ksp_monitor
53: mpiexec -n $NP ./ex36 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type gamg
54: mpiexec -n $NP ./ex36 -problem 2 -n 160 -bc neumann -nu 0.005 -rho 0.01 -io
55: mpiexec -n $NP ./ex36 -problem 2 -n 320 -bc neumann -nu 0.001 -rho 1 -io
57: Or with an external mesh file representing [0, 1]^3,
59: mpiexec -n $NP ./ex36 -problem 2 -file ./external_mesh.h5m -levels 1 -pc_type gamg
61: */
63: static char help[] = "\
64: Solves a three dimensional inhomogeneous Laplacian equation with a Gaussian source.\n \
65: Usage: ./ex36 -bc dirichlet -nu .01 -n 10\n";
67: /* PETSc includes */
68: #include <petscksp.h>
69: #include <petscdmmoab.h>
71: typedef enum {
72: DIRICHLET,
73: NEUMANN
74: } BCType;
76: typedef struct {
77: PetscInt problem, dim, n, nlevels;
78: PetscReal rho;
79: PetscReal bounds[6];
80: PetscReal xyzref[3];
81: PetscReal nu;
82: BCType bcType;
83: char filename[PETSC_MAX_PATH_LEN];
84: PetscBool use_extfile, io, error, usetet, usemg;
86: /* Discretization parameters */
87: int VPERE;
88: } UserContext;
90: static PetscErrorCode ComputeMatrix_MOAB(KSP, Mat, Mat, void *);
91: static PetscErrorCode ComputeRHS_MOAB(KSP, Vec, void *);
92: static PetscErrorCode ComputeDiscreteL2Error(KSP ksp, Vec err, UserContext *user);
93: static PetscErrorCode InitializeOptions(UserContext *user);
95: int main(int argc, char **argv)
96: {
97: const char *fields[1] = {"T-Variable"};
98: DM dm, dmref, *dmhierarchy;
99: UserContext user;
100: PetscInt k;
101: KSP ksp;
102: PC pc;
103: Vec errv;
104: Mat R;
105: Vec b, x;
107: PetscFunctionBeginUser;
108: PetscCall(PetscInitialize(&argc, &argv, nullptr, help));
110: PetscCall(InitializeOptions(&user));
112: /* Create the DM object from either a mesh file or from in-memory structured grid */
113: if (user.use_extfile) {
114: PetscCall(DMMoabLoadFromFile(PETSC_COMM_WORLD, user.dim, 1, user.filename, "", &dm));
115: } else {
116: PetscCall(DMMoabCreateBoxMesh(PETSC_COMM_WORLD, user.dim, user.usetet, NULL, user.n, 1, &dm));
117: }
118: PetscCall(DMSetFromOptions(dm));
119: PetscCall(DMMoabSetFieldNames(dm, 1, fields));
121: /* SetUp the data structures for DMMOAB */
122: PetscCall(DMSetUp(dm));
124: PetscCall(DMSetApplicationContext(dm, &user));
126: PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
127: PetscCall(KSPSetComputeRHS(ksp, ComputeRHS_MOAB, &user));
128: PetscCall(KSPSetComputeOperators(ksp, ComputeMatrix_MOAB, &user));
130: if (user.nlevels) {
131: PetscCall(KSPGetPC(ksp, &pc));
132: PetscCall(PetscMalloc(sizeof(DM) * (user.nlevels + 1), &dmhierarchy));
133: for (k = 0; k <= user.nlevels; k++) dmhierarchy[k] = NULL;
135: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of mesh hierarchy levels: %d\n", user.nlevels));
136: PetscCall(DMMoabGenerateHierarchy(dm, user.nlevels, NULL));
138: // coarsest grid = 0
139: // finest grid = nlevels
140: dmhierarchy[0] = dm;
141: PetscBool usehierarchy = PETSC_FALSE;
142: if (usehierarchy) {
143: PetscCall(DMRefineHierarchy(dm, user.nlevels, &dmhierarchy[1]));
144: } else {
145: for (k = 1; k <= user.nlevels; k++) PetscCall(DMRefine(dmhierarchy[k - 1], MPI_COMM_NULL, &dmhierarchy[k]));
146: }
147: dmref = dmhierarchy[user.nlevels];
148: PetscCall(PetscObjectReference((PetscObject)dmref));
150: if (user.usemg) {
151: PetscCall(PCSetType(pc, PCMG));
152: PetscCall(PCMGSetLevels(pc, user.nlevels + 1, NULL));
153: PetscCall(PCMGSetType(pc, PC_MG_MULTIPLICATIVE));
154: PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
155: PetscCall(PCMGSetCycleType(pc, PC_MG_CYCLE_V));
156: PetscCall(PCMGSetNumberSmooth(pc, 2));
158: for (k = 1; k <= user.nlevels; k++) {
159: PetscCall(DMCreateInterpolation(dmhierarchy[k - 1], dmhierarchy[k], &R, NULL));
160: PetscCall(PCMGSetInterpolation(pc, k, R));
161: PetscCall(MatDestroy(&R));
162: }
163: }
165: for (k = 1; k <= user.nlevels; k++) PetscCall(DMDestroy(&dmhierarchy[k]));
166: PetscCall(PetscFree(dmhierarchy));
167: } else {
168: dmref = dm;
169: PetscCall(PetscObjectReference((PetscObject)dm));
170: }
172: PetscCall(KSPSetDM(ksp, dmref));
173: PetscCall(KSPSetFromOptions(ksp));
175: /* Perform the actual solve */
176: PetscCall(KSPSolve(ksp, NULL, NULL));
177: PetscCall(KSPGetSolution(ksp, &x));
178: PetscCall(KSPGetRhs(ksp, &b));
180: if (user.error) {
181: PetscCall(VecDuplicate(b, &errv));
182: PetscCall(ComputeDiscreteL2Error(ksp, errv, &user));
183: PetscCall(VecDestroy(&errv));
184: }
186: if (user.io) {
187: /* Write out the solution along with the mesh */
188: PetscCall(DMMoabSetGlobalFieldVector(dmref, x));
189: #ifdef MOAB_HAVE_HDF5
190: PetscCall(DMMoabOutput(dmref, "ex36.h5m", ""));
191: #else
192: /* MOAB does not support true parallel writers that aren't HDF5 based
193: And so if you are using VTK as the output format in parallel,
194: the data could be jumbled due to the order in which the processors
195: write out their parts of the mesh and solution tags
196: */
197: PetscCall(DMMoabOutput(dmref, "ex36.vtk", ""));
198: #endif
199: }
201: /* Cleanup objects */
202: PetscCall(KSPDestroy(&ksp));
203: PetscCall(DMDestroy(&dmref));
204: PetscCall(DMDestroy(&dm));
205: PetscCall(PetscFinalize());
206: return 0;
207: }
209: PetscReal ComputeDiffusionCoefficient(PetscReal coords[3], UserContext *user)
210: {
211: if (user->problem == 2) {
212: if ((coords[0] > 1.0 / 3.0) && (coords[0] < 2.0 / 3.0) && (coords[1] > 1.0 / 3.0) && (coords[1] < 2.0 / 3.0) && (coords[2] > 1.0 / 3.0) && (coords[2] < 2.0 / 3.0)) return user->rho;
213: else return 1.0;
214: } else return 1.0; /* problem = 1 */
215: }
217: PetscReal ComputeReactionCoefficient(PetscReal coords[3], UserContext *user)
218: {
219: if (user->problem == 2) {
220: if ((coords[0] > 1.0 / 3.0) && (coords[0] < 2.0 / 3.0) && (coords[1] > 1.0 / 3.0) && (coords[1] < 2.0 / 3.0) && (coords[2] > 1.0 / 3.0) && (coords[2] < 2.0 / 3.0)) return 10.0;
221: else return 0.0;
222: } else return 5.0; /* problem = 1 */
223: }
225: double ExactSolution(PetscReal coords[3], UserContext *user)
226: {
227: if (user->problem == 2) {
228: const PetscScalar xx = (coords[0] - user->xyzref[0]) * (coords[0] - user->xyzref[0]);
229: const PetscScalar yy = (coords[1] - user->xyzref[1]) * (coords[1] - user->xyzref[1]);
230: const PetscScalar zz = (coords[2] - user->xyzref[2]) * (coords[2] - user->xyzref[2]);
231: return PetscExpScalar(-(xx + yy + zz) / user->nu);
232: } else return sin(PETSC_PI * coords[0]) * sin(PETSC_PI * coords[1]) * sin(PETSC_PI * coords[2]);
233: }
235: PetscReal exact_solution(PetscReal x, PetscReal y, PetscReal z)
236: {
237: PetscReal coords[3] = {x, y, z};
238: return ExactSolution(coords, 0);
239: }
241: double ForcingFunction(PetscReal coords[3], UserContext *user)
242: {
243: const PetscReal exact = ExactSolution(coords, user);
244: if (user->problem == 2) {
245: const PetscReal duxyz = ((coords[0] - user->xyzref[0]) + (coords[1] - user->xyzref[1]) + (coords[2] - user->xyzref[2]));
246: return (4.0 / user->nu * duxyz * duxyz - 6.0) * exact / user->nu;
247: } else {
248: const PetscReal reac = ComputeReactionCoefficient(coords, user);
249: return (3.0 * PETSC_PI * PETSC_PI + reac) * exact;
250: }
251: }
253: PetscErrorCode ComputeRHS_MOAB(KSP ksp, Vec b, void *ptr)
254: {
255: UserContext *user = (UserContext *)ptr;
256: DM dm;
257: PetscInt dof_indices[8], nc, npoints;
258: PetscBool dbdry[8];
259: PetscReal vpos[8 * 3];
260: PetscInt i, q, nconn;
261: const moab::EntityHandle *connect;
262: const moab::Range *elocal;
263: moab::Interface *mbImpl;
264: PetscScalar localv[8];
265: PetscReal *phi, *phypts, *jxw;
266: PetscBool elem_on_boundary;
267: PetscQuadrature quadratureObj;
269: PetscFunctionBegin;
270: PetscCall(KSPGetDM(ksp, &dm));
272: /* reset the RHS */
273: PetscCall(VecSet(b, 0.0));
275: PetscCall(DMMoabFEMCreateQuadratureDefault(user->dim, user->VPERE, &quadratureObj));
276: PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
277: PetscCall(PetscMalloc3(user->VPERE * npoints, &phi, npoints * 3, &phypts, npoints, &jxw));
279: /* get the essential MOAB mesh related quantities needed for FEM assembly */
280: PetscCall(DMMoabGetInterface(dm, &mbImpl));
281: PetscCall(DMMoabGetLocalElements(dm, &elocal));
283: /* loop over local elements */
284: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
285: const moab::EntityHandle ehandle = *iter;
287: /* Get connectivity information: */
288: PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
289: PetscCheck(nconn == 4 || nconn == 8, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only HEX8/TET4 element bases are supported in the current example. n(Connectivity)=%" PetscInt_FMT ".", nconn);
291: /* get the coordinates of the element vertices */
292: PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
294: /* get the local DoF numbers to appropriately set the element contribution in the operator */
295: PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
297: /* compute the quadrature points transformed to the physical space and then
298: compute the basis functions to compute local operators */
299: PetscCall(DMMoabFEMComputeBasis(user->dim, nconn, vpos, quadratureObj, phypts, jxw, phi, NULL));
301: PetscCall(PetscArrayzero(localv, nconn));
302: /* Compute function over the locally owned part of the grid */
303: for (q = 0; q < npoints; ++q) {
304: const double ff = ForcingFunction(&phypts[3 * q], user);
305: const PetscInt offset = q * nconn;
307: for (i = 0; i < nconn; ++i) localv[i] += jxw[q] * phi[offset + i] * ff;
308: }
310: /* check if element is on the boundary */
311: PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
313: /* apply dirichlet boundary conditions */
314: if (elem_on_boundary && user->bcType == DIRICHLET) {
315: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
316: PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
318: for (i = 0; i < nconn; ++i) {
319: if (dbdry[i]) { /* dirichlet node */
320: /* think about strongly imposing dirichlet */
321: localv[i] = ForcingFunction(&vpos[3 * i], user);
322: }
323: }
324: }
326: /* set the values directly into appropriate locations. Can alternately use VecSetValues */
327: PetscCall(VecSetValuesLocal(b, nconn, dof_indices, localv, ADD_VALUES));
328: }
330: /* force right-hand side to be consistent for singular matrix */
331: /* note this is really a hack, normally the model would provide you with a consistent right handside */
332: if (user->bcType == NEUMANN && false) {
333: MatNullSpace nullspace;
335: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
336: PetscCall(MatNullSpaceRemove(nullspace, b));
337: PetscCall(MatNullSpaceDestroy(&nullspace));
338: }
340: /* Restore vectors */
341: PetscCall(VecAssemblyBegin(b));
342: PetscCall(VecAssemblyEnd(b));
343: PetscCall(PetscFree3(phi, phypts, jxw));
344: PetscCall(PetscQuadratureDestroy(&quadratureObj));
345: PetscFunctionReturn(PETSC_SUCCESS);
346: }
348: PetscErrorCode ComputeMatrix_MOAB(KSP ksp, Mat J, Mat jac, void *ctx)
349: {
350: UserContext *user = (UserContext *)ctx;
351: DM dm;
352: PetscInt i, j, q, nconn, nglobale, nglobalv, nc, npoints, hlevel;
353: PetscInt dof_indices[8];
354: PetscReal vpos[8 * 3], rho, alpha;
355: PetscBool dbdry[8];
356: const moab::EntityHandle *connect;
357: const moab::Range *elocal;
358: moab::Interface *mbImpl;
359: PetscBool elem_on_boundary;
360: PetscScalar array[8 * 8];
361: PetscReal *phi, *dphi[3], *phypts, *jxw;
362: PetscQuadrature quadratureObj;
364: PetscFunctionBeginUser;
365: PetscCall(KSPGetDM(ksp, &dm));
367: /* get the essential MOAB mesh related quantities needed for FEM assembly */
368: PetscCall(DMMoabGetInterface(dm, &mbImpl));
369: PetscCall(DMMoabGetLocalElements(dm, &elocal));
370: PetscCall(DMMoabGetSize(dm, &nglobale, &nglobalv));
371: PetscCall(DMMoabGetHierarchyLevel(dm, &hlevel));
372: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "ComputeMatrix: Level = %d, N(elements) = %d, N(vertices) = %d \n", hlevel, nglobale, nglobalv));
374: PetscCall(DMMoabFEMCreateQuadratureDefault(user->dim, user->VPERE, &quadratureObj));
375: PetscCall(PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL));
376: PetscCall(PetscMalloc6(user->VPERE * npoints, &phi, user->VPERE * npoints, &dphi[0], user->VPERE * npoints, &dphi[1], user->VPERE * npoints, &dphi[2], npoints * 3, &phypts, npoints, &jxw));
378: /* loop over local elements */
379: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
380: const moab::EntityHandle ehandle = *iter;
382: /* Get connectivity information: */
383: PetscCall(DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect));
384: PetscCheck(nconn == 4 || nconn == 8, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Only HEX8/TET4 element bases are supported in the current example. n(Connectivity)=%" PetscInt_FMT ".", nconn);
386: /* get the coordinates of the element vertices */
387: PetscCall(DMMoabGetVertexCoordinates(dm, nconn, connect, vpos));
389: /* get the local DoF numbers to appropriately set the element contribution in the operator */
390: PetscCall(DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices));
392: /* compute the quadrature points transformed to the physical space and
393: compute the basis functions and the derivatives wrt x, y and z directions */
394: PetscCall(DMMoabFEMComputeBasis(user->dim, nconn, vpos, quadratureObj, phypts, jxw, phi, dphi));
396: PetscCall(PetscArrayzero(array, nconn * nconn));
398: /* Compute function over the locally owned part of the grid */
399: for (q = 0; q < npoints; ++q) {
400: /* compute the inhomogeneous diffusion coefficient at the quadrature point
401: -- for large spatial variations, embed this property evaluation inside quadrature loop */
402: rho = ComputeDiffusionCoefficient(&phypts[q * 3], user);
403: alpha = ComputeReactionCoefficient(&phypts[q * 3], user);
405: const PetscInt offset = q * nconn;
407: for (i = 0; i < nconn; ++i) {
408: for (j = 0; j < nconn; ++j) {
409: array[i * nconn + j] += jxw[q] * (rho * (dphi[0][offset + i] * dphi[0][offset + j] + dphi[1][offset + i] * dphi[1][offset + j] + dphi[2][offset + i] * dphi[2][offset + j]) + alpha * (phi[offset + i] * phi[offset + j]));
410: }
411: }
412: }
414: /* check if element is on the boundary */
415: PetscCall(DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary));
417: /* apply dirichlet boundary conditions */
418: if (elem_on_boundary && user->bcType == DIRICHLET) {
419: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
420: PetscCall(DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry));
422: for (i = 0; i < nconn; ++i) {
423: if (dbdry[i]) { /* dirichlet node */
424: /* think about strongly imposing dirichlet */
425: for (j = 0; j < nconn; ++j) {
426: /* TODO: symmetrize the system - need the RHS */
427: array[i * nconn + j] = 0.0;
428: }
429: array[i * nconn + i] = 1.0;
430: }
431: }
432: }
434: /* set the values directly into appropriate locations. Can alternately use VecSetValues */
435: PetscCall(MatSetValuesLocal(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES));
436: }
438: PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
439: PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
441: if (user->bcType == NEUMANN && false) {
442: MatNullSpace nullspace;
444: PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace));
445: PetscCall(MatSetNullSpace(jac, nullspace));
446: PetscCall(MatNullSpaceDestroy(&nullspace));
447: }
448: PetscCall(PetscFree6(phi, dphi[0], dphi[1], dphi[2], phypts, jxw));
449: PetscCall(PetscQuadratureDestroy(&quadratureObj));
450: PetscFunctionReturn(PETSC_SUCCESS);
451: }
453: PetscErrorCode ComputeDiscreteL2Error(KSP ksp, Vec err, UserContext *user)
454: {
455: DM dm;
456: Vec sol;
457: PetscScalar vpos[3];
458: const PetscScalar *x;
459: PetscScalar *e;
460: PetscReal l2err = 0.0, linferr = 0.0, global_l2, global_linf;
461: PetscInt dof_index, N;
462: const moab::Range *ownedvtx;
464: PetscFunctionBegin;
465: PetscCall(KSPGetDM(ksp, &dm));
467: /* get the solution vector */
468: PetscCall(KSPGetSolution(ksp, &sol));
470: /* Get the internal reference to the vector arrays */
471: PetscCall(VecGetArrayRead(sol, &x));
472: PetscCall(VecGetSize(sol, &N));
473: if (err) {
474: /* reset the error vector */
475: PetscCall(VecSet(err, 0.0));
476: /* get array reference */
477: PetscCall(VecGetArray(err, &e));
478: }
480: PetscCall(DMMoabGetLocalVertices(dm, &ownedvtx, NULL));
482: /* Compute function over the locally owned part of the grid */
483: for (moab::Range::iterator iter = ownedvtx->begin(); iter != ownedvtx->end(); iter++) {
484: const moab::EntityHandle vhandle = *iter;
485: PetscCall(DMMoabGetDofsBlockedLocal(dm, 1, &vhandle, &dof_index));
487: /* compute the mid-point of the element and use a 1-point lumped quadrature */
488: PetscCall(DMMoabGetVertexCoordinates(dm, 1, &vhandle, vpos));
490: /* compute the discrete L2 error against the exact solution */
491: const PetscScalar lerr = (ExactSolution(vpos, user) - x[dof_index]);
492: l2err += lerr * lerr;
493: if (linferr < fabs(lerr)) linferr = fabs(lerr);
495: if (err) {
496: /* set the discrete L2 error against the exact solution */
497: e[dof_index] = lerr;
498: }
499: }
501: PetscCallMPI(MPIU_Allreduce(&l2err, &global_l2, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD));
502: PetscCallMPI(MPIU_Allreduce(&linferr, &global_linf, 1, MPI_DOUBLE, MPI_MAX, PETSC_COMM_WORLD));
503: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Computed Errors: L_2 = %f, L_inf = %f\n", sqrt(global_l2 / N), global_linf));
505: /* Restore vectors */
506: PetscCall(VecRestoreArrayRead(sol, &x));
507: if (err) PetscCall(VecRestoreArray(err, &e));
508: PetscFunctionReturn(PETSC_SUCCESS);
509: }
511: PetscErrorCode InitializeOptions(UserContext *user)
512: {
513: const char *bcTypes[2] = {"dirichlet", "neumann"};
514: PetscInt bc;
516: PetscFunctionBegin;
517: /* set default parameters */
518: user->dim = 3; /* 3-Dimensional problem */
519: user->problem = 1;
520: user->n = 2;
521: user->nlevels = 2;
522: user->rho = 0.1;
523: user->bounds[0] = user->bounds[2] = user->bounds[4] = 0.0;
524: user->bounds[1] = user->bounds[3] = user->bounds[5] = 1.0;
525: user->xyzref[0] = user->bounds[1] / 2;
526: user->xyzref[1] = user->bounds[3] / 2;
527: user->xyzref[2] = user->bounds[5] / 2;
528: user->nu = 0.05;
529: user->usemg = PETSC_FALSE;
530: user->io = PETSC_FALSE;
531: user->usetet = PETSC_FALSE;
532: user->error = PETSC_FALSE;
533: bc = (PetscInt)DIRICHLET;
535: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "ex36.cxx");
536: PetscCall(PetscOptionsInt("-problem", "The type of problem being solved (controls forcing function)", "ex36.cxx", user->problem, &user->problem, NULL));
537: PetscCall(PetscOptionsInt("-n", "The elements in each direction", "ex36.cxx", user->n, &user->n, NULL));
538: PetscCall(PetscOptionsInt("-levels", "Number of levels in the multigrid hierarchy", "ex36.cxx", user->nlevels, &user->nlevels, NULL));
539: PetscCall(PetscOptionsReal("-rho", "The conductivity", "ex36.cxx", user->rho, &user->rho, NULL));
540: PetscCall(PetscOptionsReal("-x", "The domain size in x-direction", "ex36.cxx", user->bounds[1], &user->bounds[1], NULL));
541: PetscCall(PetscOptionsReal("-y", "The domain size in y-direction", "ex36.cxx", user->bounds[3], &user->bounds[3], NULL));
542: PetscCall(PetscOptionsReal("-z", "The domain size in y-direction", "ex36.cxx", user->bounds[5], &user->bounds[5], NULL));
543: PetscCall(PetscOptionsReal("-xref", "The x-coordinate of Gaussian center (for -problem 1)", "ex36.cxx", user->xyzref[0], &user->xyzref[0], NULL));
544: PetscCall(PetscOptionsReal("-yref", "The y-coordinate of Gaussian center (for -problem 1)", "ex36.cxx", user->xyzref[1], &user->xyzref[1], NULL));
545: PetscCall(PetscOptionsReal("-zref", "The y-coordinate of Gaussian center (for -problem 1)", "ex36.cxx", user->xyzref[2], &user->xyzref[2], NULL));
546: PetscCall(PetscOptionsReal("-nu", "The width of the Gaussian source (for -problem 1)", "ex36.cxx", user->nu, &user->nu, NULL));
547: PetscCall(PetscOptionsBool("-mg", "Use multigrid preconditioner", "ex36.cxx", user->usemg, &user->usemg, NULL));
548: PetscCall(PetscOptionsBool("-io", "Write out the solution and mesh data", "ex36.cxx", user->io, &user->io, NULL));
549: PetscCall(PetscOptionsBool("-tet", "Use tetrahedra to discretize the domain", "ex36.cxx", user->usetet, &user->usetet, NULL));
550: PetscCall(PetscOptionsBool("-error", "Compute the discrete L_2 and L_inf errors of the solution", "ex36.cxx", user->error, &user->error, NULL));
551: PetscCall(PetscOptionsEList("-bc", "Type of boundary condition", "ex36.cxx", bcTypes, 2, bcTypes[0], &bc, NULL));
552: PetscCall(PetscOptionsString("-file", "The mesh file for the problem", "ex36.cxx", "", user->filename, sizeof(user->filename), &user->use_extfile));
553: PetscOptionsEnd();
555: if (user->problem < 1 || user->problem > 2) user->problem = 1;
556: user->bcType = (BCType)bc;
557: user->VPERE = (user->usetet ? 4 : 8);
558: PetscFunctionReturn(PETSC_SUCCESS);
559: }
561: /*TEST
563: build:
564: requires: moab !complex
566: test:
567: args: -levels 1 -nu .01 -n 4 -mg -ksp_converged_reason
569: test:
570: suffix: 2
571: nsize: 2
572: requires: hdf5
573: args: -levels 2 -nu .01 -n 2 -mg -ksp_converged_reason
575: TEST*/