Actual source code: pcisimpl.h
1: #pragma once
3: #include <petsc/private/pcimpl.h>
4: #include <petsc/private/matisimpl.h>
5: #include <petscksp.h>
7: /*
8: Context (data structure) common for all Iterative Substructuring preconditioners.
9: */
11: typedef struct {
12: /*
13: In naming the variables, we adopted the following convention:
14: * B - stands for interface nodes;
15: * I - stands for interior nodes;
16: * D - stands for Dirichlet (by extension, refers to interior
17: nodes) and
18: * N - stands for Neumann (by extension, refers to all local
19: nodes, interior plus interface).
20: In some cases, I or D would apply equally well (e.g. vec1_D).
21: */
23: PetscInt n; /* number of nodes (interior+interface) in this subdomain */
24: PetscInt n_B; /* number of interface nodes in this subdomain */
25: IS is_B_local, /* local (sequential) index sets for interface (B) and interior (I) nodes */
26: is_I_local, is_B_global, is_I_global;
28: Mat A_II, A_IB, /* local (sequential) submatrices */
29: A_BI, A_BB;
30: Mat pA_II;
31: Vec D; /* diagonal scaling "matrix" (stored as a vector, since it's diagonal) */
32: KSP ksp_N, /* linear solver contexts */
33: ksp_D;
34: Vec vec1_N, /* local (sequential) work vectors */
35: vec2_N, vec1_D, vec2_D, vec3_D, vec4_D, vec1_B, vec2_B, vec3_B, vec1_global;
37: PetscScalar *work_N;
38: VecScatter N_to_D; /* scattering context from all local nodes to local interior nodes */
39: VecScatter global_to_D; /* scattering context from global to local interior nodes */
40: VecScatter N_to_B; /* scattering context from all local nodes to local interface nodes */
41: VecScatter global_to_B; /* scattering context from global to local interface nodes */
42: PetscBool pure_neumann;
43: PetscScalar scaling_factor;
44: PetscBool use_stiffness_scaling;
46: ISLocalToGlobalMapping mapping;
47: PetscInt n_neigh; /* should use PetscMPIInt number of neighbours this subdomain/MPI process has (INCLUDING the subdomain itself). */
48: PetscInt *neigh; /* list of neighbouring subdomains, MPI processes */
49: PetscInt *n_shared; /* n_shared[j] is the number of nodes shared with subdomain neigh[j] */
50: PetscInt **shared; /* shared[j][i] is the local index of the i-th node shared with subdomain neigh[j] */
51: /*
52: It is necessary some consistency in the
53: numbering of the shared edges from each side.
54: For instance:
56: +-------+-------+
57: | k | l | subdomains k and l are neighbours
58: +-------+-------+
60: Let i and j be s.t. proc[k].neigh[i]==l and
61: proc[l].neigh[j]==k.
63: We need:
64: proc[k].loc_to_glob(proc[k].shared[i][m]) == proc[l].loc_to_glob(proc[l].shared[j][m])
65: for all 0 <= m < proc[k].n_shared[i], or equiv'ly, for all 0 <= m < proc[l].n_shared[j]
66: */
67: ISLocalToGlobalMapping BtoNmap;
68: PetscBool reusesubmatrices;
69: } PC_IS;