Actual source code: packm.c
1: #include <../src/dm/impls/composite/packimpl.h>
3: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm, Mat *J)
4: {
5: const DM_Composite *com = (DM_Composite *)dm->data;
6: const struct DMCompositeLink *rlink, *clink;
7: IS *isg;
8: Mat *submats;
9: PetscInt i, j, n;
11: PetscFunctionBegin;
12: n = com->nDM; /* Total number of entries */
14: /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
15: * checking and allows ISEqual to compare by identity instead of by contents. */
16: PetscCall(DMCompositeGetGlobalISs(dm, &isg));
18: /* Get submatrices */
19: PetscCall(PetscMalloc1(n * n, &submats));
20: for (i = 0, rlink = com->next; rlink; i++, rlink = rlink->next) {
21: for (j = 0, clink = com->next; clink; j++, clink = clink->next) {
22: Mat sub = NULL;
23: if (i == j) PetscCall(DMCreateMatrix(rlink->dm, &sub));
24: else PetscCheck(!com->FormCoupleLocations, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Cannot manage off-diagonal parts yet");
25: submats[i * n + j] = sub;
26: }
27: }
29: PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm), n, isg, n, isg, submats, J));
31: /* Disown references */
32: for (i = 0; i < n; i++) PetscCall(ISDestroy(&isg[i]));
33: PetscCall(PetscFree(isg));
35: for (i = 0; i < n * n; i++) {
36: if (submats[i]) PetscCall(MatDestroy(&submats[i]));
37: }
38: PetscCall(PetscFree(submats));
39: PetscFunctionReturn(PETSC_SUCCESS);
40: }
42: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm, Mat *J)
43: {
44: DM_Composite *com = (DM_Composite *)dm->data;
45: struct DMCompositeLink *next;
46: PetscInt m, *dnz, *onz, i, j, mA;
47: Mat Atmp;
48: PetscMPIInt rank;
49: PetscBool dense = PETSC_FALSE;
51: PetscFunctionBegin;
52: /* use global vector to determine layout needed for matrix */
53: m = com->n;
55: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
56: PetscCall(MatSetSizes(*J, m, m, PETSC_DETERMINE, PETSC_DETERMINE));
57: PetscCall(MatSetType(*J, dm->mattype));
59: /*
60: Extremely inefficient but will compute entire Jacobian for testing
61: */
62: PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dmcomposite_dense_jacobian", &dense, NULL));
63: if (dense) {
64: PetscInt rstart, rend, *indices;
65: PetscScalar *values;
67: mA = com->N;
68: PetscCall(MatMPIAIJSetPreallocation(*J, mA, NULL, mA - m, NULL));
69: PetscCall(MatSeqAIJSetPreallocation(*J, mA, NULL));
71: PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
72: PetscCall(PetscMalloc2(mA, &values, mA, &indices));
73: PetscCall(PetscArrayzero(values, mA));
74: for (i = 0; i < mA; i++) indices[i] = i;
75: for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, mA, indices, values, INSERT_VALUES));
76: PetscCall(PetscFree2(values, indices));
77: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
78: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
79: PetscFunctionReturn(PETSC_SUCCESS);
80: }
82: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
83: MatPreallocateBegin(PetscObjectComm((PetscObject)dm), m, m, dnz, onz);
84: /* loop over packed objects, handling one at a time */
85: next = com->next;
86: while (next) {
87: PetscInt nc, rstart, *ccols, maxnc;
88: const PetscInt *cols, *rstarts;
89: PetscMPIInt proc;
91: PetscCall(DMCreateMatrix(next->dm, &Atmp));
92: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
93: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
94: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
96: maxnc = 0;
97: for (i = 0; i < mA; i++) {
98: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
99: maxnc = PetscMax(nc, maxnc);
100: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
101: }
102: PetscCall(PetscMalloc1(maxnc, &ccols));
103: for (i = 0; i < mA; i++) {
104: PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, NULL));
105: /* remap the columns taking into how much they are shifted on each process */
106: for (j = 0; j < nc; j++) {
107: proc = 0;
108: while (cols[j] >= rstarts[proc + 1]) proc++;
109: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
110: }
111: PetscCall(MatPreallocateSet(com->rstart + next->rstart + i, nc, ccols, dnz, onz));
112: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, NULL));
113: }
114: PetscCall(PetscFree(ccols));
115: PetscCall(MatDestroy(&Atmp));
116: next = next->next;
117: }
118: if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm, NULL, dnz, onz, __rstart, __nrows, __start, __end));
119: PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnz, 0, onz));
120: PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnz));
121: MatPreallocateEnd(dnz, onz);
123: if (dm->prealloc_only) PetscFunctionReturn(PETSC_SUCCESS);
125: next = com->next;
126: while (next) {
127: PetscInt nc, rstart, row, maxnc, *ccols;
128: const PetscInt *cols, *rstarts;
129: const PetscScalar *values;
130: PetscMPIInt proc;
132: PetscCall(DMCreateMatrix(next->dm, &Atmp));
133: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
134: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
135: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
136: maxnc = 0;
137: for (i = 0; i < mA; i++) {
138: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
139: maxnc = PetscMax(nc, maxnc);
140: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
141: }
142: PetscCall(PetscMalloc1(maxnc, &ccols));
143: for (i = 0; i < mA; i++) {
144: PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, &values));
145: for (j = 0; j < nc; j++) {
146: proc = 0;
147: while (cols[j] >= rstarts[proc + 1]) proc++;
148: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
149: }
150: row = com->rstart + next->rstart + i;
151: PetscCall(MatSetValues(*J, 1, &row, nc, ccols, values, INSERT_VALUES));
152: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, &values));
153: }
154: PetscCall(PetscFree(ccols));
155: PetscCall(MatDestroy(&Atmp));
156: next = next->next;
157: }
158: if (com->FormCoupleLocations) {
159: PetscInt __rstart;
160: PetscCall(MatGetOwnershipRange(*J, &__rstart, NULL));
161: PetscCall((*com->FormCoupleLocations)(dm, *J, NULL, NULL, __rstart, 0, 0, 0));
162: }
163: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
164: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
165: PetscFunctionReturn(PETSC_SUCCESS);
166: }
168: PetscErrorCode DMCreateMatrix_Composite(DM dm, Mat *J)
169: {
170: PetscBool usenest;
171: ISLocalToGlobalMapping ltogmap;
173: PetscFunctionBegin;
174: PetscCall(DMSetFromOptions(dm));
175: PetscCall(DMSetUp(dm));
176: PetscCall(PetscStrcmp(dm->mattype, MATNEST, &usenest));
177: if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm, J));
178: else PetscCall(DMCreateMatrix_Composite_AIJ(dm, J));
180: PetscCall(DMGetLocalToGlobalMapping(dm, <ogmap));
181: PetscCall(MatSetLocalToGlobalMapping(*J, ltogmap, ltogmap));
182: PetscCall(MatSetDM(*J, dm));
183: PetscFunctionReturn(PETSC_SUCCESS);
184: }