Actual source code: packm.c
1: #include <../src/dm/impls/composite/packimpl.h>
3: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm, Mat *J)
4: {
5: const DM_Composite *com = (DM_Composite *)dm->data;
6: const struct DMCompositeLink *rlink, *clink;
7: IS *isg;
8: Mat *submats;
9: PetscInt i, j, n;
11: PetscFunctionBegin;
12: n = com->nDM; /* Total number of entries */
14: /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
15: * checking and allows ISEqual to compare by identity instead of by contents. */
16: PetscCall(DMCompositeGetGlobalISs(dm, &isg));
18: /* Get submatrices */
19: PetscCall(PetscMalloc1(n * n, &submats));
20: for (i = 0, rlink = com->next; rlink; i++, rlink = rlink->next) {
21: for (j = 0, clink = com->next; clink; j++, clink = clink->next) {
22: Mat sub = NULL;
23: if (i == j) PetscCall(DMCreateMatrix(rlink->dm, &sub));
24: else PetscCheck(!com->FormCoupleLocations, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Cannot manage off-diagonal parts yet");
25: submats[i * n + j] = sub;
26: }
27: }
29: PetscCall(MatCreateNest(PetscObjectComm((PetscObject)dm), n, isg, n, isg, submats, J));
31: /* Disown references */
32: for (i = 0; i < n; i++) PetscCall(ISDestroy(&isg[i]));
33: PetscCall(PetscFree(isg));
35: for (i = 0; i < n * n; i++) PetscCall(MatDestroy(&submats[i]));
36: PetscCall(PetscFree(submats));
37: PetscFunctionReturn(PETSC_SUCCESS);
38: }
40: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm, Mat *J)
41: {
42: DM_Composite *com = (DM_Composite *)dm->data;
43: struct DMCompositeLink *next;
44: PetscInt m, *dnz, *onz, i, j, mA;
45: Mat Atmp;
46: PetscMPIInt rank;
47: PetscBool dense = PETSC_FALSE;
49: PetscFunctionBegin;
50: /* use global vector to determine layout needed for matrix */
51: m = com->n;
53: PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
54: PetscCall(MatSetSizes(*J, m, m, PETSC_DETERMINE, PETSC_DETERMINE));
55: PetscCall(MatSetType(*J, dm->mattype));
57: /*
58: Extremely inefficient but will compute entire Jacobian for testing
59: */
60: PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dmcomposite_dense_jacobian", &dense, NULL));
61: if (dense) {
62: PetscInt rstart, rend, *indices;
63: PetscScalar *values;
65: mA = com->N;
66: PetscCall(MatMPIAIJSetPreallocation(*J, mA, NULL, mA - m, NULL));
67: PetscCall(MatSeqAIJSetPreallocation(*J, mA, NULL));
69: PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
70: PetscCall(PetscMalloc2(mA, &values, mA, &indices));
71: PetscCall(PetscArrayzero(values, mA));
72: for (i = 0; i < mA; i++) indices[i] = i;
73: for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, mA, indices, values, INSERT_VALUES));
74: PetscCall(PetscFree2(values, indices));
75: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
76: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
77: PetscFunctionReturn(PETSC_SUCCESS);
78: }
80: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
81: if (dm->prealloc_skip) PetscFunctionReturn(PETSC_SUCCESS);
82: MatPreallocateBegin(PetscObjectComm((PetscObject)dm), m, m, dnz, onz);
83: /* loop over packed objects, handling one at a time */
84: next = com->next;
85: while (next) {
86: PetscInt nc, rstart, *ccols, maxnc;
87: const PetscInt *cols, *rstarts;
88: PetscMPIInt proc;
89: MatType tmp, mat_type_old;
91: /* force AIJ matrix to allow queries for preallocation */
92: PetscCall(DMGetMatType(next->dm, &tmp));
93: PetscCall(PetscStrallocpy(tmp, (char **)&mat_type_old));
94: PetscCall(DMSetMatType(next->dm, MATAIJ));
95: PetscCall(DMCreateMatrix(next->dm, &Atmp));
96: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
97: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
98: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
100: maxnc = 0;
101: for (i = 0; i < mA; i++) {
102: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
103: maxnc = PetscMax(nc, maxnc);
104: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
105: }
106: PetscCall(PetscMalloc1(maxnc, &ccols));
107: for (i = 0; i < mA; i++) {
108: PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, NULL));
109: /* remap the columns taking into how much they are shifted on each process */
110: for (j = 0; j < nc; j++) {
111: proc = 0;
112: while (cols[j] >= rstarts[proc + 1]) proc++;
113: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
114: }
115: PetscCall(MatPreallocateSet(com->rstart + next->rstart + i, nc, ccols, dnz, onz));
116: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, NULL));
117: }
118: PetscCall(PetscFree(ccols));
119: PetscCall(MatDestroy(&Atmp));
120: PetscCall(DMSetMatType(next->dm, mat_type_old));
121: PetscCall(PetscFree(mat_type_old));
122: next = next->next;
123: }
124: if (com->FormCoupleLocations) PetscCall((*com->FormCoupleLocations)(dm, NULL, dnz, onz, __rstart, __nrows, __start, __end));
125: PetscCall(MatMPIAIJSetPreallocation(*J, 0, dnz, 0, onz));
126: PetscCall(MatSeqAIJSetPreallocation(*J, 0, dnz));
127: MatPreallocateEnd(dnz, onz);
129: if (dm->prealloc_only) PetscFunctionReturn(PETSC_SUCCESS);
131: next = com->next;
132: while (next) {
133: PetscInt nc, rstart, row, maxnc, *ccols;
134: const PetscInt *cols, *rstarts;
135: const PetscScalar *values;
136: PetscMPIInt proc;
137: MatType tmp, mat_type_old;
139: /* force AIJ matrix to allow queries for zeroing initial matrix */
140: PetscCall(DMGetMatType(next->dm, &tmp));
141: PetscCall(PetscStrallocpy(tmp, (char **)&mat_type_old));
142: PetscCall(DMSetMatType(next->dm, MATAIJ));
143: PetscCall(DMCreateMatrix(next->dm, &Atmp));
144: PetscCall(MatGetOwnershipRange(Atmp, &rstart, NULL));
145: PetscCall(MatGetOwnershipRanges(Atmp, &rstarts));
146: PetscCall(MatGetLocalSize(Atmp, &mA, NULL));
147: maxnc = 0;
148: for (i = 0; i < mA; i++) {
149: PetscCall(MatGetRow(Atmp, rstart + i, &nc, NULL, NULL));
150: maxnc = PetscMax(nc, maxnc);
151: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, NULL, NULL));
152: }
153: PetscCall(PetscMalloc1(maxnc, &ccols));
154: for (i = 0; i < mA; i++) {
155: PetscCall(MatGetRow(Atmp, rstart + i, &nc, &cols, &values));
156: for (j = 0; j < nc; j++) {
157: proc = 0;
158: while (cols[j] >= rstarts[proc + 1]) proc++;
159: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
160: }
161: row = com->rstart + next->rstart + i;
162: PetscCall(MatSetValues(*J, 1, &row, nc, ccols, values, INSERT_VALUES));
163: PetscCall(MatRestoreRow(Atmp, rstart + i, &nc, &cols, &values));
164: }
165: PetscCall(PetscFree(ccols));
166: PetscCall(MatDestroy(&Atmp));
167: PetscCall(DMSetMatType(next->dm, mat_type_old));
168: PetscCall(PetscFree(mat_type_old));
169: next = next->next;
170: }
171: if (com->FormCoupleLocations) {
172: PetscInt __rstart;
173: PetscCall(MatGetOwnershipRange(*J, &__rstart, NULL));
174: PetscCall((*com->FormCoupleLocations)(dm, *J, NULL, NULL, __rstart, 0, 0, 0));
175: }
176: PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
177: PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
178: PetscFunctionReturn(PETSC_SUCCESS);
179: }
181: PetscErrorCode DMCreateMatrix_Composite(DM dm, Mat *J)
182: {
183: PetscBool usenest;
184: ISLocalToGlobalMapping ltogmap;
186: PetscFunctionBegin;
187: PetscCall(DMSetFromOptions(dm));
188: PetscCall(DMSetUp(dm));
189: PetscCall(PetscStrcmp(dm->mattype, MATNEST, &usenest));
190: if (usenest) PetscCall(DMCreateMatrix_Composite_Nest(dm, J));
191: else PetscCall(DMCreateMatrix_Composite_AIJ(dm, J));
193: PetscCall(DMGetLocalToGlobalMapping(dm, <ogmap));
194: PetscCall(MatSetLocalToGlobalMapping(*J, ltogmap, ltogmap));
195: PetscCall(MatSetDM(*J, dm));
196: PetscFunctionReturn(PETSC_SUCCESS);
197: }