Actual source code: htool.cxx
1: #include <../src/mat/impls/htool/htool.hpp>
2: #include <set>
4: const char *const MatHtoolCompressorTypes[] = {"sympartialACA", "fullACA", "SVD"};
5: const char *const MatHtoolClusteringTypes[] = {"PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric"};
6: const char HtoolCitation[] = "@article{marchand2020two,\n"
7: " Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
8: " Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
9: " Year = {2020},\n"
10: " Publisher = {Elsevier},\n"
11: " Journal = {Numerische Mathematik},\n"
12: " Volume = {146},\n"
13: " Pages = {597--628},\n"
14: " Url = {https://github.com/htool-ddm/htool}\n"
15: "}\n";
16: static PetscBool HtoolCite = PETSC_FALSE;
18: static PetscErrorCode MatGetDiagonal_Htool(Mat A, Vec v)
19: {
20: Mat_Htool *a;
21: PetscScalar *x;
22: PetscBool flg;
24: PetscFunctionBegin;
25: PetscCall(MatHasCongruentLayouts(A, &flg));
26: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
27: PetscCall(MatShellGetContext(A, &a));
28: PetscCall(VecGetArrayWrite(v, &x));
29: PetscStackCallExternalVoid("copy_diagonal_in_user_numbering", htool::copy_diagonal_in_user_numbering(a->distributed_operator_holder->hmatrix, x));
30: PetscCall(VecRestoreArrayWrite(v, &x));
31: PetscFunctionReturn(PETSC_SUCCESS);
32: }
34: static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A, Mat *b)
35: {
36: Mat_Htool *a;
37: Mat B;
38: PetscScalar *ptr, shift, scale;
39: PetscBool flg;
40: PetscMPIInt rank;
41: htool::Cluster<PetscReal> *source_cluster = nullptr;
43: PetscFunctionBegin;
44: PetscCall(MatHasCongruentLayouts(A, &flg));
45: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
46: PetscCall(MatShellGetContext(A, &a));
47: PetscCall(PetscObjectQuery((PetscObject)A, "DiagonalBlock", (PetscObject *)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
48: if (!B) {
49: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
50: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, A->rmap->n, A->rmap->n, A->rmap->n, nullptr, &B));
51: PetscCall(MatDenseGetArrayWrite(B, &ptr));
52: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
53: source_cluster = a->source_cluster ? a->source_cluster.get() : a->target_cluster.get();
54: PetscStackCallExternalVoid("copy_to_dense_in_user_numbering", htool::copy_to_dense_in_user_numbering(*a->distributed_operator_holder->hmatrix.get_sub_hmatrix(a->target_cluster->get_cluster_on_partition(rank), source_cluster->get_cluster_on_partition(rank)), ptr));
55: PetscCall(MatDenseRestoreArrayWrite(B, &ptr));
56: PetscCall(MatPropagateSymmetryOptions(A, B));
57: PetscCall(PetscObjectCompose((PetscObject)A, "DiagonalBlock", (PetscObject)B));
58: *b = B;
59: PetscCall(MatDestroy(&B));
60: PetscCall(MatShift(*b, shift));
61: PetscCall(MatScale(*b, scale));
62: } else {
63: PetscCall(MatShellGetScalingShifts(A, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
64: *b = B;
65: }
66: PetscFunctionReturn(PETSC_SUCCESS);
67: }
69: static PetscErrorCode MatMult_Htool(Mat A, Vec x, Vec y)
70: {
71: Mat_Htool *a;
72: const PetscScalar *in;
73: PetscScalar *out;
75: PetscFunctionBegin;
76: PetscCall(MatShellGetContext(A, &a));
77: PetscCall(VecGetArrayRead(x, &in));
78: PetscCall(VecGetArrayWrite(y, &out));
79: a->distributed_operator_holder->distributed_operator.vector_product_local_to_local(in, out, nullptr);
80: PetscCall(VecRestoreArrayRead(x, &in));
81: PetscCall(VecRestoreArrayWrite(y, &out));
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: static PetscErrorCode MatMultTranspose_Htool(Mat A, Vec x, Vec y)
86: {
87: Mat_Htool *a;
88: const PetscScalar *in;
89: PetscScalar *out;
91: PetscFunctionBegin;
92: PetscCall(MatShellGetContext(A, &a));
93: PetscCall(VecGetArrayRead(x, &in));
94: PetscCall(VecGetArrayWrite(y, &out));
95: a->distributed_operator_holder->distributed_operator.vector_product_transp_local_to_local(in, out, nullptr);
96: PetscCall(VecRestoreArrayRead(x, &in));
97: PetscCall(VecRestoreArrayWrite(y, &out));
98: PetscFunctionReturn(PETSC_SUCCESS);
99: }
101: static PetscErrorCode MatIncreaseOverlap_Htool(Mat A, PetscInt is_max, IS is[], PetscInt ov)
102: {
103: std::set<PetscInt> set;
104: const PetscInt *idx;
105: PetscInt *oidx, size, bs[2];
106: PetscMPIInt csize;
108: PetscFunctionBegin;
109: PetscCall(MatGetBlockSizes(A, bs, bs + 1));
110: if (bs[0] != bs[1]) bs[0] = 1;
111: for (PetscInt i = 0; i < is_max; ++i) {
112: /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
113: /* needed to avoid subdomain matrices to replicate A since it is dense */
114: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]), &csize));
115: PetscCheck(csize == 1, PETSC_COMM_SELF, PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel IS");
116: PetscCall(ISGetSize(is[i], &size));
117: PetscCall(ISGetIndices(is[i], &idx));
118: for (PetscInt j = 0; j < size; ++j) {
119: set.insert(idx[j]);
120: for (PetscInt k = 1; k <= ov; ++k) { /* for each layer of overlap */
121: if (idx[j] - k >= 0) set.insert(idx[j] - k); /* do not insert negative indices */
122: if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
123: }
124: }
125: PetscCall(ISRestoreIndices(is[i], &idx));
126: PetscCall(ISDestroy(is + i));
127: if (bs[0] > 1) {
128: for (std::set<PetscInt>::iterator it = set.cbegin(); it != set.cend(); it++) {
129: std::vector<PetscInt> block(bs[0]);
130: std::iota(block.begin(), block.end(), (*it / bs[0]) * bs[0]);
131: set.insert(block.cbegin(), block.cend());
132: }
133: }
134: size = set.size(); /* size with overlap */
135: PetscCall(PetscMalloc1(size, &oidx));
136: for (const PetscInt j : set) *oidx++ = j;
137: oidx -= size;
138: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, oidx, PETSC_OWN_POINTER, is + i));
139: }
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: static PetscErrorCode MatCreateSubMatrices_Htool(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
144: {
145: Mat_Htool *a;
146: Mat D, B, BT;
147: const PetscScalar *copy;
148: PetscScalar *ptr, shift, scale;
149: const PetscInt *idxr, *idxc, *it;
150: PetscInt nrow, m, i;
151: PetscBool flg;
153: PetscFunctionBegin;
154: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
155: PetscCall(MatShellGetContext(A, &a));
156: if (scall != MAT_REUSE_MATRIX) PetscCall(PetscCalloc1(n, submat));
157: for (i = 0; i < n; ++i) {
158: PetscCall(ISGetLocalSize(irow[i], &nrow));
159: PetscCall(ISGetLocalSize(icol[i], &m));
160: PetscCall(ISGetIndices(irow[i], &idxr));
161: PetscCall(ISGetIndices(icol[i], &idxc));
162: if (scall != MAT_REUSE_MATRIX) PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow, m, nrow, m, nullptr, (*submat) + i));
163: PetscCall(MatDenseGetArrayWrite((*submat)[i], &ptr));
164: if (irow[i] == icol[i]) { /* same row and column IS? */
165: PetscCall(MatHasCongruentLayouts(A, &flg));
166: if (flg) {
167: PetscCall(ISSorted(irow[i], &flg));
168: if (flg) { /* sorted IS? */
169: it = std::lower_bound(idxr, idxr + nrow, A->rmap->rstart);
170: if (it != idxr + nrow && *it == A->rmap->rstart) { /* rmap->rstart in IS? */
171: if (std::distance(idxr, it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
172: for (PetscInt j = 0; j < A->rmap->n && flg; ++j)
173: if (PetscUnlikely(it[j] != A->rmap->rstart + j)) flg = PETSC_FALSE;
174: if (flg) { /* complete local diagonal block in IS? */
175: /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
176: * [ B C E ]
177: * A = [ B D E ]
178: * [ B F E ]
179: */
180: m = std::distance(idxr, it); /* shift of the coefficient (0,0) of block D from above */
181: PetscCall(MatGetDiagonalBlock(A, &D));
182: PetscCall(MatDenseGetArrayRead(D, ©));
183: for (PetscInt k = 0; k < A->rmap->n; ++k) { PetscCall(PetscArraycpy(ptr + (m + k) * nrow + m, copy + k * A->rmap->n, A->rmap->n)); /* block D from above */ }
184: PetscCall(MatDenseRestoreArrayRead(D, ©));
185: if (m) {
186: a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* vertical block B from above */
187: /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
188: if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
189: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, m, A->rmap->n, m, ptr + m, &B));
190: PetscCall(MatDenseSetLDA(B, nrow));
191: PetscCall(MatCreateDense(PETSC_COMM_SELF, m, A->rmap->n, m, A->rmap->n, ptr + m * nrow, &BT));
192: PetscCall(MatDenseSetLDA(BT, nrow));
193: if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
194: PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
195: } else {
196: PetscCall(MatTransposeSetPrecursor(B, BT));
197: PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
198: }
199: PetscCall(MatDestroy(&B));
200: PetscCall(MatDestroy(&BT));
201: } else {
202: for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block C from above */
203: a->wrapper->copy_submatrix(m, 1, idxr, idxc + m + k, ptr + (m + k) * nrow);
204: }
205: }
206: }
207: if (m + A->rmap->n != nrow) {
208: a->wrapper->copy_submatrix(nrow, std::distance(it + A->rmap->n, idxr + nrow), idxr, idxc + m + A->rmap->n, ptr + (m + A->rmap->n) * nrow); /* vertical block E from above */
209: /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
210: if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
211: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), ptr + (m + A->rmap->n) * nrow + m, &B));
212: PetscCall(MatDenseSetLDA(B, nrow));
213: PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, ptr + m * nrow + m + A->rmap->n, &BT));
214: PetscCall(MatDenseSetLDA(BT, nrow));
215: if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
216: PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
217: } else {
218: PetscCall(MatTransposeSetPrecursor(B, BT));
219: PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
220: }
221: PetscCall(MatDestroy(&B));
222: PetscCall(MatDestroy(&BT));
223: } else {
224: for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block F from above */
225: a->wrapper->copy_submatrix(std::distance(it + A->rmap->n, idxr + nrow), 1, it + A->rmap->n, idxc + m + k, ptr + (m + k) * nrow + m + A->rmap->n);
226: }
227: }
228: }
229: } /* complete local diagonal block not in IS */
230: } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
231: } else flg = PETSC_FALSE; /* rmap->rstart not in IS */
232: } /* unsorted IS */
233: }
234: } else flg = PETSC_FALSE; /* different row and column IS */
235: if (!flg) a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* reassemble everything */
236: PetscCall(ISRestoreIndices(irow[i], &idxr));
237: PetscCall(ISRestoreIndices(icol[i], &idxc));
238: PetscCall(MatDenseRestoreArrayWrite((*submat)[i], &ptr));
239: PetscCall(MatShift((*submat)[i], shift));
240: PetscCall(MatScale((*submat)[i], scale));
241: }
242: PetscFunctionReturn(PETSC_SUCCESS);
243: }
245: static PetscErrorCode MatDestroy_Htool(Mat A)
246: {
247: Mat_Htool *a;
248: PetscContainer container;
249: MatHtoolKernelTranspose *kernelt;
251: PetscFunctionBegin;
252: PetscCall(MatShellGetContext(A, &a));
253: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", nullptr));
254: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", nullptr));
255: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", nullptr));
256: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", nullptr));
257: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", nullptr));
258: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", nullptr));
259: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", nullptr));
260: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", nullptr));
261: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", nullptr));
262: PetscCall(PetscObjectQuery((PetscObject)A, "KernelTranspose", (PetscObject *)&container));
263: if (container) { /* created in MatTranspose_Htool() */
264: PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
265: PetscCall(MatDestroy(&kernelt->A));
266: PetscCall(PetscObjectCompose((PetscObject)A, "KernelTranspose", nullptr));
267: }
268: if (a->gcoords_source != a->gcoords_target) PetscCall(PetscFree(a->gcoords_source));
269: PetscCall(PetscFree(a->gcoords_target));
270: delete a->wrapper;
271: a->target_cluster.reset();
272: a->source_cluster.reset();
273: a->distributed_operator_holder.reset();
274: PetscCall(PetscFree(a));
275: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", nullptr)); // needed to avoid a call to MatShellSetContext_Immutable()
276: PetscFunctionReturn(PETSC_SUCCESS);
277: }
279: static PetscErrorCode MatView_Htool(Mat A, PetscViewer pv)
280: {
281: Mat_Htool *a;
282: PetscScalar shift, scale;
283: PetscBool flg;
284: std::map<std::string, std::string> hmatrix_information;
286: PetscFunctionBegin;
287: PetscCall(MatShellGetContext(A, &a));
288: hmatrix_information = htool::get_distributed_hmatrix_information(a->distributed_operator_holder->hmatrix, PetscObjectComm((PetscObject)A));
289: PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &flg));
290: if (flg) {
291: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
292: PetscCall(PetscViewerASCIIPrintf(pv, "symmetry: %c\n", a->distributed_operator_holder->distributed_operator.get_symmetry_type()));
293: if (PetscAbsScalar(scale - 1.0) > PETSC_MACHINE_EPSILON) {
294: #if defined(PETSC_USE_COMPLEX)
295: PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g+%gi\n", (double)PetscRealPart(scale), (double)PetscImaginaryPart(scale)));
296: #else
297: PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g\n", (double)scale));
298: #endif
299: }
300: if (PetscAbsScalar(shift) > PETSC_MACHINE_EPSILON) {
301: #if defined(PETSC_USE_COMPLEX)
302: PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g+%gi\n", (double)PetscRealPart(shift), (double)PetscImaginaryPart(shift)));
303: #else
304: PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g\n", (double)shift));
305: #endif
306: }
307: PetscCall(PetscViewerASCIIPrintf(pv, "minimum cluster size: %" PetscInt_FMT "\n", a->min_cluster_size));
308: PetscCall(PetscViewerASCIIPrintf(pv, "epsilon: %g\n", (double)a->epsilon));
309: PetscCall(PetscViewerASCIIPrintf(pv, "eta: %g\n", (double)a->eta));
310: PetscCall(PetscViewerASCIIPrintf(pv, "minimum target depth: %" PetscInt_FMT "\n", a->depth[0]));
311: PetscCall(PetscViewerASCIIPrintf(pv, "minimum source depth: %" PetscInt_FMT "\n", a->depth[1]));
312: PetscCall(PetscViewerASCIIPrintf(pv, "compressor: %s\n", MatHtoolCompressorTypes[a->compressor]));
313: PetscCall(PetscViewerASCIIPrintf(pv, "clustering: %s\n", MatHtoolClusteringTypes[a->clustering]));
314: PetscCall(PetscViewerASCIIPrintf(pv, "compression ratio: %s\n", hmatrix_information["Compression_ratio"].c_str()));
315: PetscCall(PetscViewerASCIIPrintf(pv, "space saving: %s\n", hmatrix_information["Space_saving"].c_str()));
316: PetscCall(PetscViewerASCIIPrintf(pv, "block tree consistency: %s\n", PetscBools[a->distributed_operator_holder->hmatrix.is_block_tree_consistent()]));
317: PetscCall(PetscViewerASCIIPrintf(pv, "number of dense (resp. low rank) matrices: %s (resp. %s)\n", hmatrix_information["Number_of_dense_blocks"].c_str(), hmatrix_information["Number_of_low_rank_blocks"].c_str()));
318: PetscCall(
319: PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n", hmatrix_information["Dense_block_size_min"].c_str(), hmatrix_information["Dense_block_size_mean"].c_str(), hmatrix_information["Dense_block_size_max"].c_str()));
320: PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n", hmatrix_information["Low_rank_block_size_min"].c_str(), hmatrix_information["Low_rank_block_size_mean"].c_str(),
321: hmatrix_information["Low_rank_block_size_max"].c_str()));
322: PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) ranks: (%s, %s, %s)\n", hmatrix_information["Rank_min"].c_str(), hmatrix_information["Rank_mean"].c_str(), hmatrix_information["Rank_max"].c_str()));
323: }
324: PetscFunctionReturn(PETSC_SUCCESS);
325: }
327: /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
328: static PetscErrorCode MatGetRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
329: {
330: Mat_Htool *a;
331: PetscScalar shift, scale;
332: PetscInt *idxc;
333: PetscBLASInt one = 1, bn;
335: PetscFunctionBegin;
336: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
337: PetscCall(MatShellGetContext(A, &a));
338: if (nz) *nz = A->cmap->N;
339: if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
340: PetscCall(PetscMalloc1(A->cmap->N, &idxc));
341: for (PetscInt i = 0; i < A->cmap->N; ++i) idxc[i] = i;
342: }
343: if (idx) *idx = idxc;
344: if (v) {
345: PetscCall(PetscMalloc1(A->cmap->N, v));
346: if (a->wrapper) a->wrapper->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
347: else reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx)->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
348: PetscCall(PetscBLASIntCast(A->cmap->N, &bn));
349: PetscCallCXX(htool::Blas<PetscScalar>::scal(&bn, &scale, *v, &one));
350: if (row < A->cmap->N) (*v)[row] += shift;
351: }
352: if (!idx) PetscCall(PetscFree(idxc));
353: PetscFunctionReturn(PETSC_SUCCESS);
354: }
356: static PetscErrorCode MatRestoreRow_Htool(Mat, PetscInt, PetscInt *, PetscInt **idx, PetscScalar **v)
357: {
358: PetscFunctionBegin;
359: if (idx) PetscCall(PetscFree(*idx));
360: if (v) PetscCall(PetscFree(*v));
361: PetscFunctionReturn(PETSC_SUCCESS);
362: }
364: static PetscErrorCode MatSetFromOptions_Htool(Mat A, PetscOptionItems PetscOptionsObject)
365: {
366: Mat_Htool *a;
367: PetscInt n;
368: PetscBool flg;
370: PetscFunctionBegin;
371: PetscCall(MatShellGetContext(A, &a));
372: PetscOptionsHeadBegin(PetscOptionsObject, "Htool options");
373: PetscCall(PetscOptionsBoundedInt("-mat_htool_min_cluster_size", "Minimal leaf size in cluster tree", nullptr, a->min_cluster_size, &a->min_cluster_size, nullptr, 0));
374: PetscCall(PetscOptionsBoundedReal("-mat_htool_epsilon", "Relative error in Frobenius norm when approximating a block", nullptr, a->epsilon, &a->epsilon, nullptr, 0.0));
375: PetscCall(PetscOptionsReal("-mat_htool_eta", "Admissibility condition tolerance", nullptr, a->eta, &a->eta, nullptr));
376: PetscCall(PetscOptionsBoundedInt("-mat_htool_min_target_depth", "Minimal cluster tree depth associated with the rows", nullptr, a->depth[0], a->depth, nullptr, 0));
377: PetscCall(PetscOptionsBoundedInt("-mat_htool_min_source_depth", "Minimal cluster tree depth associated with the columns", nullptr, a->depth[1], a->depth + 1, nullptr, 0));
378: PetscCall(PetscOptionsBool("-mat_htool_block_tree_consistency", "Block tree consistency", nullptr, a->block_tree_consistency, &a->block_tree_consistency, nullptr));
380: n = 0;
381: PetscCall(PetscOptionsEList("-mat_htool_compressor", "Type of compression", "MatHtoolCompressorType", MatHtoolCompressorTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes), MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA], &n, &flg));
382: if (flg) a->compressor = MatHtoolCompressorType(n);
383: n = 0;
384: PetscCall(PetscOptionsEList("-mat_htool_clustering", "Type of clustering", "MatHtoolClusteringType", MatHtoolClusteringTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes), MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR], &n, &flg));
385: if (flg) a->clustering = MatHtoolClusteringType(n);
386: PetscOptionsHeadEnd();
387: PetscFunctionReturn(PETSC_SUCCESS);
388: }
390: static PetscErrorCode MatAssemblyEnd_Htool(Mat A, MatAssemblyType)
391: {
392: Mat_Htool *a;
393: const PetscInt *ranges;
394: PetscInt *offset;
395: PetscMPIInt size, rank;
396: char S = PetscDefined(USE_COMPLEX) && A->hermitian == PETSC_BOOL3_TRUE ? 'H' : (A->symmetric == PETSC_BOOL3_TRUE ? 'S' : 'N'), uplo = S == 'N' ? 'N' : 'U';
397: htool::VirtualGenerator<PetscScalar> *generator = nullptr;
398: htool::ClusterTreeBuilder<PetscReal> recursive_build_strategy;
399: htool::Cluster<PetscReal> *source_cluster;
400: std::shared_ptr<htool::VirtualLowRankGenerator<PetscScalar>> compressor;
402: PetscFunctionBegin;
403: PetscCall(PetscCitationsRegister(HtoolCitation, &HtoolCite));
404: PetscCall(MatShellGetContext(A, &a));
405: delete a->wrapper;
406: a->target_cluster.reset();
407: a->source_cluster.reset();
408: a->distributed_operator_holder.reset();
409: // clustering
410: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
411: PetscCall(PetscMalloc1(2 * size, &offset));
412: PetscCall(MatGetOwnershipRanges(A, &ranges));
413: for (PetscInt i = 0; i < size; ++i) {
414: offset[2 * i] = ranges[i];
415: offset[2 * i + 1] = ranges[i + 1] - ranges[i];
416: }
417: switch (a->clustering) {
418: case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
419: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
420: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
421: break;
422: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
423: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
424: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
425: break;
426: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
427: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
428: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
429: break;
430: default:
431: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
432: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
433: }
434: recursive_build_strategy.set_minclustersize(a->min_cluster_size);
435: a->target_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree(A->rmap->N, a->dim, a->gcoords_target, 2, size, offset));
436: if (a->gcoords_target != a->gcoords_source) {
437: PetscCall(MatGetOwnershipRangesColumn(A, &ranges));
438: for (PetscInt i = 0; i < size; ++i) {
439: offset[2 * i] = ranges[i];
440: offset[2 * i + 1] = ranges[i + 1] - ranges[i];
441: }
442: switch (a->clustering) {
443: case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
444: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
445: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
446: break;
447: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
448: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
449: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
450: break;
451: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
452: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
453: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
454: break;
455: default:
456: recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
457: recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
458: }
459: recursive_build_strategy.set_minclustersize(a->min_cluster_size);
460: a->source_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree(A->cmap->N, a->dim, a->gcoords_source, 2, size, offset));
461: S = uplo = 'N';
462: source_cluster = a->source_cluster.get();
463: } else source_cluster = a->target_cluster.get();
464: PetscCall(PetscFree(offset));
465: // generator
466: if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
467: else {
468: a->wrapper = nullptr;
469: generator = reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx);
470: }
471: // compressor
472: switch (a->compressor) {
473: case MAT_HTOOL_COMPRESSOR_FULL_ACA:
474: compressor = std::make_shared<htool::fullACA<PetscScalar>>();
475: break;
476: case MAT_HTOOL_COMPRESSOR_SVD:
477: compressor = std::make_shared<htool::SVD<PetscScalar>>();
478: break;
479: default:
480: compressor = std::make_shared<htool::sympartialACA<PetscScalar>>();
481: }
482: // local hierarchical matrix
483: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
484: auto hmatrix_builder = htool::HMatrixTreeBuilder<PetscScalar>(*a->target_cluster, *source_cluster, a->epsilon, a->eta, S, uplo, -1, rank, rank);
485: hmatrix_builder.set_low_rank_generator(compressor);
486: hmatrix_builder.set_minimal_target_depth(a->depth[0]);
487: hmatrix_builder.set_minimal_source_depth(a->depth[1]);
488: PetscCheck(a->block_tree_consistency || (!a->block_tree_consistency && !(A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE)), PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot have a MatHtool with inconsistent block tree which is either symmetric or Hermitian");
489: hmatrix_builder.set_block_tree_consistency(a->block_tree_consistency);
490: a->distributed_operator_holder = std::make_unique<htool::DistributedOperatorFromHMatrix<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, *a->target_cluster, *source_cluster, hmatrix_builder, PetscObjectComm((PetscObject)A));
491: PetscFunctionReturn(PETSC_SUCCESS);
492: }
494: static PetscErrorCode MatProductNumeric_Htool(Mat C)
495: {
496: Mat_Product *product = C->product;
497: Mat_Htool *a;
498: const PetscScalar *in;
499: PetscScalar *out;
500: PetscInt N, lda;
502: PetscFunctionBegin;
503: MatCheckProduct(C, 1);
504: PetscCall(MatGetSize(C, nullptr, &N));
505: PetscCall(MatDenseGetLDA(C, &lda));
506: PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
507: PetscCall(MatDenseGetArrayRead(product->B, &in));
508: PetscCall(MatDenseGetArrayWrite(C, &out));
509: PetscCall(MatShellGetContext(product->A, &a));
510: switch (product->type) {
511: case MATPRODUCT_AB:
512: a->distributed_operator_holder->distributed_operator.matrix_product_local_to_local(in, out, N, nullptr);
513: break;
514: case MATPRODUCT_AtB:
515: a->distributed_operator_holder->distributed_operator.matrix_product_transp_local_to_local(in, out, N, nullptr);
516: break;
517: default:
518: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatProductType %s is not supported", MatProductTypes[product->type]);
519: }
520: PetscCall(MatDenseRestoreArrayWrite(C, &out));
521: PetscCall(MatDenseRestoreArrayRead(product->B, &in));
522: PetscFunctionReturn(PETSC_SUCCESS);
523: }
525: static PetscErrorCode MatProductSymbolic_Htool(Mat C)
526: {
527: Mat_Product *product = C->product;
528: Mat A, B;
529: PetscBool flg;
531: PetscFunctionBegin;
532: MatCheckProduct(C, 1);
533: A = product->A;
534: B = product->B;
535: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, ""));
536: PetscCheck(flg && (product->type == MATPRODUCT_AB || product->type == MATPRODUCT_AtB), PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "ProductType %s not supported for %s", MatProductTypes[product->type], ((PetscObject)product->B)->type_name);
537: if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
538: if (product->type == MATPRODUCT_AB) PetscCall(MatSetSizes(C, A->rmap->n, B->cmap->n, A->rmap->N, B->cmap->N));
539: else PetscCall(MatSetSizes(C, A->cmap->n, B->cmap->n, A->cmap->N, B->cmap->N));
540: }
541: PetscCall(MatSetType(C, MATDENSE));
542: PetscCall(MatSetUp(C));
543: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
544: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
545: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
546: C->ops->productsymbolic = nullptr;
547: C->ops->productnumeric = MatProductNumeric_Htool;
548: PetscFunctionReturn(PETSC_SUCCESS);
549: }
551: static PetscErrorCode MatProductSetFromOptions_Htool(Mat C)
552: {
553: PetscFunctionBegin;
554: MatCheckProduct(C, 1);
555: if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
556: PetscFunctionReturn(PETSC_SUCCESS);
557: }
559: static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
560: {
561: Mat_Htool *a;
563: PetscFunctionBegin;
564: PetscCall(MatShellGetContext(A, &a));
565: *distributed_operator = &a->distributed_operator_holder->distributed_operator;
566: PetscFunctionReturn(PETSC_SUCCESS);
567: }
569: /*@C
570: MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a `MATHTOOL`.
572: No Fortran Support, No C Support
574: Input Parameter:
575: . A - hierarchical matrix
577: Output Parameter:
578: . distributed_operator - opaque pointer to a Htool virtual matrix
580: Level: advanced
582: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`
583: @*/
584: PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
585: {
586: PetscFunctionBegin;
588: PetscAssertPointer(distributed_operator, 2);
589: PetscTryMethod(A, "MatHtoolGetHierarchicalMat_C", (Mat, const htool::DistributedOperator<PetscScalar> **), (A, distributed_operator));
590: PetscFunctionReturn(PETSC_SUCCESS);
591: }
593: static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
594: {
595: Mat_Htool *a;
597: PetscFunctionBegin;
598: PetscCall(MatShellGetContext(A, &a));
599: a->kernel = kernel;
600: a->kernelctx = kernelctx;
601: delete a->wrapper;
602: if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
603: PetscFunctionReturn(PETSC_SUCCESS);
604: }
606: /*@C
607: MatHtoolSetKernel - Sets the kernel and context used for the assembly of a `MATHTOOL`.
609: Collective, No Fortran Support
611: Input Parameters:
612: + A - hierarchical matrix
613: . kernel - computational kernel (or `NULL`)
614: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
616: Level: advanced
618: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatCreateHtoolFromKernel()`
619: @*/
620: PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
621: {
622: PetscFunctionBegin;
625: if (!kernel) PetscAssertPointer(kernelctx, 3);
626: PetscTryMethod(A, "MatHtoolSetKernel_C", (Mat, MatHtoolKernelFn *, void *), (A, kernel, kernelctx));
627: PetscFunctionReturn(PETSC_SUCCESS);
628: }
630: static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is)
631: {
632: Mat_Htool *a;
633: PetscMPIInt rank;
634: const std::vector<PetscInt> *source;
635: const htool::Cluster<PetscReal> *local_source_cluster;
637: PetscFunctionBegin;
638: PetscCall(MatShellGetContext(A, &a));
639: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
640: local_source_cluster = a->source_cluster ? &a->source_cluster->get_cluster_on_partition(rank) : &a->target_cluster->get_cluster_on_partition(rank);
641: source = &local_source_cluster->get_permutation();
642: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), local_source_cluster->get_size(), source->data() + local_source_cluster->get_offset(), PETSC_COPY_VALUES, is));
643: PetscCall(ISSetPermutation(*is));
644: PetscFunctionReturn(PETSC_SUCCESS);
645: }
647: /*@
648: MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster for a `MATHTOOL` matrix.
650: Input Parameter:
651: . A - hierarchical matrix
653: Output Parameter:
654: . is - permutation
656: Level: advanced
658: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()`
659: @*/
660: PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is)
661: {
662: PetscFunctionBegin;
664: if (!is) PetscAssertPointer(is, 2);
665: PetscTryMethod(A, "MatHtoolGetPermutationSource_C", (Mat, IS *), (A, is));
666: PetscFunctionReturn(PETSC_SUCCESS);
667: }
669: static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is)
670: {
671: Mat_Htool *a;
672: const std::vector<PetscInt> *target;
673: PetscMPIInt rank;
675: PetscFunctionBegin;
676: PetscCall(MatShellGetContext(A, &a));
677: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
678: target = &a->target_cluster->get_permutation();
679: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), a->target_cluster->get_cluster_on_partition(rank).get_size(), target->data() + a->target_cluster->get_cluster_on_partition(rank).get_offset(), PETSC_COPY_VALUES, is));
680: PetscCall(ISSetPermutation(*is));
681: PetscFunctionReturn(PETSC_SUCCESS);
682: }
684: /*@
685: MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster for a `MATHTOOL` matrix.
687: Input Parameter:
688: . A - hierarchical matrix
690: Output Parameter:
691: . is - permutation
693: Level: advanced
695: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()`
696: @*/
697: PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is)
698: {
699: PetscFunctionBegin;
701: if (!is) PetscAssertPointer(is, 2);
702: PetscTryMethod(A, "MatHtoolGetPermutationTarget_C", (Mat, IS *), (A, is));
703: PetscFunctionReturn(PETSC_SUCCESS);
704: }
706: static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use)
707: {
708: Mat_Htool *a;
710: PetscFunctionBegin;
711: PetscCall(MatShellGetContext(A, &a));
712: a->distributed_operator_holder->distributed_operator.use_permutation() = use;
713: PetscFunctionReturn(PETSC_SUCCESS);
714: }
716: /*@
717: MatHtoolUsePermutation - Sets whether a `MATHTOOL` matrix should permute input (resp. output) vectors following its internal source (resp. target) permutation.
719: Input Parameters:
720: + A - hierarchical matrix
721: - use - Boolean value
723: Level: advanced
725: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()`
726: @*/
727: PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use)
728: {
729: PetscFunctionBegin;
732: PetscTryMethod(A, "MatHtoolUsePermutation_C", (Mat, PetscBool), (A, use));
733: PetscFunctionReturn(PETSC_SUCCESS);
734: }
736: static PetscErrorCode MatConvert_Htool_Dense(Mat A, MatType, MatReuse reuse, Mat *B)
737: {
738: Mat C;
739: Mat_Htool *a;
740: PetscScalar *array, shift, scale;
741: PetscInt lda;
743: PetscFunctionBegin;
744: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
745: PetscCall(MatShellGetContext(A, &a));
746: if (reuse == MAT_REUSE_MATRIX) {
747: C = *B;
748: PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible dimensions");
749: PetscCall(MatDenseGetLDA(C, &lda));
750: PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
751: } else {
752: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
753: PetscCall(MatSetSizes(C, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
754: PetscCall(MatSetType(C, MATDENSE));
755: PetscCall(MatSetUp(C));
756: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
757: }
758: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
759: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
760: PetscCall(MatDenseGetArrayWrite(C, &array));
761: htool::copy_to_dense_in_user_numbering(a->distributed_operator_holder->hmatrix, array);
762: PetscCall(MatDenseRestoreArrayWrite(C, &array));
763: PetscCall(MatShift(C, shift));
764: PetscCall(MatScale(C, scale));
765: if (reuse == MAT_INPLACE_MATRIX) {
766: PetscCall(MatHeaderReplace(A, &C));
767: } else *B = C;
768: PetscFunctionReturn(PETSC_SUCCESS);
769: }
771: static PetscErrorCode GenEntriesTranspose(PetscInt sdim, PetscInt M, PetscInt N, const PetscInt *rows, const PetscInt *cols, PetscScalar *ptr, void *ctx)
772: {
773: MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose *)ctx;
774: PetscScalar *tmp;
776: PetscFunctionBegin;
777: PetscCall(generator->kernel(sdim, N, M, cols, rows, ptr, generator->kernelctx));
778: PetscCall(PetscMalloc1(M * N, &tmp));
779: PetscCall(PetscArraycpy(tmp, ptr, M * N));
780: for (PetscInt i = 0; i < M; ++i) {
781: for (PetscInt j = 0; j < N; ++j) ptr[i + j * M] = tmp[j + i * N];
782: }
783: PetscCall(PetscFree(tmp));
784: PetscFunctionReturn(PETSC_SUCCESS);
785: }
787: /* naive implementation which keeps a reference to the original Mat */
788: static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B)
789: {
790: Mat C;
791: Mat_Htool *a, *c;
792: PetscScalar shift, scale;
793: PetscInt M = A->rmap->N, N = A->cmap->N, m = A->rmap->n, n = A->cmap->n;
794: PetscContainer container;
795: MatHtoolKernelTranspose *kernelt;
797: PetscFunctionBegin;
798: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
799: PetscCall(MatShellGetContext(A, &a));
800: if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
801: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MatTranspose() with MAT_INPLACE_MATRIX not supported");
802: if (reuse == MAT_INITIAL_MATRIX) {
803: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
804: PetscCall(MatSetSizes(C, n, m, N, M));
805: PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
806: PetscCall(MatSetUp(C));
807: PetscCall(PetscNew(&kernelt));
808: PetscCall(PetscObjectContainerCompose((PetscObject)C, "KernelTranspose", kernelt, PetscCtxDestroyDefault));
809: } else {
810: C = *B;
811: PetscCall(PetscObjectQuery((PetscObject)C, "KernelTranspose", (PetscObject *)&container));
812: PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call MatTranspose() with MAT_INITIAL_MATRIX first");
813: PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
814: }
815: PetscCall(MatShellGetContext(C, &c));
816: c->dim = a->dim;
817: PetscCall(MatShift(C, shift));
818: PetscCall(MatScale(C, scale));
819: c->kernel = GenEntriesTranspose;
820: if (kernelt->A != A) {
821: PetscCall(MatDestroy(&kernelt->A));
822: kernelt->A = A;
823: PetscCall(PetscObjectReference((PetscObject)A));
824: }
825: kernelt->kernel = a->kernel;
826: kernelt->kernelctx = a->kernelctx;
827: c->kernelctx = kernelt;
828: c->min_cluster_size = a->min_cluster_size;
829: c->epsilon = a->epsilon;
830: c->eta = a->eta;
831: c->block_tree_consistency = a->block_tree_consistency;
832: c->compressor = a->compressor;
833: c->clustering = a->clustering;
834: if (reuse == MAT_INITIAL_MATRIX) {
835: PetscCall(PetscMalloc1(N * c->dim, &c->gcoords_target));
836: PetscCall(PetscArraycpy(c->gcoords_target, a->gcoords_source, N * c->dim));
837: if (a->gcoords_target != a->gcoords_source) {
838: PetscCall(PetscMalloc1(M * c->dim, &c->gcoords_source));
839: PetscCall(PetscArraycpy(c->gcoords_source, a->gcoords_target, M * c->dim));
840: } else c->gcoords_source = c->gcoords_target;
841: }
842: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
843: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
844: if (reuse == MAT_INITIAL_MATRIX) *B = C;
845: PetscFunctionReturn(PETSC_SUCCESS);
846: }
848: static PetscErrorCode MatDestroy_Factor(Mat F)
849: {
850: PetscContainer container;
851: htool::HMatrix<PetscScalar> *A;
853: PetscFunctionBegin;
854: PetscCall(PetscObjectQuery((PetscObject)F, "HMatrix", (PetscObject *)&container));
855: if (container) {
856: PetscCall(PetscContainerGetPointer(container, (void **)&A));
857: delete A;
858: PetscCall(PetscObjectCompose((PetscObject)F, "HMatrix", nullptr));
859: }
860: PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", nullptr));
861: PetscFunctionReturn(PETSC_SUCCESS);
862: }
864: static PetscErrorCode MatFactorGetSolverType_Htool(Mat, MatSolverType *type)
865: {
866: PetscFunctionBegin;
867: *type = MATSOLVERHTOOL;
868: PetscFunctionReturn(PETSC_SUCCESS);
869: }
871: template <char trans>
872: static inline PetscErrorCode MatSolve_Private(Mat A, htool::Matrix<PetscScalar> &X)
873: {
874: PetscContainer container;
875: htool::HMatrix<PetscScalar> *B;
877: PetscFunctionBegin;
878: PetscCheck(A->factortype == MAT_FACTOR_LU || A->factortype == MAT_FACTOR_CHOLESKY, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_UNKNOWN_TYPE, "Only MAT_LU_FACTOR and MAT_CHOLESKY_FACTOR are supported");
879: PetscCall(PetscObjectQuery((PetscObject)A, "HMatrix", (PetscObject *)&container));
880: PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call Mat%sFactorNumeric() before Mat%sSolve%s()", A->factortype == MAT_FACTOR_LU ? "LU" : "Cholesky", X.nb_cols() == 1 ? "" : "Mat", trans == 'N' ? "" : "Transpose");
881: PetscCall(PetscContainerGetPointer(container, (void **)&B));
882: if (A->factortype == MAT_FACTOR_LU) htool::lu_solve(trans, *B, X);
883: else htool::cholesky_solve('L', *B, X);
884: PetscFunctionReturn(PETSC_SUCCESS);
885: }
887: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Vec>::value>::type * = nullptr>
888: static PetscErrorCode MatSolve_Htool(Mat A, Type b, Type x)
889: {
890: PetscInt n;
891: htool::Matrix<PetscScalar> v;
892: PetscScalar *array;
894: PetscFunctionBegin;
895: PetscCall(VecGetLocalSize(b, &n));
896: PetscCall(VecCopy(b, x));
897: PetscCall(VecGetArrayWrite(x, &array));
898: v.assign(n, 1, array, false);
899: PetscCall(VecRestoreArrayWrite(x, &array));
900: PetscCall(MatSolve_Private<trans>(A, v));
901: PetscFunctionReturn(PETSC_SUCCESS);
902: }
904: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Mat>::value>::type * = nullptr>
905: static PetscErrorCode MatSolve_Htool(Mat A, Type B, Type X)
906: {
907: PetscInt m, N;
908: htool::Matrix<PetscScalar> v;
909: PetscScalar *array;
911: PetscFunctionBegin;
912: PetscCall(MatGetLocalSize(B, &m, nullptr));
913: PetscCall(MatGetLocalSize(B, nullptr, &N));
914: PetscCall(MatCopy(B, X, SAME_NONZERO_PATTERN));
915: PetscCall(MatDenseGetArrayWrite(X, &array));
916: v.assign(m, N, array, false);
917: PetscCall(MatDenseRestoreArrayWrite(X, &array));
918: PetscCall(MatSolve_Private<trans>(A, v));
919: PetscFunctionReturn(PETSC_SUCCESS);
920: }
922: template <MatFactorType ftype>
923: static PetscErrorCode MatFactorNumeric_Htool(Mat F, Mat A, const MatFactorInfo *)
924: {
925: Mat_Htool *a;
926: htool::HMatrix<PetscScalar> *B;
928: PetscFunctionBegin;
929: PetscCall(MatShellGetContext(A, &a));
930: B = new htool::HMatrix<PetscScalar>(a->distributed_operator_holder->hmatrix);
931: if (ftype == MAT_FACTOR_LU) htool::lu_factorization(*B);
932: else htool::cholesky_factorization('L', *B);
933: PetscCall(PetscObjectContainerCompose((PetscObject)F, "HMatrix", B, nullptr));
934: PetscFunctionReturn(PETSC_SUCCESS);
935: }
937: template <MatFactorType ftype>
938: PetscErrorCode MatFactorSymbolic_Htool(Mat F, Mat)
939: {
940: PetscFunctionBegin;
941: F->preallocated = PETSC_TRUE;
942: F->assembled = PETSC_TRUE;
943: F->ops->solve = MatSolve_Htool<'N', Vec>;
944: F->ops->matsolve = MatSolve_Htool<'N', Mat>;
945: if (!PetscDefined(USE_COMPLEX) || ftype == MAT_FACTOR_LU) {
946: F->ops->solvetranspose = MatSolve_Htool<'T', Vec>;
947: F->ops->matsolvetranspose = MatSolve_Htool<'T', Mat>;
948: }
949: F->ops->destroy = MatDestroy_Factor;
950: if (ftype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_LU>;
951: else F->ops->choleskyfactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_CHOLESKY>;
952: PetscFunctionReturn(PETSC_SUCCESS);
953: }
955: static PetscErrorCode MatLUFactorSymbolic_Htool(Mat F, Mat A, IS, IS, const MatFactorInfo *)
956: {
957: PetscFunctionBegin;
958: PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_LU>(F, A));
959: PetscFunctionReturn(PETSC_SUCCESS);
960: }
962: static PetscErrorCode MatCholeskyFactorSymbolic_Htool(Mat F, Mat A, IS, const MatFactorInfo *)
963: {
964: PetscFunctionBegin;
965: PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_CHOLESKY>(F, A));
966: PetscFunctionReturn(PETSC_SUCCESS);
967: }
969: static PetscErrorCode MatGetFactor_htool_htool(Mat A, MatFactorType ftype, Mat *F)
970: {
971: Mat B;
972: Mat_Htool *a;
973: PetscMPIInt size;
975: PetscFunctionBegin;
976: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
977: PetscCall(MatShellGetContext(A, &a));
978: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel MatGetFactor()");
979: PetscCheck(a->block_tree_consistency, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot factor a MatHtool with inconsistent block tree");
980: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
981: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
982: PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &((PetscObject)B)->type_name));
983: PetscCall(MatSetUp(B));
985: B->ops->getinfo = MatGetInfo_External;
986: B->factortype = ftype;
987: B->trivialsymbolic = PETSC_TRUE;
989: if (ftype == MAT_FACTOR_LU) B->ops->lufactorsymbolic = MatLUFactorSymbolic_Htool;
990: else if (ftype == MAT_FACTOR_CHOLESKY) B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_Htool;
992: PetscCall(PetscFree(B->solvertype));
993: PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &B->solvertype));
995: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_Htool));
996: *F = B;
997: PetscFunctionReturn(PETSC_SUCCESS);
998: }
1000: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Htool(void)
1001: {
1002: PetscFunctionBegin;
1003: PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_LU, MatGetFactor_htool_htool));
1004: PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_CHOLESKY, MatGetFactor_htool_htool));
1005: PetscFunctionReturn(PETSC_SUCCESS);
1006: }
1008: /*@C
1009: MatCreateHtoolFromKernel - Creates a `MATHTOOL` from a user-supplied kernel.
1011: Collective, No Fortran Support
1013: Input Parameters:
1014: + comm - MPI communicator
1015: . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
1016: . n - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
1017: . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
1018: . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
1019: . spacedim - dimension of the space coordinates
1020: . coords_target - coordinates of the target
1021: . coords_source - coordinates of the source
1022: . kernel - computational kernel (or `NULL`)
1023: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
1025: Output Parameter:
1026: . B - matrix
1028: Options Database Keys:
1029: + -mat_htool_min_cluster_size <`PetscInt`> - minimal leaf size in cluster tree
1030: . -mat_htool_epsilon <`PetscReal`> - relative error in Frobenius norm when approximating a block
1031: . -mat_htool_eta <`PetscReal`> - admissibility condition tolerance
1032: . -mat_htool_min_target_depth <`PetscInt`> - minimal cluster tree depth associated with the rows
1033: . -mat_htool_min_source_depth <`PetscInt`> - minimal cluster tree depth associated with the columns
1034: . -mat_htool_block_tree_consistency <`PetscBool`> - block tree consistency
1035: . -mat_htool_compressor <sympartialACA, fullACA, SVD> - type of compression
1036: - -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering
1038: Level: intermediate
1040: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MATHTOOL`, `PCSetCoordinates()`, `MatHtoolSetKernel()`, `MatHtoolCompressorType`, `MATH2OPUS`, `MatCreateH2OpusFromKernel()`
1041: @*/
1042: PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt spacedim, const PetscReal coords_target[], const PetscReal coords_source[], MatHtoolKernelFn *kernel, void *kernelctx, Mat *B)
1043: {
1044: Mat A;
1045: Mat_Htool *a;
1047: PetscFunctionBegin;
1048: PetscCall(MatCreate(comm, &A));
1050: PetscAssertPointer(coords_target, 7);
1051: PetscAssertPointer(coords_source, 8);
1053: if (!kernel) PetscAssertPointer(kernelctx, 10);
1054: PetscCall(MatSetSizes(A, m, n, M, N));
1055: PetscCall(MatSetType(A, MATHTOOL));
1056: PetscCall(MatSetUp(A));
1057: PetscCall(MatShellGetContext(A, &a));
1058: a->dim = spacedim;
1059: a->kernel = kernel;
1060: a->kernelctx = kernelctx;
1061: PetscCall(PetscCalloc1(A->rmap->N * spacedim, &a->gcoords_target));
1062: PetscCall(PetscArraycpy(a->gcoords_target + A->rmap->rstart * spacedim, coords_target, A->rmap->n * spacedim));
1063: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_target, A->rmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global target coordinates */
1064: if (coords_target != coords_source) {
1065: PetscCall(PetscCalloc1(A->cmap->N * spacedim, &a->gcoords_source));
1066: PetscCall(PetscArraycpy(a->gcoords_source + A->cmap->rstart * spacedim, coords_source, A->cmap->n * spacedim));
1067: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_source, A->cmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global source coordinates */
1068: } else a->gcoords_source = a->gcoords_target;
1069: *B = A;
1070: PetscFunctionReturn(PETSC_SUCCESS);
1071: }
1073: /*MC
1074: MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.
1076: Use `./configure --download-htool` to install PETSc to use Htool.
1078: Options Database Key:
1079: . -mat_type htool - matrix type to `MATHTOOL`
1081: Level: beginner
1083: .seealso: [](ch_matrices), `Mat`, `MATH2OPUS`, `MATDENSE`, `MatCreateHtoolFromKernel()`, `MatHtoolSetKernel()`
1084: M*/
1085: PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A)
1086: {
1087: Mat_Htool *a;
1089: PetscFunctionBegin;
1090: PetscCall(MatSetType(A, MATSHELL));
1091: PetscCall(PetscNew(&a));
1092: PetscCall(MatShellSetContext(A, a));
1093: PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Htool));
1094: PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL_BLOCK, (void (*)(void))MatGetDiagonalBlock_Htool));
1095: PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Htool));
1096: PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
1097: if (!PetscDefined(USE_COMPLEX)) PetscCall(MatShellSetOperation(A, MATOP_MULT_HERMITIAN_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
1098: A->ops->increaseoverlap = MatIncreaseOverlap_Htool;
1099: A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
1100: PetscCall(MatShellSetOperation(A, MATOP_VIEW, (void (*)(void))MatView_Htool));
1101: PetscCall(MatShellSetOperation(A, MATOP_SET_FROM_OPTIONS, (void (*)(void))MatSetFromOptions_Htool));
1102: PetscCall(MatShellSetOperation(A, MATOP_GET_ROW, (void (*)(void))MatGetRow_Htool));
1103: PetscCall(MatShellSetOperation(A, MATOP_RESTORE_ROW, (void (*)(void))MatRestoreRow_Htool));
1104: PetscCall(MatShellSetOperation(A, MATOP_ASSEMBLY_END, (void (*)(void))MatAssemblyEnd_Htool));
1105: PetscCall(MatShellSetOperation(A, MATOP_TRANSPOSE, (void (*)(void))MatTranspose_Htool));
1106: PetscCall(MatShellSetOperation(A, MATOP_DESTROY, (void (*)(void))MatDestroy_Htool));
1107: a->dim = 0;
1108: a->gcoords_target = nullptr;
1109: a->gcoords_source = nullptr;
1110: a->min_cluster_size = 10;
1111: a->epsilon = PetscSqrtReal(PETSC_SMALL);
1112: a->eta = 10.0;
1113: a->depth[0] = 0;
1114: a->depth[1] = 0;
1115: a->block_tree_consistency = PETSC_TRUE;
1116: a->compressor = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
1117: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", MatProductSetFromOptions_Htool));
1118: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", MatProductSetFromOptions_Htool));
1119: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", MatConvert_Htool_Dense));
1120: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", MatConvert_Htool_Dense));
1121: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", MatHtoolGetHierarchicalMat_Htool));
1122: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", MatHtoolSetKernel_Htool));
1123: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", MatHtoolGetPermutationSource_Htool));
1124: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", MatHtoolGetPermutationTarget_Htool));
1125: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", MatHtoolUsePermutation_Htool));
1126: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", MatShellSetContext_Immutable));
1127: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable));
1128: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable));
1129: PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATHTOOL));
1130: PetscFunctionReturn(PETSC_SUCCESS);
1131: }