Actual source code: htool.cxx
1: #include <../src/mat/impls/htool/htool.hpp>
2: #include <set>
4: const char *const MatHtoolCompressorTypes[] = {"sympartialACA", "fullACA", "SVD"};
5: const char *const MatHtoolClusteringTypes[] = {"PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric"};
6: const char HtoolCitation[] = "@article{marchand2020two,\n"
7: " Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
8: " Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
9: " Year = {2020},\n"
10: " Publisher = {Elsevier},\n"
11: " Journal = {Numerische Mathematik},\n"
12: " Volume = {146},\n"
13: " Pages = {597--628},\n"
14: " Url = {https://github.com/htool-ddm/htool}\n"
15: "}\n";
16: static PetscBool HtoolCite = PETSC_FALSE;
18: static PetscErrorCode MatGetDiagonal_Htool(Mat A, Vec v)
19: {
20: Mat_Htool *a;
21: PetscScalar *x;
22: PetscBool flg;
24: PetscFunctionBegin;
25: PetscCall(MatHasCongruentLayouts(A, &flg));
26: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
27: PetscCall(MatShellGetContext(A, &a));
28: PetscCall(VecGetArrayWrite(v, &x));
29: PetscStackCallExternalVoid("copy_diagonal_in_user_numbering", htool::copy_diagonal_in_user_numbering(a->distributed_operator_holder->hmatrix, x));
30: PetscCall(VecRestoreArrayWrite(v, &x));
31: PetscFunctionReturn(PETSC_SUCCESS);
32: }
34: static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A, Mat *b)
35: {
36: Mat_Htool *a;
37: Mat B;
38: PetscScalar *ptr, shift, scale;
39: PetscBool flg;
40: PetscMPIInt rank;
41: htool::Cluster<PetscReal> *source_cluster = nullptr;
43: PetscFunctionBegin;
44: PetscCall(MatHasCongruentLayouts(A, &flg));
45: PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
46: PetscCall(MatShellGetContext(A, &a));
47: PetscCall(PetscObjectQuery((PetscObject)A, "DiagonalBlock", (PetscObject *)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
48: if (!B) {
49: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
50: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, A->rmap->n, A->rmap->n, A->rmap->n, nullptr, &B));
51: PetscCall(MatDenseGetArrayWrite(B, &ptr));
52: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
53: source_cluster = a->source_cluster ? a->source_cluster.get() : a->target_cluster.get();
54: PetscStackCallExternalVoid("copy_to_dense_in_user_numbering", htool::copy_to_dense_in_user_numbering(*a->distributed_operator_holder->hmatrix.get_sub_hmatrix(a->target_cluster->get_cluster_on_partition(rank), source_cluster->get_cluster_on_partition(rank)), ptr));
55: PetscCall(MatDenseRestoreArrayWrite(B, &ptr));
56: PetscCall(MatPropagateSymmetryOptions(A, B));
57: PetscCall(PetscObjectCompose((PetscObject)A, "DiagonalBlock", (PetscObject)B));
58: *b = B;
59: PetscCall(MatDestroy(&B));
60: PetscCall(MatShift(*b, shift));
61: PetscCall(MatScale(*b, scale));
62: } else {
63: PetscCall(MatShellGetScalingShifts(A, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
64: *b = B;
65: }
66: PetscFunctionReturn(PETSC_SUCCESS);
67: }
69: static PetscErrorCode MatMult_Htool(Mat A, Vec x, Vec y)
70: {
71: Mat_Htool *a;
72: const PetscScalar *in;
73: PetscScalar *out;
75: PetscFunctionBegin;
76: PetscCall(MatShellGetContext(A, &a));
77: PetscCall(VecGetArrayRead(x, &in));
78: PetscCall(VecGetArrayWrite(y, &out));
79: if (a->permutation == PETSC_TRUE) htool::add_distributed_operator_vector_product_local_to_local<PetscScalar>('N', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, nullptr);
80: else htool::internal_add_distributed_operator_vector_product_local_to_local<PetscScalar>('N', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, nullptr);
81: PetscCall(VecRestoreArrayRead(x, &in));
82: PetscCall(VecRestoreArrayWrite(y, &out));
83: PetscFunctionReturn(PETSC_SUCCESS);
84: }
86: static PetscErrorCode MatMultTranspose_Htool(Mat A, Vec x, Vec y)
87: {
88: Mat_Htool *a;
89: const PetscScalar *in;
90: PetscScalar *out;
92: PetscFunctionBegin;
93: PetscCall(MatShellGetContext(A, &a));
94: PetscCall(VecGetArrayRead(x, &in));
95: PetscCall(VecGetArrayWrite(y, &out));
96: if (a->permutation == PETSC_TRUE) htool::add_distributed_operator_vector_product_local_to_local<PetscScalar>('T', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, nullptr);
97: else htool::internal_add_distributed_operator_vector_product_local_to_local<PetscScalar>('T', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, nullptr);
98: PetscCall(VecRestoreArrayRead(x, &in));
99: PetscCall(VecRestoreArrayWrite(y, &out));
100: PetscFunctionReturn(PETSC_SUCCESS);
101: }
103: static PetscErrorCode MatIncreaseOverlap_Htool(Mat A, PetscInt is_max, IS is[], PetscInt ov)
104: {
105: std::set<PetscInt> set;
106: const PetscInt *idx;
107: PetscInt *oidx, size, bs[2];
108: PetscMPIInt csize;
110: PetscFunctionBegin;
111: PetscCall(MatGetBlockSizes(A, bs, bs + 1));
112: if (bs[0] != bs[1]) bs[0] = 1;
113: for (PetscInt i = 0; i < is_max; ++i) {
114: /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
115: /* needed to avoid subdomain matrices to replicate A since it is dense */
116: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]), &csize));
117: PetscCheck(csize == 1, PETSC_COMM_SELF, PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel IS");
118: PetscCall(ISGetSize(is[i], &size));
119: PetscCall(ISGetIndices(is[i], &idx));
120: for (PetscInt j = 0; j < size; ++j) {
121: set.insert(idx[j]);
122: for (PetscInt k = 1; k <= ov; ++k) { /* for each layer of overlap */
123: if (idx[j] - k >= 0) set.insert(idx[j] - k); /* do not insert negative indices */
124: if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
125: }
126: }
127: PetscCall(ISRestoreIndices(is[i], &idx));
128: PetscCall(ISDestroy(is + i));
129: if (bs[0] > 1) {
130: for (std::set<PetscInt>::iterator it = set.cbegin(); it != set.cend(); it++) {
131: std::vector<PetscInt> block(bs[0]);
132: std::iota(block.begin(), block.end(), (*it / bs[0]) * bs[0]);
133: set.insert(block.cbegin(), block.cend());
134: }
135: }
136: size = set.size(); /* size with overlap */
137: PetscCall(PetscMalloc1(size, &oidx));
138: for (const PetscInt j : set) *oidx++ = j;
139: oidx -= size;
140: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, oidx, PETSC_OWN_POINTER, is + i));
141: }
142: PetscFunctionReturn(PETSC_SUCCESS);
143: }
145: static PetscErrorCode MatCreateSubMatrices_Htool(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
146: {
147: Mat_Htool *a;
148: Mat D, B, BT;
149: const PetscScalar *copy;
150: PetscScalar *ptr, shift, scale;
151: const PetscInt *idxr, *idxc, *it;
152: PetscInt nrow, m, i;
153: PetscBool flg;
155: PetscFunctionBegin;
156: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
157: PetscCall(MatShellGetContext(A, &a));
158: if (scall != MAT_REUSE_MATRIX) PetscCall(PetscCalloc1(n, submat));
159: for (i = 0; i < n; ++i) {
160: PetscCall(ISGetLocalSize(irow[i], &nrow));
161: PetscCall(ISGetLocalSize(icol[i], &m));
162: PetscCall(ISGetIndices(irow[i], &idxr));
163: PetscCall(ISGetIndices(icol[i], &idxc));
164: if (scall != MAT_REUSE_MATRIX) PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow, m, nrow, m, nullptr, (*submat) + i));
165: PetscCall(MatDenseGetArrayWrite((*submat)[i], &ptr));
166: if (irow[i] == icol[i]) { /* same row and column IS? */
167: PetscCall(MatHasCongruentLayouts(A, &flg));
168: if (flg) {
169: PetscCall(ISSorted(irow[i], &flg));
170: if (flg) { /* sorted IS? */
171: it = std::lower_bound(idxr, idxr + nrow, A->rmap->rstart);
172: if (it != idxr + nrow && *it == A->rmap->rstart) { /* rmap->rstart in IS? */
173: if (std::distance(idxr, it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
174: for (PetscInt j = 0; j < A->rmap->n && flg; ++j)
175: if (PetscUnlikely(it[j] != A->rmap->rstart + j)) flg = PETSC_FALSE;
176: if (flg) { /* complete local diagonal block in IS? */
177: /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
178: * [ B C E ]
179: * A = [ B D E ]
180: * [ B F E ]
181: */
182: m = std::distance(idxr, it); /* shift of the coefficient (0,0) of block D from above */
183: PetscCall(MatGetDiagonalBlock(A, &D));
184: PetscCall(MatDenseGetArrayRead(D, ©));
185: for (PetscInt k = 0; k < A->rmap->n; ++k) PetscCall(PetscArraycpy(ptr + (m + k) * nrow + m, copy + k * A->rmap->n, A->rmap->n)); /* block D from above */
186: PetscCall(MatDenseRestoreArrayRead(D, ©));
187: if (m) {
188: a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* vertical block B from above */
189: /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
190: if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
191: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, m, A->rmap->n, m, ptr + m, &B));
192: PetscCall(MatDenseSetLDA(B, nrow));
193: PetscCall(MatCreateDense(PETSC_COMM_SELF, m, A->rmap->n, m, A->rmap->n, ptr + m * nrow, &BT));
194: PetscCall(MatDenseSetLDA(BT, nrow));
195: if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
196: PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
197: } else {
198: PetscCall(MatTransposeSetPrecursor(B, BT));
199: PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
200: }
201: PetscCall(MatDestroy(&B));
202: PetscCall(MatDestroy(&BT));
203: } else {
204: for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block C from above */
205: a->wrapper->copy_submatrix(m, 1, idxr, idxc + m + k, ptr + (m + k) * nrow);
206: }
207: }
208: }
209: if (m + A->rmap->n != nrow) {
210: a->wrapper->copy_submatrix(nrow, std::distance(it + A->rmap->n, idxr + nrow), idxr, idxc + m + A->rmap->n, ptr + (m + A->rmap->n) * nrow); /* vertical block E from above */
211: /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
212: if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
213: PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), ptr + (m + A->rmap->n) * nrow + m, &B));
214: PetscCall(MatDenseSetLDA(B, nrow));
215: PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, ptr + m * nrow + m + A->rmap->n, &BT));
216: PetscCall(MatDenseSetLDA(BT, nrow));
217: if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
218: PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
219: } else {
220: PetscCall(MatTransposeSetPrecursor(B, BT));
221: PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
222: }
223: PetscCall(MatDestroy(&B));
224: PetscCall(MatDestroy(&BT));
225: } else {
226: for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block F from above */
227: a->wrapper->copy_submatrix(std::distance(it + A->rmap->n, idxr + nrow), 1, it + A->rmap->n, idxc + m + k, ptr + (m + k) * nrow + m + A->rmap->n);
228: }
229: }
230: }
231: } /* complete local diagonal block not in IS */
232: } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
233: } else flg = PETSC_FALSE; /* rmap->rstart not in IS */
234: } /* unsorted IS */
235: }
236: } else flg = PETSC_FALSE; /* different row and column IS */
237: if (!flg) a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* reassemble everything */
238: PetscCall(ISRestoreIndices(irow[i], &idxr));
239: PetscCall(ISRestoreIndices(icol[i], &idxc));
240: PetscCall(MatDenseRestoreArrayWrite((*submat)[i], &ptr));
241: PetscCall(MatShift((*submat)[i], shift));
242: PetscCall(MatScale((*submat)[i], scale));
243: }
244: PetscFunctionReturn(PETSC_SUCCESS);
245: }
247: static PetscErrorCode MatDestroy_Htool(Mat A)
248: {
249: Mat_Htool *a;
250: PetscContainer container;
251: MatHtoolKernelTranspose *kernelt;
253: PetscFunctionBegin;
254: PetscCall(MatShellGetContext(A, &a));
255: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", nullptr));
256: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", nullptr));
257: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", nullptr));
258: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", nullptr));
259: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", nullptr));
260: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", nullptr));
261: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", nullptr));
262: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", nullptr));
263: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", nullptr));
264: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUseRecompression_C", nullptr));
265: PetscCall(PetscObjectQuery((PetscObject)A, "KernelTranspose", (PetscObject *)&container));
266: if (container) { /* created in MatTranspose_Htool() */
267: PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
268: PetscCall(MatDestroy(&kernelt->A));
269: PetscCall(PetscObjectCompose((PetscObject)A, "KernelTranspose", nullptr));
270: }
271: if (a->gcoords_source != a->gcoords_target) PetscCall(PetscFree(a->gcoords_source));
272: PetscCall(PetscFree(a->gcoords_target));
273: delete a->wrapper;
274: a->target_cluster.reset();
275: a->source_cluster.reset();
276: a->distributed_operator_holder.reset();
277: PetscCall(PetscFree(a));
278: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", nullptr)); // needed to avoid a call to MatShellSetContext_Immutable()
279: PetscFunctionReturn(PETSC_SUCCESS);
280: }
282: static PetscErrorCode MatView_Htool(Mat A, PetscViewer pv)
283: {
284: Mat_Htool *a;
285: PetscScalar shift, scale;
286: PetscBool flg;
287: std::map<std::string, std::string> hmatrix_information;
289: PetscFunctionBegin;
290: PetscCall(MatShellGetContext(A, &a));
291: hmatrix_information = htool::get_distributed_hmatrix_information(a->distributed_operator_holder->hmatrix, PetscObjectComm((PetscObject)A));
292: PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &flg));
293: if (flg) {
294: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
295: PetscCall(PetscViewerASCIIPrintf(pv, "symmetry: %c\n", a->distributed_operator_holder->block_diagonal_hmatrix->get_symmetry()));
296: if (PetscAbsScalar(scale - 1.0) > PETSC_MACHINE_EPSILON) {
297: #if defined(PETSC_USE_COMPLEX)
298: PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g+%gi\n", (double)PetscRealPart(scale), (double)PetscImaginaryPart(scale)));
299: #else
300: PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g\n", (double)scale));
301: #endif
302: }
303: if (PetscAbsScalar(shift) > PETSC_MACHINE_EPSILON) {
304: #if defined(PETSC_USE_COMPLEX)
305: PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g+%gi\n", (double)PetscRealPart(shift), (double)PetscImaginaryPart(shift)));
306: #else
307: PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g\n", (double)shift));
308: #endif
309: }
310: PetscCall(PetscViewerASCIIPrintf(pv, "maximal cluster leaf size: %" PetscInt_FMT "\n", a->max_cluster_leaf_size));
311: PetscCall(PetscViewerASCIIPrintf(pv, "epsilon: %g\n", (double)a->epsilon));
312: PetscCall(PetscViewerASCIIPrintf(pv, "eta: %g\n", (double)a->eta));
313: PetscCall(PetscViewerASCIIPrintf(pv, "minimum target depth: %" PetscInt_FMT "\n", a->depth[0]));
314: PetscCall(PetscViewerASCIIPrintf(pv, "minimum source depth: %" PetscInt_FMT "\n", a->depth[1]));
315: PetscCall(PetscViewerASCIIPrintf(pv, "compressor: %s\n", MatHtoolCompressorTypes[a->compressor]));
316: PetscCall(PetscViewerASCIIPrintf(pv, "clustering: %s\n", MatHtoolClusteringTypes[a->clustering]));
317: PetscCall(PetscViewerASCIIPrintf(pv, "compression ratio: %s\n", hmatrix_information["Compression_ratio"].c_str()));
318: PetscCall(PetscViewerASCIIPrintf(pv, "space saving: %s\n", hmatrix_information["Space_saving"].c_str()));
319: PetscCall(PetscViewerASCIIPrintf(pv, "block tree consistency: %s\n", PetscBools[a->distributed_operator_holder->hmatrix.is_block_tree_consistent()]));
320: PetscCall(PetscViewerASCIIPrintf(pv, "recompression: %s\n", PetscBools[a->recompression]));
321: PetscCall(PetscViewerASCIIPrintf(pv, "number of dense (resp. low rank) matrices: %s (resp. %s)\n", hmatrix_information["Number_of_dense_blocks"].c_str(), hmatrix_information["Number_of_low_rank_blocks"].c_str()));
322: PetscCall(
323: PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n", hmatrix_information["Dense_block_size_min"].c_str(), hmatrix_information["Dense_block_size_mean"].c_str(), hmatrix_information["Dense_block_size_max"].c_str()));
324: PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n", hmatrix_information["Low_rank_block_size_min"].c_str(), hmatrix_information["Low_rank_block_size_mean"].c_str(),
325: hmatrix_information["Low_rank_block_size_max"].c_str()));
326: PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) ranks: (%s, %s, %s)\n", hmatrix_information["Rank_min"].c_str(), hmatrix_information["Rank_mean"].c_str(), hmatrix_information["Rank_max"].c_str()));
327: }
328: PetscFunctionReturn(PETSC_SUCCESS);
329: }
331: /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
332: static PetscErrorCode MatGetRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
333: {
334: Mat_Htool *a;
335: PetscScalar shift, scale;
336: PetscInt *idxc;
337: PetscBLASInt one = 1, bn;
339: PetscFunctionBegin;
340: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
341: PetscCall(MatShellGetContext(A, &a));
342: if (nz) *nz = A->cmap->N;
343: if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
344: PetscCall(PetscMalloc1(A->cmap->N, &idxc));
345: for (PetscInt i = 0; i < A->cmap->N; ++i) idxc[i] = i;
346: }
347: if (idx) *idx = idxc;
348: if (v) {
349: PetscCall(PetscMalloc1(A->cmap->N, v));
350: if (a->wrapper) a->wrapper->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
351: else reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx)->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
352: PetscCall(PetscBLASIntCast(A->cmap->N, &bn));
353: PetscCallCXX(htool::Blas<PetscScalar>::scal(&bn, &scale, *v, &one));
354: if (row < A->cmap->N) (*v)[row] += shift;
355: }
356: if (!idx) PetscCall(PetscFree(idxc));
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: static PetscErrorCode MatRestoreRow_Htool(Mat, PetscInt, PetscInt *, PetscInt **idx, PetscScalar **v)
361: {
362: PetscFunctionBegin;
363: if (idx) PetscCall(PetscFree(*idx));
364: if (v) PetscCall(PetscFree(*v));
365: PetscFunctionReturn(PETSC_SUCCESS);
366: }
368: static PetscErrorCode MatSetFromOptions_Htool(Mat A, PetscOptionItems PetscOptionsObject)
369: {
370: Mat_Htool *a;
371: PetscInt n;
372: PetscBool flg;
374: PetscFunctionBegin;
375: PetscCall(MatShellGetContext(A, &a));
376: PetscOptionsHeadBegin(PetscOptionsObject, "Htool options");
377: PetscCall(PetscOptionsBoundedInt("-mat_htool_max_cluster_leaf_size", "Maximal leaf size in cluster tree", nullptr, a->max_cluster_leaf_size, &a->max_cluster_leaf_size, nullptr, 0));
378: PetscCall(PetscOptionsBoundedReal("-mat_htool_epsilon", "Relative error in Frobenius norm when approximating a block", nullptr, a->epsilon, &a->epsilon, nullptr, 0.0));
379: PetscCall(PetscOptionsReal("-mat_htool_eta", "Admissibility condition tolerance", nullptr, a->eta, &a->eta, nullptr));
380: PetscCall(PetscOptionsBoundedInt("-mat_htool_min_target_depth", "Minimal cluster tree depth associated with the rows", nullptr, a->depth[0], a->depth, nullptr, 0));
381: PetscCall(PetscOptionsBoundedInt("-mat_htool_min_source_depth", "Minimal cluster tree depth associated with the columns", nullptr, a->depth[1], a->depth + 1, nullptr, 0));
382: PetscCall(PetscOptionsBool("-mat_htool_block_tree_consistency", "Block tree consistency", nullptr, a->block_tree_consistency, &a->block_tree_consistency, nullptr));
383: PetscCall(PetscOptionsBool("-mat_htool_recompression", "Use recompression", nullptr, a->recompression, &a->recompression, nullptr));
385: n = 0;
386: PetscCall(PetscOptionsEList("-mat_htool_compressor", "Type of compression", "MatHtoolCompressorType", MatHtoolCompressorTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes), MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA], &n, &flg));
387: if (flg) a->compressor = MatHtoolCompressorType(n);
388: n = 0;
389: PetscCall(PetscOptionsEList("-mat_htool_clustering", "Type of clustering", "MatHtoolClusteringType", MatHtoolClusteringTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes), MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR], &n, &flg));
390: if (flg) a->clustering = MatHtoolClusteringType(n);
391: PetscOptionsHeadEnd();
392: PetscFunctionReturn(PETSC_SUCCESS);
393: }
395: static PetscErrorCode MatAssemblyEnd_Htool(Mat A, MatAssemblyType)
396: {
397: Mat_Htool *a;
398: const PetscInt *ranges;
399: PetscInt *offset;
400: PetscMPIInt size, rank;
401: char S = PetscDefined(USE_COMPLEX) && A->hermitian == PETSC_BOOL3_TRUE ? 'H' : (A->symmetric == PETSC_BOOL3_TRUE ? 'S' : 'N'), uplo = S == 'N' ? 'N' : 'U';
402: htool::VirtualGenerator<PetscScalar> *generator = nullptr;
403: htool::ClusterTreeBuilder<PetscReal> recursive_build_strategy;
404: htool::Cluster<PetscReal> *source_cluster;
405: std::shared_ptr<htool::VirtualInternalLowRankGenerator<PetscScalar>> compressor;
407: PetscFunctionBegin;
408: PetscCall(PetscCitationsRegister(HtoolCitation, &HtoolCite));
409: PetscCall(MatShellGetContext(A, &a));
410: delete a->wrapper;
411: a->target_cluster.reset();
412: a->source_cluster.reset();
413: a->distributed_operator_holder.reset();
414: // clustering
415: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
416: PetscCall(PetscMalloc1(2 * size, &offset));
417: PetscCall(MatGetOwnershipRanges(A, &ranges));
418: for (PetscInt i = 0; i < size; ++i) {
419: offset[2 * i] = ranges[i];
420: offset[2 * i + 1] = ranges[i + 1] - ranges[i];
421: }
422: switch (a->clustering) {
423: case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
424: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeLargestExtent<PetscReal>, htool::GeometricSplitting<PetscReal>>>());
425: break;
426: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
427: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeBoundingBox<PetscReal>, htool::GeometricSplitting<PetscReal>>>());
428: break;
429: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
430: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeBoundingBox<PetscReal>, htool::RegularSplitting<PetscReal>>>());
431: break;
432: default:
433: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeLargestExtent<PetscReal>, htool::RegularSplitting<PetscReal>>>());
434: }
435: recursive_build_strategy.set_maximal_leaf_size(a->max_cluster_leaf_size);
436: a->target_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree_from_local_partition(A->rmap->N, a->dim, a->gcoords_target, 2, size, offset));
437: if (a->gcoords_target != a->gcoords_source) {
438: PetscCall(MatGetOwnershipRangesColumn(A, &ranges));
439: for (PetscInt i = 0; i < size; ++i) {
440: offset[2 * i] = ranges[i];
441: offset[2 * i + 1] = ranges[i + 1] - ranges[i];
442: }
443: switch (a->clustering) {
444: case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
445: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeLargestExtent<PetscReal>, htool::GeometricSplitting<PetscReal>>>());
446: break;
447: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
448: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeBoundingBox<PetscReal>, htool::GeometricSplitting<PetscReal>>>());
449: break;
450: case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
451: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeBoundingBox<PetscReal>, htool::RegularSplitting<PetscReal>>>());
452: break;
453: default:
454: recursive_build_strategy.set_partitioning_strategy(std::make_shared<htool::Partitioning<PetscReal, htool::ComputeLargestExtent<PetscReal>, htool::RegularSplitting<PetscReal>>>());
455: }
456: recursive_build_strategy.set_maximal_leaf_size(a->max_cluster_leaf_size);
457: a->source_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree_from_local_partition(A->cmap->N, a->dim, a->gcoords_source, 2, size, offset));
458: S = uplo = 'N';
459: source_cluster = a->source_cluster.get();
460: } else source_cluster = a->target_cluster.get();
461: PetscCall(PetscFree(offset));
462: // generator
463: if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
464: else {
465: a->wrapper = nullptr;
466: generator = reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx);
467: }
468: // compressor
469: switch (a->compressor) {
470: case MAT_HTOOL_COMPRESSOR_FULL_ACA:
471: compressor = std::make_shared<htool::fullACA<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, a->target_cluster->get_permutation().data(), source_cluster->get_permutation().data());
472: break;
473: case MAT_HTOOL_COMPRESSOR_SVD:
474: compressor = std::make_shared<htool::SVD<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, a->target_cluster->get_permutation().data(), source_cluster->get_permutation().data());
475: break;
476: default:
477: compressor = std::make_shared<htool::sympartialACA<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, a->target_cluster->get_permutation().data(), source_cluster->get_permutation().data());
478: }
479: // local hierarchical matrix
480: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
481: auto hmatrix_builder = htool::HMatrixTreeBuilder<PetscScalar>(a->epsilon, a->eta, S, uplo);
482: if (a->recompression) {
483: std::shared_ptr<htool::VirtualInternalLowRankGenerator<PetscScalar>> RecompressedLowRankGenerator = std::make_shared<htool::RecompressedLowRankGenerator<PetscScalar>>(*compressor, std::function<void(htool::LowRankMatrix<PetscScalar> &)>(htool::SVD_recompression<PetscScalar>));
484: hmatrix_builder.set_low_rank_generator(RecompressedLowRankGenerator);
485: } else {
486: hmatrix_builder.set_low_rank_generator(compressor);
487: }
488: hmatrix_builder.set_minimal_target_depth(a->depth[0]);
489: hmatrix_builder.set_minimal_source_depth(a->depth[1]);
490: PetscCheck(a->block_tree_consistency || (!a->block_tree_consistency && !(A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE)), PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot have a MatHtool with inconsistent block tree which is either symmetric or Hermitian");
491: hmatrix_builder.set_block_tree_consistency(a->block_tree_consistency);
492: a->distributed_operator_holder = std::make_unique<htool::DefaultApproximationBuilder<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, *a->target_cluster, *source_cluster, hmatrix_builder, PetscObjectComm((PetscObject)A));
493: PetscFunctionReturn(PETSC_SUCCESS);
494: }
496: static PetscErrorCode MatProductNumeric_Htool(Mat C)
497: {
498: Mat_Product *product = C->product;
499: Mat_Htool *a;
500: const PetscScalar *in;
501: PetscScalar *out;
502: PetscInt N, lda;
504: PetscFunctionBegin;
505: MatCheckProduct(C, 1);
506: PetscCall(MatGetSize(C, nullptr, &N));
507: PetscCall(MatDenseGetLDA(C, &lda));
508: PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
509: PetscCall(MatDenseGetArrayRead(product->B, &in));
510: PetscCall(MatDenseGetArrayWrite(C, &out));
511: PetscCall(MatShellGetContext(product->A, &a));
512: switch (product->type) {
513: case MATPRODUCT_AB:
514: if (a->permutation == PETSC_TRUE) htool::add_distributed_operator_matrix_product_local_to_local<PetscScalar>('N', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, N, nullptr);
515: else htool::internal_add_distributed_operator_matrix_product_local_to_local<PetscScalar>('N', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, N, nullptr);
516: break;
517: case MATPRODUCT_AtB:
518: if (a->permutation == PETSC_TRUE) htool::add_distributed_operator_matrix_product_local_to_local<PetscScalar>('T', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, N, nullptr);
519: else htool::internal_add_distributed_operator_matrix_product_local_to_local<PetscScalar>('T', 1.0, a->distributed_operator_holder->distributed_operator, in, 0.0, out, N, nullptr);
520: break;
521: default:
522: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatProductType %s is not supported", MatProductTypes[product->type]);
523: }
524: PetscCall(MatDenseRestoreArrayWrite(C, &out));
525: PetscCall(MatDenseRestoreArrayRead(product->B, &in));
526: PetscFunctionReturn(PETSC_SUCCESS);
527: }
529: static PetscErrorCode MatProductSymbolic_Htool(Mat C)
530: {
531: Mat_Product *product = C->product;
532: Mat A, B;
533: PetscBool flg;
535: PetscFunctionBegin;
536: MatCheckProduct(C, 1);
537: A = product->A;
538: B = product->B;
539: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, ""));
540: PetscCheck(flg && (product->type == MATPRODUCT_AB || product->type == MATPRODUCT_AtB), PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "ProductType %s not supported for %s", MatProductTypes[product->type], ((PetscObject)product->B)->type_name);
541: if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
542: if (product->type == MATPRODUCT_AB) PetscCall(MatSetSizes(C, A->rmap->n, B->cmap->n, A->rmap->N, B->cmap->N));
543: else PetscCall(MatSetSizes(C, A->cmap->n, B->cmap->n, A->cmap->N, B->cmap->N));
544: }
545: PetscCall(MatSetType(C, MATDENSE));
546: PetscCall(MatSetUp(C));
547: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
548: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
549: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
550: C->ops->productsymbolic = nullptr;
551: C->ops->productnumeric = MatProductNumeric_Htool;
552: PetscFunctionReturn(PETSC_SUCCESS);
553: }
555: static PetscErrorCode MatProductSetFromOptions_Htool(Mat C)
556: {
557: PetscFunctionBegin;
558: MatCheckProduct(C, 1);
559: if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
560: PetscFunctionReturn(PETSC_SUCCESS);
561: }
563: static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
564: {
565: Mat_Htool *a;
567: PetscFunctionBegin;
568: PetscCall(MatShellGetContext(A, &a));
569: *distributed_operator = &a->distributed_operator_holder->distributed_operator;
570: PetscFunctionReturn(PETSC_SUCCESS);
571: }
573: /*@C
574: MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a `MATHTOOL`.
576: No Fortran Support, No C Support
578: Input Parameter:
579: . A - hierarchical matrix
581: Output Parameter:
582: . distributed_operator - opaque pointer to a Htool virtual matrix
584: Level: advanced
586: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`
587: @*/
588: PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
589: {
590: PetscFunctionBegin;
592: PetscAssertPointer(distributed_operator, 2);
593: PetscTryMethod(A, "MatHtoolGetHierarchicalMat_C", (Mat, const htool::DistributedOperator<PetscScalar> **), (A, distributed_operator));
594: PetscFunctionReturn(PETSC_SUCCESS);
595: }
597: static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
598: {
599: Mat_Htool *a;
601: PetscFunctionBegin;
602: PetscCall(MatShellGetContext(A, &a));
603: a->kernel = kernel;
604: a->kernelctx = kernelctx;
605: delete a->wrapper;
606: if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
607: PetscFunctionReturn(PETSC_SUCCESS);
608: }
610: /*@C
611: MatHtoolSetKernel - Sets the kernel and context used for the assembly of a `MATHTOOL`.
613: Collective, No Fortran Support
615: Input Parameters:
616: + A - hierarchical matrix
617: . kernel - computational kernel (or `NULL`)
618: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
620: Level: advanced
622: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatCreateHtoolFromKernel()`
623: @*/
624: PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
625: {
626: PetscFunctionBegin;
629: if (!kernel) PetscAssertPointer(kernelctx, 3);
630: PetscTryMethod(A, "MatHtoolSetKernel_C", (Mat, MatHtoolKernelFn *, void *), (A, kernel, kernelctx));
631: PetscFunctionReturn(PETSC_SUCCESS);
632: }
634: static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is)
635: {
636: Mat_Htool *a;
637: PetscMPIInt rank;
638: const std::vector<PetscInt> *source;
639: const htool::Cluster<PetscReal> *local_source_cluster;
641: PetscFunctionBegin;
642: PetscCall(MatShellGetContext(A, &a));
643: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
644: local_source_cluster = a->source_cluster ? &a->source_cluster->get_cluster_on_partition(rank) : &a->target_cluster->get_cluster_on_partition(rank);
645: source = &local_source_cluster->get_permutation();
646: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), local_source_cluster->get_size(), source->data() + local_source_cluster->get_offset(), PETSC_COPY_VALUES, is));
647: PetscCall(ISSetPermutation(*is));
648: PetscFunctionReturn(PETSC_SUCCESS);
649: }
651: /*@
652: MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster for a `MATHTOOL` matrix.
654: Input Parameter:
655: . A - hierarchical matrix
657: Output Parameter:
658: . is - permutation
660: Level: advanced
662: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()`
663: @*/
664: PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is)
665: {
666: PetscFunctionBegin;
668: if (!is) PetscAssertPointer(is, 2);
669: PetscTryMethod(A, "MatHtoolGetPermutationSource_C", (Mat, IS *), (A, is));
670: PetscFunctionReturn(PETSC_SUCCESS);
671: }
673: static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is)
674: {
675: Mat_Htool *a;
676: const std::vector<PetscInt> *target;
677: PetscMPIInt rank;
679: PetscFunctionBegin;
680: PetscCall(MatShellGetContext(A, &a));
681: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
682: target = &a->target_cluster->get_permutation();
683: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), a->target_cluster->get_cluster_on_partition(rank).get_size(), target->data() + a->target_cluster->get_cluster_on_partition(rank).get_offset(), PETSC_COPY_VALUES, is));
684: PetscCall(ISSetPermutation(*is));
685: PetscFunctionReturn(PETSC_SUCCESS);
686: }
688: /*@
689: MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster for a `MATHTOOL` matrix.
691: Input Parameter:
692: . A - hierarchical matrix
694: Output Parameter:
695: . is - permutation
697: Level: advanced
699: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()`
700: @*/
701: PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is)
702: {
703: PetscFunctionBegin;
705: if (!is) PetscAssertPointer(is, 2);
706: PetscTryMethod(A, "MatHtoolGetPermutationTarget_C", (Mat, IS *), (A, is));
707: PetscFunctionReturn(PETSC_SUCCESS);
708: }
710: static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use)
711: {
712: Mat_Htool *a;
714: PetscFunctionBegin;
715: PetscCall(MatShellGetContext(A, &a));
716: a->permutation = use;
717: PetscFunctionReturn(PETSC_SUCCESS);
718: }
720: /*@
721: MatHtoolUsePermutation - Sets whether a `MATHTOOL` matrix should permute input (resp. output) vectors following its internal source (resp. target) permutation.
723: Input Parameters:
724: + A - hierarchical matrix
725: - use - Boolean value
727: Level: advanced
729: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()`
730: @*/
731: PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use)
732: {
733: PetscFunctionBegin;
736: PetscTryMethod(A, "MatHtoolUsePermutation_C", (Mat, PetscBool), (A, use));
737: PetscFunctionReturn(PETSC_SUCCESS);
738: }
740: static PetscErrorCode MatHtoolUseRecompression_Htool(Mat A, PetscBool use)
741: {
742: Mat_Htool *a;
744: PetscFunctionBegin;
745: PetscCall(MatShellGetContext(A, &a));
746: a->recompression = use;
747: PetscFunctionReturn(PETSC_SUCCESS);
748: }
750: /*@
751: MatHtoolUseRecompression - Sets whether a `MATHTOOL` matrix should use recompression.
753: Input Parameters:
754: + A - hierarchical matrix
755: - use - Boolean value
757: Level: advanced
759: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`
760: @*/
761: PetscErrorCode MatHtoolUseRecompression(Mat A, PetscBool use)
762: {
763: PetscFunctionBegin;
766: PetscTryMethod(A, "MatHtoolUseRecompression_C", (Mat, PetscBool), (A, use));
767: PetscFunctionReturn(PETSC_SUCCESS);
768: }
770: static PetscErrorCode MatConvert_Htool_Dense(Mat A, MatType, MatReuse reuse, Mat *B)
771: {
772: Mat C;
773: Mat_Htool *a;
774: PetscScalar *array, shift, scale;
775: PetscInt lda;
777: PetscFunctionBegin;
778: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
779: PetscCall(MatShellGetContext(A, &a));
780: if (reuse == MAT_REUSE_MATRIX) {
781: C = *B;
782: PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible dimensions");
783: PetscCall(MatDenseGetLDA(C, &lda));
784: PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
785: } else {
786: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
787: PetscCall(MatSetSizes(C, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
788: PetscCall(MatSetType(C, MATDENSE));
789: PetscCall(MatSetUp(C));
790: PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
791: }
792: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
793: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
794: PetscCall(MatDenseGetArrayWrite(C, &array));
795: htool::copy_to_dense_in_user_numbering(a->distributed_operator_holder->hmatrix, array);
796: PetscCall(MatDenseRestoreArrayWrite(C, &array));
797: PetscCall(MatShift(C, shift));
798: PetscCall(MatScale(C, scale));
799: if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &C));
800: else *B = C;
801: PetscFunctionReturn(PETSC_SUCCESS);
802: }
804: static PetscErrorCode GenEntriesTranspose(PetscInt sdim, PetscInt M, PetscInt N, const PetscInt *rows, const PetscInt *cols, PetscScalar *ptr, void *ctx)
805: {
806: MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose *)ctx;
807: PetscScalar *tmp;
809: PetscFunctionBegin;
810: PetscCall(generator->kernel(sdim, N, M, cols, rows, ptr, generator->kernelctx));
811: PetscCall(PetscMalloc1(M * N, &tmp));
812: PetscCall(PetscArraycpy(tmp, ptr, M * N));
813: for (PetscInt i = 0; i < M; ++i) {
814: for (PetscInt j = 0; j < N; ++j) ptr[i + j * M] = tmp[j + i * N];
815: }
816: PetscCall(PetscFree(tmp));
817: PetscFunctionReturn(PETSC_SUCCESS);
818: }
820: /* naive implementation which keeps a reference to the original Mat */
821: static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B)
822: {
823: Mat C;
824: Mat_Htool *a, *c;
825: PetscScalar shift, scale;
826: PetscInt M = A->rmap->N, N = A->cmap->N, m = A->rmap->n, n = A->cmap->n;
827: PetscContainer container;
828: MatHtoolKernelTranspose *kernelt;
830: PetscFunctionBegin;
831: PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
832: PetscCall(MatShellGetContext(A, &a));
833: if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
834: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MatTranspose() with MAT_INPLACE_MATRIX not supported");
835: if (reuse == MAT_INITIAL_MATRIX) {
836: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
837: PetscCall(MatSetSizes(C, n, m, N, M));
838: PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
839: PetscCall(MatSetUp(C));
840: PetscCall(PetscNew(&kernelt));
841: PetscCall(PetscObjectContainerCompose((PetscObject)C, "KernelTranspose", kernelt, PetscCtxDestroyDefault));
842: } else {
843: C = *B;
844: PetscCall(PetscObjectQuery((PetscObject)C, "KernelTranspose", (PetscObject *)&container));
845: PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call MatTranspose() with MAT_INITIAL_MATRIX first");
846: PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
847: }
848: PetscCall(MatShellGetContext(C, &c));
849: c->dim = a->dim;
850: PetscCall(MatShift(C, shift));
851: PetscCall(MatScale(C, scale));
852: c->kernel = GenEntriesTranspose;
853: if (kernelt->A != A) {
854: PetscCall(MatDestroy(&kernelt->A));
855: kernelt->A = A;
856: PetscCall(PetscObjectReference((PetscObject)A));
857: }
858: kernelt->kernel = a->kernel;
859: kernelt->kernelctx = a->kernelctx;
860: c->kernelctx = kernelt;
861: c->max_cluster_leaf_size = a->max_cluster_leaf_size;
862: c->epsilon = a->epsilon;
863: c->eta = a->eta;
864: c->block_tree_consistency = a->block_tree_consistency;
865: c->permutation = a->permutation;
866: c->recompression = a->recompression;
867: c->compressor = a->compressor;
868: c->clustering = a->clustering;
869: if (reuse == MAT_INITIAL_MATRIX) {
870: PetscCall(PetscMalloc1(N * c->dim, &c->gcoords_target));
871: PetscCall(PetscArraycpy(c->gcoords_target, a->gcoords_source, N * c->dim));
872: if (a->gcoords_target != a->gcoords_source) {
873: PetscCall(PetscMalloc1(M * c->dim, &c->gcoords_source));
874: PetscCall(PetscArraycpy(c->gcoords_source, a->gcoords_target, M * c->dim));
875: } else c->gcoords_source = c->gcoords_target;
876: }
877: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
878: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
879: if (reuse == MAT_INITIAL_MATRIX) *B = C;
880: PetscFunctionReturn(PETSC_SUCCESS);
881: }
883: static PetscErrorCode MatDestroy_Factor(Mat F)
884: {
885: PetscContainer container;
886: htool::HMatrix<PetscScalar> *A;
888: PetscFunctionBegin;
889: PetscCall(PetscObjectQuery((PetscObject)F, "HMatrix", (PetscObject *)&container));
890: if (container) {
891: PetscCall(PetscContainerGetPointer(container, (void **)&A));
892: delete A;
893: PetscCall(PetscObjectCompose((PetscObject)F, "HMatrix", nullptr));
894: }
895: PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", nullptr));
896: PetscFunctionReturn(PETSC_SUCCESS);
897: }
899: static PetscErrorCode MatFactorGetSolverType_Htool(Mat, MatSolverType *type)
900: {
901: PetscFunctionBegin;
902: *type = MATSOLVERHTOOL;
903: PetscFunctionReturn(PETSC_SUCCESS);
904: }
906: template <char trans>
907: static inline PetscErrorCode MatSolve_Private(Mat A, htool::Matrix<PetscScalar> &X)
908: {
909: PetscContainer container;
910: htool::HMatrix<PetscScalar> *B;
912: PetscFunctionBegin;
913: PetscCheck(A->factortype == MAT_FACTOR_LU || A->factortype == MAT_FACTOR_CHOLESKY, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_UNKNOWN_TYPE, "Only MAT_LU_FACTOR and MAT_CHOLESKY_FACTOR are supported");
914: PetscCall(PetscObjectQuery((PetscObject)A, "HMatrix", (PetscObject *)&container));
915: PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call Mat%sFactorNumeric() before Mat%sSolve%s()", A->factortype == MAT_FACTOR_LU ? "LU" : "Cholesky", X.nb_cols() == 1 ? "" : "Mat", trans == 'N' ? "" : "Transpose");
916: PetscCall(PetscContainerGetPointer(container, (void **)&B));
917: if (A->factortype == MAT_FACTOR_LU) htool::lu_solve(trans, *B, X);
918: else htool::cholesky_solve('L', *B, X);
919: PetscFunctionReturn(PETSC_SUCCESS);
920: }
922: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Vec>::value>::type * = nullptr>
923: static PetscErrorCode MatSolve_Htool(Mat A, Type b, Type x)
924: {
925: PetscInt n;
926: htool::Matrix<PetscScalar> v;
927: PetscScalar *array;
929: PetscFunctionBegin;
930: PetscCall(VecGetLocalSize(b, &n));
931: PetscCall(VecCopy(b, x));
932: PetscCall(VecGetArrayWrite(x, &array));
933: v.assign(n, 1, array, false);
934: PetscCall(VecRestoreArrayWrite(x, &array));
935: PetscCall(MatSolve_Private<trans>(A, v));
936: PetscFunctionReturn(PETSC_SUCCESS);
937: }
939: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Mat>::value>::type * = nullptr>
940: static PetscErrorCode MatSolve_Htool(Mat A, Type B, Type X)
941: {
942: PetscInt m, N;
943: htool::Matrix<PetscScalar> v;
944: PetscScalar *array;
946: PetscFunctionBegin;
947: PetscCall(MatGetLocalSize(B, &m, nullptr));
948: PetscCall(MatGetLocalSize(B, nullptr, &N));
949: PetscCall(MatCopy(B, X, SAME_NONZERO_PATTERN));
950: PetscCall(MatDenseGetArrayWrite(X, &array));
951: v.assign(m, N, array, false);
952: PetscCall(MatDenseRestoreArrayWrite(X, &array));
953: PetscCall(MatSolve_Private<trans>(A, v));
954: PetscFunctionReturn(PETSC_SUCCESS);
955: }
957: template <MatFactorType ftype>
958: static PetscErrorCode MatFactorNumeric_Htool(Mat F, Mat A, const MatFactorInfo *)
959: {
960: Mat_Htool *a;
961: htool::HMatrix<PetscScalar> *B;
963: PetscFunctionBegin;
964: PetscCall(MatShellGetContext(A, &a));
965: B = new htool::HMatrix<PetscScalar>(a->distributed_operator_holder->hmatrix);
966: if (ftype == MAT_FACTOR_LU) htool::sequential_lu_factorization(*B);
967: else htool::sequential_cholesky_factorization('L', *B);
968: PetscCall(PetscObjectContainerCompose((PetscObject)F, "HMatrix", B, nullptr));
969: PetscFunctionReturn(PETSC_SUCCESS);
970: }
972: template <MatFactorType ftype>
973: PetscErrorCode MatFactorSymbolic_Htool(Mat F, Mat)
974: {
975: PetscFunctionBegin;
976: F->preallocated = PETSC_TRUE;
977: F->assembled = PETSC_TRUE;
978: F->ops->solve = MatSolve_Htool<'N', Vec>;
979: F->ops->matsolve = MatSolve_Htool<'N', Mat>;
980: if (!PetscDefined(USE_COMPLEX) || ftype == MAT_FACTOR_LU) {
981: F->ops->solvetranspose = MatSolve_Htool<'T', Vec>;
982: F->ops->matsolvetranspose = MatSolve_Htool<'T', Mat>;
983: }
984: F->ops->destroy = MatDestroy_Factor;
985: if (ftype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_LU>;
986: else F->ops->choleskyfactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_CHOLESKY>;
987: PetscFunctionReturn(PETSC_SUCCESS);
988: }
990: static PetscErrorCode MatLUFactorSymbolic_Htool(Mat F, Mat A, IS, IS, const MatFactorInfo *)
991: {
992: PetscFunctionBegin;
993: PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_LU>(F, A));
994: PetscFunctionReturn(PETSC_SUCCESS);
995: }
997: static PetscErrorCode MatCholeskyFactorSymbolic_Htool(Mat F, Mat A, IS, const MatFactorInfo *)
998: {
999: PetscFunctionBegin;
1000: PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_CHOLESKY>(F, A));
1001: PetscFunctionReturn(PETSC_SUCCESS);
1002: }
1004: static PetscErrorCode MatGetFactor_htool_htool(Mat A, MatFactorType ftype, Mat *F)
1005: {
1006: Mat B;
1007: Mat_Htool *a;
1008: PetscMPIInt size;
1010: PetscFunctionBegin;
1011: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1012: PetscCall(MatShellGetContext(A, &a));
1013: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel MatGetFactor()");
1014: PetscCheck(a->block_tree_consistency, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot factor a MatHtool with inconsistent block tree");
1015: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1016: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1017: PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &((PetscObject)B)->type_name));
1018: PetscCall(MatSetUp(B));
1020: B->ops->getinfo = MatGetInfo_External;
1021: B->factortype = ftype;
1022: B->trivialsymbolic = PETSC_TRUE;
1024: if (ftype == MAT_FACTOR_LU) B->ops->lufactorsymbolic = MatLUFactorSymbolic_Htool;
1025: else if (ftype == MAT_FACTOR_CHOLESKY) B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_Htool;
1027: PetscCall(PetscFree(B->solvertype));
1028: PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &B->solvertype));
1030: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_Htool));
1031: *F = B;
1032: PetscFunctionReturn(PETSC_SUCCESS);
1033: }
1035: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Htool(void)
1036: {
1037: PetscFunctionBegin;
1038: PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_LU, MatGetFactor_htool_htool));
1039: PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_CHOLESKY, MatGetFactor_htool_htool));
1040: PetscFunctionReturn(PETSC_SUCCESS);
1041: }
1043: /*@C
1044: MatCreateHtoolFromKernel - Creates a `MATHTOOL` from a user-supplied kernel.
1046: Collective, No Fortran Support
1048: Input Parameters:
1049: + comm - MPI communicator
1050: . m - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
1051: . n - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
1052: . M - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
1053: . N - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
1054: . spacedim - dimension of the space coordinates
1055: . coords_target - coordinates of the target
1056: . coords_source - coordinates of the source
1057: . kernel - computational kernel (or `NULL`)
1058: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)
1060: Output Parameter:
1061: . B - matrix
1063: Options Database Keys:
1064: + -mat_htool_max_cluster_leaf_size <`PetscInt`> - maximal leaf size in cluster tree
1065: . -mat_htool_epsilon <`PetscReal`> - relative error in Frobenius norm when approximating a block
1066: . -mat_htool_eta <`PetscReal`> - admissibility condition tolerance
1067: . -mat_htool_min_target_depth <`PetscInt`> - minimal cluster tree depth associated with the rows
1068: . -mat_htool_min_source_depth <`PetscInt`> - minimal cluster tree depth associated with the columns
1069: . -mat_htool_block_tree_consistency <`PetscBool`> - block tree consistency
1070: . -mat_htool_recompression <`PetscBool`> - use recompression
1071: . -mat_htool_compressor <sympartialACA, fullACA, SVD> - type of compression
1072: - -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering
1074: Level: intermediate
1076: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MATHTOOL`, `PCSetCoordinates()`, `MatHtoolSetKernel()`, `MatHtoolCompressorType`, `MATH2OPUS`, `MatCreateH2OpusFromKernel()`
1077: @*/
1078: PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt spacedim, const PetscReal coords_target[], const PetscReal coords_source[], MatHtoolKernelFn *kernel, void *kernelctx, Mat *B)
1079: {
1080: Mat A;
1081: Mat_Htool *a;
1083: PetscFunctionBegin;
1084: PetscCall(MatCreate(comm, &A));
1086: PetscAssertPointer(coords_target, 7);
1087: PetscAssertPointer(coords_source, 8);
1089: if (!kernel) PetscAssertPointer(kernelctx, 10);
1090: PetscCall(MatSetSizes(A, m, n, M, N));
1091: PetscCall(MatSetType(A, MATHTOOL));
1092: PetscCall(MatSetUp(A));
1093: PetscCall(MatShellGetContext(A, &a));
1094: a->dim = spacedim;
1095: a->kernel = kernel;
1096: a->kernelctx = kernelctx;
1097: PetscCall(PetscCalloc1(A->rmap->N * spacedim, &a->gcoords_target));
1098: PetscCall(PetscArraycpy(a->gcoords_target + A->rmap->rstart * spacedim, coords_target, A->rmap->n * spacedim));
1099: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_target, A->rmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global target coordinates */
1100: if (coords_target != coords_source) {
1101: PetscCall(PetscCalloc1(A->cmap->N * spacedim, &a->gcoords_source));
1102: PetscCall(PetscArraycpy(a->gcoords_source + A->cmap->rstart * spacedim, coords_source, A->cmap->n * spacedim));
1103: PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_source, A->cmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global source coordinates */
1104: } else a->gcoords_source = a->gcoords_target;
1105: *B = A;
1106: PetscFunctionReturn(PETSC_SUCCESS);
1107: }
1109: /*MC
1110: MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.
1112: Use `./configure --download-htool` to install PETSc to use Htool.
1114: Options Database Key:
1115: . -mat_type htool - matrix type to `MATHTOOL`
1117: Level: beginner
1119: .seealso: [](ch_matrices), `Mat`, `MATH2OPUS`, `MATDENSE`, `MatCreateHtoolFromKernel()`, `MatHtoolSetKernel()`
1120: M*/
1121: PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A)
1122: {
1123: Mat_Htool *a;
1125: PetscFunctionBegin;
1126: PetscCall(MatSetType(A, MATSHELL));
1127: PetscCall(PetscNew(&a));
1128: PetscCall(MatShellSetContext(A, a));
1129: PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL, (PetscErrorCodeFn *)MatGetDiagonal_Htool));
1130: PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL_BLOCK, (PetscErrorCodeFn *)MatGetDiagonalBlock_Htool));
1131: PetscCall(MatShellSetOperation(A, MATOP_MULT, (PetscErrorCodeFn *)MatMult_Htool));
1132: PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)MatMultTranspose_Htool));
1133: if (!PetscDefined(USE_COMPLEX)) PetscCall(MatShellSetOperation(A, MATOP_MULT_HERMITIAN_TRANSPOSE, (PetscErrorCodeFn *)MatMultTranspose_Htool));
1134: A->ops->increaseoverlap = MatIncreaseOverlap_Htool;
1135: A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
1136: PetscCall(MatShellSetOperation(A, MATOP_VIEW, (PetscErrorCodeFn *)MatView_Htool));
1137: PetscCall(MatShellSetOperation(A, MATOP_SET_FROM_OPTIONS, (PetscErrorCodeFn *)MatSetFromOptions_Htool));
1138: PetscCall(MatShellSetOperation(A, MATOP_GET_ROW, (PetscErrorCodeFn *)MatGetRow_Htool));
1139: PetscCall(MatShellSetOperation(A, MATOP_RESTORE_ROW, (PetscErrorCodeFn *)MatRestoreRow_Htool));
1140: PetscCall(MatShellSetOperation(A, MATOP_ASSEMBLY_END, (PetscErrorCodeFn *)MatAssemblyEnd_Htool));
1141: PetscCall(MatShellSetOperation(A, MATOP_TRANSPOSE, (PetscErrorCodeFn *)MatTranspose_Htool));
1142: PetscCall(MatShellSetOperation(A, MATOP_DESTROY, (PetscErrorCodeFn *)MatDestroy_Htool));
1143: a->dim = 0;
1144: a->gcoords_target = nullptr;
1145: a->gcoords_source = nullptr;
1146: a->max_cluster_leaf_size = 10;
1147: a->epsilon = PetscSqrtReal(PETSC_SMALL);
1148: a->eta = 10.0;
1149: a->depth[0] = 0;
1150: a->depth[1] = 0;
1151: a->block_tree_consistency = PETSC_TRUE;
1152: a->permutation = PETSC_TRUE;
1153: a->recompression = PETSC_FALSE;
1154: a->compressor = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
1155: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", MatProductSetFromOptions_Htool));
1156: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", MatProductSetFromOptions_Htool));
1157: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", MatConvert_Htool_Dense));
1158: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", MatConvert_Htool_Dense));
1159: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", MatHtoolGetHierarchicalMat_Htool));
1160: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", MatHtoolSetKernel_Htool));
1161: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", MatHtoolGetPermutationSource_Htool));
1162: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", MatHtoolGetPermutationTarget_Htool));
1163: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", MatHtoolUsePermutation_Htool));
1164: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUseRecompression_C", MatHtoolUseRecompression_Htool));
1165: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", MatShellSetContext_Immutable));
1166: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable));
1167: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable));
1168: PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATHTOOL));
1169: PetscFunctionReturn(PETSC_SUCCESS);
1170: }