Actual source code: htool.cxx

  1: #include <../src/mat/impls/htool/htool.hpp>
  2: #include <../src/mat/impls/shell/shell.h>
  3: #include <petscblaslapack.h>
  4: #include <set>

  6: const char *const MatHtoolCompressorTypes[] = {"sympartialACA", "fullACA", "SVD"};
  7: const char *const MatHtoolClusteringTypes[] = {"PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric"};
  8: const char        HtoolCitation[]           = "@article{marchand2020two,\n"
  9:                                               "  Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
 10:                                               "  Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
 11:                                               "  Year = {2020},\n"
 12:                                               "  Publisher = {Elsevier},\n"
 13:                                               "  Journal = {Numerische Mathematik},\n"
 14:                                               "  Volume = {146},\n"
 15:                                               "  Pages = {597--628},\n"
 16:                                               "  Url = {https://github.com/htool-ddm/htool}\n"
 17:                                               "}\n";
 18: static PetscBool  HtoolCite                 = PETSC_FALSE;

 20: static PetscErrorCode MatGetDiagonal_Htool(Mat A, Vec v)
 21: {
 22:   Mat_Htool   *a;
 23:   PetscScalar *x;
 24:   PetscBool    flg;

 26:   PetscFunctionBegin;
 27:   PetscCall(MatHasCongruentLayouts(A, &flg));
 28:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
 29:   PetscCall(MatShellGetContext(A, &a));
 30:   PetscCall(VecGetArrayWrite(v, &x));
 31:   a->hmatrix->copy_local_diagonal(x);
 32:   PetscCall(VecRestoreArrayWrite(v, &x));
 33:   PetscFunctionReturn(PETSC_SUCCESS);
 34: }

 36: static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A, Mat *b)
 37: {
 38:   Mat_Htool   *a;
 39:   Mat          B;
 40:   PetscScalar *ptr;
 41:   PetscBool    flg;

 43:   PetscFunctionBegin;
 44:   PetscCall(MatHasCongruentLayouts(A, &flg));
 45:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
 46:   PetscCall(MatShellGetContext(A, &a));
 47:   PetscCall(PetscObjectQuery((PetscObject)A, "DiagonalBlock", (PetscObject *)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
 48:   if (!B) {
 49:     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, A->rmap->n, A->rmap->n, A->rmap->n, nullptr, &B));
 50:     PetscCall(MatDenseGetArrayWrite(B, &ptr));
 51:     a->hmatrix->copy_local_diagonal_block(ptr);
 52:     PetscCall(MatDenseRestoreArrayWrite(B, &ptr));
 53:     PetscCall(MatPropagateSymmetryOptions(A, B));
 54:     PetscCall(PetscObjectCompose((PetscObject)A, "DiagonalBlock", (PetscObject)B));
 55:     *b = B;
 56:     PetscCall(MatDestroy(&B));
 57:   } else {
 58:     PetscCheck(PetscAbsScalar(((Mat_Shell *)A->data)->vscale - 1.0) <= PETSC_MACHINE_EPSILON && PetscAbsScalar(((Mat_Shell *)A->data)->vshift) <= PETSC_MACHINE_EPSILON, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse DiagonalBlock with non-trivial shift and scaling"); // TODO FIXME
 59:     *b = B;
 60:   }
 61:   PetscFunctionReturn(PETSC_SUCCESS);
 62: }

 64: static PetscErrorCode MatMult_Htool(Mat A, Vec x, Vec y)
 65: {
 66:   Mat_Htool         *a;
 67:   const PetscScalar *in;
 68:   PetscScalar       *out;

 70:   PetscFunctionBegin;
 71:   PetscCall(MatShellGetContext(A, &a));
 72:   PetscCall(VecGetArrayRead(x, &in));
 73:   PetscCall(VecGetArrayWrite(y, &out));
 74:   a->hmatrix->mvprod_local_to_local(in, out);
 75:   PetscCall(VecRestoreArrayRead(x, &in));
 76:   PetscCall(VecRestoreArrayWrite(y, &out));
 77:   PetscFunctionReturn(PETSC_SUCCESS);
 78: }

 80: static PetscErrorCode MatMultTranspose_Htool(Mat A, Vec x, Vec y)
 81: {
 82:   Mat_Htool         *a;
 83:   const PetscScalar *in;
 84:   PetscScalar       *out;

 86:   PetscFunctionBegin;
 87:   PetscCall(MatShellGetContext(A, &a));
 88:   PetscCall(VecGetArrayRead(x, &in));
 89:   PetscCall(VecGetArrayWrite(y, &out));
 90:   a->hmatrix->mvprod_transp_local_to_local(in, out);
 91:   PetscCall(VecRestoreArrayRead(x, &in));
 92:   PetscCall(VecRestoreArrayWrite(y, &out));
 93:   PetscFunctionReturn(PETSC_SUCCESS);
 94: }

 96: static PetscErrorCode MatIncreaseOverlap_Htool(Mat A, PetscInt is_max, IS is[], PetscInt ov)
 97: {
 98:   std::set<PetscInt> set;
 99:   const PetscInt    *idx;
100:   PetscInt          *oidx, size, bs[2];
101:   PetscMPIInt        csize;

103:   PetscFunctionBegin;
104:   PetscCall(MatGetBlockSizes(A, bs, bs + 1));
105:   if (bs[0] != bs[1]) bs[0] = 1;
106:   for (PetscInt i = 0; i < is_max; ++i) {
107:     /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
108:     /* needed to avoid subdomain matrices to replicate A since it is dense                           */
109:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]), &csize));
110:     PetscCheck(csize == 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported parallel IS");
111:     PetscCall(ISGetSize(is[i], &size));
112:     PetscCall(ISGetIndices(is[i], &idx));
113:     for (PetscInt j = 0; j < size; ++j) {
114:       set.insert(idx[j]);
115:       for (PetscInt k = 1; k <= ov; ++k) {                                              /* for each layer of overlap      */
116:         if (idx[j] - k >= 0) set.insert(idx[j] - k);                                    /* do not insert negative indices */
117:         if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
118:       }
119:     }
120:     PetscCall(ISRestoreIndices(is[i], &idx));
121:     PetscCall(ISDestroy(is + i));
122:     if (bs[0] > 1) {
123:       for (std::set<PetscInt>::iterator it = set.cbegin(); it != set.cend(); it++) {
124:         std::vector<PetscInt> block(bs[0]);
125:         std::iota(block.begin(), block.end(), (*it / bs[0]) * bs[0]);
126:         set.insert(block.cbegin(), block.cend());
127:       }
128:     }
129:     size = set.size(); /* size with overlap */
130:     PetscCall(PetscMalloc1(size, &oidx));
131:     for (const PetscInt j : set) *oidx++ = j;
132:     oidx -= size;
133:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, oidx, PETSC_OWN_POINTER, is + i));
134:   }
135:   PetscFunctionReturn(PETSC_SUCCESS);
136: }

138: static PetscErrorCode MatCreateSubMatrices_Htool(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
139: {
140:   Mat_Htool         *a;
141:   Mat                D, B, BT;
142:   const PetscScalar *copy;
143:   PetscScalar       *ptr;
144:   const PetscInt    *idxr, *idxc, *it;
145:   PetscInt           nrow, m, i;
146:   PetscBool          flg;

148:   PetscFunctionBegin;
149:   PetscCheck(!((Mat_Shell *)A->data)->zrows && !((Mat_Shell *)A->data)->zcols, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatCreateSubMatrices() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); // TODO FIXME
150:   PetscCheck(!((Mat_Shell *)A->data)->axpy, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatCreateSubMatrices() if MatAXPY() has been called on the input Mat");          // TODO FIXME
151:   PetscCheck(!((Mat_Shell *)A->data)->dshift, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatCreateSubMatrices() if MatDiagonalSet() has been called on the input Mat"); // TODO FIXME
152:   PetscCall(MatShellGetContext(A, &a));
153:   if (scall != MAT_REUSE_MATRIX) PetscCall(PetscCalloc1(n, submat));
154:   for (i = 0; i < n; ++i) {
155:     PetscCall(ISGetLocalSize(irow[i], &nrow));
156:     PetscCall(ISGetLocalSize(icol[i], &m));
157:     PetscCall(ISGetIndices(irow[i], &idxr));
158:     PetscCall(ISGetIndices(icol[i], &idxc));
159:     if (scall != MAT_REUSE_MATRIX) PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow, m, nrow, m, nullptr, (*submat) + i));
160:     PetscCall(MatDenseGetArrayWrite((*submat)[i], &ptr));
161:     if (irow[i] == icol[i]) { /* same row and column IS? */
162:       PetscCall(MatHasCongruentLayouts(A, &flg));
163:       if (flg) {
164:         PetscCall(ISSorted(irow[i], &flg));
165:         if (flg) { /* sorted IS? */
166:           it = std::lower_bound(idxr, idxr + nrow, A->rmap->rstart);
167:           if (it != idxr + nrow && *it == A->rmap->rstart) {    /* rmap->rstart in IS? */
168:             if (std::distance(idxr, it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
169:               for (PetscInt j = 0; j < A->rmap->n && flg; ++j)
170:                 if (PetscUnlikely(it[j] != A->rmap->rstart + j)) flg = PETSC_FALSE;
171:               if (flg) { /* complete local diagonal block in IS? */
172:                 /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
173:                  *      [   B   C   E   ]
174:                  *  A = [   B   D   E   ]
175:                  *      [   B   F   E   ]
176:                  */
177:                 m = std::distance(idxr, it); /* shift of the coefficient (0,0) of block D from above */
178:                 PetscCall(MatGetDiagonalBlock(A, &D));
179:                 PetscCall(MatDenseGetArrayRead(D, &copy));
180:                 for (PetscInt k = 0; k < A->rmap->n; ++k) { PetscCall(PetscArraycpy(ptr + (m + k) * nrow + m, copy + k * A->rmap->n, A->rmap->n)); /* block D from above */ }
181:                 PetscCall(MatDenseRestoreArrayRead(D, &copy));
182:                 if (m) {
183:                   a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* vertical block B from above */
184:                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
185:                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
186:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, m, A->rmap->n, m, ptr + m, &B));
187:                     PetscCall(MatDenseSetLDA(B, nrow));
188:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, m, A->rmap->n, m, A->rmap->n, ptr + m * nrow, &BT));
189:                     PetscCall(MatDenseSetLDA(BT, nrow));
190:                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
191:                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
192:                     } else {
193:                       PetscCall(MatTransposeSetPrecursor(B, BT));
194:                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
195:                     }
196:                     PetscCall(MatDestroy(&B));
197:                     PetscCall(MatDestroy(&BT));
198:                   } else {
199:                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block C from above */
200:                       a->wrapper->copy_submatrix(m, 1, idxr, idxc + m + k, ptr + (m + k) * nrow);
201:                     }
202:                   }
203:                 }
204:                 if (m + A->rmap->n != nrow) {
205:                   a->wrapper->copy_submatrix(nrow, std::distance(it + A->rmap->n, idxr + nrow), idxr, idxc + m + A->rmap->n, ptr + (m + A->rmap->n) * nrow); /* vertical block E from above */
206:                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
207:                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
208:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), ptr + (m + A->rmap->n) * nrow + m, &B));
209:                     PetscCall(MatDenseSetLDA(B, nrow));
210:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, ptr + m * nrow + m + A->rmap->n, &BT));
211:                     PetscCall(MatDenseSetLDA(BT, nrow));
212:                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
213:                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
214:                     } else {
215:                       PetscCall(MatTransposeSetPrecursor(B, BT));
216:                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
217:                     }
218:                     PetscCall(MatDestroy(&B));
219:                     PetscCall(MatDestroy(&BT));
220:                   } else {
221:                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block F from above */
222:                       a->wrapper->copy_submatrix(std::distance(it + A->rmap->n, idxr + nrow), 1, it + A->rmap->n, idxc + m + k, ptr + (m + k) * nrow + m + A->rmap->n);
223:                     }
224:                   }
225:                 }
226:               } /* complete local diagonal block not in IS */
227:             } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
228:           } else flg = PETSC_FALSE;   /* rmap->rstart not in IS */
229:         } /* unsorted IS */
230:       }
231:     } else flg = PETSC_FALSE;                                       /* different row and column IS */
232:     if (!flg) a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* reassemble everything */
233:     PetscCall(ISRestoreIndices(irow[i], &idxr));
234:     PetscCall(ISRestoreIndices(icol[i], &idxc));
235:     PetscCall(MatDenseRestoreArrayWrite((*submat)[i], &ptr));
236:     PetscCall(MatScale((*submat)[i], ((Mat_Shell *)A->data)->vscale));
237:     PetscCall(MatShift((*submat)[i], ((Mat_Shell *)A->data)->vshift));
238:   }
239:   PetscFunctionReturn(PETSC_SUCCESS);
240: }

242: static PetscErrorCode MatDestroy_Htool(Mat A)
243: {
244:   Mat_Htool               *a;
245:   PetscContainer           container;
246:   MatHtoolKernelTranspose *kernelt;

248:   PetscFunctionBegin;
249:   PetscCall(MatShellGetContext(A, &a));
250:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", nullptr));
251:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", nullptr));
252:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", nullptr));
253:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", nullptr));
254:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", nullptr));
255:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", nullptr));
256:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", nullptr));
257:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", nullptr));
258:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", nullptr));
259:   PetscCall(PetscObjectQuery((PetscObject)A, "KernelTranspose", (PetscObject *)&container));
260:   if (container) { /* created in MatTranspose_Htool() */
261:     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
262:     PetscCall(MatDestroy(&kernelt->A));
263:     PetscCall(PetscFree(kernelt));
264:     PetscCall(PetscContainerDestroy(&container));
265:     PetscCall(PetscObjectCompose((PetscObject)A, "KernelTranspose", nullptr));
266:   }
267:   if (a->gcoords_source != a->gcoords_target) PetscCall(PetscFree(a->gcoords_source));
268:   PetscCall(PetscFree(a->gcoords_target));
269:   PetscCall(PetscFree2(a->work_source, a->work_target));
270:   delete a->wrapper;
271:   delete a->hmatrix;
272:   PetscCall(PetscFree(a));
273:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", nullptr)); // needed to avoid a call to MatShellSetContext_Immutable()
274:   PetscFunctionReturn(PETSC_SUCCESS);
275: }

277: static PetscErrorCode MatView_Htool(Mat A, PetscViewer pv)
278: {
279:   Mat_Htool *a;
280:   PetscBool  flg;

282:   PetscFunctionBegin;
283:   PetscCall(MatShellGetContext(A, &a));
284:   PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &flg));
285:   if (flg) {
286:     PetscCall(PetscViewerASCIIPrintf(pv, "symmetry: %c\n", a->hmatrix->get_symmetry_type()));
287:     if (PetscAbsScalar(((Mat_Shell *)A->data)->vscale - 1.0) > PETSC_MACHINE_EPSILON) {
288: #if defined(PETSC_USE_COMPLEX)
289:       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g+%gi\n", (double)PetscRealPart(((Mat_Shell *)A->data)->vscale), (double)PetscImaginaryPart(((Mat_Shell *)A->data)->vscale)));
290: #else
291:       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g\n", (double)((Mat_Shell *)A->data)->vscale));
292: #endif
293:     }
294:     if (PetscAbsScalar(((Mat_Shell *)A->data)->vshift) > PETSC_MACHINE_EPSILON) {
295: #if defined(PETSC_USE_COMPLEX)
296:       PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g+%gi\n", (double)PetscRealPart(((Mat_Shell *)A->data)->vshift), (double)PetscImaginaryPart(((Mat_Shell *)A->data)->vshift)));
297: #else
298:       PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g\n", (double)((Mat_Shell *)A->data)->vshift));
299: #endif
300:     }
301:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum cluster size: %" PetscInt_FMT "\n", a->bs[0]));
302:     PetscCall(PetscViewerASCIIPrintf(pv, "maximum block size: %" PetscInt_FMT "\n", a->bs[1]));
303:     PetscCall(PetscViewerASCIIPrintf(pv, "epsilon: %g\n", (double)a->epsilon));
304:     PetscCall(PetscViewerASCIIPrintf(pv, "eta: %g\n", (double)a->eta));
305:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum target depth: %" PetscInt_FMT "\n", a->depth[0]));
306:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum source depth: %" PetscInt_FMT "\n", a->depth[1]));
307:     PetscCall(PetscViewerASCIIPrintf(pv, "compressor: %s\n", MatHtoolCompressorTypes[a->compressor]));
308:     PetscCall(PetscViewerASCIIPrintf(pv, "clustering: %s\n", MatHtoolClusteringTypes[a->clustering]));
309:     PetscCall(PetscViewerASCIIPrintf(pv, "compression ratio: %s\n", a->hmatrix->get_infos("Compression_ratio").c_str()));
310:     PetscCall(PetscViewerASCIIPrintf(pv, "space saving: %s\n", a->hmatrix->get_infos("Space_saving").c_str()));
311:     PetscCall(PetscViewerASCIIPrintf(pv, "number of dense (resp. low rank) matrices: %s (resp. %s)\n", a->hmatrix->get_infos("Number_of_dmat").c_str(), a->hmatrix->get_infos("Number_of_lrmat").c_str()));
312:     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n", a->hmatrix->get_infos("Dense_block_size_min").c_str(), a->hmatrix->get_infos("Dense_block_size_mean").c_str(),
313:                                      a->hmatrix->get_infos("Dense_block_size_max").c_str()));
314:     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n", a->hmatrix->get_infos("Low_rank_block_size_min").c_str(), a->hmatrix->get_infos("Low_rank_block_size_mean").c_str(),
315:                                      a->hmatrix->get_infos("Low_rank_block_size_max").c_str()));
316:     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) ranks: (%s, %s, %s)\n", a->hmatrix->get_infos("Rank_min").c_str(), a->hmatrix->get_infos("Rank_mean").c_str(), a->hmatrix->get_infos("Rank_max").c_str()));
317:   }
318:   PetscFunctionReturn(PETSC_SUCCESS);
319: }

321: /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
322: static PetscErrorCode MatGetRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
323: {
324:   Mat_Htool   *a;
325:   PetscInt    *idxc;
326:   PetscBLASInt one = 1, bn;

328:   PetscFunctionBegin;
329:   PetscCheck(!((Mat_Shell *)A->data)->zrows && !((Mat_Shell *)A->data)->zcols, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatGetRow() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); // TODO FIXME
330:   PetscCheck(!((Mat_Shell *)A->data)->axpy, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatGetRow() if MatAXPY() has been called on the input Mat");                                                                // TODO FIXME
331:   PetscCall(MatShellGetContext(A, &a));
332:   if (nz) *nz = A->cmap->N;
333:   if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
334:     PetscCall(PetscMalloc1(A->cmap->N, &idxc));
335:     for (PetscInt i = 0; i < A->cmap->N; ++i) idxc[i] = i;
336:   }
337:   if (idx) *idx = idxc;
338:   if (v) {
339:     PetscCall(PetscMalloc1(A->cmap->N, v));
340:     if (a->wrapper) a->wrapper->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
341:     else reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx)->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
342:     PetscCall(PetscBLASIntCast(A->cmap->N, &bn));
343:     PetscCallBLAS("BLASscal", BLASscal_(&bn, &(((Mat_Shell *)A->data)->vscale), *v, &one));
344:     if (row < A->cmap->N) (*v)[row] += ((Mat_Shell *)A->data)->vshift;
345:   }
346:   if (!idx) PetscCall(PetscFree(idxc));
347:   PetscFunctionReturn(PETSC_SUCCESS);
348: }

350: static PetscErrorCode MatRestoreRow_Htool(Mat, PetscInt, PetscInt *, PetscInt **idx, PetscScalar **v)
351: {
352:   PetscFunctionBegin;
353:   if (idx) PetscCall(PetscFree(*idx));
354:   if (v) PetscCall(PetscFree(*v));
355:   PetscFunctionReturn(PETSC_SUCCESS);
356: }

358: static PetscErrorCode MatSetFromOptions_Htool(Mat A, PetscOptionItems *PetscOptionsObject)
359: {
360:   Mat_Htool *a;
361:   PetscInt   n;
362:   PetscBool  flg;

364:   PetscFunctionBegin;
365:   PetscCall(MatShellGetContext(A, &a));
366:   PetscOptionsHeadBegin(PetscOptionsObject, "Htool options");
367:   PetscCall(PetscOptionsInt("-mat_htool_min_cluster_size", "Minimal leaf size in cluster tree", nullptr, a->bs[0], a->bs, nullptr));
368:   PetscCall(PetscOptionsInt("-mat_htool_max_block_size", "Maximal number of coefficients in a dense block", nullptr, a->bs[1], a->bs + 1, nullptr));
369:   PetscCall(PetscOptionsReal("-mat_htool_epsilon", "Relative error in Frobenius norm when approximating a block", nullptr, a->epsilon, &a->epsilon, nullptr));
370:   PetscCall(PetscOptionsReal("-mat_htool_eta", "Admissibility condition tolerance", nullptr, a->eta, &a->eta, nullptr));
371:   PetscCall(PetscOptionsInt("-mat_htool_min_target_depth", "Minimal cluster tree depth associated with the rows", nullptr, a->depth[0], a->depth, nullptr));
372:   PetscCall(PetscOptionsInt("-mat_htool_min_source_depth", "Minimal cluster tree depth associated with the columns", nullptr, a->depth[1], a->depth + 1, nullptr));
373:   n = 0;
374:   PetscCall(PetscOptionsEList("-mat_htool_compressor", "Type of compression", "MatHtoolCompressorType", MatHtoolCompressorTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes), MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA], &n, &flg));
375:   if (flg) a->compressor = MatHtoolCompressorType(n);
376:   n = 0;
377:   PetscCall(PetscOptionsEList("-mat_htool_clustering", "Type of clustering", "MatHtoolClusteringType", MatHtoolClusteringTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes), MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR], &n, &flg));
378:   if (flg) a->clustering = MatHtoolClusteringType(n);
379:   PetscOptionsHeadEnd();
380:   PetscFunctionReturn(PETSC_SUCCESS);
381: }

383: static PetscErrorCode MatAssemblyEnd_Htool(Mat A, MatAssemblyType)
384: {
385:   Mat_Htool                                                   *a;
386:   const PetscInt                                              *ranges;
387:   PetscInt                                                    *offset;
388:   PetscMPIInt                                                  size;
389:   char                                                         S = PetscDefined(USE_COMPLEX) && A->hermitian == PETSC_BOOL3_TRUE ? 'H' : (A->symmetric == PETSC_BOOL3_TRUE ? 'S' : 'N'), uplo = S == 'N' ? 'N' : 'U';
390:   htool::VirtualGenerator<PetscScalar>                        *generator = nullptr;
391:   std::shared_ptr<htool::VirtualCluster>                       t, s = nullptr;
392:   std::shared_ptr<htool::VirtualLowRankGenerator<PetscScalar>> compressor = nullptr;

394:   PetscFunctionBegin;
395:   PetscCall(PetscCitationsRegister(HtoolCitation, &HtoolCite));
396:   PetscCall(MatShellGetContext(A, &a));
397:   delete a->wrapper;
398:   delete a->hmatrix;
399:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
400:   PetscCall(PetscMalloc1(2 * size, &offset));
401:   PetscCall(MatGetOwnershipRanges(A, &ranges));
402:   for (PetscInt i = 0; i < size; ++i) {
403:     offset[2 * i]     = ranges[i];
404:     offset[2 * i + 1] = ranges[i + 1] - ranges[i];
405:   }
406:   switch (a->clustering) {
407:   case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
408:     t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
409:     break;
410:   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
411:     t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
412:     break;
413:   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
414:     t = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim);
415:     break;
416:   default:
417:     t = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
418:   }
419:   t->set_minclustersize(a->bs[0]);
420:   t->build(A->rmap->N, a->gcoords_target, offset, -1, PetscObjectComm((PetscObject)A));
421:   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N, A->cmap->N, a->dim, a->kernel, a->kernelctx);
422:   else {
423:     a->wrapper = nullptr;
424:     generator  = reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx);
425:   }
426:   if (a->gcoords_target != a->gcoords_source) {
427:     PetscCall(MatGetOwnershipRangesColumn(A, &ranges));
428:     for (PetscInt i = 0; i < size; ++i) {
429:       offset[2 * i]     = ranges[i];
430:       offset[2 * i + 1] = ranges[i + 1] - ranges[i];
431:     }
432:     switch (a->clustering) {
433:     case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
434:       s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
435:       break;
436:     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
437:       s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::GeometricSplitting>>>(a->dim);
438:       break;
439:     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
440:       s = std::make_shared<htool::Cluster<htool::BoundingBox1<htool::SplittingTypes::RegularSplitting>>>(a->dim);
441:       break;
442:     default:
443:       s = std::make_shared<htool::Cluster<htool::PCA<htool::SplittingTypes::RegularSplitting>>>(a->dim);
444:     }
445:     s->set_minclustersize(a->bs[0]);
446:     s->build(A->cmap->N, a->gcoords_source, offset, -1, PetscObjectComm((PetscObject)A));
447:     S = uplo = 'N';
448:   }
449:   PetscCall(PetscFree(offset));
450:   switch (a->compressor) {
451:   case MAT_HTOOL_COMPRESSOR_FULL_ACA:
452:     compressor = std::make_shared<htool::fullACA<PetscScalar>>();
453:     break;
454:   case MAT_HTOOL_COMPRESSOR_SVD:
455:     compressor = std::make_shared<htool::SVD<PetscScalar>>();
456:     break;
457:   default:
458:     compressor = std::make_shared<htool::sympartialACA<PetscScalar>>();
459:   }
460:   a->hmatrix = dynamic_cast<htool::VirtualHMatrix<PetscScalar> *>(new htool::HMatrix<PetscScalar>(t, s ? s : t, a->epsilon, a->eta, S, uplo, -1, PetscObjectComm((PetscObject)A)));
461:   a->hmatrix->set_compression(compressor);
462:   a->hmatrix->set_maxblocksize(a->bs[1]);
463:   a->hmatrix->set_mintargetdepth(a->depth[0]);
464:   a->hmatrix->set_minsourcedepth(a->depth[1]);
465:   if (s) a->hmatrix->build(a->wrapper ? *a->wrapper : *generator, a->gcoords_target, a->gcoords_source);
466:   else a->hmatrix->build(a->wrapper ? *a->wrapper : *generator, a->gcoords_target);
467:   PetscFunctionReturn(PETSC_SUCCESS);
468: }

470: static PetscErrorCode MatProductNumeric_Htool(Mat C)
471: {
472:   Mat_Product       *product = C->product;
473:   Mat_Htool         *a;
474:   const PetscScalar *in;
475:   PetscScalar       *out;
476:   PetscInt           N, lda;

478:   PetscFunctionBegin;
479:   MatCheckProduct(C, 1);
480:   PetscCall(MatGetSize(C, nullptr, &N));
481:   PetscCall(MatDenseGetLDA(C, &lda));
482:   PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
483:   PetscCall(MatDenseGetArrayRead(product->B, &in));
484:   PetscCall(MatDenseGetArrayWrite(C, &out));
485:   PetscCall(MatShellGetContext(product->A, &a));
486:   switch (product->type) {
487:   case MATPRODUCT_AB:
488:     a->hmatrix->mvprod_local_to_local(in, out, N);
489:     break;
490:   case MATPRODUCT_AtB:
491:     a->hmatrix->mvprod_transp_local_to_local(in, out, N);
492:     break;
493:   default:
494:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatProductType %s is not supported", MatProductTypes[product->type]);
495:   }
496:   PetscCall(MatDenseRestoreArrayWrite(C, &out));
497:   PetscCall(MatDenseRestoreArrayRead(product->B, &in));
498:   PetscFunctionReturn(PETSC_SUCCESS);
499: }

501: static PetscErrorCode MatProductSymbolic_Htool(Mat C)
502: {
503:   Mat_Product *product = C->product;
504:   Mat          A, B;
505:   PetscBool    flg;

507:   PetscFunctionBegin;
508:   MatCheckProduct(C, 1);
509:   A = product->A;
510:   B = product->B;
511:   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, ""));
512:   PetscCheck(flg && (product->type == MATPRODUCT_AB || product->type == MATPRODUCT_AtB), PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "ProductType %s not supported for %s", MatProductTypes[product->type], ((PetscObject)product->B)->type_name);
513:   if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
514:     if (product->type == MATPRODUCT_AB) PetscCall(MatSetSizes(C, A->rmap->n, B->cmap->n, A->rmap->N, B->cmap->N));
515:     else PetscCall(MatSetSizes(C, A->cmap->n, B->cmap->n, A->cmap->N, B->cmap->N));
516:   }
517:   PetscCall(MatSetType(C, MATDENSE));
518:   PetscCall(MatSetUp(C));
519:   PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
520:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
521:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
522:   C->ops->productsymbolic = nullptr;
523:   C->ops->productnumeric  = MatProductNumeric_Htool;
524:   PetscFunctionReturn(PETSC_SUCCESS);
525: }

527: static PetscErrorCode MatProductSetFromOptions_Htool(Mat C)
528: {
529:   PetscFunctionBegin;
530:   MatCheckProduct(C, 1);
531:   if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
532:   PetscFunctionReturn(PETSC_SUCCESS);
533: }

535: static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::VirtualHMatrix<PetscScalar> **hmatrix)
536: {
537:   Mat_Htool *a;

539:   PetscFunctionBegin;
540:   PetscCall(MatShellGetContext(A, &a));
541:   *hmatrix = a->hmatrix;
542:   PetscFunctionReturn(PETSC_SUCCESS);
543: }

545: /*@C
546:   MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a `MATHTOOL`.

548:   No Fortran Support, No C Support

550:   Input Parameter:
551: . A - hierarchical matrix

553:   Output Parameter:
554: . hmatrix - opaque pointer to a Htool virtual matrix

556:   Level: advanced

558: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`
559: @*/
560: PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A, const htool::VirtualHMatrix<PetscScalar> **hmatrix)
561: {
562:   PetscFunctionBegin;
564:   PetscAssertPointer(hmatrix, 2);
565:   PetscTryMethod(A, "MatHtoolGetHierarchicalMat_C", (Mat, const htool::VirtualHMatrix<PetscScalar> **), (A, hmatrix));
566:   PetscFunctionReturn(PETSC_SUCCESS);
567: }

569: static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
570: {
571:   Mat_Htool *a;

573:   PetscFunctionBegin;
574:   PetscCall(MatShellGetContext(A, &a));
575:   a->kernel    = kernel;
576:   a->kernelctx = kernelctx;
577:   delete a->wrapper;
578:   if (a->kernel) a->wrapper = new WrapperHtool(A->rmap->N, A->cmap->N, a->dim, a->kernel, a->kernelctx);
579:   PetscFunctionReturn(PETSC_SUCCESS);
580: }

582: /*@C
583:   MatHtoolSetKernel - Sets the kernel and context used for the assembly of a `MATHTOOL`.

585:   Collective, No Fortran Support

587:   Input Parameters:
588: + A         - hierarchical matrix
589: . kernel    - computational kernel (or `NULL`)
590: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)

592:   Level: advanced

594: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatCreateHtoolFromKernel()`
595: @*/
596: PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
597: {
598:   PetscFunctionBegin;
601:   if (!kernel) PetscAssertPointer(kernelctx, 3);
602:   PetscTryMethod(A, "MatHtoolSetKernel_C", (Mat, MatHtoolKernelFn *, void *), (A, kernel, kernelctx));
603:   PetscFunctionReturn(PETSC_SUCCESS);
604: }

606: static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is)
607: {
608:   Mat_Htool            *a;
609:   std::vector<PetscInt> source;

611:   PetscFunctionBegin;
612:   PetscCall(MatShellGetContext(A, &a));
613:   source = a->hmatrix->get_source_cluster()->get_local_perm();
614:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), source.size(), source.data(), PETSC_COPY_VALUES, is));
615:   PetscCall(ISSetPermutation(*is));
616:   PetscFunctionReturn(PETSC_SUCCESS);
617: }

619: /*@
620:   MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster for a `MATHTOOL` matrix.

622:   Input Parameter:
623: . A - hierarchical matrix

625:   Output Parameter:
626: . is - permutation

628:   Level: advanced

630: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()`
631: @*/
632: PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is)
633: {
634:   PetscFunctionBegin;
636:   if (!is) PetscAssertPointer(is, 2);
637:   PetscTryMethod(A, "MatHtoolGetPermutationSource_C", (Mat, IS *), (A, is));
638:   PetscFunctionReturn(PETSC_SUCCESS);
639: }

641: static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is)
642: {
643:   Mat_Htool            *a;
644:   std::vector<PetscInt> target;

646:   PetscFunctionBegin;
647:   PetscCall(MatShellGetContext(A, &a));
648:   target = a->hmatrix->get_target_cluster()->get_local_perm();
649:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), target.size(), target.data(), PETSC_COPY_VALUES, is));
650:   PetscCall(ISSetPermutation(*is));
651:   PetscFunctionReturn(PETSC_SUCCESS);
652: }

654: /*@
655:   MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster for a `MATHTOOL` matrix.

657:   Input Parameter:
658: . A - hierarchical matrix

660:   Output Parameter:
661: . is - permutation

663:   Level: advanced

665: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()`
666: @*/
667: PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is)
668: {
669:   PetscFunctionBegin;
671:   if (!is) PetscAssertPointer(is, 2);
672:   PetscTryMethod(A, "MatHtoolGetPermutationTarget_C", (Mat, IS *), (A, is));
673:   PetscFunctionReturn(PETSC_SUCCESS);
674: }

676: static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use)
677: {
678:   Mat_Htool *a;

680:   PetscFunctionBegin;
681:   PetscCall(MatShellGetContext(A, &a));
682:   a->hmatrix->set_use_permutation(use);
683:   PetscFunctionReturn(PETSC_SUCCESS);
684: }

686: /*@
687:   MatHtoolUsePermutation - Sets whether a `MATHTOOL` matrix should permute input (resp. output) vectors following its internal source (resp. target) permutation.

689:   Input Parameters:
690: + A   - hierarchical matrix
691: - use - Boolean value

693:   Level: advanced

695: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()`
696: @*/
697: PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use)
698: {
699:   PetscFunctionBegin;
702:   PetscTryMethod(A, "MatHtoolUsePermutation_C", (Mat, PetscBool), (A, use));
703:   PetscFunctionReturn(PETSC_SUCCESS);
704: }

706: static PetscErrorCode MatConvert_Htool_Dense(Mat A, MatType, MatReuse reuse, Mat *B)
707: {
708:   Mat          C;
709:   Mat_Htool   *a;
710:   PetscInt     lda;
711:   PetscScalar *array;

713:   PetscFunctionBegin;
714:   PetscCall(MatShellGetContext(A, &a));
715:   if (reuse == MAT_REUSE_MATRIX) {
716:     C = *B;
717:     PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible dimensions");
718:     PetscCall(MatDenseGetLDA(C, &lda));
719:     PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
720:   } else {
721:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
722:     PetscCall(MatSetSizes(C, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
723:     PetscCall(MatSetType(C, MATDENSE));
724:     PetscCall(MatSetUp(C));
725:     PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
726:   }
727:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
728:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
729:   PetscCall(MatDenseGetArrayWrite(C, &array));
730:   a->hmatrix->copy_local_dense_perm(array);
731:   PetscCall(MatDenseRestoreArrayWrite(C, &array));
732:   PetscCheck(!((Mat_Shell *)A->data)->zrows && !((Mat_Shell *)A->data)->zcols, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatConvert() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); // TODO FIXME
733:   PetscCheck(!((Mat_Shell *)A->data)->axpy, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatConvert() if MatAXPY() has been called on the input Mat");                                                                // TODO FIXME
734:   PetscCall(MatScale(C, ((Mat_Shell *)A->data)->vscale));
735:   PetscCall(MatShift(C, ((Mat_Shell *)A->data)->vshift));
736:   if (reuse == MAT_INPLACE_MATRIX) {
737:     PetscCall(MatHeaderReplace(A, &C));
738:   } else *B = C;
739:   PetscFunctionReturn(PETSC_SUCCESS);
740: }

742: static PetscErrorCode GenEntriesTranspose(PetscInt sdim, PetscInt M, PetscInt N, const PetscInt *rows, const PetscInt *cols, PetscScalar *ptr, void *ctx)
743: {
744:   MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose *)ctx;
745:   PetscScalar             *tmp;

747:   PetscFunctionBegin;
748:   PetscCall(generator->kernel(sdim, N, M, cols, rows, ptr, generator->kernelctx));
749:   PetscCall(PetscMalloc1(M * N, &tmp));
750:   PetscCall(PetscArraycpy(tmp, ptr, M * N));
751:   for (PetscInt i = 0; i < M; ++i) {
752:     for (PetscInt j = 0; j < N; ++j) ptr[i + j * M] = tmp[j + i * N];
753:   }
754:   PetscCall(PetscFree(tmp));
755:   PetscFunctionReturn(PETSC_SUCCESS);
756: }

758: /* naive implementation which keeps a reference to the original Mat */
759: static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B)
760: {
761:   Mat                      C;
762:   Mat_Htool               *a, *c;
763:   PetscInt                 M = A->rmap->N, N = A->cmap->N, m = A->rmap->n, n = A->cmap->n;
764:   PetscContainer           container;
765:   MatHtoolKernelTranspose *kernelt;

767:   PetscFunctionBegin;
768:   PetscCall(MatShellGetContext(A, &a));
769:   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
770:   PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MatTranspose() with MAT_INPLACE_MATRIX not supported");
771:   if (reuse == MAT_INITIAL_MATRIX) {
772:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
773:     PetscCall(MatSetSizes(C, n, m, N, M));
774:     PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
775:     PetscCall(MatSetUp(C));
776:     PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)C), &container));
777:     PetscCall(PetscNew(&kernelt));
778:     PetscCall(PetscContainerSetPointer(container, kernelt));
779:     PetscCall(PetscObjectCompose((PetscObject)C, "KernelTranspose", (PetscObject)container));
780:   } else {
781:     C = *B;
782:     PetscCall(PetscObjectQuery((PetscObject)C, "KernelTranspose", (PetscObject *)&container));
783:     PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call MatTranspose() with MAT_INITIAL_MATRIX first");
784:     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
785:   }
786:   PetscCall(MatShellGetContext(C, &c));
787:   c->dim = a->dim;
788:   PetscCheck(!((Mat_Shell *)A->data)->zrows && !((Mat_Shell *)A->data)->zcols, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatTranspose() if MatZeroRows() or MatZeroRowsColumns() has been called on the input Mat"); // TODO FIXME
789:   PetscCheck(!((Mat_Shell *)A->data)->axpy, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot call MatTranspose() if MatAXPY() has been called on the input Mat");                                                                // TODO FIXME
790:   ((Mat_Shell *)C->data)->vscale = ((Mat_Shell *)A->data)->vscale;
791:   ((Mat_Shell *)C->data)->vshift = ((Mat_Shell *)A->data)->vshift;
792:   c->kernel                      = GenEntriesTranspose;
793:   if (kernelt->A != A) {
794:     PetscCall(MatDestroy(&kernelt->A));
795:     kernelt->A = A;
796:     PetscCall(PetscObjectReference((PetscObject)A));
797:   }
798:   kernelt->kernel    = a->kernel;
799:   kernelt->kernelctx = a->kernelctx;
800:   c->kernelctx       = kernelt;
801:   if (reuse == MAT_INITIAL_MATRIX) {
802:     PetscCall(PetscMalloc1(N * c->dim, &c->gcoords_target));
803:     PetscCall(PetscArraycpy(c->gcoords_target, a->gcoords_source, N * c->dim));
804:     if (a->gcoords_target != a->gcoords_source) {
805:       PetscCall(PetscMalloc1(M * c->dim, &c->gcoords_source));
806:       PetscCall(PetscArraycpy(c->gcoords_source, a->gcoords_target, M * c->dim));
807:     } else c->gcoords_source = c->gcoords_target;
808:     PetscCall(PetscCalloc2(M, &c->work_source, N, &c->work_target));
809:   }
810:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
811:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
812:   if (reuse == MAT_INITIAL_MATRIX) *B = C;
813:   PetscFunctionReturn(PETSC_SUCCESS);
814: }

816: /*@C
817:   MatCreateHtoolFromKernel - Creates a `MATHTOOL` from a user-supplied kernel.

819:   Collective, No Fortran Support

821:   Input Parameters:
822: + comm          - MPI communicator
823: . m             - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
824: . n             - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
825: . M             - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
826: . N             - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
827: . spacedim      - dimension of the space coordinates
828: . coords_target - coordinates of the target
829: . coords_source - coordinates of the source
830: . kernel        - computational kernel (or `NULL`)
831: - kernelctx     - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)

833:   Output Parameter:
834: . B - matrix

836:   Options Database Keys:
837: + -mat_htool_min_cluster_size <`PetscInt`>                                                     - minimal leaf size in cluster tree
838: . -mat_htool_max_block_size <`PetscInt`>                                                       - maximal number of coefficients in a dense block
839: . -mat_htool_epsilon <`PetscReal`>                                                             - relative error in Frobenius norm when approximating a block
840: . -mat_htool_eta <`PetscReal`>                                                                 - admissibility condition tolerance
841: . -mat_htool_min_target_depth <`PetscInt`>                                                     - minimal cluster tree depth associated with the rows
842: . -mat_htool_min_source_depth <`PetscInt`>                                                     - minimal cluster tree depth associated with the columns
843: . -mat_htool_compressor <sympartialACA, fullACA, SVD>                                          - type of compression
844: - -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering

846:   Level: intermediate

848: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MATHTOOL`, `PCSetCoordinates()`, `MatHtoolSetKernel()`, `MatHtoolCompressorType`, `MATH2OPUS`, `MatCreateH2OpusFromKernel()`
849: @*/
850: PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt spacedim, const PetscReal coords_target[], const PetscReal coords_source[], MatHtoolKernelFn *kernel, void *kernelctx, Mat *B)
851: {
852:   Mat        A;
853:   Mat_Htool *a;

855:   PetscFunctionBegin;
856:   PetscCall(MatCreate(comm, &A));
858:   PetscAssertPointer(coords_target, 7);
859:   PetscAssertPointer(coords_source, 8);
861:   if (!kernel) PetscAssertPointer(kernelctx, 10);
862:   PetscCall(MatSetSizes(A, m, n, M, N));
863:   PetscCall(MatSetType(A, MATHTOOL));
864:   PetscCall(MatSetUp(A));
865:   PetscCall(MatShellGetContext(A, &a));
866:   a->dim       = spacedim;
867:   a->kernel    = kernel;
868:   a->kernelctx = kernelctx;
869:   PetscCall(PetscCalloc1(A->rmap->N * spacedim, &a->gcoords_target));
870:   PetscCall(PetscArraycpy(a->gcoords_target + A->rmap->rstart * spacedim, coords_target, A->rmap->n * spacedim));
871:   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_target, A->rmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global target coordinates */
872:   if (coords_target != coords_source) {
873:     PetscCall(PetscCalloc1(A->cmap->N * spacedim, &a->gcoords_source));
874:     PetscCall(PetscArraycpy(a->gcoords_source + A->cmap->rstart * spacedim, coords_source, A->cmap->n * spacedim));
875:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_source, A->cmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global source coordinates */
876:   } else a->gcoords_source = a->gcoords_target;
877:   PetscCall(PetscCalloc2(A->cmap->N, &a->work_source, A->rmap->N, &a->work_target));
878:   *B = A;
879:   PetscFunctionReturn(PETSC_SUCCESS);
880: }

882: /*MC
883:      MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.

885:   Use `./configure --download-htool` to install PETSc to use Htool.

887:    Options Database Key:
888: .     -mat_type htool - matrix type to `MATHTOOL`

890:    Level: beginner

892: .seealso: [](ch_matrices), `Mat`, `MATH2OPUS`, `MATDENSE`, `MatCreateHtoolFromKernel()`, `MatHtoolSetKernel()`
893: M*/
894: PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A)
895: {
896:   Mat_Htool *a;

898:   PetscFunctionBegin;
899:   PetscCall(MatSetType(A, MATSHELL));
900:   PetscCall(PetscNew(&a));
901:   PetscCall(MatShellSetContext(A, a));
902:   PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Htool));
903:   PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL_BLOCK, (void (*)(void))MatGetDiagonalBlock_Htool));
904:   PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Htool));
905:   PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
906:   if (!PetscDefined(USE_COMPLEX)) PetscCall(MatShellSetOperation(A, MATOP_MULT_HERMITIAN_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
907:   A->ops->increaseoverlap   = MatIncreaseOverlap_Htool;
908:   A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
909:   PetscCall(MatShellSetOperation(A, MATOP_VIEW, (void (*)(void))MatView_Htool));
910:   PetscCall(MatShellSetOperation(A, MATOP_SET_FROM_OPTIONS, (void (*)(void))MatSetFromOptions_Htool));
911:   PetscCall(MatShellSetOperation(A, MATOP_GET_ROW, (void (*)(void))MatGetRow_Htool));
912:   PetscCall(MatShellSetOperation(A, MATOP_RESTORE_ROW, (void (*)(void))MatRestoreRow_Htool));
913:   PetscCall(MatShellSetOperation(A, MATOP_ASSEMBLY_END, (void (*)(void))MatAssemblyEnd_Htool));
914:   PetscCall(MatShellSetOperation(A, MATOP_TRANSPOSE, (void (*)(void))MatTranspose_Htool));
915:   PetscCall(MatShellSetOperation(A, MATOP_DESTROY, (void (*)(void))MatDestroy_Htool));
916:   a->dim            = 0;
917:   a->gcoords_target = nullptr;
918:   a->gcoords_source = nullptr;
919:   a->bs[0]          = 10;
920:   a->bs[1]          = 1000000;
921:   a->epsilon        = PetscSqrtReal(PETSC_SMALL);
922:   a->eta            = 10.0;
923:   a->depth[0]       = 0;
924:   a->depth[1]       = 0;
925:   a->compressor     = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
926:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", MatProductSetFromOptions_Htool));
927:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", MatProductSetFromOptions_Htool));
928:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", MatConvert_Htool_Dense));
929:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", MatConvert_Htool_Dense));
930:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", MatHtoolGetHierarchicalMat_Htool));
931:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", MatHtoolSetKernel_Htool));
932:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", MatHtoolGetPermutationSource_Htool));
933:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", MatHtoolGetPermutationTarget_Htool));
934:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", MatHtoolUsePermutation_Htool));
935:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", MatShellSetContext_Immutable));
936:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable));
937:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable));
938:   PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATHTOOL));
939:   PetscFunctionReturn(PETSC_SUCCESS);
940: }