Actual source code: gcreate.c

  1: #include <petsc/private/matimpl.h>

  3: #include <../src/mat/impls/aij/seq/aij.h>
  4: #include <../src/mat/impls/aij/mpi/mpiaij.h>

  6: PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, PetscInt cbs)
  7: {
  8:   PetscFunctionBegin;
  9:   if (!mat->preallocated) PetscFunctionReturn(PETSC_SUCCESS);
 10:   PetscCheck(mat->rmap->bs <= 0 || mat->rmap->bs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change row block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->rmap->bs, rbs);
 11:   PetscCheck(mat->cmap->bs <= 0 || mat->cmap->bs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change column block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->cmap->bs, cbs);
 12:   PetscFunctionReturn(PETSC_SUCCESS);
 13: }

 15: PetscErrorCode MatShift_Basic(Mat Y, PetscScalar a)
 16: {
 17:   PetscInt    i, start, end, oldValA = 0, oldValB = 0;
 18:   PetscScalar alpha = a;
 19:   PetscBool   prevoption;
 20:   PetscBool   isSeqAIJDerived, isMPIAIJDerived; // all classes sharing SEQAIJHEADER or MPIAIJHEADER
 21:   Mat         A = NULL, B = NULL;

 23:   PetscFunctionBegin;
 24:   PetscCall(MatGetOption(Y, MAT_NO_OFF_PROC_ENTRIES, &prevoption));
 25:   PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
 26:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &isSeqAIJDerived, MATSEQAIJ, MATSEQBAIJ, MATSEQSBAIJ, ""));
 27:   PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &isMPIAIJDerived, MATMPIAIJ, MATMPIBAIJ, MATMPISBAIJ, ""));

 29:   if (isSeqAIJDerived) A = Y;
 30:   else if (isMPIAIJDerived) {
 31:     Mat_MPIAIJ *mpiaij = (Mat_MPIAIJ *)Y->data;
 32:     A                  = mpiaij->A;
 33:     B                  = mpiaij->B;
 34:   }

 36:   if (A) {
 37:     oldValA                        = ((Mat_SeqAIJ *)A->data)->nonew;
 38:     ((Mat_SeqAIJ *)A->data)->nonew = 0; // so that new nonzero locations are allowed
 39:   }
 40:   if (B) {
 41:     oldValB                        = ((Mat_SeqAIJ *)B->data)->nonew;
 42:     ((Mat_SeqAIJ *)B->data)->nonew = 0;
 43:   }

 45:   PetscCall(MatGetOwnershipRange(Y, &start, &end));
 46:   for (i = start; i < end; i++) {
 47:     if (i < Y->cmap->N) PetscCall(MatSetValues(Y, 1, &i, 1, &i, &alpha, ADD_VALUES));
 48:   }
 49:   PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY));
 50:   PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY));
 51:   PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, prevoption));
 52:   if (A) ((Mat_SeqAIJ *)A->data)->nonew = oldValA;
 53:   if (B) ((Mat_SeqAIJ *)B->data)->nonew = oldValB;
 54:   PetscFunctionReturn(PETSC_SUCCESS);
 55: }

 57: /*@
 58:   MatCreate - Creates a matrix where the type is determined
 59:   from either a call to `MatSetType()` or from the options database
 60:   with a call to `MatSetFromOptions()`.

 62:   Collective

 64:   Input Parameter:
 65: . comm - MPI communicator

 67:   Output Parameter:
 68: . A - the matrix

 70:   Options Database Keys:
 71: + -mat_type seqaij   - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()`
 72: . -mat_type mpiaij   - `MATMPIAIJ` type, uses `MatCreateAIJ()`
 73: . -mat_type seqdense - `MATSEQDENSE`, uses `MatCreateSeqDense()`
 74: . -mat_type mpidense - `MATMPIDENSE` type, uses `MatCreateDense()`
 75: . -mat_type seqbaij  - `MATSEQBAIJ` type, uses `MatCreateSeqBAIJ()`
 76: - -mat_type mpibaij  - `MATMPIBAIJ` type, uses `MatCreateBAIJ()`

 78:    See the manpages for particular formats (e.g., `MATSEQAIJ`)
 79:    for additional format-specific options.

 81:   Level: beginner

 83:   Notes:
 84:   The default matrix type is `MATAIJ`, using the routines `MatCreateSeqAIJ()` or
 85:   `MatCreateAIJ()` if you do not set a type in the options database. If you never call
 86:   `MatSetType()` or `MatSetFromOptions()` it will generate an error when you try to use the
 87:   matrix.

 89: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
 90:           `MatCreateSeqDense()`, `MatCreateDense()`,
 91:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
 92:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
 93:           `MatConvert()`
 94: @*/
 95: PetscErrorCode MatCreate(MPI_Comm comm, Mat *A)
 96: {
 97:   Mat B;

 99:   PetscFunctionBegin;
100:   PetscAssertPointer(A, 2);

102:   *A = NULL;
103:   PetscCall(MatInitializePackage());

105:   PetscCall(PetscHeaderCreate(B, MAT_CLASSID, "Mat", "Matrix", "Mat", comm, MatDestroy, MatView));
106:   PetscCall(PetscLayoutCreate(comm, &B->rmap));
107:   PetscCall(PetscLayoutCreate(comm, &B->cmap));
108:   PetscCall(PetscStrallocpy(VECSTANDARD, &B->defaultvectype));
109:   PetscCall(PetscStrallocpy(PETSCRANDER48, &B->defaultrandtype));

111:   B->symmetric                   = PETSC_BOOL3_UNKNOWN;
112:   B->hermitian                   = PETSC_BOOL3_UNKNOWN;
113:   B->structurally_symmetric      = PETSC_BOOL3_UNKNOWN;
114:   B->spd                         = PETSC_BOOL3_UNKNOWN;
115:   B->symmetry_eternal            = PETSC_FALSE;
116:   B->structural_symmetry_eternal = PETSC_FALSE;

118:   B->congruentlayouts = PETSC_DECIDE;
119:   B->preallocated     = PETSC_FALSE;
120: #if defined(PETSC_HAVE_DEVICE)
121:   B->boundtocpu = PETSC_TRUE;
122: #endif
123:   *A = B;
124:   PetscFunctionReturn(PETSC_SUCCESS);
125: }

127: /*@C
128:   MatCreateFromOptions - Creates a matrix whose type is set from the options database

130:   Collective

132:   Input Parameters:
133: + comm   - MPI communicator
134: . prefix - [optional] prefix for the options database
135: . bs     - the blocksize (commonly 1)
136: . m      - the local number of rows (or `PETSC_DECIDE`)
137: . n      - the local number of columns (or `PETSC_DECIDE` or `PETSC_DETERMINE`)
138: . M      - the global number of rows (or `PETSC_DETERMINE`)
139: - N      - the global number of columns (or `PETSC_DETERMINE`)

141:   Output Parameter:
142: . A - the matrix

144:   Options Database Key:
145: . -mat_type - see `MatType`, for example `aij`, `aijcusparse`, `baij`, `sbaij`, dense, defaults to `aij`

147:   Level: beginner

149: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
150:           `MatCreateSeqDense()`, `MatCreateDense()`,
151:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
152:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
153:           `MatConvert()`, `MatCreate()`
154: @*/
155: PetscErrorCode MatCreateFromOptions(MPI_Comm comm, const char *prefix, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, Mat *A)
156: {
157:   PetscFunctionBegin;
158:   PetscAssertPointer(A, 8);
159:   PetscCall(MatCreate(comm, A));
160:   if (prefix) PetscCall(MatSetOptionsPrefix(*A, prefix));
161:   PetscCall(MatSetBlockSize(*A, bs));
162:   PetscCall(MatSetSizes(*A, m, n, M, N));
163:   PetscCall(MatSetFromOptions(*A));
164:   PetscFunctionReturn(PETSC_SUCCESS);
165: }

167: /*@
168:   MatSetErrorIfFailure - Causes `Mat` to generate an immediate error, for example a zero pivot, is detected.

170:   Logically Collective

172:   Input Parameters:
173: + mat - matrix obtained from `MatCreate()`
174: - flg - `PETSC_TRUE` indicates you want the error generated

176:   Level: advanced

178:   Note:
179:   If this flag is not set then the matrix operation will note the error and continue. The error may cause a later `PC` or `KSP` error
180:   or result in a `KSPConvergedReason` indicating the method did not converge.

182: .seealso: [](ch_matrices), `Mat`, `PCSetErrorIfFailure()`, `KSPConvergedReason`, `SNESConvergedReason`
183: @*/
184: PetscErrorCode MatSetErrorIfFailure(Mat mat, PetscBool flg)
185: {
186:   PetscFunctionBegin;
189:   mat->erroriffailure = flg;
190:   PetscFunctionReturn(PETSC_SUCCESS);
191: }

193: /*@
194:   MatSetSizes - Sets the local and global sizes, and checks to determine compatibility

196:   Collective

198:   Input Parameters:
199: + A - the matrix
200: . m - number of local rows (or `PETSC_DECIDE`)
201: . n - number of local columns (or `PETSC_DECIDE`)
202: . M - number of global rows (or `PETSC_DETERMINE`)
203: - N - number of global columns (or `PETSC_DETERMINE`)

205:   Level: beginner

207:   Notes:
208:   `m` (`n`) and `M` (`N`) cannot be both `PETSC_DECIDE`
209:   If one processor calls this with `M` (`N`) of `PETSC_DECIDE` then all processors must, otherwise the program will hang.

211:   If `PETSC_DECIDE` is not used for the arguments 'm' and 'n', then the
212:   user must ensure that they are chosen to be compatible with the
213:   vectors. To do this, one first considers the matrix-vector product
214:   'y = A x'. The `m` that is used in the above routine must match the
215:   local size used in the vector creation routine `VecCreateMPI()` for 'y'.
216:   Likewise, the `n` used must match that used as the local size in
217:   `VecCreateMPI()` for 'x'.

219:   You cannot change the sizes once they have been set.

221:   The sizes must be set before `MatSetUp()` or MatXXXSetPreallocation() is called.

223: .seealso: [](ch_matrices), `Mat`, `MatGetSize()`, `PetscSplitOwnership()`
224: @*/
225: PetscErrorCode MatSetSizes(Mat A, PetscInt m, PetscInt n, PetscInt M, PetscInt N)
226: {
227:   PetscFunctionBegin;
231:   PetscCheck(M <= 0 || m <= M, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local row size %" PetscInt_FMT " cannot be larger than global row size %" PetscInt_FMT, m, M);
232:   PetscCheck(N <= 0 || n <= N, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local column size %" PetscInt_FMT " cannot be larger than global column size %" PetscInt_FMT, n, N);
233:   PetscCheck((A->rmap->n < 0 || A->rmap->N < 0) || (A->rmap->n == m && (M <= 0 || A->rmap->N == M)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset row sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", m, M,
234:              A->rmap->n, A->rmap->N);
235:   PetscCheck((A->cmap->n < 0 || A->cmap->N < 0) || (A->cmap->n == n && (N <= 0 || A->cmap->N == N)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset column sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", n, N,
236:              A->cmap->n, A->cmap->N);
237:   A->rmap->n = m;
238:   A->cmap->n = n;
239:   A->rmap->N = M > -1 ? M : A->rmap->N;
240:   A->cmap->N = N > -1 ? N : A->cmap->N;
241:   PetscFunctionReturn(PETSC_SUCCESS);
242: }

244: /*@
245:   MatSetFromOptions - Creates a matrix where the type is determined
246:   from the options database.

248:   Collective

250:   Input Parameter:
251: . B - the matrix

253:   Options Database Keys:
254: + -mat_type seqaij   - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()`
255: . -mat_type mpiaij   - `MATMPIAIJ` type, uses `MatCreateAIJ()`
256: . -mat_type seqdense - `MATSEQDENSE` type, uses `MatCreateSeqDense()`
257: . -mat_type mpidense - `MATMPIDENSE`, uses `MatCreateDense()`
258: . -mat_type seqbaij  - `MATSEQBAIJ`, uses `MatCreateSeqBAIJ()`
259: - -mat_type mpibaij  - `MATMPIBAIJ`, uses `MatCreateBAIJ()`

261:    See the manpages for particular formats (e.g., `MATSEQAIJ`)
262:    for additional format-specific options.

264:   Level: beginner

266:   Notes:
267:   Generates a parallel MPI matrix if the communicator has more than one processor.  The default
268:   matrix type is `MATAIJ`, using the routines `MatCreateSeqAIJ()` and `MatCreateAIJ()` if you
269:   do not select a type in the options database.

271: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqAIJ()`, `MatCreateAIJ()`,
272:           `MatCreateSeqDense()`, `MatCreateDense()`,
273:           `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`,
274:           `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`,
275:           `MatConvert()`
276: @*/
277: PetscErrorCode MatSetFromOptions(Mat B)
278: {
279:   const char *deft = MATAIJ;
280:   char        type[256];
281:   PetscBool   flg, set;
282:   PetscInt    bind_below = 0;

284:   PetscFunctionBegin;

287:   PetscObjectOptionsBegin((PetscObject)B);

289:   if (B->rmap->bs < 0) {
290:     PetscInt newbs = -1;
291:     PetscCall(PetscOptionsInt("-mat_block_size", "Set the blocksize used to store the matrix", "MatSetBlockSize", newbs, &newbs, &flg));
292:     if (flg) {
293:       PetscCall(PetscLayoutSetBlockSize(B->rmap, newbs));
294:       PetscCall(PetscLayoutSetBlockSize(B->cmap, newbs));
295:     }
296:   }

298:   PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, deft, type, 256, &flg));
299:   if (flg) {
300:     PetscCall(MatSetType(B, type));
301:   } else if (!((PetscObject)B)->type_name) {
302:     PetscCall(MatSetType(B, deft));
303:   }

305:   PetscCall(PetscOptionsName("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", &B->checksymmetryonassembly));
306:   PetscCall(PetscOptionsReal("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", B->checksymmetrytol, &B->checksymmetrytol, NULL));
307:   PetscCall(PetscOptionsBool("-mat_null_space_test", "Checks if provided null space is correct in MatAssemblyEnd()", "MatSetNullSpaceTest", B->checknullspaceonassembly, &B->checknullspaceonassembly, NULL));
308:   PetscCall(PetscOptionsBool("-mat_error_if_failure", "Generate an error if an error occurs when factoring the matrix", "MatSetErrorIfFailure", B->erroriffailure, &B->erroriffailure, NULL));

310:   PetscTryTypeMethod(B, setfromoptions, PetscOptionsObject);

312:   flg = PETSC_FALSE;
313:   PetscCall(PetscOptionsBool("-mat_new_nonzero_location_err", "Generate an error if new nonzeros are created in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set));
314:   if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, flg));
315:   flg = PETSC_FALSE;
316:   PetscCall(PetscOptionsBool("-mat_new_nonzero_allocation_err", "Generate an error if new nonzeros are allocated in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set));
317:   if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, flg));
318:   flg = PETSC_FALSE;
319:   PetscCall(PetscOptionsBool("-mat_ignore_zero_entries", "For AIJ/IS matrices this will stop zero values from creating a zero location in the matrix", "MatSetOption", flg, &flg, &set));
320:   if (set) PetscCall(MatSetOption(B, MAT_IGNORE_ZERO_ENTRIES, flg));

322:   flg = PETSC_FALSE;
323:   PetscCall(PetscOptionsBool("-mat_form_explicit_transpose", "Hint to form an explicit transpose for operations like MatMultTranspose", "MatSetOption", flg, &flg, &set));
324:   if (set) PetscCall(MatSetOption(B, MAT_FORM_EXPLICIT_TRANSPOSE, flg));

326:   /* Bind to CPU if below a user-specified size threshold.
327:    * This perhaps belongs in the options for the GPU Mat types, but MatBindToCPU() does nothing when called on non-GPU types,
328:    * and putting it here makes is more maintainable than duplicating this for all. */
329:   PetscCall(PetscOptionsInt("-mat_bind_below", "Set the size threshold (in local rows) below which the Mat is bound to the CPU", "MatBindToCPU", bind_below, &bind_below, &flg));
330:   if (flg && B->rmap->n < bind_below) PetscCall(MatBindToCPU(B, PETSC_TRUE));

332:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
333:   PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)B, PetscOptionsObject));
334:   PetscOptionsEnd();
335:   PetscFunctionReturn(PETSC_SUCCESS);
336: }

338: /*@C
339:   MatXAIJSetPreallocation - set preallocation for serial and parallel `MATAIJ`, `MATBAIJ`, and `MATSBAIJ` matrices and their unassembled versions.

341:   Collective

343:   Input Parameters:
344: + A     - matrix being preallocated
345: . bs    - block size
346: . dnnz  - number of nonzero column blocks per block row of diagonal part of parallel matrix
347: . onnz  - number of nonzero column blocks per block row of off-diagonal part of parallel matrix
348: . dnnzu - number of nonzero column blocks per block row of upper-triangular part of diagonal part of parallel matrix
349: - onnzu - number of nonzero column blocks per block row of upper-triangular part of off-diagonal part of parallel matrix

351:   Level: beginner

353: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocation()`,
354:           `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`,
355:           `PetscSplitOwnership()`
356: @*/
357: PetscErrorCode MatXAIJSetPreallocation(Mat A, PetscInt bs, const PetscInt dnnz[], const PetscInt onnz[], const PetscInt dnnzu[], const PetscInt onnzu[])
358: {
359:   PetscInt cbs;
360:   void (*aij)(void);
361:   void (*is)(void);
362:   void (*hyp)(void) = NULL;

364:   PetscFunctionBegin;
365:   if (bs != PETSC_DECIDE) { /* don't mess with an already set block size */
366:     PetscCall(MatSetBlockSize(A, bs));
367:   }
368:   PetscCall(PetscLayoutSetUp(A->rmap));
369:   PetscCall(PetscLayoutSetUp(A->cmap));
370:   PetscCall(MatGetBlockSizes(A, &bs, &cbs));
371:   /* these routines assumes bs == cbs, this should be checked somehow */
372:   PetscCall(MatSeqBAIJSetPreallocation(A, bs, 0, dnnz));
373:   PetscCall(MatMPIBAIJSetPreallocation(A, bs, 0, dnnz, 0, onnz));
374:   PetscCall(MatSeqSBAIJSetPreallocation(A, bs, 0, dnnzu));
375:   PetscCall(MatMPISBAIJSetPreallocation(A, bs, 0, dnnzu, 0, onnzu));
376:   /*
377:     In general, we have to do extra work to preallocate for scalar (AIJ) or unassembled (IS) matrices so we check whether it will do any
378:     good before going on with it.
379:   */
380:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatMPIAIJSetPreallocation_C", &aij));
381:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatISSetPreallocation_C", &is));
382: #if defined(PETSC_HAVE_HYPRE)
383:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatHYPRESetPreallocation_C", &hyp));
384: #endif
385:   if (!aij && !is && !hyp) PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSeqAIJSetPreallocation_C", &aij));
386:   if (aij || is || hyp) {
387:     if (bs == cbs && bs == 1) {
388:       PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz));
389:       PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz, 0, onnz));
390:       PetscCall(MatISSetPreallocation(A, 0, dnnz, 0, onnz));
391: #if defined(PETSC_HAVE_HYPRE)
392:       PetscCall(MatHYPRESetPreallocation(A, 0, dnnz, 0, onnz));
393: #endif
394:     } else { /* Convert block-row precallocation to scalar-row */
395:       PetscInt i, m, *sdnnz, *sonnz;
396:       PetscCall(MatGetLocalSize(A, &m, NULL));
397:       PetscCall(PetscMalloc2((!!dnnz) * m, &sdnnz, (!!onnz) * m, &sonnz));
398:       for (i = 0; i < m; i++) {
399:         if (dnnz) sdnnz[i] = dnnz[i / bs] * cbs;
400:         if (onnz) sonnz[i] = onnz[i / bs] * cbs;
401:       }
402:       PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL));
403:       PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
404:       PetscCall(MatISSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
405: #if defined(PETSC_HAVE_HYPRE)
406:       PetscCall(MatHYPRESetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL));
407: #endif
408:       PetscCall(PetscFree2(sdnnz, sonnz));
409:     }
410:   }
411:   PetscFunctionReturn(PETSC_SUCCESS);
412: }

414: /*@C
415:   MatHeaderMerge - Merges some information from the header of `C` to `A`; the `C` object is then destroyed

417:   Collective, No Fortran Support

419:   Input Parameters:
420: + A - a `Mat` being merged into
421: - C - the `Mat` providing the merge information

423:   Level: developer

425:   Notes:
426:   `A` and `C` must be of the same type.
427:   The object list and query function list in `A` are retained, as well as the object name, and prefix.
428:   The object state of `A` is increased by 1.

430:   Developer Note:
431:   This is somewhat different from `MatHeaderReplace()`, it would be nice to merge the code

433: .seealso: `Mat`, `MatHeaderReplace()`
434:  @*/
435: PetscErrorCode MatHeaderMerge(Mat A, Mat *C)
436: {
437:   PetscInt          refct;
438:   PetscOps          Abops;
439:   struct _MatOps    Aops;
440:   char             *mtype, *mname, *mprefix;
441:   Mat_Product      *product;
442:   Mat_Redundant    *redundant;
443:   PetscObjectState  state;
444:   PetscObjectList   olist;
445:   PetscFunctionList qlist;

447:   PetscFunctionBegin;
450:   if (A == *C) PetscFunctionReturn(PETSC_SUCCESS);
451:   PetscCheckSameTypeAndComm(A, 1, *C, 2);
452:   /* save the parts of A we need */
453:   Abops     = ((PetscObject)A)->bops[0];
454:   Aops      = A->ops[0];
455:   refct     = ((PetscObject)A)->refct;
456:   mtype     = ((PetscObject)A)->type_name;
457:   mname     = ((PetscObject)A)->name;
458:   state     = ((PetscObject)A)->state;
459:   mprefix   = ((PetscObject)A)->prefix;
460:   product   = A->product;
461:   redundant = A->redundant;
462:   qlist     = ((PetscObject)A)->qlist;
463:   olist     = ((PetscObject)A)->olist;

465:   /* zero these so the destroy below does not free them */
466:   ((PetscObject)A)->type_name = NULL;
467:   ((PetscObject)A)->name      = NULL;
468:   ((PetscObject)A)->qlist     = NULL;
469:   ((PetscObject)A)->olist     = NULL;

471:   /*
472:      free all the interior data structures from mat
473:      cannot use PetscUseTypeMethod(A,destroy); because compiler
474:      thinks it may print NULL type_name and name
475:   */
476:   PetscTryTypeMethod(A, destroy);

478:   PetscCall(PetscFree(A->defaultvectype));
479:   PetscCall(PetscFree(A->defaultrandtype));
480:   PetscCall(PetscLayoutDestroy(&A->rmap));
481:   PetscCall(PetscLayoutDestroy(&A->cmap));
482:   PetscCall(PetscComposedQuantitiesDestroy((PetscObject)A));

484:   /* copy C over to A */
485:   PetscCall(PetscFree(A->factorprefix));
486:   PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat)));

488:   /* return the parts of A we saved */
489:   ((PetscObject)A)->bops[0]   = Abops;
490:   A->ops[0]                   = Aops;
491:   ((PetscObject)A)->refct     = refct;
492:   ((PetscObject)A)->type_name = mtype;
493:   ((PetscObject)A)->name      = mname;
494:   ((PetscObject)A)->prefix    = mprefix;
495:   ((PetscObject)A)->state     = state + 1;
496:   A->product                  = product;
497:   A->redundant                = redundant;

499:   /* Append the saved lists */
500:   PetscCall(PetscFunctionListDuplicate(qlist, &((PetscObject)A)->qlist));
501:   PetscCall(PetscObjectListDuplicate(olist, &((PetscObject)A)->olist));
502:   PetscCall(PetscFunctionListDestroy(&qlist));
503:   PetscCall(PetscObjectListDestroy(&olist));

505:   /* since these two are copied into A we do not want them destroyed in C */
506:   ((PetscObject)*C)->qlist = NULL;
507:   ((PetscObject)*C)->olist = NULL;
508:   PetscCall(PetscHeaderDestroy(C));
509:   PetscFunctionReturn(PETSC_SUCCESS);
510: }

512: /*@
513:   MatHeaderReplace - Replaces the internal data of matrix `A` by the internal data of matrix `C` while deleting the outer wrapper of `C`

515:   Input Parameters:
516: + A - a `Mat` whose internal data is to be replaced
517: - C - the `Mat` providing new internal data for `A`

519:   Level: advanced

521:   Example Usage\:
522: .vb
523:   Mat C;
524:   MatCreateSeqAIJWithArrays(..., &C);
525:   MatHeaderReplace(A, &C);
526:   // C has been destroyed and A contains the matrix entries of C
527: .ve

529:   Note:
530:   This can be used inside a function provided to `SNESSetJacobian()`, `TSSetRHSJacobian()`, or `TSSetIJacobian()` in cases where the user code computes an entirely new sparse matrix
531:   (generally with a different nonzero pattern) for each Newton update. It is usually better to reuse the matrix structure of `A` instead of constructing an entirely new one.

533:   Developer Note:
534:   This is somewhat different from `MatHeaderMerge()` it would be nice to merge the code

536: .seealso: `Mat`, `MatHeaderMerge()`
537:  @*/
538: PetscErrorCode MatHeaderReplace(Mat A, Mat *C)
539: {
540:   PetscInt         refct;
541:   PetscObjectState state;
542:   struct _p_Mat    buffer;
543:   MatStencilInfo   stencil;

545:   PetscFunctionBegin;
548:   if (A == *C) PetscFunctionReturn(PETSC_SUCCESS);
549:   PetscCheckSameComm(A, 1, *C, 2);
550:   PetscCheck(((PetscObject)*C)->refct == 1, PetscObjectComm((PetscObject)C), PETSC_ERR_ARG_WRONGSTATE, "Object C has refct %" PetscInt_FMT " > 1, would leave hanging reference", ((PetscObject)*C)->refct);

552:   /* swap C and A */
553:   refct   = ((PetscObject)A)->refct;
554:   state   = ((PetscObject)A)->state;
555:   stencil = A->stencil;
556:   PetscCall(PetscMemcpy(&buffer, A, sizeof(struct _p_Mat)));
557:   PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat)));
558:   PetscCall(PetscMemcpy(*C, &buffer, sizeof(struct _p_Mat)));
559:   ((PetscObject)A)->refct = refct;
560:   ((PetscObject)A)->state = state + 1;
561:   A->stencil              = stencil;

563:   ((PetscObject)*C)->refct = 1;
564:   PetscCall(MatDestroy(C));
565:   PetscFunctionReturn(PETSC_SUCCESS);
566: }

568: /*@
569:   MatBindToCPU - marks a matrix to temporarily stay on the CPU and perform computations on the CPU

571:   Logically Collective

573:   Input Parameters:
574: + A   - the matrix
575: - flg - bind to the CPU if value of `PETSC_TRUE`

577:   Level: intermediate

579: .seealso: [](ch_matrices), `Mat`, `MatBoundToCPU()`
580: @*/
581: PetscErrorCode MatBindToCPU(Mat A, PetscBool flg)
582: {
583:   PetscFunctionBegin;
586: #if defined(PETSC_HAVE_DEVICE)
587:   if (A->boundtocpu == flg) PetscFunctionReturn(PETSC_SUCCESS);
588:   A->boundtocpu = flg;
589:   PetscTryTypeMethod(A, bindtocpu, flg);
590: #endif
591:   PetscFunctionReturn(PETSC_SUCCESS);
592: }

594: /*@
595:   MatBoundToCPU - query if a matrix is bound to the CPU

597:   Input Parameter:
598: . A - the matrix

600:   Output Parameter:
601: . flg - the logical flag

603:   Level: intermediate

605: .seealso: [](ch_matrices), `Mat`, `MatBindToCPU()`
606: @*/
607: PetscErrorCode MatBoundToCPU(Mat A, PetscBool *flg)
608: {
609:   PetscFunctionBegin;
611:   PetscAssertPointer(flg, 2);
612: #if defined(PETSC_HAVE_DEVICE)
613:   *flg = A->boundtocpu;
614: #else
615:   *flg = PETSC_TRUE;
616: #endif
617:   PetscFunctionReturn(PETSC_SUCCESS);
618: }

620: PetscErrorCode MatSetValuesCOO_Basic(Mat A, const PetscScalar coo_v[], InsertMode imode)
621: {
622:   IS              is_coo_i, is_coo_j;
623:   const PetscInt *coo_i, *coo_j;
624:   PetscInt        n, n_i, n_j;
625:   PetscScalar     zero = 0.;

627:   PetscFunctionBegin;
628:   PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_i", (PetscObject *)&is_coo_i));
629:   PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_j", (PetscObject *)&is_coo_j));
630:   PetscCheck(is_coo_i, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_i IS");
631:   PetscCheck(is_coo_j, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_j IS");
632:   PetscCall(ISGetLocalSize(is_coo_i, &n_i));
633:   PetscCall(ISGetLocalSize(is_coo_j, &n_j));
634:   PetscCheck(n_i == n_j, PETSC_COMM_SELF, PETSC_ERR_COR, "Wrong local size %" PetscInt_FMT " != %" PetscInt_FMT, n_i, n_j);
635:   PetscCall(ISGetIndices(is_coo_i, &coo_i));
636:   PetscCall(ISGetIndices(is_coo_j, &coo_j));
637:   if (imode != ADD_VALUES) PetscCall(MatZeroEntries(A));
638:   for (n = 0; n < n_i; n++) PetscCall(MatSetValue(A, coo_i[n], coo_j[n], coo_v ? coo_v[n] : zero, ADD_VALUES));
639:   PetscCall(ISRestoreIndices(is_coo_i, &coo_i));
640:   PetscCall(ISRestoreIndices(is_coo_j, &coo_j));
641:   PetscFunctionReturn(PETSC_SUCCESS);
642: }

644: PetscErrorCode MatSetPreallocationCOO_Basic(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
645: {
646:   Mat         preallocator;
647:   IS          is_coo_i, is_coo_j;
648:   PetscScalar zero = 0.0;

650:   PetscFunctionBegin;
651:   PetscCall(PetscLayoutSetUp(A->rmap));
652:   PetscCall(PetscLayoutSetUp(A->cmap));
653:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &preallocator));
654:   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
655:   PetscCall(MatSetSizes(preallocator, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
656:   PetscCall(MatSetLayouts(preallocator, A->rmap, A->cmap));
657:   PetscCall(MatSetUp(preallocator));
658:   for (PetscCount n = 0; n < ncoo; n++) PetscCall(MatSetValue(preallocator, coo_i[n], coo_j[n], zero, INSERT_VALUES));
659:   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
660:   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
661:   PetscCall(MatPreallocatorPreallocate(preallocator, PETSC_TRUE, A));
662:   PetscCall(MatDestroy(&preallocator));
663:   PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
664:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_i, PETSC_COPY_VALUES, &is_coo_i));
665:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_j, PETSC_COPY_VALUES, &is_coo_j));
666:   PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_i", (PetscObject)is_coo_i));
667:   PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_j", (PetscObject)is_coo_j));
668:   PetscCall(ISDestroy(&is_coo_i));
669:   PetscCall(ISDestroy(&is_coo_j));
670:   PetscFunctionReturn(PETSC_SUCCESS);
671: }

673: /*@C
674:   MatSetPreallocationCOO - set preallocation for matrices using a coordinate format of the entries with global indices

676:   Collective

678:   Input Parameters:
679: + A     - matrix being preallocated
680: . ncoo  - number of entries
681: . coo_i - row indices
682: - coo_j - column indices

684:   Level: beginner

686:   Notes:
687:   The indices `coo_i` and `coo_j` may be modified within this function. The caller should not rely on them
688:   having any specific value after this function returns. The arrays can be freed or reused immediately
689:   after this function returns.

691:   Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed
692:   but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries
693:   are allowed and will be properly added or inserted to the matrix, unless the matrix option `MAT_IGNORE_OFF_PROC_ENTRIES`
694:   is set, in which case remote entries are ignored, or `MAT_NO_OFF_PROC_ENTRIES` is set, in which case an error will be generated.

696:   If you just want to create a sequential AIJ matrix (`MATSEQAIJ`), and your matrix entries in COO format are unique, you can also use
697:   `MatCreateSeqAIJFromTriple()`. But that is not recommended for iterative applications.

699: .seealso: [](ch_matrices), `Mat`, `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`,
700:           `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOOLocal()`,
701:           `DMSetMatrixPreallocateSkip()`, `MatCreateSeqAIJFromTriple()`
702: @*/
703: PetscErrorCode MatSetPreallocationCOO(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
704: {
705:   PetscErrorCode (*f)(Mat, PetscCount, PetscInt[], PetscInt[]) = NULL;

707:   PetscFunctionBegin;
710:   if (ncoo) PetscAssertPointer(coo_i, 3);
711:   if (ncoo) PetscAssertPointer(coo_j, 4);
712:   PetscCall(PetscLayoutSetUp(A->rmap));
713:   PetscCall(PetscLayoutSetUp(A->cmap));
714:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOO_C", &f));

716:   PetscCall(PetscLogEventBegin(MAT_PreallCOO, A, 0, 0, 0));
717:   if (f) {
718:     PetscCall((*f)(A, ncoo, coo_i, coo_j));
719:   } else { /* allow fallback, very slow */
720:     PetscCall(MatSetPreallocationCOO_Basic(A, ncoo, coo_i, coo_j));
721:   }
722:   PetscCall(PetscLogEventEnd(MAT_PreallCOO, A, 0, 0, 0));
723:   A->preallocated = PETSC_TRUE;
724:   A->nonzerostate++;
725:   PetscFunctionReturn(PETSC_SUCCESS);
726: }

728: /*@C
729:   MatSetPreallocationCOOLocal - set preallocation for matrices using a coordinate format of the entries with local indices

731:   Collective

733:   Input Parameters:
734: + A     - matrix being preallocated
735: . ncoo  - number of entries
736: . coo_i - row indices (local numbering; may be modified)
737: - coo_j - column indices (local numbering; may be modified)

739:   Level: beginner

741:   Notes:
742:   The local indices are translated using the local to global mapping, thus `MatSetLocalToGlobalMapping()` must have been
743:   called prior to this function. For matrices created with `DMCreateMatrix()` the local to global mapping is often already provided.

745:   The indices `coo_i` and `coo_j` may be modified within this function. They might be translated to corresponding global
746:   indices, but the caller should not rely on them having any specific value after this function returns. The arrays
747:   can be freed or reused immediately after this function returns.

749:   Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed
750:   but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries
751:   are allowed and will be properly added or inserted to the matrix.

753: .seealso: [](ch_matrices), `Mat`, `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`,
754:           `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOO()`,
755:           `DMSetMatrixPreallocateSkip()`
756: @*/
757: PetscErrorCode MatSetPreallocationCOOLocal(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
758: {
759:   PetscErrorCode (*f)(Mat, PetscCount, PetscInt[], PetscInt[]) = NULL;

761:   PetscFunctionBegin;
764:   if (ncoo) PetscAssertPointer(coo_i, 3);
765:   if (ncoo) PetscAssertPointer(coo_j, 4);
766:   PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
767:   PetscCall(PetscLayoutSetUp(A->rmap));
768:   PetscCall(PetscLayoutSetUp(A->cmap));

770:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", &f));
771:   if (f) {
772:     PetscCall((*f)(A, ncoo, coo_i, coo_j));
773:     A->nonzerostate++;
774:   } else {
775:     ISLocalToGlobalMapping ltog_row, ltog_col;
776:     PetscCall(MatGetLocalToGlobalMapping(A, &ltog_row, &ltog_col));
777:     if (ltog_row) PetscCall(ISLocalToGlobalMappingApply(ltog_row, ncoo, coo_i, coo_i));
778:     if (ltog_col) PetscCall(ISLocalToGlobalMappingApply(ltog_col, ncoo, coo_j, coo_j));
779:     PetscCall(MatSetPreallocationCOO(A, ncoo, coo_i, coo_j));
780:   }
781:   A->preallocated = PETSC_TRUE;
782:   PetscFunctionReturn(PETSC_SUCCESS);
783: }

785: /*@
786:   MatSetValuesCOO - set values at once in a matrix preallocated using `MatSetPreallocationCOO()`

788:   Collective

790:   Input Parameters:
791: + A     - matrix being preallocated
792: . coo_v - the matrix values (can be `NULL`)
793: - imode - the insert mode

795:   Level: beginner

797:   Notes:
798:   The values must follow the order of the indices prescribed with `MatSetPreallocationCOO()` or `MatSetPreallocationCOOLocal()`.

800:   When repeated entries are specified in the COO indices the `coo_v` values are first properly summed, regardless of the value of imode.
801:   The imode flag indicates if coo_v must be added to the current values of the matrix (`ADD_VALUES`) or overwritten (`INSERT_VALUES`).

803:   `MatAssemblyBegin()` and `MatAssemblyEnd()` do not need to be called after this routine. It automatically handles the assembly process.

805: .seealso: [](ch_matrices), `Mat`, `MatSetPreallocationCOO()`, `MatSetPreallocationCOOLocal()`, `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
806: @*/
807: PetscErrorCode MatSetValuesCOO(Mat A, const PetscScalar coo_v[], InsertMode imode)
808: {
809:   PetscErrorCode (*f)(Mat, const PetscScalar[], InsertMode) = NULL;
810:   PetscBool oldFlg;

812:   PetscFunctionBegin;
815:   MatCheckPreallocated(A, 1);
817:   PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetValuesCOO_C", &f));
818:   PetscCall(PetscLogEventBegin(MAT_SetVCOO, A, 0, 0, 0));
819:   if (f) {
820:     PetscCall((*f)(A, coo_v, imode)); // all known COO implementations do not use MatStash. They do their own off-proc communication
821:     PetscCall(MatGetOption(A, MAT_NO_OFF_PROC_ENTRIES, &oldFlg));
822:     PetscCall(MatSetOption(A, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); // set A->nooffprocentries to avoid costly MatStash scatter in MatAssembly
823:   } else {
824:     PetscCall(MatSetValuesCOO_Basic(A, coo_v, imode)); // fall back to MatSetValues, which might use MatStash
825:   }
826:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
827:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
828:   if (f) PetscCall(MatSetOption(A, MAT_NO_OFF_PROC_ENTRIES, oldFlg));
829:   PetscCall(PetscLogEventEnd(MAT_SetVCOO, A, 0, 0, 0));
830:   PetscFunctionReturn(PETSC_SUCCESS);
831: }

833: /*@
834:   MatSetBindingPropagates - Sets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects

836:   Input Parameters:
837: + A   - the matrix
838: - flg - flag indicating whether the boundtocpu flag should be propagated

840:   Level: developer

842:   Notes:
843:   If the value of flg is set to true, the following will occur
844: +   `MatCreateSubMatrices()` and `MatCreateRedundantMatrix()` - bind created matrices to CPU if the input matrix is bound to the CPU.
845: -   `MatCreateVecs()` - bind created vectors to CPU if the input matrix is bound to the CPU.

847:   The bindingpropagates flag itself is also propagated by the above routines.

849:   Developer Notes:
850:   If the fine-scale `DMDA` has the `-dm_bind_below` option set to true, then `DMCreateInterpolationScale()` calls `MatSetBindingPropagates()`
851:   on the restriction/interpolation operator to set the bindingpropagates flag to true.

853: .seealso: [](ch_matrices), `Mat`, `VecSetBindingPropagates()`, `MatGetBindingPropagates()`
854: @*/
855: PetscErrorCode MatSetBindingPropagates(Mat A, PetscBool flg)
856: {
857:   PetscFunctionBegin;
859: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
860:   A->bindingpropagates = flg;
861: #endif
862:   PetscFunctionReturn(PETSC_SUCCESS);
863: }

865: /*@
866:   MatGetBindingPropagates - Gets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects

868:   Input Parameter:
869: . A - the matrix

871:   Output Parameter:
872: . flg - flag indicating whether the boundtocpu flag will be propagated

874:   Level: developer

876: .seealso: [](ch_matrices), `Mat`, `MatSetBindingPropagates()`
877: @*/
878: PetscErrorCode MatGetBindingPropagates(Mat A, PetscBool *flg)
879: {
880:   PetscFunctionBegin;
882:   PetscAssertPointer(flg, 2);
883: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
884:   *flg = A->bindingpropagates;
885: #else
886:   *flg = PETSC_FALSE;
887: #endif
888:   PetscFunctionReturn(PETSC_SUCCESS);
889: }