Actual source code: ex3.c

  1: static char help[] = "Illustration of MatIS using a 1D Laplacian assembly\n\n";

  3: /*
  4:   MatIS means that the matrix is not assembled. The easiest way to think of this (for me) is that processes do not have
  5:   to hold full matrix rows. One process can hold part of row i, and another processes can hold another part. However, there
  6:   are still the same number of global rows. The local size here is not the size of the local IS block, which we call the
  7:   overlap size, since that is a property only of MatIS. It is the size of the local piece of the vector you multiply in
  8:   MatMult(). This allows PETSc to understand the parallel layout of the Vec, and how it matches the Mat. If you only know
  9:   the overlap size when assembling, it is best to use PETSC_DECIDE for the local size in the creation routine, so that PETSc
 10:   automatically partitions the unknowns.

 12:   Each P_1 element matrix for a cell will be

 14:     /  1 -1 \
 15:     \ -1  1 /

 17:   so that the assembled matrix has a tridiagonal [-1, 2, -1] pattern. We will use 1 cell per process for illustration,
 18:   and allow PETSc to decide the ownership.
 19: */

 21: #include <petscmat.h>

 23: int main(int argc, char **argv)
 24: {
 25:   MPI_Comm               comm;
 26:   Mat                    A;
 27:   Vec                    x, y;
 28:   ISLocalToGlobalMapping map;
 29:   PetscScalar            elemMat[4] = {1.0, -1.0, -1.0, 1.0};
 30:   PetscReal              error;
 31:   PetscInt               overlapSize = 2, globalIdx[2];
 32:   PetscMPIInt            rank, size;

 34:   PetscFunctionBeginUser;
 35:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
 36:   comm = PETSC_COMM_WORLD;
 37:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 38:   PetscCallMPI(MPI_Comm_size(comm, &size));
 39:   /* Create local-to-global map */
 40:   globalIdx[0] = rank;
 41:   globalIdx[1] = rank + 1;
 42:   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, overlapSize, globalIdx, PETSC_COPY_VALUES, &map));
 43:   /* Create matrix */
 44:   PetscCall(MatCreateIS(comm, 1, PETSC_DECIDE, PETSC_DECIDE, size + 1, size + 1, map, map, &A));
 45:   PetscCall(PetscObjectSetName((PetscObject)A, "A"));
 46:   PetscCall(ISLocalToGlobalMappingDestroy(&map));
 47:   PetscCall(MatISSetPreallocation(A, overlapSize, NULL, overlapSize, NULL));
 48:   PetscCall(MatSetValues(A, 2, globalIdx, 2, globalIdx, elemMat, ADD_VALUES));
 49:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 50:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
 51:   /* Check that the constant vector is in the nullspace */
 52:   PetscCall(MatCreateVecs(A, &x, &y));
 53:   PetscCall(VecSet(x, 1.0));
 54:   PetscCall(PetscObjectSetName((PetscObject)x, "x"));
 55:   PetscCall(VecViewFromOptions(x, NULL, "-x_view"));
 56:   PetscCall(MatMult(A, x, y));
 57:   PetscCall(PetscObjectSetName((PetscObject)y, "y"));
 58:   PetscCall(VecViewFromOptions(y, NULL, "-y_view"));
 59:   PetscCall(VecNorm(y, NORM_2, &error));
 60:   PetscCheck(error <= PETSC_SMALL, comm, PETSC_ERR_ARG_WRONG, "Invalid output, x should be in the nullspace of A");
 61:   /* Check that an interior unit vector gets mapped to something of 1-norm 4 */
 62:   if (size > 1) {
 63:     PetscCall(VecSet(x, 0.0));
 64:     PetscCall(VecSetValue(x, 1, 1.0, INSERT_VALUES));
 65:     PetscCall(VecAssemblyBegin(x));
 66:     PetscCall(VecAssemblyEnd(x));
 67:     PetscCall(MatMult(A, x, y));
 68:     PetscCall(VecNorm(y, NORM_1, &error));
 69:     PetscCheck(PetscAbsReal(error - 4) <= PETSC_SMALL, comm, PETSC_ERR_ARG_WRONG, "Invalid output for matrix multiply");
 70:   }
 71:   /* Cleanup */
 72:   PetscCall(MatDestroy(&A));
 73:   PetscCall(VecDestroy(&x));
 74:   PetscCall(VecDestroy(&y));
 75:   PetscCall(PetscFinalize());
 76:   return 0;
 77: }

 79: /*TEST

 81:   test:
 82:     suffix: 0
 83:     requires:
 84:     args:

 86:   test:
 87:     suffix: 1
 88:     nsize: 3
 89:     args:

 91:   test:
 92:     suffix: 2
 93:     nsize: 7
 94:     args:

 96: TEST*/