Actual source code: ex11f.F90

  1: ! Test MatCreateMPIAdj() with NULL argument 'values'

  3: program main
  4: #include <petsc/finclude/petscmat.h>
  5: use petscmat
  6: implicit none

  8:   Mat                   :: mesh,dual
  9:   MatPartitioning       :: part
 10:   IS                    :: is
 11:   PetscInt,parameter    :: Nvertices=6,ncells=2,two=2
 12:   PetscInt              :: ii(3),jj(6)
 13:   PetscMPIInt           :: sz,rnk
 14:   PetscErrorCode        :: ierr

 16:   PetscCallA(PetscInitialize(ierr))

 18:   PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,sz,ierr))
 19:   PetscCheckA(sz.eq.2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,'This example is for exactly two processes')
 20:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rnk,ierr))
 21:   ii(1) = 0
 22:   ii(2) = 3
 23:   ii(3) = 6
 24:   if (rnk.eq.0) then
 25:     jj(1) = 0
 26:     jj(2) = 1
 27:     jj(3) = 2
 28:     jj(4) = 1
 29:     jj(5) = 2
 30:     jj(6) = 3
 31:   else
 32:     jj(1) = 1
 33:     jj(2) = 4
 34:     jj(3) = 5
 35:     jj(4) = 1
 36:     jj(5) = 3
 37:     jj(6) = 5
 38:   endif

 40:   PetscCallA(MatCreateMPIAdj(PETSC_COMM_WORLD,ncells,Nvertices,ii,jj,PETSC_NULL_INTEGER_ARRAY,mesh,ierr))
 41:   PetscCallA(MatMeshToCellGraph(mesh,two,dual,ierr))
 42:   PetscCallA(MatView(dual,PETSC_VIEWER_STDOUT_WORLD,ierr))

 44:   PetscCallA(MatPartitioningCreate(PETSC_COMM_WORLD,part,ierr))
 45:   PetscCallA(MatPartitioningSetAdjacency(part,dual,ierr))
 46:   PetscCallA(MatPartitioningSetFromOptions(part,ierr))
 47:   PetscCallA(MatPartitioningApply(part,is,ierr))
 48:   PetscCallA(ISView(is,PETSC_VIEWER_STDOUT_WORLD,ierr))
 49:   PetscCallA(ISDestroy(is,ierr))
 50:   PetscCallA(MatPartitioningDestroy(part,ierr))

 52:   PetscCallA(MatDestroy(mesh,ierr))
 53:   PetscCallA(MatDestroy(dual,ierr))
 54:   PetscCallA(PetscFinalize(ierr))

 56: end program

 58: !/*TEST
 59: !
 60: !   build:
 61: !     requires: parmetis
 62: !
 63: !   test:
 64: !      nsize: 2
 65: !
 66: !TEST*/