Actual source code: ex16f90.F90
1: !
2: !
3: ! Tests MatDenseGetArray()
4: !
5: #include <petsc/finclude/petscmat.h>
6: program main
7: use petscmat
8: implicit none
10: Mat A
11: PetscErrorCode ierr
12: PetscInt i, j, iar(1), jar(1)
13: PetscInt, parameter :: m = 3, n = 2
14: PetscScalar v(1)
15: PetscScalar, pointer :: array(:, :)
16: PetscMPIInt rank
17: integer :: ashape(2)
18: character(len=80) :: string
20: PetscCallA(PetscInitialize(ierr))
21: !
22: ! Create a parallel dense matrix shared by all processors
23: !
24: PetscCallA(MatCreateDense(PETSC_COMM_WORLD, PETSC_DECIDE, PETSC_DECIDE, m, n, PETSC_NULL_SCALAR_ARRAY, A, ierr))
26: !
27: ! Set values into the matrix. All processors set all values.
28: !
29: do i = 0, m - 1
30: iar(1) = i
31: do j = 0, n - 1
32: jar(1) = j
33: v(1) = 9.0/real(i + j + 1)
34: PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, iar, 1_PETSC_INT_KIND, jar, v, INSERT_VALUES, ierr))
35: end do
36: end do
38: PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
39: PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
41: !
42: ! Print the matrix to the screen
43: !
44: PetscCallA(MatView(A, PETSC_VIEWER_STDOUT_WORLD, ierr))
46: !
47: ! Print the local matrix shape to the screen for each rank
48: !
49: PetscCallA(MatDenseGetArray(A, array, ierr))
50: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
51: ashape = shape(array)
52: write (string, '("[", i0, "]", " shape (", i0, ",", i0, ")", a1)') rank, ashape(1), ashape(2), new_line('a')
53: PetscCallA(PetscSynchronizedPrintf(PETSC_COMM_WORLD, string, ierr))
54: PetscCallA(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT, ierr))
55: PetscCallA(MatDenseRestoreArray(A, array, ierr))
56: !
57: ! Free the space used by the matrix
58: !
59: PetscCallA(MatDestroy(A, ierr))
60: PetscCallA(PetscFinalize(ierr))
61: end
63: !/*TEST
64: !
65: ! test:
66: ! nsize: 2
67: ! filter: sort -b
68: ! filter_output: sort -b
69: !
70: !TEST*/