Actual source code: ex89f.F90
1: !
2: ! Creates a tridiagonal sparse matrix explicitly in Fortran and solves a linear system with it
3: !
4: ! The matrix is provided in CSR format by the user
5: !
6: #include <petsc/finclude/petscksp.h>
7: program main
8: use petscksp
9: implicit none
11: PetscInt i, n, nz, cnt
12: PetscBool flg
13: PetscErrorCode ierr
14: PetscScalar, pointer :: b(:)
16: PetscInt, pointer :: rowptr(:)
17: PetscInt, pointer :: colind(:)
18: PetscScalar, pointer :: a(:)
20: Mat J
21: Vec rhs, solution
22: KSP ksp
24: PetscCallA(PetscInitialize(ierr))
26: n = 3
27: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
28: nz = 3*n - 4
30: PetscCallA(VecCreateSeq(PETSC_COMM_SELF, n, rhs, ierr))
31: PetscCallA(VecCreateSeq(PETSC_COMM_SELF, n, solution, ierr))
32: PetscCallA(PetscShmgetAllocateArrayInt(0_PETSC_INT_KIND, n + 1, rowptr, ierr))
33: PetscCallA(PetscShmgetAllocateArrayInt(0_PETSC_INT_KIND, nz, colind, ierr))
34: PetscCallA(PetscShmgetAllocateArrayScalar(0_PETSC_INT_KIND, nz, a, ierr))
36: PetscCallA(VecGetArray(rhs, b, ierr))
37: b(1:n) = 1.0
38: PetscCallA(VecRestoreArray(rhs, b, ierr))
40: rowptr(0) = 0
41: colind(0) = 0
42: a(0) = 1.0
43: rowptr(1) = 1
44: cnt = 1
45: do i = 1, n - 2
46: colind(cnt) = i - 1
47: a(cnt) = -1
48: cnt = cnt + 1
49: colind(cnt) = i
50: a(cnt) = 2
51: cnt = cnt + 1
52: colind(cnt) = i + 1
53: a(cnt) = -1
54: cnt = cnt + 1
55: rowptr(i + 1) = 3 + rowptr(i)
56: end do
57: colind(cnt) = n - 1
58: a(cnt) = 1.0
59: rowptr(n) = cnt + 1
61: PetscCallA(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, n, n, rowptr, colind, a, J, ierr))
63: PetscCallA(KSPCreate(PETSC_COMM_SELF, ksp, ierr))
64: PetscCallA(KSPSetErrorIfNotConverged(ksp, PETSC_TRUE, ierr))
65: PetscCallA(KSPSetFromOptions(ksp, ierr))
66: PetscCallA(KSPSetOperators(ksp, J, J, ierr))
68: PetscCallA(KSPSolve(ksp, rhs, solution, ierr))
70: ! Keep the same size and nonzero structure of the matrix but change its numerical entries
71: do i = 2, n - 1
72: a(2 + 3*(i - 2)) = 4.0
73: end do
74: PetscCallA(PetscObjectStateIncrease(J, ierr))
76: PetscCallA(KSPSolve(ksp, rhs, solution, ierr))
78: PetscCallA(KSPDestroy(ksp, ierr))
79: PetscCallA(VecDestroy(rhs, ierr))
80: PetscCallA(VecDestroy(solution, ierr))
81: PetscCallA(MatDestroy(J, ierr))
83: PetscCallA(PetscShmgetDeallocateArrayInt(rowptr, ierr))
84: PetscCallA(PetscShmgetDeallocateArrayInt(colind, ierr))
85: PetscCallA(PetscShmgetDeallocateArrayScalar(a, ierr))
86: PetscCallA(PetscFinalize(ierr))
87: end
89: !/*TEST
90: !
91: ! test:
92: ! requires: defined(PETSC_USE_SINGLE_LIBRARY)
93: ! nsize: 3
94: ! filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
95: ! # use the MPI Linear Solver Server
96: ! args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
97: ! # controls for the use of PCMPI on a particular system
98: ! args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view
99: ! # the usual options for the linear solver (in this case using the server)
100: ! args: -ksp_monitor -ksp_converged_reason -ksp_view
101: !
102: ! test:
103: ! suffix: 2
104: ! requires: defined(PETSC_USE_SINGLE_LIBRARY)
105: ! nsize: 3
106: ! filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
107: ! # use the MPI Linear Solver Server
108: ! args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
109: ! # controls for the use of PCMPI on a particular system
110: ! args: -mpi_linear_solver_server_ksp_view
111: ! # the usual options for the linear solver (in this case using the server)
112: ! args: -ksp_monitor -ksp_converged_reason -ksp_view
113: !
114: !TEST*/