Actual source code: ex88f.F90
1: !
2: ! Creates a tridiagonal sparse matrix explicitly in Fortran and solves a linear system with it
3: !
4: ! The matrix is provided in triples in a way that supports new nonzero values with the same nonzero structure
5: !
6: program main
7: #include <petsc/finclude/petscksp.h>
8: use petscksp
9: implicit none
11: PetscInt i,n
12: PetscCount nz
13: PetscBool flg
14: PetscErrorCode ierr
15: PetscScalar,ALLOCATABLE :: a(:)
16: PetscScalar,pointer :: b(:)
18: PetscInt,ALLOCATABLE :: rows(:)
19: PetscInt,ALLOCATABLE :: cols(:)
21: Mat J
22: Vec rhs,solution
23: KSP ksp
25: PetscCallA(PetscInitialize(ierr))
27: n = 3
28: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
29: nz = 3*n - 4;
31: PetscCallA(VecCreateSeq(PETSC_COMM_SELF,n,rhs,ierr))
32: PetscCallA(VecCreateSeq(PETSC_COMM_SELF,n,solution,ierr))
33: ALLOCATE (rows(nz),cols(nz),a(nz))
35: PetscCallA(VecGetArray(rhs,b,ierr))
36: do i=1,n
37: b(i) = 1.0
38: enddo
39: PetscCallA(VecRestoreArray(rhs,b,ierr))
41: rows(1) = 0; cols(1) = 0
42: a(1) = 1.0
43: do i=2,n-1
44: rows(2+3*(i-2)) = i-1; cols(2+3*(i-2)) = i-2
45: a(2+3*(i-2)) = -1.0;
46: rows(2+3*(i-2)+1) = i-1; cols(2+3*(i-2)+1) = i-1
47: a(2+3*(i-2)+1) = 2.0;
48: rows(2+3*(i-2)+2) = i-1; cols(2+3*(i-2)+2) = i
49: a(2+3*(i-2)+2) = -1.0;
50: enddo
51: rows(nz) = n-1; cols(nz) = n-1
52: a(nz) = 1.0
54: PetscCallA(MatCreate(PETSC_COMM_SELF,J,ierr))
55: PetscCallA(MatSetSizes(J,n,n,n,n,ierr))
56: PetscCallA(MatSetType(J,MATSEQAIJ,ierr))
57: PetscCallA(MatSetPreallocationCOO(J,nz,rows,cols,ierr))
58: PetscCallA(MatSetValuesCOO(J,a,INSERT_VALUES,ierr))
60: PetscCallA(KSPCreate(PETSC_COMM_SELF,ksp,ierr))
61: PetscCallA(KSPSetErrorIfNotConverged(ksp,PETSC_TRUE,ierr))
62: PetscCallA(KSPSetFromOptions(ksp,ierr))
63: PetscCallA(KSPSetOperators(ksp,J,J,ierr))
65: PetscCallA(KSPSolve(ksp,rhs,solution,ierr))
67: ! Keep the same size and nonzero structure of the matrix but change its numerical entries
68: do i=2,n-1
69: a(2+3*(i-2)+1) = 4.0;
70: enddo
71: PetscCallA(MatSetValuesCOO(J,a,INSERT_VALUES,ierr))
73: PetscCallA(KSPSolve(ksp,rhs,solution,ierr))
75: PetscCallA(KSPDestroy(ksp,ierr))
76: PetscCallA(VecDestroy(rhs,ierr))
77: PetscCallA(VecDestroy(solution,ierr))
78: PetscCallA(MatDestroy(J,ierr))
80: DEALLOCATE (rows, cols, a)
82: PetscCallA(PetscFinalize(ierr))
83: end
85: !/*TEST
86: !
87: ! test:
88: ! requires: defined(PETSC_USE_SINGLE_LIBRARY)
89: ! nsize: 3
90: ! filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
91: ! # use the MPI Linear Solver Server
92: ! args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
93: ! # controls for the use of PCMPI on a particular system
94: ! args: -mpi_linear_solver_server_minimum_count_per_rank 5 -mpi_linear_solver_server_ksp_view
95: ! # the usual options for the linear solver (in this case using the server)
96: ! args: -ksp_monitor -ksp_converged_reason -ksp_view
97: !
98: ! test:
99: ! suffix: 2
100: ! requires: defined(PETSC_USE_SINGLE_LIBRARY)
101: ! nsize: 3
102: ! filter: sed 's?ATOL?RTOL?g' | grep -v HERMITIAN | grep -v "shared memory" | grep -v "Mat_0"
103: ! # use the MPI Linear Solver Server
104: ! args: -n 20 -mpi_linear_solver_server -mpi_linear_solver_server_view -mpi_linear_solver_server_use_shared_memory false
105: ! # controls for the use of PCMPI on a particular system
106: ! args: -mpi_linear_solver_server_ksp_view
107: ! # the usual options for the linear solver (in this case using the server)
108: ! args: -ksp_monitor -ksp_converged_reason -ksp_view
109: !
110: !TEST*/