Actual source code: ex76f.F90
1: !
2: ! Description: Solves a linear system using PCHPDDM.
3: !
4: #include <petsc/finclude/petscksp.h>
5: program main
6: use petscksp
7: implicit none
8: Vec x, b
9: Mat A, aux, Y, C
10: KSP ksp
11: PC pc
12: IS is, sizes
13: PetscScalar, parameter :: one = 1.0
14: PetscInt, pointer :: idx(:)
15: PetscMPIInt rank, size
16: PetscInt m, N
17: PetscViewer viewer
18: character(PETSC_MAX_PATH_LEN) dir, name
19: PetscLayout map
20: PetscBool flg
21: PetscErrorCode ierr
23: PetscCallA(PetscInitialize(ierr))
25: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD, size, ierr))
26: PetscCheckA(size == 4, PETSC_COMM_WORLD, PETSC_ERR_WRONG_MPI_SIZE, 'This example requires 4 processes')
27: N = 1
28: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-rhs', N, flg, ierr))
29: PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
30: PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
31: dir = '.'
32: PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-load_dir', dir, flg, ierr))
33: ! loading matrices
34: write (name, '(a,a,i0,a)') trim(dir), '/sizes_', size, '.dat'
35: PetscCallA(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, viewer, ierr))
36: PetscCallA(ISCreate(PETSC_COMM_WORLD, sizes, ierr))
37: PetscCallA(ISLoad(sizes, viewer, ierr))
38: PetscCallA(ISGetIndices(sizes, idx, ierr))
39: PetscCallA(MatSetSizes(A, idx(1), idx(2), idx(3), idx(4), ierr))
40: PetscCallA(MatCreate(PETSC_COMM_WORLD, Y, ierr))
41: PetscCallA(MatSetSizes(Y, idx(5), idx(5), PETSC_DETERMINE, PETSC_DETERMINE, ierr))
42: PetscCallA(MatSetUp(Y, ierr))
43: PetscCallA(ISRestoreIndices(sizes, idx, ierr))
44: PetscCallA(ISDestroy(sizes, ierr))
45: PetscCallA(PetscViewerDestroy(viewer, ierr))
46: write (name, '(a,a)') trim(dir), '/A.dat'
47: PetscCallA(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, viewer, ierr))
48: PetscCallA(MatLoad(A, viewer, ierr))
49: PetscCallA(PetscViewerDestroy(viewer, ierr))
50: write (name, '(a,a,i0,a)') trim(dir), '/is_', size, '.dat'
51: PetscCallA(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, viewer, ierr))
52: PetscCallA(ISCreate(PETSC_COMM_WORLD, sizes, ierr))
53: PetscCallA(MatGetLayouts(Y, map, PETSC_NULL_LAYOUT, ierr))
54: PetscCallA(ISSetLayout(sizes, map, ierr))
55: PetscCallA(ISLoad(sizes, viewer, ierr))
56: PetscCallA(ISGetLocalSize(sizes, m, ierr))
57: PetscCallA(ISGetIndices(sizes, idx, ierr))
58: PetscCallA(ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_COPY_VALUES, is, ierr))
59: PetscCallA(ISRestoreIndices(sizes, idx, ierr))
60: PetscCallA(ISDestroy(sizes, ierr))
61: PetscCallA(PetscViewerDestroy(viewer, ierr))
62: write (name, '(a,a,i0,a)') trim(dir), '/Neumann_', size, '.dat'
63: PetscCallA(PetscViewerBinaryOpen(PETSC_COMM_WORLD, name, FILE_MODE_READ, viewer, ierr))
64: PetscCallA(MatLoad(Y, viewer, ierr))
65: PetscCallA(PetscViewerDestroy(viewer, ierr))
66: PetscCallA(MatGetDiagonalBlock(Y, C, ierr))
67: PetscCallA(MatDuplicate(C, MAT_COPY_VALUES, aux, ierr))
68: PetscCallA(MatDestroy(Y, ierr))
69: PetscCallA(MatSetBlockSizesFromMats(aux, A, A, ierr))
70: PetscCallA(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE, ierr))
71: PetscCallA(MatSetOption(aux, MAT_SYMMETRIC, PETSC_TRUE, ierr))
72: ! ready for testing
73: PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
74: PetscCallA(KSPSetOperators(ksp, A, A, ierr))
75: PetscCallA(KSPGetPC(ksp, pc, ierr))
76: PetscCallA(PCSetType(pc, PCHPDDM, ierr))
77: #if defined(PETSC_HAVE_HPDDM) && defined(PETSC_HAVE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES)
78: PetscCallA(PCHPDDMSetAuxiliaryMat(pc, is, aux, PETSC_NULL_FUNCTION, PETSC_NULL_INTEGER, ierr))
79: PetscCallA(PCHPDDMHasNeumannMat(pc, PETSC_FALSE, ierr)) ! PETSC_TRUE is fine as well, just testing
80: #endif
81: PetscCallA(ISDestroy(is, ierr))
82: PetscCallA(MatDestroy(aux, ierr))
83: PetscCallA(KSPSetFromOptions(ksp, ierr))
84: PetscCallA(MatCreateVecs(A, x, b, ierr))
85: PetscCallA(VecSet(b, one, ierr))
86: PetscCallA(KSPSolve(ksp, b, x, ierr))
87: PetscCallA(VecGetLocalSize(x, m, ierr))
88: PetscCallA(VecDestroy(x, ierr))
89: PetscCallA(VecDestroy(b, ierr))
90: if (N > 1) then
91: PetscCallA(PetscOptionsClearValue(PETSC_NULL_OPTIONS, '-ksp_converged_reason', ierr))
92: PetscCallA(KSPSetFromOptions(ksp, ierr))
93: PetscCallA(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, PETSC_NULL_SCALAR_ARRAY, C, ierr))
94: PetscCallA(MatCreateDense(PETSC_COMM_WORLD, m, PETSC_DECIDE, PETSC_DECIDE, N, PETSC_NULL_SCALAR_ARRAY, Y, ierr))
95: PetscCallA(MatSetRandom(C, PETSC_NULL_RANDOM, ierr))
96: PetscCallA(KSPMatSolve(ksp, C, Y, ierr))
97: PetscCallA(MatDestroy(Y, ierr))
98: PetscCallA(MatDestroy(C, ierr))
99: end if
100: PetscCallA(KSPDestroy(ksp, ierr))
101: PetscCallA(MatDestroy(A, ierr))
102: PetscCallA(PetscFinalize(ierr))
103: end
105: !/*TEST
106: !
107: ! test:
108: ! requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
109: ! output_file: output/ex76_1.out
110: ! nsize: 4
111: ! args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
112: !
113: ! test:
114: ! requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
115: ! suffix: geneo
116: ! output_file: output/ex76_geneo_pc_hpddm_levels_1_eps_nev-5.out
117: ! nsize: 4
118: ! args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 5 -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_coarse_pc_type redundant -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
119: !
120: ! test:
121: ! requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
122: ! suffix: fgmres_geneo_20_p_2
123: ! output_file: output/ex76_fgmres_geneo_20_p_2.out
124: ! nsize: 4
125: ! args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
126: !
127: ! test:
128: ! requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
129: ! suffix: fgmres_geneo_20_p_2_geneo
130: ! output_file: output/ex76_fgmres_geneo_20_p_2.out
131: ! nsize: 4
132: ! args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
133: ! # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
134: ! test:
135: ! requires: hpddm slepc datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_DYNAMIC_LIBRARIES) defined(PETSC_USE_SHARED_LIBRARIES)
136: ! suffix: fgmres_geneo_20_p_2_geneo_rhs
137: ! output_file: output/ex76_fgmres_geneo_20_p_2.out
138: ! nsize: 4
139: ! args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4
140: !
141: !TEST*/