Actual source code: hplcreate.c
1: #include <petsc/private/matimpl.h>
2: #include "hpl.h"
4: /*@
5: MatSetHPL - fills a `MATSEQDENSE` matrix using the HPL 2.3 random matrix generation routine
7: Collective
9: Input Parameters:
10: + A - the matrix
11: - iseed - the random number seed
13: Level: intermediate
15: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
16: @*/
17: PetscErrorCode MatSetHPL(Mat A, int iseed)
18: {
19: PetscBool isDense;
20: PetscInt M, N, LDA;
21: PetscBLASInt bM, bN, bLDA;
22: PetscScalar *values;
24: PetscFunctionBegin;
26: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQDENSE, &isDense));
27: PetscCheck(isDense, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only supports sequential dense matrices");
28: PetscCall(MatGetSize(A, &M, &N));
29: PetscCall(PetscBLASIntCast(M, &bM));
30: PetscCall(PetscBLASIntCast(N, &bN));
31: PetscCall(MatDenseGetLDA(A, &LDA));
32: PetscCall(PetscBLASIntCast(LDA, &bLDA));
33: PetscCall(MatDenseGetArrayWrite(A, &values));
34: PetscStackCallExternalVoid("HPL_dmatgen", HPL_dmatgen(bM, bN, values, bLDA, iseed));
35: PetscCall(MatDenseRestoreArrayWrite(A, &values));
36: PetscFunctionReturn(PETSC_SUCCESS);
37: }
39: #include <petsc/private/bmimpl.h>
41: typedef struct {
42: Mat A, F;
43: } PetscBench_HPL;
45: static PetscErrorCode PetscBenchSetUp_HPL(PetscBench bm)
46: {
47: PetscBench_HPL *hp = (PetscBench_HPL *)bm->data;
48: PetscMPIInt rank;
50: PetscFunctionBegin;
51: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)bm), &rank));
52: if (rank > 0) PetscFunctionReturn(PETSC_SUCCESS);
53: if (bm->size == PETSC_DECIDE) bm->size = 2500;
54: PetscCall(MatCreateSeqDense(PETSC_COMM_SELF, bm->size, bm->size, NULL, &hp->A));
55: PetscCall(MatSetHPL(hp->A, 0));
56: PetscCall(MatGetFactor(hp->A, MATSOLVERPETSC, MAT_FACTOR_LU, &hp->F));
57: PetscCall(MatLUFactorSymbolic(hp->F, hp->A, NULL, NULL, 0));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: static PetscErrorCode PetscBenchRun_HPL(PetscBench bm)
62: {
63: PetscBench_HPL *hp = (PetscBench_HPL *)bm->data;
64: PetscMPIInt rank;
66: PetscFunctionBegin;
67: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)bm), &rank));
68: if (rank > 0) PetscFunctionReturn(PETSC_SUCCESS);
69: PetscCall(MatLUFactorNumeric(hp->F, hp->A, 0));
70: PetscFunctionReturn(PETSC_SUCCESS);
71: }
73: static PetscErrorCode PetscBenchView_HPL(PetscBench bm, PetscViewer viewer)
74: {
75: PetscEventPerfInfo *info;
76: PetscInt numThreads;
78: PetscFunctionBegin;
79: PetscCall(PetscLogHandlerGetEventPerfInfo(bm->lhdlr, 0, MAT_LUFactor, &info)); // because symbolic is trivial it does not log numeric!
80: PetscCall(PetscBLASGetNumThreads(&numThreads));
81: PetscCall(PetscViewerASCIIPrintf(viewer, "HPL Benchmark, number of threads %d, matrix size %d, flop rate %g (megaflops)\n", (int)numThreads, (int)bm->size, (double)(info->flops / (1.e7 * info->time))));
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: static PetscErrorCode PetscBenchReset_HPL(PetscBench bm)
86: {
87: PetscBench_HPL *hp = (PetscBench_HPL *)bm->data;
88: PetscMPIInt rank;
90: PetscFunctionBegin;
91: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)bm), &rank));
92: if (rank > 0) PetscFunctionReturn(PETSC_SUCCESS);
93: PetscCall(MatDestroy(&hp->F));
94: PetscCall(MatDestroy(&hp->A));
95: PetscFunctionReturn(PETSC_SUCCESS);
96: }
98: static PetscErrorCode PetscBenchDestroy_HPL(PetscBench bm)
99: {
100: PetscFunctionBegin;
101: PetscCall(PetscFree(bm->data));
102: PetscFunctionReturn(PETSC_SUCCESS);
103: }
105: PETSC_INTERN PetscErrorCode PetscBenchCreate_HPL(PetscBench bm)
106: {
107: PetscBench_HPL *hp;
109: PetscFunctionBegin;
110: PetscCall(PetscNew(&hp));
111: bm->data = hp;
112: bm->ops->setup = PetscBenchSetUp_HPL;
113: bm->ops->run = PetscBenchRun_HPL;
114: bm->ops->view = PetscBenchView_HPL;
115: bm->ops->reset = PetscBenchReset_HPL;
116: bm->ops->destroy = PetscBenchDestroy_HPL;
117: PetscFunctionReturn(PETSC_SUCCESS);
118: }