1
1
{-# LANGUAGE DuplicateRecordFields #-}
2
+ {-# LANGUAGE OverloadedRecordDot #-}
2
3
3
4
module Main where
4
5
@@ -10,69 +11,71 @@ import Bench.Options (Options (..), benchOptionsParser)
10
11
import Bench.Summary (Summary (.. ), markdownReport , textReport )
11
12
import Cardano.Binary (decodeFull , serialize )
12
13
import Data.Aeson (eitherDecodeFileStrict' )
13
- import Data.ByteString (hPut )
14
14
import Data.ByteString.Base16 qualified as Base16
15
15
import Data.ByteString.Lazy qualified as LBS
16
16
import Hydra.Cardano.Api (
17
17
ShelleyBasedEra (.. ),
18
18
ShelleyGenesis (.. ),
19
19
fromLedgerPParams ,
20
20
)
21
- import Hydra.Generator (Dataset , generateConstantUTxODataset )
21
+ import Hydra.Generator (Dataset ( .. ) , generateConstantUTxODataset )
22
22
import Options.Applicative (
23
23
execParser ,
24
24
)
25
- import System.Directory (createDirectory , createDirectoryIfMissing , doesDirectoryExist )
25
+ import System.Directory (createDirectoryIfMissing , doesDirectoryExist )
26
26
import System.Environment (withArgs )
27
- import System.FilePath ((</>) )
27
+ import System.FilePath (takeDirectory , takeFileName , (</>) )
28
28
import Test.HUnit.Lang (formatFailureReason )
29
29
import Test.QuickCheck (generate , getSize , scale )
30
30
31
31
main :: IO ()
32
32
main =
33
33
execParser benchOptionsParser >>= \ case
34
- StandaloneOptions {workDirectory = Just benchDir, outputDirectory, timeoutSeconds, startingNodeId, scalingFactor, clusterSize} -> do
35
- existsDir <- doesDirectoryExist benchDir
34
+ StandaloneOptions {workDirectory = Just workDir, outputDirectory, timeoutSeconds, startingNodeId, scalingFactor, clusterSize} -> do
35
+ -- XXX: This option is a bit weird as it allows to re-run a test by
36
+ -- providing --work-directory, which is now redundant of the dataset
37
+ -- sub-command.
38
+ existsDir <- doesDirectoryExist workDir
36
39
if existsDir
37
- then replay outputDirectory timeoutSeconds startingNodeId benchDir
38
- else createDirectory benchDir >> play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId benchDir
40
+ then replay outputDirectory timeoutSeconds startingNodeId workDir
41
+ else play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir
39
42
StandaloneOptions {workDirectory = Nothing , outputDirectory, timeoutSeconds, scalingFactor, clusterSize, startingNodeId} -> do
40
- tmpDir <- createSystemTempDirectory " bench"
41
- play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId tmpDir
43
+ workDir <- createSystemTempDirectory " bench"
44
+ play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir
42
45
DatasetOptions {datasetFiles, outputDirectory, timeoutSeconds, startingNodeId} -> do
43
- benchDir <- createSystemTempDirectory " bench"
44
- datasets <- mapM loadDataset datasetFiles
45
- let targets = zip datasets $ (benchDir </> ) . show <$> [1 .. length datasets]
46
- forM_ (snd <$> targets) (createDirectoryIfMissing True )
47
- run outputDirectory timeoutSeconds startingNodeId targets
46
+ run outputDirectory timeoutSeconds startingNodeId datasetFiles
48
47
where
49
- play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId benchDir = do
48
+ play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir = do
49
+ putStrLn $ " Generating single dataset in work directory: " <> workDir
50
50
numberOfTxs <- generate $ scale (* scalingFactor) getSize
51
51
pparams <-
52
52
eitherDecodeFileStrict' (" config" </> " devnet" </> " genesis-shelley.json" ) >>= \ case
53
53
Left err -> fail $ show err
54
54
Right shelleyGenesis ->
55
55
pure $ fromLedgerPParams ShelleyBasedEraShelley (sgProtocolParams shelleyGenesis)
56
56
dataset <- generateConstantUTxODataset pparams (fromIntegral clusterSize) numberOfTxs
57
- saveDataset (benchDir </> " dataset.cbor" ) dataset
58
- run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir)]
57
+ let datasetPath = workDir </> " dataset.cbor"
58
+ saveDataset datasetPath dataset
59
+ run outputDirectory timeoutSeconds startingNodeId [datasetPath]
59
60
60
61
replay outputDirectory timeoutSeconds startingNodeId benchDir = do
61
- dataset <- loadDataset $ benchDir </> " dataset.cbor"
62
- putStrLn $ " Using UTxO and Transactions from: " <> benchDir
63
- run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir) ]
62
+ let datasetPath = benchDir </> " dataset.cbor"
63
+ putStrLn $ " Replaying single dataset from work directory : " <> datasetPath
64
+ run outputDirectory timeoutSeconds startingNodeId [datasetPath ]
64
65
65
- run outputDirectory timeoutSeconds startingNodeId targets = do
66
- results <- forM targets $ \ (dataset, dir) -> do
67
- putStrLn $ " Test logs available in: " <> (dir </> " test.log" )
68
- withArgs [] $ do
69
- -- XXX: Wait between each bench run to give the OS time to cleanup resources??
70
- threadDelay 10
71
- try @ _ @ HUnitFailure (bench startingNodeId timeoutSeconds dir dataset) >>= \ case
72
- Left exc -> pure $ Left (dataset, dir, TestFailed exc)
73
- Right summary@ Summary {numberOfInvalidTxs}
74
- | numberOfInvalidTxs == 0 -> pure $ Right summary
75
- | otherwise -> pure $ Left (dataset, dir, InvalidTransactions numberOfInvalidTxs)
66
+ run outputDirectory timeoutSeconds startingNodeId datasetFiles = do
67
+ results <- forM datasetFiles $ \ datasetPath -> do
68
+ putTextLn $ " Running benchmark with dataset " <> show datasetPath
69
+ dataset <- loadDataset datasetPath
70
+ withTempDir (" bench-" <> takeFileName datasetPath) $ \ dir ->
71
+ withArgs [] $ do
72
+ -- XXX: Wait between each bench run to give the OS time to cleanup resources??
73
+ threadDelay 10
74
+ try @ _ @ HUnitFailure (bench startingNodeId timeoutSeconds dir dataset) >>= \ case
75
+ Left exc -> pure $ Left (dataset, dir, TestFailed exc)
76
+ Right summary@ Summary {numberOfInvalidTxs}
77
+ | numberOfInvalidTxs == 0 -> pure $ Right summary
78
+ | otherwise -> pure $ Left (dataset, dir, InvalidTransactions numberOfInvalidTxs)
76
79
let (failures, summaries) = partitionEithers results
77
80
case failures of
78
81
[] -> benchmarkSucceeded outputDirectory summaries
86
89
saveDataset :: FilePath -> Dataset -> IO ()
87
90
saveDataset f dataset = do
88
91
putStrLn $ " Writing dataset to: " <> f
92
+ createDirectoryIfMissing True $ takeDirectory f
89
93
writeFileBS f $ Base16. encode $ LBS. toStrict $ serialize dataset
90
94
91
95
data BenchmarkFailed
@@ -116,8 +120,9 @@ benchmarkSucceeded outputDirectory summaries = do
116
120
dumpToStdout = mapM_ putTextLn (concatMap textReport summaries)
117
121
118
122
writeReport outputDir = do
123
+ let reportPath = outputDir </> " end-to-end-benchmarks.md"
124
+ putStrLn $ " Writing report to: " <> reportPath
119
125
now <- getCurrentTime
120
126
let report = markdownReport now summaries
121
127
createDirectoryIfMissing True outputDir
122
- withFile (outputDir </> " end-to-end-benchmarks.md" ) WriteMode $ \ hdl -> do
123
- hPut hdl $ encodeUtf8 $ unlines report
128
+ writeFileBS reportPath . encodeUtf8 $ unlines report
0 commit comments