Skip to content

Commit a1b4e90

Browse files
authoredJan 8, 2024
Merge pull request #1226 from input-output-hk/check-protocol-parameters-roundtrip
Align protocol parameters JSON
2 parents bd8a113 + 191cb0c commit a1b4e90

File tree

18 files changed

+669
-228
lines changed

18 files changed

+669
-228
lines changed
 

‎.github/workflows/ci-nix.yaml

+8
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,14 @@ jobs:
174174
name: benchmarks-${{matrix.package}}-${{matrix.bench}}
175175
path: benchmarks
176176

177+
# NOTE: This depends on the path used in hydra-cluster bench
178+
- name: 💾 Upload logs
179+
if: always()
180+
uses: actions/upload-artifact@v4
181+
with:
182+
name: hydra-cluster-bench-logs
183+
path: /tmp/nix-shell.*/bench-*/**/*.log
184+
177185
publish-benchmark-results:
178186
name: Publish benchmark results
179187
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository

‎hydra-cluster/bench/Bench/EndToEnd.hs

+8-3
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@ data Event = Event
7777
deriving anyclass (ToJSON)
7878

7979
bench :: Int -> DiffTime -> FilePath -> Dataset -> IO Summary
80-
bench startingNodeId timeoutSeconds workDir dataset@Dataset{clientDatasets, title, description} =
80+
bench startingNodeId timeoutSeconds workDir dataset@Dataset{clientDatasets, title, description} = do
81+
putStrLn $ "Test logs available in: " <> (workDir </> "test.log")
8182
withFile (workDir </> "test.log") ReadWriteMode $ \hdl ->
8283
withTracerOutputTo hdl "Test" $ \tracer ->
8384
failAfter timeoutSeconds $ do
@@ -87,12 +88,13 @@ bench startingNodeId timeoutSeconds workDir dataset@Dataset{clientDatasets, titl
8788
let parties = Set.fromList (deriveParty <$> hydraKeys)
8889
let clusterSize = fromIntegral $ length clientDatasets
8990
withOSStats workDir $
90-
withCardanoNodeDevnet (contramap FromCardanoNode tracer) workDir $ \node@RunningNode{nodeSocket, pparams} -> do
91+
withCardanoNodeDevnet (contramap FromCardanoNode tracer) workDir $ \node@RunningNode{nodeSocket} -> do
9192
putTextLn "Seeding network"
9293
let hydraTracer = contramap FromHydraNode tracer
9394
hydraScriptsTxId <- seedNetwork node dataset (contramap FromFaucet tracer)
9495
let contestationPeriod = UnsafeContestationPeriod 10
95-
withHydraCluster hydraTracer workDir nodeSocket startingNodeId cardanoKeys hydraKeys hydraScriptsTxId pparams contestationPeriod $ \(leader :| followers) -> do
96+
putStrLn $ "Starting hydra cluster in " <> workDir
97+
withHydraCluster hydraTracer workDir nodeSocket startingNodeId cardanoKeys hydraKeys hydraScriptsTxId contestationPeriod $ \(leader :| followers) -> do
9698
let clients = leader : followers
9799
waitForNodesConnected hydraTracer 20 clients
98100

@@ -238,14 +240,17 @@ seedNetwork :: RunningNode -> Dataset -> Tracer IO FaucetLog -> IO TxId
238240
seedNetwork node@RunningNode{nodeSocket, networkId} Dataset{fundingTransaction, clientDatasets} tracer = do
239241
fundClients
240242
forM_ clientDatasets fuelWith100Ada
243+
putTextLn "Publishing hydra scripts"
241244
publishHydraScriptsAs node Faucet
242245
where
243246
fundClients = do
247+
putTextLn "Fund scenario from faucet"
244248
submitTransaction networkId nodeSocket fundingTransaction
245249
void $ awaitTransaction networkId nodeSocket fundingTransaction
246250

247251
fuelWith100Ada ClientDataset{clientKeys = ClientKeys{signingKey}} = do
248252
let vk = getVerificationKey signingKey
253+
putTextLn $ "Seed client " <> show vk
249254
seedFromFaucet node vk 100_000_000 tracer
250255

251256
-- | Commit all (expected to exit) 'initialUTxO' from the dataset using the

‎hydra-cluster/bench/Main.hs

+39-34
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
{-# LANGUAGE DuplicateRecordFields #-}
2+
{-# LANGUAGE OverloadedRecordDot #-}
23

34
module Main where
45

@@ -10,69 +11,71 @@ import Bench.Options (Options (..), benchOptionsParser)
1011
import Bench.Summary (Summary (..), markdownReport, textReport)
1112
import Cardano.Binary (decodeFull, serialize)
1213
import Data.Aeson (eitherDecodeFileStrict')
13-
import Data.ByteString (hPut)
1414
import Data.ByteString.Base16 qualified as Base16
1515
import Data.ByteString.Lazy qualified as LBS
1616
import Hydra.Cardano.Api (
1717
ShelleyBasedEra (..),
1818
ShelleyGenesis (..),
1919
fromLedgerPParams,
2020
)
21-
import Hydra.Generator (Dataset, generateConstantUTxODataset)
21+
import Hydra.Generator (Dataset (..), generateConstantUTxODataset)
2222
import Options.Applicative (
2323
execParser,
2424
)
25-
import System.Directory (createDirectory, createDirectoryIfMissing, doesDirectoryExist)
25+
import System.Directory (createDirectoryIfMissing, doesDirectoryExist)
2626
import System.Environment (withArgs)
27-
import System.FilePath ((</>))
27+
import System.FilePath (takeDirectory, takeFileName, (</>))
2828
import Test.HUnit.Lang (formatFailureReason)
2929
import Test.QuickCheck (generate, getSize, scale)
3030

3131
main :: IO ()
3232
main =
3333
execParser benchOptionsParser >>= \case
34-
StandaloneOptions{workDirectory = Just benchDir, outputDirectory, timeoutSeconds, startingNodeId, scalingFactor, clusterSize} -> do
35-
existsDir <- doesDirectoryExist benchDir
34+
StandaloneOptions{workDirectory = Just workDir, outputDirectory, timeoutSeconds, startingNodeId, scalingFactor, clusterSize} -> do
35+
-- XXX: This option is a bit weird as it allows to re-run a test by
36+
-- providing --work-directory, which is now redundant of the dataset
37+
-- sub-command.
38+
existsDir <- doesDirectoryExist workDir
3639
if existsDir
37-
then replay outputDirectory timeoutSeconds startingNodeId benchDir
38-
else createDirectory benchDir >> play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId benchDir
40+
then replay outputDirectory timeoutSeconds startingNodeId workDir
41+
else play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir
3942
StandaloneOptions{workDirectory = Nothing, outputDirectory, timeoutSeconds, scalingFactor, clusterSize, startingNodeId} -> do
40-
tmpDir <- createSystemTempDirectory "bench"
41-
play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId tmpDir
43+
workDir <- createSystemTempDirectory "bench"
44+
play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir
4245
DatasetOptions{datasetFiles, outputDirectory, timeoutSeconds, startingNodeId} -> do
43-
benchDir <- createSystemTempDirectory "bench"
44-
datasets <- mapM loadDataset datasetFiles
45-
let targets = zip datasets $ (benchDir </>) . show <$> [1 .. length datasets]
46-
forM_ (snd <$> targets) (createDirectoryIfMissing True)
47-
run outputDirectory timeoutSeconds startingNodeId targets
46+
run outputDirectory timeoutSeconds startingNodeId datasetFiles
4847
where
49-
play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId benchDir = do
48+
play outputDirectory timeoutSeconds scalingFactor clusterSize startingNodeId workDir = do
49+
putStrLn $ "Generating single dataset in work directory: " <> workDir
5050
numberOfTxs <- generate $ scale (* scalingFactor) getSize
5151
pparams <-
5252
eitherDecodeFileStrict' ("config" </> "devnet" </> "genesis-shelley.json") >>= \case
5353
Left err -> fail $ show err
5454
Right shelleyGenesis ->
5555
pure $ fromLedgerPParams ShelleyBasedEraShelley (sgProtocolParams shelleyGenesis)
5656
dataset <- generateConstantUTxODataset pparams (fromIntegral clusterSize) numberOfTxs
57-
saveDataset (benchDir </> "dataset.cbor") dataset
58-
run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir)]
57+
let datasetPath = workDir </> "dataset.cbor"
58+
saveDataset datasetPath dataset
59+
run outputDirectory timeoutSeconds startingNodeId [datasetPath]
5960

6061
replay outputDirectory timeoutSeconds startingNodeId benchDir = do
61-
dataset <- loadDataset $ benchDir </> "dataset.cbor"
62-
putStrLn $ "Using UTxO and Transactions from: " <> benchDir
63-
run outputDirectory timeoutSeconds startingNodeId [(dataset, benchDir)]
62+
let datasetPath = benchDir </> "dataset.cbor"
63+
putStrLn $ "Replaying single dataset from work directory: " <> datasetPath
64+
run outputDirectory timeoutSeconds startingNodeId [datasetPath]
6465

65-
run outputDirectory timeoutSeconds startingNodeId targets = do
66-
results <- forM targets $ \(dataset, dir) -> do
67-
putStrLn $ "Test logs available in: " <> (dir </> "test.log")
68-
withArgs [] $ do
69-
-- XXX: Wait between each bench run to give the OS time to cleanup resources??
70-
threadDelay 10
71-
try @_ @HUnitFailure (bench startingNodeId timeoutSeconds dir dataset) >>= \case
72-
Left exc -> pure $ Left (dataset, dir, TestFailed exc)
73-
Right summary@Summary{numberOfInvalidTxs}
74-
| numberOfInvalidTxs == 0 -> pure $ Right summary
75-
| otherwise -> pure $ Left (dataset, dir, InvalidTransactions numberOfInvalidTxs)
66+
run outputDirectory timeoutSeconds startingNodeId datasetFiles = do
67+
results <- forM datasetFiles $ \datasetPath -> do
68+
putTextLn $ "Running benchmark with dataset " <> show datasetPath
69+
dataset <- loadDataset datasetPath
70+
withTempDir ("bench-" <> takeFileName datasetPath) $ \dir ->
71+
withArgs [] $ do
72+
-- XXX: Wait between each bench run to give the OS time to cleanup resources??
73+
threadDelay 10
74+
try @_ @HUnitFailure (bench startingNodeId timeoutSeconds dir dataset) >>= \case
75+
Left exc -> pure $ Left (dataset, dir, TestFailed exc)
76+
Right summary@Summary{numberOfInvalidTxs}
77+
| numberOfInvalidTxs == 0 -> pure $ Right summary
78+
| otherwise -> pure $ Left (dataset, dir, InvalidTransactions numberOfInvalidTxs)
7679
let (failures, summaries) = partitionEithers results
7780
case failures of
7881
[] -> benchmarkSucceeded outputDirectory summaries
@@ -86,6 +89,7 @@ main =
8689
saveDataset :: FilePath -> Dataset -> IO ()
8790
saveDataset f dataset = do
8891
putStrLn $ "Writing dataset to: " <> f
92+
createDirectoryIfMissing True $ takeDirectory f
8993
writeFileBS f $ Base16.encode $ LBS.toStrict $ serialize dataset
9094

9195
data BenchmarkFailed
@@ -116,8 +120,9 @@ benchmarkSucceeded outputDirectory summaries = do
116120
dumpToStdout = mapM_ putTextLn (concatMap textReport summaries)
117121

118122
writeReport outputDir = do
123+
let reportPath = outputDir </> "end-to-end-benchmarks.md"
124+
putStrLn $ "Writing report to: " <> reportPath
119125
now <- getCurrentTime
120126
let report = markdownReport now summaries
121127
createDirectoryIfMissing True outputDir
122-
withFile (outputDir </> "end-to-end-benchmarks.md") WriteMode $ \hdl -> do
123-
hPut hdl $ encodeUtf8 $ unlines report
128+
writeFileBS reportPath . encodeUtf8 $ unlines report

0 commit comments

Comments
 (0)
Please sign in to comment.