Skip to content

Script Evaluation Test #121

Script Evaluation Test

Script Evaluation Test #121

# A nightly job which downloads script evaluation dumps from S3 and runs a regression test.
name: Script Evaluation Test
on:
schedule:
- cron: 30 3 * * * # 3:30am every day
workflow_dispatch:
concurrency:
group: script-evaluation-test
# We only want at most one evaluation test running at a time
cancel-in-progress: true
jobs:
script-evaluation-test:
runs-on: [self-hosted, default]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Quick Install Nix
uses: cachix/install-nix-action@V27
with:
extra_nix_config: |
experimental-features = nix-command flakes
accept-flake-config = true
- name: Download and Unzip Dump Files
if: always()
# NOTE: the S3 location s3://plutus/mainnet-script-dump/ must match that in
# plutus-apps/.github/script-evaluation-dump.yml
run: |
export LOCAL_DIR="$HOME/mainnet-script-dump-downloaded"
nix develop --no-warn-dirty --accept-flake-config --command \
bash ./scripts/s3-sync-unzip.sh s3://plutus/mainnet-script-dump-1-35-4/ \*.event.bz2
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1
AWS_ENDPOINT_URL: https://s3.devx.iog.io
- name: Run
# Run the test cases sequentially. This ensures we don't need to simultaneously store
# multiple `ScriptEvaluationEvents`, which are large, in memory. Each test case
# contains many script evaluation events, and those are run in parallel based on
# the number of available processors.
run: |
export EVENT_DUMP_DIR="$HOME/mainnet-script-dump-downloaded"
nix run --no-warn-dirty --accept-flake-config .#evaluation-test -- --num-threads=1