Skip to content

Commit

Permalink
Merge branch 'main' into akaladarshi/add-delegated-address
Browse files Browse the repository at this point in the history
  • Loading branch information
akaladarshi authored Feb 6, 2025
2 parents 8852041 + bdd8bae commit 0f24cdc
Show file tree
Hide file tree
Showing 25 changed files with 366 additions and 225 deletions.
37 changes: 37 additions & 0 deletions .github/workflows/butterflynet.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
name: Butterflynet checks
on:
workflow_dispatch:
env:
CI: 1
CARGO_INCREMENTAL: 0
CACHE_TIMEOUT_MINUTES: 5
SCRIPT_TIMEOUT_MINUTES: 30
AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
RUSTC_WRAPPER: sccache
CC: sccache clang
CXX: sccache clang++
FIL_PROOFS_PARAMETER_CACHE: /var/tmp/filecoin-proof-parameters
SHELL_IMAGE: busybox
jobs:
butterflynet-checks:
name: Butterflynet checks
runs-on: ubuntu-24.04-arm
steps:
- name: Checkout Sources
uses: actions/checkout@v4
- name: Setup sccache
uses: mozilla-actions/[email protected]
timeout-minutes: "${{ fromJSON(env.CACHE_TIMEOUT_MINUTES) }}"
continue-on-error: true
- uses: actions/setup-go@v5
with:
go-version-file: "go.work"
- name: Build and install Forest binaries
env:
# To minimize compile times: https://nnethercote.github.io/perf-book/build-configuration.html#minimizing-compile-times
RUSTFLAGS: "-C linker=clang -C link-arg=-fuse-ld=lld"
run: make install-slim-quick
- name: Run butterflynet checks
run: ./scripts/tests/butterflynet_check.sh
timeout-minutes: "${{ fromJSON(env.SCRIPT_TIMEOUT_MINUTES) }}"
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@

### Added

- [#5244](https://github.com/ChainSafe/forest/issues/5244) Add `live` and `healthy` subcommands to `forest-cli healthcheck`.

- [#4708](https://github.com/ChainSafe/forest/issues/4708) Add support for the
`Filecoin.EthTraceBlock` RPC method.

Expand Down
3 changes: 1 addition & 2 deletions build/bootstrap/butterflynet
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
/dns4/bootstrap-0.butterfly.fildev.network/tcp/1347/p2p/12D3KooWFfuFm4eWfMR2Xk9jQ8VsxVqt9nyZXZRdi6WqNrPcg8qk
/dns4/bootstrap-1.butterfly.fildev.network/tcp/1347/p2p/12D3KooWEgUQub6ZS5M7hfiZPNf4kFFmgsiyYKNcdAk5oKMUcyft
/dnsaddr/bootstrap.butterfly.fildev.network
36 changes: 18 additions & 18 deletions build/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -1062,69 +1062,69 @@
"network": {
"type": "butterflynet"
},
"version": "v16.0.0-dev",
"bundle_cid": "bafy2bzaced3uzaynkdi7wiqiwbje7l3lllpwbokuf7slak627gx7bk5pryjpa",
"version": "v16.0.0-dev1",
"bundle_cid": "bafy2bzaced3ucmgzcrkvaw6qgqdzl56t3rctlb7i66vrj23dacgeath7jcxne",
"manifest": {
"actors": [
[
"system",
1,
"bafk2bzacebzc4fklfws2ri4jv6mcu4xgs52ve3gqzgm476hkohfahj7s5lig2"
"bafk2bzacea5dls7jx2bhbhdkwl2c3qgyv22ldbeoc2me5z7qe5kbutf7tjeow"
],
[
"init",
2,
"bafk2bzacebccioskxaudlyoydyy4yoswgfcffpadhbu6midk3edlhuzt3osu4"
"bafk2bzaced6b6odw6vt3ak7z7idhatex6gjsxck57wkum2yud6wubhpbwtm5e"
],
[
"cron",
3,
"bafk2bzaceadysuxpaeqxwbzg7ksyl2olzizj4kgeq2b2oldlca4et55huauwo"
"bafk2bzacec7zpiconapx4veuplh5hk3iumnigsbx7yxhxfz7hirqbf2vpexqa"
],
[
"account",
4,
"bafk2bzacebyxmngiugdhhio7zn7i67jzsythgy3jqqbqyan735g2rkalufhr6"
"bafk2bzaceaw5z2sungnovruolbxdc43xbsksx7ajlr2bw7insd55nho3gfbne"
],
[
"storagepower",
5,
"bafk2bzaceauwj75apfux75zz4owkcjeggu4cp2ldn7weoxakzb5zsz55kthbe"
"bafk2bzacebnq5klygudstkq5tx6y7xusn2frygtotahiagmda3zy6rvzmyqme"
],
[
"storageminer",
6,
"bafk2bzacedz6zjhfuyexwdco7zmpkypjzllk5v2sv4kdoz4gfqdzxuyq5uz5u"
"bafk2bzaceca27d33cwxwwbhbmijt453wkc3dvrnfkrdsuznol4cq33z3oqxbk"
],
[
"storagemarket",
7,
"bafk2bzacedb6p35ax2s2muxvgir73vmdkbxn7uc5aw6n64dewb32drqils5zq"
"bafk2bzaceblznz3yqthmh2jbrknljiqu3kdfr7x7j2wn62abnszs4ziettqmm"
],
[
"paymentchannel",
8,
"bafk2bzaceaigyawy2ywyfdqjccfyi62xtn456gavqrwdilpltiqxbeo7zsjf4"
"bafk2bzacebmpquxfvdh2lmgi7huqcln3ey56run7hkrsuhi6lgcwekbozhxac"
],
[
"multisig",
9,
"bafk2bzacedcpbmiblrhh43kthgrkctrklh27jkvjcorzfreusd7fsjdw2llzq"
"bafk2bzacebius3sex65rxav4oo2qbbm6vuv5pcer3shgutqyyxy3vvcgezayg"
],
[
"reward",
10,
"bafk2bzaceagt6mvup6z3atlaftepdex6f45ncml57zsuxy5puwtzcge5vy4wm"
"bafk2bzaceagmmgu3wt7fozbp3uhd6aepdl6c2ykt7xbpbldh2lvmmvvmt56gw"
],
[
"verifiedregistry",
11,
"bafk2bzaceasoa42xnnbu2uftlfvlzhbok3q3nqetv6bxcw7vydbxbmwn53ad6"
"bafk2bzacecqbljsk5utms7pe4g3uy7zvrpwmwgop4spx6pjrpi4tjx663gkq2"
],
[
"datacap",
12,
"bafk2bzaceals5hcpbvzm24dmmoddqpr2tcpolwwey3qvjf3okzk7ihf75gngu"
"bafk2bzaceb4owttyigypvl6pguxhqwe45rgfjubgpoitqhiyzumhlwwu6buge"
],
[
"placeholder",
Expand All @@ -1134,20 +1134,20 @@
[
"evm",
14,
"bafk2bzacebxlvhz665s2kbace6nzeqy5maasqixgirzn4xhbjx42xi2hkc5gk"
"bafk2bzacebdhgopsxunxykgehkbwtj5iyyvbqygi5uuvhtm7m4vsz3vcsp5iw"
],
[
"eam",
15,
"bafk2bzacec2gt4teegjdhbpfwl6qbxjojffxgkmzhua2m2a4lks52abnjyypw"
"bafk2bzaceapofadtnyiulmdc5k3nujthqwyht67xu2pohatqjcexojm34j7ng"
],
[
"ethaccount",
16,
"bafk2bzacec627lshgjxvfzjledk2wph4u7n47got2ultaijbh4v5wdyhjpxse"
"bafk2bzacebtz62oxftksx4f6efbuh6i5wb5nvuo447uefkbz5lis4rcw7djw2"
]
],
"actor_list_cid": "bafy2bzacebls3q4yivgxner4v3sltf4mk4sxmyr7lu65nic5manmsafqu3qkm"
"actor_list_cid": "bafy2bzacednuely5c7x43ykvspowkcbzrqym7wlfvgn4ceoqakkkxhu3g5i6m"
}
},
{
Expand Down
8 changes: 8 additions & 0 deletions documentation/src/offline-forest.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ chain's archive state without syncing, and various testing scenarios.
```bash
forest-tool api serve --help
```

Sample output (may vary depending on the version):

```console
Usage: forest-tool api serve [OPTIONS] [SNAPSHOT_FILES]...

Expand Down Expand Up @@ -45,7 +47,9 @@ height: 1859736.
```bash
forest-tool api serve --chain calibnet ~/Downloads/forest_snapshot_calibnet_2024-08-08_height_1859736.forest.car.zst
```

Sample output:

```console
2024-08-12T12:29:16.624698Z INFO forest::tool::offline_server::server: Configuring Offline RPC Server
2024-08-12T12:29:16.640402Z INFO forest::tool::offline_server::server: Using chain config for calibnet
Expand All @@ -63,7 +67,9 @@ curl --silent -X POST -H "Content-Type: application/json" \
--data '{"jsonrpc":"2.0","id":2,"method":"Filecoin.ChainHead","param":"null"}' \
"http://127.0.0.1:2345/rpc/v0" | jq
```

Sample output:

```json
{
"jsonrpc": "2.0",
Expand Down Expand Up @@ -101,7 +107,9 @@ curl --silent -X POST -H "Content-Type: application/json" \
--data '{"jsonrpc":"2.0","id":2,"method":"Filecoin.StateGetNetworkParams","param":"null"}' \
"http://127.0.0.1:2345/rpc/v0" | jq
```

Sample output:

```json
{
"jsonrpc": "2.0",
Expand Down
28 changes: 28 additions & 0 deletions scripts/tests/butterflynet_check.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash
set -euxo pipefail

# This script tests Forest is able to catch up the butterflynet.

source "$(dirname "$0")/harness.sh"

function shutdown {
kill -KILL $FOREST_NODE_PID
}

trap shutdown EXIT

function call_forest_chain_head {
curl --silent -X POST -H "Content-Type: application/json" \
--data '{"jsonrpc":"2.0","id":2,"method":"Filecoin.ChainHead","param":"null"}' \
"http://127.0.0.1:2345/rpc/v1"
}

$FOREST_PATH --chain butterflynet --encrypt-keystore false &
FOREST_NODE_PID=$!

until call_forest_chain_head; do
echo "Forest RPC endpoint is unavailable - sleeping for 1s"
sleep 1
done

forest_wait_for_sync
20 changes: 0 additions & 20 deletions src/blocks/tipset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use std::sync::Arc;
use std::{fmt, sync::OnceLock};

use crate::cid_collections::SmallCidNonEmptyVec;
use crate::db::{SettingsStore, SettingsStoreExt};
use crate::networks::{calibnet, mainnet};
use crate::shim::clock::ChainEpoch;
use crate::utils::cid::CidCborExt;
Expand Down Expand Up @@ -223,25 +222,6 @@ impl Tipset {
.transpose()?)
}

/// Load the heaviest tipset from the blockstore
pub fn load_heaviest(
store: &impl Blockstore,
settings: &impl SettingsStore,
) -> anyhow::Result<Option<Tipset>> {
Ok(
match settings.read_obj::<TipsetKey>(crate::db::setting_keys::HEAD_KEY)? {
Some(tsk) => tsk
.into_cids()
.into_iter()
.map(|key| CachingBlockHeader::load(store, key))
.collect::<anyhow::Result<Option<Vec<_>>>>()?
.map(Tipset::new)
.transpose()?,
None => None,
},
)
}

/// Fetch a tipset from the blockstore. This calls fails if the tipset is
/// missing or invalid.
pub fn load_required(store: &impl Blockstore, tsk: &TipsetKey) -> anyhow::Result<Tipset> {
Expand Down
47 changes: 17 additions & 30 deletions src/chain/store/chain_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ use super::{
tipset_tracker::TipsetTracker,
Error,
};
use crate::blocks::{CachingBlockHeader, Tipset, TipsetKey, TxMeta};
use crate::db::setting_keys::HEAD_KEY;
use crate::db::{EthMappingsStore, EthMappingsStoreExt, SettingsStore, SettingsStoreExt};
use crate::db::{EthMappingsStore, EthMappingsStoreExt};
use crate::fil_cns;
use crate::interpreter::{BlockMessages, VMEvent, VMTrace};
use crate::libp2p_bitswap::{BitswapStoreRead, BitswapStoreReadWrite};
Expand All @@ -22,14 +20,17 @@ use crate::shim::{
};
use crate::state_manager::StateOutput;
use crate::utils::db::{BlockstoreExt, CborStoreExt};
use crate::{
blocks::{CachingBlockHeader, Tipset, TipsetKey, TxMeta},
db::HeaviestTipsetKeyProvider,
};
use ahash::{HashMap, HashMapExt, HashSet};
use anyhow::Context as _;
use cid::Cid;
use fil_actors_shared::fvm_ipld_amt::Amtv0 as Amt;
use fvm_ipld_blockstore::Blockstore;
use fvm_ipld_encoding::CborStore;
use itertools::Itertools;
use nunny::vec as nonempty;
use parking_lot::Mutex;
use serde::{de::DeserializeOwned, Serialize};
use std::sync::Arc;
Expand Down Expand Up @@ -60,8 +61,8 @@ pub struct ChainStore<DB> {
/// key-value `datastore`.
pub db: Arc<DB>,

/// Settings store
settings: Arc<dyn SettingsStore + Sync + Send>,
/// Heaviest tipset key provider
heaviest_tipset_key_provider: Arc<dyn HeaviestTipsetKeyProvider + Sync + Send>,

/// Used as a cache for tipset `lookbacks`.
pub chain_index: Arc<ChainIndex<Arc<DB>>>,
Expand Down Expand Up @@ -111,30 +112,21 @@ where
{
pub fn new(
db: Arc<DB>,
settings: Arc<dyn SettingsStore + Sync + Send>,
heaviest_tipset_key_provider: Arc<dyn HeaviestTipsetKeyProvider + Sync + Send>,
eth_mappings: Arc<dyn EthMappingsStore + Sync + Send>,
chain_config: Arc<ChainConfig>,
genesis_block_header: CachingBlockHeader,
) -> anyhow::Result<Self> {
let (publisher, _) = broadcast::channel(SINK_CAP);
let chain_index = Arc::new(ChainIndex::new(Arc::clone(&db)));

if settings
.read_obj::<TipsetKey>(HEAD_KEY)?
.is_none_or(|tipset_keys| chain_index.load_tipset(&tipset_keys).is_err())
{
let tipset_keys = TipsetKey::from(nonempty![*genesis_block_header.cid()]);
settings.write_obj(HEAD_KEY, &tipset_keys)?;
}

let validated_blocks = Mutex::new(HashSet::default());

let cs = Self {
publisher,
chain_index,
tipset_tracker: TipsetTracker::new(Arc::clone(&db), chain_config.clone()),
db,
settings,
heaviest_tipset_key_provider,
genesis_block_header,
validated_blocks,
eth_mappings,
Expand All @@ -144,10 +136,10 @@ where
Ok(cs)
}

/// Sets heaviest tipset within `ChainStore` and store its tipset keys in
/// the settings store under the [`crate::db::setting_keys::HEAD_KEY`] key.
/// Sets heaviest tipset
pub fn set_heaviest_tipset(&self, ts: Arc<Tipset>) -> Result<(), Error> {
self.settings.write_obj(HEAD_KEY, ts.key())?;
self.heaviest_tipset_key_provider
.set_heaviest_tipset_key(ts.key())?;
if self.publisher.send(HeadChange::Apply(ts)).is_err() {
debug!("did not publish head change, no active receivers");
}
Expand Down Expand Up @@ -229,13 +221,12 @@ where

/// Returns the currently tracked heaviest tipset.
pub fn heaviest_tipset(&self) -> Arc<Tipset> {
let tsk = self
.heaviest_tipset_key_provider
.heaviest_tipset_key()
.unwrap_or_else(|_| TipsetKey::from(nunny::vec![*self.genesis_block_header.cid()]));
self.chain_index
.load_required_tipset(
&self
.settings
.require_obj::<TipsetKey>(HEAD_KEY)
.expect("failed to load heaviest tipset key"),
)
.load_required_tipset(&tsk)
.expect("failed to load heaviest tipset")
}

Expand Down Expand Up @@ -384,10 +375,6 @@ where
Ok((lbts, *next_ts.parent_state()))
}

pub fn settings(&self) -> Arc<dyn SettingsStore + Sync + Send> {
self.settings.clone()
}

/// Filter [`SignedMessage`]'s to keep only the most recent ones, then write corresponding entries to the Ethereum mapping.
pub fn process_signed_messages(&self, messages: &[(SignedMessage, u64)]) -> anyhow::Result<()>
where
Expand Down
Loading

0 comments on commit 0f24cdc

Please sign in to comment.