Skip to content

Commit 51e27ca

Browse files
committed
add real world s3 dataset test
1 parent 74f0c26 commit 51e27ca

File tree

1 file changed

+42
-0
lines changed

1 file changed

+42
-0
lines changed

tests/test_real_s3.py

+42
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import os
2+
import numpy as np
3+
4+
from activestorage.active import Active
5+
from activestorage.active import load_from_s3
6+
7+
S3_BUCKET = "bnl"
8+
9+
10+
def test_s3_dataset():
11+
"""Run somewhat as the 'gold' test."""
12+
storage_options = {
13+
'key': "f2d55c6dcfc7618b2c34e00b58df3cef",
14+
'secret': "$/'#M{0{/4rVhp%n^(XeX$q@y#&(NM3W1->~N.Q6VP.5[@bLpi='nt]AfH)>78pT",
15+
'client_kwargs': {'endpoint_url': "https://uor-aces-o.s3-ext.jc.rl.ac.uk"}, # old proxy
16+
# 'client_kwargs': {'endpoint_url': "https://uor-aces-o.ext.proxy.jc.rl.ac.uk"}, # new proxy
17+
}
18+
active_storage_url = "https://192.171.169.113:8080"
19+
# bigger_file = "ch330a.pc19790301-bnl.nc" # 18GB 3400 HDF5 chunks
20+
bigger_file = "ch330a.pc19790301-def.nc" # 17GB 64 HDF5 chunks
21+
# bigger_file = "da193a_25_day__198808-198808.nc" # 3GB 30 HDF5 chunks
22+
23+
test_file_uri = os.path.join(
24+
S3_BUCKET,
25+
bigger_file
26+
)
27+
print("S3 Test file path:", test_file_uri)
28+
dataset = load_from_s3(test_file_uri, storage_options=storage_options)
29+
av = dataset['UM_m01s16i202_vn1106']
30+
31+
# big file bnl: 18GB/3400 HDF5 chunks; def: 17GB/64 HDF5 chunks
32+
active = Active(av, storage_type="s3",
33+
storage_options=storage_options,
34+
active_storage_url=active_storage_url)
35+
active._version = 2
36+
active._method = "min"
37+
38+
# result = active[:]
39+
result = active[0:3, 4:6, 7:9] # standardized slice
40+
41+
print("Result is", result)
42+
assert result == 5098.625

0 commit comments

Comments
 (0)