Skip to content

Commit 050bc8f

Browse files
committed
test mock s3 dataset
1 parent ebd54e8 commit 050bc8f

File tree

1 file changed

+17
-3
lines changed

1 file changed

+17
-3
lines changed

tests/unit/test_mock_s3.py

+17-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
import os
22
import s3fs
33
import pathlib
4+
import pyfive
45
import pytest
56
import h5netcdf
7+
import numpy as np
68

79
from tempfile import NamedTemporaryFile
8-
from activestorage.active import load_from_s3
10+
from activestorage.active import load_from_s3, Active
911

1012

1113
# needed by the spoofed s3 filesystem
@@ -133,17 +135,29 @@ def test_s3file_with_s3fs(s3fs_s3):
133135
anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri}
134136
)
135137

136-
# test load by h5netcdf
138+
# test load by standard h5netcdf
137139
with s3.open(os.path.join("MY_BUCKET", file_name), "rb") as f:
138140
print("File path", f.path)
139141
ncfile = h5netcdf.File(f, 'r', invalid_netcdf=True)
140142
print("File loaded from spoof S3 with h5netcdf:", ncfile)
141143
print(ncfile["ta"])
142144
assert "ta" in ncfile
143145

144-
# test Active
146+
# test active.load_from_s3
145147
storage_options = dict(anon=False, version_aware=True,
146148
client_kwargs={"endpoint_url": endpoint_uri})
147149
with load_from_s3(os.path.join("MY_BUCKET", file_name), storage_options) as ac_file:
148150
print(ac_file)
149151
assert "ta" in ac_file
152+
153+
# test loading with Pyfive and passing the Dataset to Active
154+
with s3.open(os.path.join("MY_BUCKET", file_name), "rb") as f:
155+
print("File path", f.path)
156+
pie_ds = pyfive.File(f, 'r')
157+
print("File loaded from spoof S3 with Pyfive:", pie_ds)
158+
print("Pyfive dataset:", pie_ds["ta"])
159+
av = Active(pie_ds["ta"])
160+
av._method = "min"
161+
assert av.method([3,444]) == 3
162+
av_slice_min = av[3:5]
163+
assert av_slice_min == np.array(249.6583, dtype="float32")

0 commit comments

Comments
 (0)