Skip to content

Commit 676d22f

Browse files
committed
Added scikit_learn_bring_your_own_model_local_serving sample code
1 parent f3e24a3 commit 676d22f

File tree

4 files changed

+81
-0
lines changed

4 files changed

+81
-0
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -69,3 +69,5 @@ pytorch_script_mode_local_model_inference/data/cifar-10-python.tar.gz
6969
scikit_learn_script_mode_local_serving_no_model_artifact/dummy.model
7070
scikit_learn_script_mode_local_serving_no_model_artifact/dummy.txt
7171
scikit_learn_script_mode_local_serving_no_model_artifact/model.tar.gz
72+
scikit_learn_bring_your_own_model_local_serving/model.joblib
73+
scikit_learn_bring_your_own_model_local_serving/model.tar.gz
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
2+
import os
3+
import joblib
4+
5+
6+
def model_fn(model_dir):
7+
print("loading model.joblib from: {}".format(model_dir))
8+
loaded_model = joblib.load(os.path.join(model_dir, "model.joblib"))
9+
return loaded_model
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
numpy
2+
pandas
3+
sagemaker>=2.0.0<3.0.0
4+
sagemaker[local]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
# This is a sample Python program that serve a scikit-learn model pre-trained on the California Housing dataset.
2+
# This implementation will work on your *local computer* or in the *AWS Cloud*.
3+
#
4+
# Prerequisites:
5+
# 1. Install required Python packages:
6+
# `pip install -r requirements.txt`
7+
# 2. Docker Desktop installed and running on your computer:
8+
# `docker ps`
9+
# 3. You should have AWS credentials configured on your local machine
10+
# in order to be able to pull the docker image from ECR.
11+
###############################################################################################
12+
13+
import boto3
14+
import pandas as pd
15+
import tarfile
16+
17+
from sagemaker.sklearn import SKLearnModel
18+
from sklearn.datasets import load_boston
19+
from sklearn.model_selection import train_test_split
20+
21+
DUMMY_IAM_ROLE = 'arn:aws:iam::111111111111:role/service-role/AmazonSageMaker-ExecutionRole-20200101T000001'
22+
s3 = boto3.client('s3')
23+
24+
25+
def main():
26+
27+
# Prepare data for model inference - we use the Boston housing dataset
28+
print('Preparing data for model inference')
29+
data = load_boston()
30+
X_train, X_test, y_train, y_test = train_test_split(
31+
data.data, data.target, test_size=0.25, random_state=42
32+
)
33+
34+
# we don't train a model, so we will need only the testing data
35+
testX = pd.DataFrame(X_test, columns=data.feature_names)
36+
37+
# Download a pre-trained model file
38+
print('Downloading a pre-trained model file')
39+
s3.download_file('aws-ml-blog', 'artifacts/scikit_learn_bring_your_own_model/model.joblib', 'model.joblib')
40+
41+
# Creating a model.tar.gz file
42+
tar = tarfile.open('model.tar.gz', 'w:gz')
43+
tar.add('model.joblib')
44+
tar.close()
45+
46+
model = SKLearnModel(
47+
role=DUMMY_IAM_ROLE,
48+
model_data='file://./model.tar.gz',
49+
framework_version='0.23-1',
50+
py_version='py3',
51+
source_dir='code',
52+
entry_point='inference.py'
53+
)
54+
55+
print('Deploying endpoint in local mode')
56+
predictor = model.deploy(initial_instance_count=1, instance_type='local')
57+
58+
predictions = predictor.predict(testX[data.feature_names])
59+
print("Predictions: {}".format(predictor.predict(testX.values)))
60+
61+
print('About to delete the endpoint to stop paying (if in cloud mode).')
62+
predictor.delete_endpoint(predictor.endpoint_name)
63+
64+
65+
if __name__ == "__main__":
66+
main()

0 commit comments

Comments
 (0)