%%capture
import requests
from azureml.core.model import Model
from azureml.core.workspace import Workspace
from azureml.core.conda_dependencies import CondaDependencies
from azureml.core.environment import Environment
from azureml.core.model import InferenceConfig
from azureml.core.webservice import LocalWebservice
from azureml.core.webservice import AciWebservice, Webservice
from azureml.exceptions import WebserviceException
from azureml.core.authentication import InteractiveLoginAuthentication
%%writefile score.py
import re
import json
import joblib
import numpy as np
import tensorflow as tf
from tensorflow.python import keras
from tensorflow.python.keras.models import model_from_json ,load_model
from sklearn.preprocessing import LabelEncoder
from azureml.core.model import Model
from azureml.contrib.services.aml_response import AMLResponse
MAX_TWEET_LENGTH = 100
MIN_PREDICTION_SCORE = 0.8
def init():
global model, tokenizer, label_encoder, classifier
model_path =Model.get_model_path('disaster-watch-model-keras',version=1)
model = load_model(model_path , custom_objects={"adam": tf.keras.optimizers.Adam})
model_path = Model.get_model_path('disaster-watch-tokenizer', version=1)
tokenizer = joblib.load(model_path)
model_path = Model.get_model_path('disaster-watch-label-encoder', version=1)
label_encoder = joblib.load(model_path)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
classifier = TweeterClassifier(tokenizer, label_encoder, model)
def run(request):
payload = json.loads(request)
min_score = payload.get('minScore', MIN_PREDICTION_SCORE)
prediction = classifier.predict(payload['tweet'], min_score)
response = AMLResponse(prediction, 200)
response.headers['Access-Control-Allow-Origin'] = '*'
return prediction
class TweeterClassifier:
"""Classification class that loads the saved Tensorflow 2.0 model and weights
and classifies the disaster related tweets.
"""
def __init__(self, tokenizer, label_encoder, model):
# Load pre-processing
self.MAX_TWEET_LENGTH = 100
self.tokenizer = tokenizer
self.label_encoder = label_encoder
self.model = model
self.model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
def predict(self, tweet, min_score):
tweet = self._sanitize(tweet)
x = [tweet]
x_seq = self.tokenizer.texts_to_sequences(x)[0]
x_pad = keras.preprocessing.sequence.pad_sequences([x_seq], maxlen=self.MAX_TWEET_LENGTH, padding='post')[0]
x_pad = np.array(x_pad)
x_pad = x_pad.reshape(1, self.MAX_TWEET_LENGTH)
prediction_class = self.model.predict_classes(x_pad)
prediction_score = max(self.model.predict(x_pad)[0])
prediction_category = self.label_encoder.inverse_transform(prediction_class)[0]
if prediction_score < min_score:
prediction_category = 'unrelated'
return {'category': prediction_category, 'score': str(prediction_score), 'tweet': tweet}
def _sanitize(self, tweet):
tweet = tweet.lower()
tweet = tweet.replace('@', '')
tweet = tweet.replace('#', '')
tweet = tweet.replace('.', '')
tweet = tweet.replace(',', '')
tweet = re.sub(r'http\S+', '', tweet)
for word in ['pakistan', 'nepal', 'chile', 'texas', 'boston', 'california', 'alberta', 'calgary', 'queensland',
'india', 'oklahoma']:
tweet = tweet.replace(word, '')
return tweet
Overwriting score.py
def initialze():
subscription_id = "979884b7-8494-4a3d-abd7-e9e63d1f5d90"
resource_group = "azure-ai-hackathon-ml"
workspace_name = "azure-ai-hackathon-ws"
workspace_region = "West US 2"
interactive_auth = InteractiveLoginAuthentication()
ws = Workspace.get(
name=workspace_name,
subscription_id=subscription_id,
resource_group=resource_group,
auth=interactive_auth
)
return ws
def get_environment():
environment = Environment("LocalDeploy")
conda_dep = CondaDependencies()
conda_dep.add_pip_package("h5py")
conda_dep.add_pip_package("joblib")
conda_dep.add_pip_package("numpy")
conda_dep.add_pip_package("pandas")
conda_dep.add_pip_package("python-dateutil")
conda_dep.add_pip_package("pytz")
conda_dep.add_pip_package("scikit-learn")
conda_dep.add_pip_package("tensorflow")
conda_dep.add_pip_package("azureml-core")
conda_dep.add_pip_package("azureml-contrib-services")
environment.python.conda_dependencies = conda_dep
return environment
ws = initialze()
environment = get_environment()
inference_config = InferenceConfig(entry_script="score.py",
environment=environment)
def deploy_local():
deployment_config = LocalWebservice.deploy_configuration()
model = Model(name='disaster-watch-model-keras', workspace=ws, version=1)
tokenizer = Model(name='disaster-watch-tokenizer', workspace=ws, version=1)
label_encoder = Model(name='disaster-watch-label-encoder', workspace=ws, version=1)
local_service = Model.deploy(ws, "local-deploy", [model, tokenizer, label_encoder], inference_config, deployment_config)
local_service.wait_for_deployment()
def deploy_to_cloud():
deployment_config = AciWebservice.deploy_configuration(cpu_cores=1, memory_gb=1)
model = Model(name='disaster-watch-model-keras', workspace=ws, version=1)
tokenizer = Model(name='disaster-watch-tokenizer', workspace=ws, version=1)
label_encoder = Model(name='disaster-watch-label-encoder', workspace=ws, version=1)
aci_service_name = 'disaster-watch-service'
try:
# deleting existing aci if any
service = Webservice(ws, name=aci_service_name)
if service:
service.delete()
except WebserviceException as e:
print()
service = Model.deploy(ws, aci_service_name, [model, tokenizer, label_encoder], inference_config, deployment_config)
service.wait_for_deployment(True)
print(service.state)
deploy_local()
Downloading model disaster-watch-model-keras:1 to /tmp/azureml_iyv8erkk/disaster-watch-model-keras/1 Downloading model disaster-watch-tokenizer:1 to /tmp/azureml_iyv8erkk/disaster-watch-tokenizer/1 Downloading model disaster-watch-label-encoder:1 to /tmp/azureml_iyv8erkk/disaster-watch-label-encoder/1 Generating Docker build context. Package creation Succeeded Logging into Docker registry azureaihacka6d64e0ee.azurecr.io Logging into Docker registry azureaihacka6d64e0ee.azurecr.io Building Docker image from Dockerfile... Step 1/5 : FROM azureaihacka6d64e0ee.azurecr.io/azureml/azureml_1f1c28ac8a28fb742de605694473acfe ---> 237daad85902 Step 2/5 : COPY azureml-app /var/azureml-app ---> 15791ac781ab Step 3/5 : COPY model_config_map.json /var/azureml-app/model_config_map.json ---> 0a539817b72c Step 4/5 : RUN mv '/var/azureml-app/tmp5_8gfis0.py' /var/azureml-app/main.py ---> Running in f8fd7e33182b ---> b1e815a50ff1 Step 5/5 : CMD ["runsvdir","/var/runit"] ---> Running in d4bc318e91a7 ---> 1a3e7a27b081 Successfully built 1a3e7a27b081 Successfully tagged local-deploy:latest Starting Docker container... Docker container running. Checking container health... Local webservice is running at http://localhost:32768
deploy_to_cloud()
Running........................... SucceededACI service creation operation finished, operation "Succeeded" Healthy
response=requests.post('http://localhost:32768/score',json={
'tweet':'large explosion at a texas fertilizer plant...multiple injuries reported...local hospital told to expect up to 100 patients: via ap'
})
response.json()
{'category': 'explosion', 'score': '0.96989554', 'tweet': 'large explosion at a fertilizer plantmultiple injuries reportedlocal hospital told to expect up to 100 patients: via ap'}