
import json
import os
import io
# Imports for the REST API
from flask import Flask, request, jsonify
# Imports for image procesing
from PIL import Image
# Imports for prediction
from predict import predict_url
app = Flask(__name__)
# 4MB Max image size limit
app.config['MAX_CONTENT_LENGTH'] = 4 * 1024 * 1024
@app.route('/')
def index():
return 'GET Methods are Not Allowed'
@app.route('/image', methods=['POST'])
def predict_url_handler():
try:
image_url = json.loads(request.get_data().decode('utf-8'))['url']
results = predict_url(image_url)
return jsonify(results)
except Exception as e:
print('EXCEPTION:', str(e))
return 'Error processing image'
if __name__ == '__main__':
# # Load and intialize the model
# initialize()
# Run the server
app.run(host='0.0.0.0', port=80)
MZ-PROC-IT-IP-0032
MZ-PROC-IT-IP-0035
MZ-PROC-IT-IP-0100
import tensorflow.keras
from urllib.request import Request, urlopen
from PIL import Image, ImageOps
import numpy as np
import ssl
def predict_url(imageUrl):
"""
predicts image by url
"""
ssl._create_default_https_context = ssl._create_unverified_context
# log_msg("Predicting from url: " + imageUrl)
imgrequest = Request(imageUrl, headers={"User-Agent": "Mozilla/5.0"})
with urlopen(imgrequest) as testImage:
# with urlopen(imageUrl) as testImage:
image = Image.open(testImage)
return predict_image(image)
def predict_image(image):
# code snippet from teachable machine start-----------------------------
# Disable scientific notation for clarity
np.set_printoptions(suppress=True)
# Load the model
model = tensorflow.keras.models.load_model('model.h5')
# model = keras.models.load_model('model.h5')
# Create the array of the right shape to feed into the keras model
# The 'length' or number of images you can put into the array is
# determined by the first position in the shape tuple, in this case 1.
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
# # Replace this with the path to your image
# image = Image.open('test.jpg')
#resize the image to a 224x224 with the same strategy as in TM2:
#resizing the image to be at least 224x224 and then cropping from the center
size = (224, 224)
image = ImageOps.fit(image, size, Image.ANTIALIAS)
#turn the image into a numpy array
image_array = np.asarray(image)
# display the resized image
# image.show()
# Normalize the image
normalized_image_array = (image_array.astype(np.float32) / 127.0) - 1
# Load the image into the array
data[0] = normalized_image_array
# run the inference
predictions = model.predict(data)
# code snippet end teachable machine start-----------------------------
# print('result')
# print(predictions)
labels_filename = 'labels.txt'
labels = []
with open(labels_filename, 'rt') as lf:
# global labels
labels = [l.strip() for l in lf.readlines()]
result = []
for p, label in zip(predictions[0], labels):
result.append({
'tagName': label,
'probability': p * 100
})
response = {
'predictions': result
}
# log_msg("Results: " + str(response))
return response
# return prediction
FROM python:3.7-slim
RUN pip install -U pip
RUN pip install --no-cache-dir numpy~=1.17.5 tensorflow~=2.4.0 flask~=1.1.2 pillow~=7.2.0
COPY app /app
# Expose the port
EXPOSE 80
# Set the working directory
WORKDIR /app
# Run the flask server for the endpoints
CMD python -u app.py
docker build -t codersudip/tmachineofficesupply:aarini .
docker run -p 127.0.0.1:80:80 -d codersudip/tmachineofficesupply:aarini
docker push codersudip/tmachineofficesupply:aarini
export KUBECONFIG=kubeconfig.yml
kubectl create deployment --image=codersudip/tmachineofficesupply:aarini officesupplytm
kubectl set env deployment/officesupplytm DOMAIN=cluster
kubectl expose deployment officesupplytm --port=80 --name=officesupplytm
You must be a registered user to add a comment. If you've already registered, sign in. Otherwise, register and sign in.
User | Count |
---|---|
12 | |
9 | |
7 | |
6 | |
6 | |
6 | |
4 | |
4 | |
4 | |
4 |