File size: 2,487 Bytes
c167c24
 
 
63e9f40
 
 
c167c24
 
 
 
 
 
 
63e9f40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
from flask import Flask,render_template
from flask_socketio import SocketIO,emit
import base64

from keras.models import load_model
from PIL import Image
import numpy as np
import cv2 


app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socket = SocketIO(app,async_mode="eventlet")


#the following are to do with this interactive notebook code
from matplotlib import pyplot as plt # this lets you draw inline pictures in the notebooks
import pylab # this allows you to control figure size 
pylab.rcParams['figure.figsize'] = (10.0, 8.0) # this controls figure size in the notebook

###loading model###
age_model = load_model('Copy of age_model_pretrained.h5')
gender_model = load_model('Copy of gender_model_pretrained.h5')
emotion_model = load_model('emotion_model_pretrained.h5')

# Labels on Age, Gender and Emotion to be predicted
age_ranges = ['1-2', '3-9', '10-20', '21-27', '28-45', '46-65', '66-116']
gender_ranges = ['male', 'female']
emotion_ranges= ['positive','negative','neutral']

def base64_to_image(base64_string):
    # Extract the base64 encoded binary data from the input string
    base64_data = base64_string.split(",")[1]
    # Decode the base64 data to bytes
    image_bytes = base64.b64decode(base64_data)
    # Convert the bytes to numpy array
    image_array = np.frombuffer(image_bytes, dtype=np.uint8)
    # Decode the numpy array as an image using OpenCV
    image = cv2.imdecode(image_array, cv2.IMREAD_COLOR)
    return image

@socket.on("connect")
def test_connect():
    print("Connected")
    emit("my response", {"data": "Connected"})

@socket.on("image")
def receive_image(image):
    # Decode the base64-encoded image data
    image = base64_to_image(image)
    image = cv2.resize(image, (224, 224), interpolation=cv2.INTER_AREA)
    # emit("processed_image", image)
    # Make the image a numpy array and reshape it to the models input shape.
    image = np.asarray(image, dtype=np.float32).reshape(1, 224, 224, 3)
    image = (image / 127.5) - 1
    # Predicts the model
    # prediction1 = age_model.predict(image)
    prediction2 = gender_model.predict(image)
    # prediction3 = emotion_model.predict(image)

    index = np.argmax(prediction2)
    gender_ranges = gender_ranges[index]
    age = prediction1[0][index]
    emit("result",{"gender":str(gender_ranges)})

@app.route("/")
def home():
    return render_template("index.html")

if __name__ == '__main__':
    # app.run(debug=True)
    socket.run(app,host="0.0.0.0", port=7860)