def main_func():
img_path='snap.jpg' # THE PATH OF THE IMAGE TO BE ANALYZED
font=cv2.FONT_HERSHEY_DUPLEX
emotions = ["anger", "happy", "sadness"] #Emotion list
clahe=cv2.createCLAHE(clipLimit=2.0,tileGridSize=(8,8)) # Histogram equalization object
face_det=dlib.get_frontal_face_detector()
land_pred=dlib.shape_predictor("data/DlibPredictor/shape_predictor_68_face_landmarks.dat")
SUPPORT_VECTOR_MACHINE_clf2 = joblib.load('data/Trained_ML_Models/SVM_emo_model_7.pkl')
# Loading the SVM model trained earlier in the path mentioned above.
pred_data=[]
pred_labels=[]
a=crop_face(img_path)
img=cv2.imread(a)
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
clahe_gray=clahe.apply(gray)
landmarks_vec = get_landmarks(clahe_gray,face_det,land_pred)
#print(len(landmarks_vec))
#print(landmarks_vec)
if landmarks_vec == "error":
pass
else:
pred_data.append(landmarks_vec)
np_test_data = np.array(pred_data)
a=SUPPORT_VECTOR_MACHINE_clf2.predict(pred_data)
#cv2.putText(img,'DETECTED FACIAL EXPRESSION : ',(8,30),font,0.7,(0,0,255),2,cv2.LINE_AA)
#l=len('Facial Expression Detected : ')
#cv2.putText(img,emotions[a[0]].upper(),(150,60),font,1,(255,0,0),2,cv2.LINE_AA)
#cv2.imshow('test_image',img)
#print(emotions[a[0]])
cv2.waitKey(0)
cv2.destroyAllWindows()
return emotions[a[0]]
评论列表
文章目录