From a88088aeaf746ad3e4e6889e975dc6a50ad419bd Mon Sep 17 00:00:00 2001
From: Markus Willman <mpewil@utu.fi>
Date: Sat, 3 Mar 2018 16:48:16 +0200
Subject: [PATCH] update detectMultiScale() arguments

---
 client/sbs_client.py                        | 10 +++++++++-
 contrib/src/web/emotion_gender_processor.py | 10 +++++++++-
 2 files changed, 18 insertions(+), 2 deletions(-)

diff --git a/client/sbs_client.py b/client/sbs_client.py
index 9ba2bd4..6b477ab 100755
--- a/client/sbs_client.py
+++ b/client/sbs_client.py
@@ -63,7 +63,7 @@ def takePicture(cameraIndex, model):
             raise Exception('Unable to capture image')
 
         # matching serverside parameters, for comparable results
-        faces = model.detectMultiScale(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY), 1.3, 5)
+        faces = detectFaces(model, cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY))
 
         if len(faces) != 0:
             cv2.imwrite(TMP_IMAGE, frame)
@@ -81,6 +81,14 @@ def takePicture(cameraIndex, model):
 def logError(msg):
     logging.error('{0}:{1}'.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg))
 
+def detectFaces(model, image):
+    return model.detectMultiScale(image,
+        scaleFactor=1.1,
+        minNeighbors=5,
+        minSize=(30, 30),
+        flags=cv2.cv.CV_HAAR_SCALE_IMAGE
+    )
+
 def main(argv):
     if argv != None and (len(argv) <= 2 or not re.match("^[\w\d_-]+$", argv[1]) or not re.match("^[\d]+$", argv[2])):
         return False
diff --git a/contrib/src/web/emotion_gender_processor.py b/contrib/src/web/emotion_gender_processor.py
index e4c880f..20f291a 100644
--- a/contrib/src/web/emotion_gender_processor.py
+++ b/contrib/src/web/emotion_gender_processor.py
@@ -46,6 +46,14 @@ class EGProcessor:
     def isValid(self):
         return self.valid
 
+    def detectFaces(self, model, image):
+        return model.detectMultiScale(image,
+            scaleFactor=1.1,
+            minNeighbors=5,
+            minSize=(30, 30),
+            flags=cv2.cv.CV_HAAR_SCALE_IMAGE
+        )
+
     def processImage(self, image, result_fname = None, type = 'png', detect_emotion = True):
         result = []
 
@@ -68,7 +76,7 @@ class EGProcessor:
             gray_image = cv2.cvtColor(unchanged_image, cv2.COLOR_BGR2GRAY)
             gray_face = None
 
-            faces = detect_faces(self.face_detection, gray_image)
+            faces = self.detectFaces(self.face_detection, gray_image)
             for face_coordinates in faces:
                 x1, x2, y1, y2 = apply_offsets(face_coordinates, self.gender_offsets)
                 rgb_face = rgb_image[y1:y2, x1:x2]
-- 
GitLab