Skip to content
Snippets Groups Projects
Commit a88088ae authored by Markus Willman's avatar Markus Willman
Browse files

update detectMultiScale() arguments

parent 86a5bdea
No related branches found
No related tags found
No related merge requests found
...@@ -63,7 +63,7 @@ def takePicture(cameraIndex, model): ...@@ -63,7 +63,7 @@ def takePicture(cameraIndex, model):
raise Exception('Unable to capture image') raise Exception('Unable to capture image')
# matching serverside parameters, for comparable results # matching serverside parameters, for comparable results
faces = model.detectMultiScale(cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY), 1.3, 5) faces = detectFaces(model, cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY))
if len(faces) != 0: if len(faces) != 0:
cv2.imwrite(TMP_IMAGE, frame) cv2.imwrite(TMP_IMAGE, frame)
...@@ -81,6 +81,14 @@ def takePicture(cameraIndex, model): ...@@ -81,6 +81,14 @@ def takePicture(cameraIndex, model):
def logError(msg): def logError(msg):
logging.error('{0}:{1}'.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg)) logging.error('{0}:{1}'.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), msg))
def detectFaces(model, image):
return model.detectMultiScale(image,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
flags=cv2.cv.CV_HAAR_SCALE_IMAGE
)
def main(argv): def main(argv):
if argv != None and (len(argv) <= 2 or not re.match("^[\w\d_-]+$", argv[1]) or not re.match("^[\d]+$", argv[2])): if argv != None and (len(argv) <= 2 or not re.match("^[\w\d_-]+$", argv[1]) or not re.match("^[\d]+$", argv[2])):
return False return False
......
...@@ -46,6 +46,14 @@ class EGProcessor: ...@@ -46,6 +46,14 @@ class EGProcessor:
def isValid(self): def isValid(self):
return self.valid return self.valid
def detectFaces(self, model, image):
return model.detectMultiScale(image,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
flags=cv2.cv.CV_HAAR_SCALE_IMAGE
)
def processImage(self, image, result_fname = None, type = 'png', detect_emotion = True): def processImage(self, image, result_fname = None, type = 'png', detect_emotion = True):
result = [] result = []
...@@ -68,7 +76,7 @@ class EGProcessor: ...@@ -68,7 +76,7 @@ class EGProcessor:
gray_image = cv2.cvtColor(unchanged_image, cv2.COLOR_BGR2GRAY) gray_image = cv2.cvtColor(unchanged_image, cv2.COLOR_BGR2GRAY)
gray_face = None gray_face = None
faces = detect_faces(self.face_detection, gray_image) faces = self.detectFaces(self.face_detection, gray_image)
for face_coordinates in faces: for face_coordinates in faces:
x1, x2, y1, y2 = apply_offsets(face_coordinates, self.gender_offsets) x1, x2, y1, y2 = apply_offsets(face_coordinates, self.gender_offsets)
rgb_face = rgb_image[y1:y2, x1:x2] rgb_face = rgb_image[y1:y2, x1:x2]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment