status etc

This commit is contained in:
Ruben 2017-11-20 16:39:28 +01:00
parent baa306763e
commit 7445907e56
1 changed files with 145 additions and 24 deletions

View File

@ -2,10 +2,17 @@ import os
from PIL import Image, ImageDraw
import argparse
import json
import time
import glob
import numpy as np
parser = argparse.ArgumentParser(description='Parses opencv-webcam-demo json output files and collects statistics')
parser.add_argument('--frameOutput', '-o', required=True, help='directory to look for frames & json')
parser.add_argument('--status', '-s', action='store_true', help='Keep status of last frame')
parser.add_argument('--cutAllFaces', action='store_true', help='Cut out all faces from all frames')
parser.add_argument('--sum', action='store_true', help='Get total scores over all time')
parser.add_argument('--disonant', action='store_true', help='Get most disonant faces over time')
args = parser.parse_args()
@ -15,11 +22,41 @@ class Face:
self.id = data['id']
self.frame = frame # Frame class
self.data = data # json data
self.disonanceScore = None
self.disonanceScore = None # a first attempt, can be deprecated?
self.anomalyScore = None
def getFaceImg(self):
r = self.data['rect']
return self.frame.getImg().crop((int(r['x']), int(r['y']), int(r['x']+r['w']), int(r['y']+r['h'])))
def getCharacteristicVector(self):
self.vector = np.array([
self.data["smile"],
self.data["innerBrowRaise"],
self.data["browRaise"],
self.data["browFurrow"],
self.data["noseWrinkle"],
self.data["upperLipRaise"],
self.data["lipCornerDepressor"],
self.data["chinRaise"],
self.data["lipPucker"],
self.data["lipPress"],
self.data["lipSuck"],
self.data["mouthOpen"],
self.data["smirk"],
self.data["eyeClosure"],
# self.data["attention"],
self.data["eyeWiden"],
self.data["cheekRaise"],
self.data["lidTighten"],
self.data["dimpler"],
self.data["lipStretch"],
self.data["jawDrop"],
])
return self.vector
def setAnomalyScore(self, score):
self.anomalyScore = score
class Frame:
"""
@ -70,7 +107,16 @@ class Frame:
for face in self.getFaces():
face.disonanceScore = abs(face.data['valence'] - avgValence)
def getAverageV(self):
vectors = [face.getCharacteristicVector() for face in self.getFaces()]
vAvg = np.mean(vectors, axis=0)
return vAvg
def updateAnomalyScores(self):
vAvg = self.getAverageV()
for face in self.getFaces():
face.setAnomalyScore(np.linalg.norm(face.getCharacteristicVector() - vAvg))
def exists(self):
@ -89,6 +135,15 @@ def loadFrames(frameDir):
nextFrame = Frame(frameDir, nr)
return frames
def getLastFrame(frameDir):
jsons = sorted(glob.glob(os.path.join(frameDir, "*.json")))
if len(jsons):
lastJson = jsons[-1]
lastNr = int(lastJson[-11:-5])
frame = Frame(frameDir, lastNr)
return frame
return None
def cutOutFaces(frame, targetDir):
for faceNr, face in enumerate(frame.getFaces()):
print(faceNr, face)
@ -98,26 +153,20 @@ def cutOutFaces(frame, targetDir):
img.save(faceImgPath)
pass
frames = loadFrames(args.frameOutput)
lastTime = None
for frameNr, frame in frames.items():
thisTime = frame.getJson()['t']
#print(frameNr, thisTime)
if not (lastTime is None) and lastTime > thisTime:
print "ERRROR!! Time error at %s. Restarted scanner there?" % frameNr
lastTime = thisTime
faceDir = os.path.join(args.frameOutput, 'faces')
if not os.path.exists(faceDir):
os.mkdir(faceDir)
def validateJsonTimes():
lastTime = None
for frameNr, frame in loadFrames(args.frameOutput).items():
thisTime = frame.getJson()['t']
#print(frameNr, thisTime)
if not (lastTime is None) and lastTime > thisTime:
print "ERRROR!! Time error at %s. Restarted scanner there?" % frameNr
lastTime = thisTime
def sumEmotions():
total = 0.
summed = 0.
items = 0
for frameNr, frame in frames.items():
for frameNr, frame in loadFrames(args.frameOutput).items():
for face in frame.getFaces():
total += abs(face.data['valence'])
summed += face.data['valence']
@ -127,17 +176,89 @@ def sumEmotions():
print ("Total emotion %d, positivity score %d (average: %s)" % (total, summed, average))
def getMostDisonant(nr = 5):
for frameNr, frame in frames.items():
for frameNr, frame in loadFrames(args.frameOutput).items():
frame.updateDisonanceScores()
faces.sort(key=lambda x: x.disonanceScore, reverse=True)
mostDisonantFaces = faces[:5]
mostDisonantFaces = faces[:nr]
for face in mostDisonantFaces:
print("Frame %d, face %d, score %d, valence %d" % (face.frame.nr, face.id, face.disonanceScore, face.data['valence']))
face.getFaceImg().show()
def getAnomalies(nr = 5):
for frameNr, frame in loadFrames(args.frameOutput).items():
frame.updateAnomalyScores()
faces.sort(key=lambda x: x.anomalyScore, reverse=True)
sumEmotions()
getMostDisonant()
#~ for frameNr, frame in frames.items():
#~ cutOutFaces(frame, faceDir)
anomalies = faces[:nr]
for face in anomalies:
print("Frame %d, face %d, score %d" % (face.frame.nr, face.id, face.anomalyScore))
#~ getCharacteristicVector
face.getFaceImg().show()
def printFrameStats(frame):
os.system('clear')
print(time.time())
print( ("Nr: %d" % frame.nr).ljust(40) + ("t: %f" % frame.getJson()['t']) )
#~ print
faces = frame.getFaces()
print("Faces: %d" % len(faces))
if len(faces) < 1:
return
params = ['smile', 'browFurrow']
q0s = [np.percentile(np.array([f.data[param] for f in faces]),0) for param in params]
q1s = [np.percentile(np.array([f.data[param] for f in faces]),25) for param in params]
q2s = [np.percentile(np.array([f.data[param] for f in faces]),50) for param in params]
q3s = [np.percentile(np.array([f.data[param] for f in faces]),75) for param in params]
q4s = [np.percentile(np.array([f.data[param] for f in faces]),100) for param in params]
print " ".ljust(8),
for p in params:
print p.center(20),
print ""
print(" 0% " + "".join([("%f%%" % q).rjust(20) for q in q0s]))
print(" q1 " + "".join([("%f%%" % q).rjust(20) for q in q1s]))
print(" median " + "".join([("%f%%" % q).rjust(20) for q in q2s]))
print(" q3 " + "".join([("%f%%" % q).rjust(20) for q in q3s]))
print(" 100% " + "".join([("%f%%" % q).rjust(20) for q in q4s]))
#~ TODO: speaker stats
frame.updateDisonanceScores()
dissonantFace = max(faces,key=lambda f: f.disonanceScore)
#~ dissonantFace.getFaceImg()
def monitorStatus(frameDir):
while True:
frame = getLastFrame(frameDir)
if not frame is None:
printFrameStats(frame)
# don't check too often
time.sleep(.5)
validateJsonTimes()
if args.sum:
sumEmotions()
if args.disonant:
getMostDisonant()
if args.cutAllFaces:
faceDir = os.path.join(args.frameOutput, 'faces')
if not os.path.exists(faceDir):
os.mkdir(faceDir)
for frameNr, frame in loadFrames(args.frameOutput).items():
cutOutFaces(faceDir)
if args.status:
monitorStatus(args.frameOutput)