From 6007282ab324e340c53b90a03aa23aeece58416f Mon Sep 17 00:00:00 2001 From: Ruben van de Ven Date: Wed, 6 Feb 2019 01:31:42 +0100 Subject: [PATCH] No blur but use image --- head_pose.py | 34 ++++++++++++++++++++++++++-------- spot.png | Bin 0 -> 1936 bytes 2 files changed, 26 insertions(+), 8 deletions(-) create mode 100644 spot.png diff --git a/head_pose.py b/head_pose.py index c59d01a..a5f827d 100644 --- a/head_pose.py +++ b/head_pose.py @@ -27,6 +27,9 @@ import coloredlogs import argparse import multiprocessing + +cur_dir = os.path.dirname(__file__) + argParser = argparse.ArgumentParser(description='Draw a heatmap') argParser.add_argument( '--camera', @@ -85,8 +88,13 @@ logger = logging.getLogger(__name__) # im = cv2.imread("headPose.jpg"); +spotSize = (100,100) +spot = Image.open(os.path.join(cur_dir,"spot.png")).convert('L') +spot = spot.resize(spotSize) +spot = np.array(spot) -predictor_path = "shape_predictor_68_face_landmarks.dat" + +predictor_path = os.path.join(cur_dir,"shape_predictor_68_face_landmarks.dat") if args.output_dir: lastMetricsFilename = os.path.join(args.output_dir, 'last_metrics.p') @@ -425,7 +433,8 @@ def captureFacesPoints(i): # TODO only draw nose line now, so we can change color depending whether on screen or not - results = {'currentPoint': currentPoint, 'currentPoints': currentPoints, 'im': im} + results = {'currentPoint': currentPoint, 'currentPoints': currentPoints} + results['im'] = im if not args.hide_preview else None try: pointsQueue.put_nowait(results) @@ -505,15 +514,24 @@ while True: logger.info("Looking at {} {}".format(point, targetPoint) ) # cv2.circle(im, (int(targetPoint[0]), int(targetPoint[1])), 2, (0,255,0), -1) # from 1920x1080 to 80x50 - miniTargetPoint = (int(targetPoint[0] / 1920 * 80 + 10), int(targetPoint[1] / 1080 * 50 + 60)) - cv2.circle(im, miniTargetPoint, 2, (0,255,0), -1) + if not args.hide_preview: + miniTargetPoint = (int(targetPoint[0] / 1920 * 80 + 10), int(targetPoint[1] / 1080 * 50 + 60)) + cv2.circle(im, miniTargetPoint, 2, (0,255,0), -1) targetInt = (int(targetPoint[0]), int(targetPoint[1])) # check if point fits on screen: # if so, measure it - if targetInt[0] >= 0 and targetInt[1] >= 0 and targetInt[0] < metricsSize[0] and targetInt[1] < metricsSize[1]: + if targetInt[0]+spotSize[0] >= 0 and targetInt[1]+spotSize[1] >= 0 and targetInt[0]-spotSize[0] < metricsSize[0] and targetInt[1]-spotSize[0] < metricsSize[1]: dataframe = dataframe.append({'x':targetInt[0],'y':targetInt[1]}, ignore_index=True) - logger.debug("Put metric {},{} in metrix of {},{}".format(targetInt[1],targetInt[0], metricsSize[1], metricsSize[0])) - newMetrics[targetInt[1],targetInt[0]] += 1 + logger.info("Put metric {},{} in metrix of {},{}".format(targetInt[1],targetInt[0], metricsSize[1], metricsSize[0])) + for sx in range(spotSize[0]): + for sy in range(spotSize[1]): + mx = targetInt[0] + sx - (spotSize[0]-1)/2 + my = targetInt[1] + sy - (spotSize[1]-1)/2 + + if mx >= 0 and my >= 0 and mx < metricsSize[0] and my < metricsSize[1]: + newMetrics[my,mx] += spot[sx,sy] #/ 20 + print("MAX",np.max(newMetrics)) + # TODO: put in an image of a blurred spot & remove blur action # after we collected all new metrics, blur them foor smoothness @@ -538,7 +556,7 @@ while True: # update the heatmap output tm21 = time.time() # smooth impact of first hits by having at least 0.05 - normalisedMetrics = metrics / (max(.02, np.max(metrics))) + normalisedMetrics = metrics / (max(255*4 ,np.max(metrics))) # convert to colormap, thanks to: https://stackoverflow.com/a/10967471 normalisedMetricsColored = np.uint8(cm.nipy_spectral(normalisedMetrics)*255) normalisedMetricsColoredBGR = cv2.cvtColor(normalisedMetricsColored, cv2.COLOR_RGB2BGR) diff --git a/spot.png b/spot.png new file mode 100644 index 0000000000000000000000000000000000000000..9360ed0a3b7228c0cbfd7d8067541a778c559baf GIT binary patch literal 1936 zcmV;B2XFX^P)eSad^g zZEa<4bO1wgWnpw>WFU8GbZ8()Nlj2!fese{00#U?L_t(|+U=d$f}=VNL{)_U|K!wt z@FvNIScG)K9hfxJOX5B$*;;HvL(V-QBEk=}`GLmxql~64{vu=75`T_y(9l_si~Rn9 zPqxFKV%%tk|BS8wo&LDl3U6EAjQ5zi{o@_4KNqiWTi-eEZR?xy_K(-iaq;0s_#2Eb zn&9^tCmP^)87E)P?=c2m%V)3+urQXB>Y5Qq97U{X zP>3*u8Ile57$goOrjz4r{n7AgJ>!s3MumvXXr*+PPJmX-LGU?+(I6Jc{OBPKqQ+GH zavlrfFkLXpg^#G13XVhY#_o(>1|y_R9YG( zd76$ckx&vUN(D&>K@g+}sFDeYk#$bUHDu}56BT2#c6%mKEKNg^=%w_5Imk8W!?Ud| zpSE@#GGNkCz;+K0F2LBqE;~64`4bk8Ye33*kwdXIva+ z3)#hi7a5oQk@{+~c0kBACZju+PRJxuii1tY45));K*%*FV+(}?duJI@`lFJu0OnvQ zAt4`=$%Jy=OMWc%v-*Q2eW4?cnkt}c8j~r_8lDD)7Sd1(nyuTQUY{QxoKyeRK0h208m*aOik+nzEqG& zMzfC8Ey=wc4Ub{+VQY(42lkMfQLUiUK@JdokK`6dol6;Q&B35Uih=a&Tg~^7BO&Xl z4@s0wlE*}St}T2#6MK|3D~P>ZK}m9lbfP71z5c8k`gH&$oeZ2k0jO~-jB^q5{f zLakrMgq4y;$LB!F5_dSaxEWwndG~nvKNC{v$fu*!aVO+^BJ+=y(#uFq+EW(7op^hc z(HAWWPv_Ir<64ZZpQLq1*3@t5IGV`y`)1s5md^P+t*Kw6oJSe0kZ#`*23e6(?KvDc z0U2L1dLezygi#2TfsoG@x*3ykmeHWiposfg&0-c{LcfHjm zRh~YXq0KT{Yxi46+YSmb+b0Hyy62vxqZYCxjpH4eCY$Y2pK5AGH|S0?YHI@RM$jBe))hB3kp850j>koko6z0Na=n)2)aXY3vikQOSul7HFbsy;?+9~vUmosabeNgdkcR%jQ{ZTHlulbAn!yj@Ca!_yS zYD!Ht{^>sZOI)w;t?ND>@&@GYYS6c?pSi?!NOFbix#SYp#a-h1Lb<|qnsSNjUF8bb z4R5^k$F9E)zW7Vmu?JrM#rpwFHt^;9Doix-1NWy0`JOK_Ug3Tza*6wB-uIKb8UGo^ z`+j@3t