Adding gaze angle to features and to visualization.

This commit is contained in:
Tadas Baltrusaitis 2016-12-02 14:21:24 -05:00
parent 2854b86733
commit 984cfb58e7
8 changed files with 76 additions and 36 deletions

View file

@ -231,7 +231,7 @@ void prepareOutputFile(std::ofstream* output_file, bool output_2D_landmarks, boo
void outputAllFeatures(std::ofstream* output_file, bool output_2D_landmarks, bool output_3D_landmarks,
bool output_model_params, bool output_pose, bool output_AUs, bool output_gaze,
const LandmarkDetector::CLNF& face_model, int frame_count, double time_stamp, bool detection_success,
cv::Point3f gazeDirection0, cv::Point3f gazeDirection1, const cv::Vec6d& pose_estimate, double fx, double fy, double cx, double cy,
cv::Point3f gazeDirection0, cv::Point3f gazeDirection1, cv::Vec2d gaze_angle, const cv::Vec6d& pose_estimate, double fx, double fy, double cx, double cy,
const FaceAnalysis::FaceAnalyser& face_analyser);
void post_process_output_file(FaceAnalysis::FaceAnalyser& face_analyser, string output_file, bool dynamic);
@ -556,15 +556,20 @@ int main (int argc, char **argv)
{
detection_success = LandmarkDetector::DetectLandmarksInImage(grayscale_image, face_model, det_parameters);
}
// Work out the pose of the head from the tracked model
cv::Vec6d pose_estimate = LandmarkDetector::GetPose(face_model, fx, fy, cx, cy);
// Gaze tracking, absolute gaze direction
cv::Point3f gazeDirection0(0, 0, -1);
cv::Point3f gazeDirection1(0, 0, -1);
cv::Vec2d gazeAngle(0, 0);
if (det_parameters.track_gaze && detection_success && face_model.eye_model)
{
FaceAnalysis::EstimateGaze(face_model, gazeDirection0, fx, fy, cx, cy, true);
FaceAnalysis::EstimateGaze(face_model, gazeDirection1, fx, fy, cx, cy, false);
gazeAngle = FaceAnalysis::GetGazeAngle(gazeDirection0, gazeDirection1, pose_estimate);
}
// Do face alignment
@ -594,8 +599,6 @@ int main (int argc, char **argv)
}
}
// Work out the pose of the head from the tracked model
cv::Vec6d pose_estimate = LandmarkDetector::GetPose(face_model, fx, fy, cx, cy);
if(hog_output_file.is_open())
{
@ -636,7 +639,7 @@ int main (int argc, char **argv)
// Output the landmarks, pose, gaze, parameters and AUs
outputAllFeatures(&output_file, output_2D_landmarks, output_3D_landmarks, output_model_params, output_pose, output_AUs, output_gaze,
face_model, frame_count, time_stamp, detection_success, gazeDirection0, gazeDirection1,
face_model, frame_count, time_stamp, detection_success, gazeDirection0, gazeDirection1, gazeAngle,
pose_estimate, fx, fy, cx, cy, face_analyser);
// output the tracked video
@ -846,7 +849,7 @@ void prepareOutputFile(std::ofstream* output_file, bool output_2D_landmarks, boo
if (output_gaze)
{
*output_file << ", gaze_0_x, gaze_0_y, gaze_0_z, gaze_1_x, gaze_1_y, gaze_1_z";
*output_file << ", gaze_0_x, gaze_0_y, gaze_0_z, gaze_1_x, gaze_1_y, gaze_1_z, gaze_angle_x, gaze_angle_y";
}
if (output_pose)
@ -915,7 +918,7 @@ void prepareOutputFile(std::ofstream* output_file, bool output_2D_landmarks, boo
void outputAllFeatures(std::ofstream* output_file, bool output_2D_landmarks, bool output_3D_landmarks,
bool output_model_params, bool output_pose, bool output_AUs, bool output_gaze,
const LandmarkDetector::CLNF& face_model, int frame_count, double time_stamp, bool detection_success,
cv::Point3f gazeDirection0, cv::Point3f gazeDirection1, const cv::Vec6d& pose_estimate, double fx, double fy, double cx, double cy,
cv::Point3f gazeDirection0, cv::Point3f gazeDirection1, cv::Vec2d gaze_angle, const cv::Vec6d& pose_estimate, double fx, double fy, double cx, double cy,
const FaceAnalysis::FaceAnalyser& face_analyser)
{
@ -933,7 +936,8 @@ void outputAllFeatures(std::ofstream* output_file, bool output_2D_landmarks, boo
if (output_gaze)
{
*output_file << ", " << gazeDirection0.x << ", " << gazeDirection0.y << ", " << gazeDirection0.z
<< ", " << gazeDirection1.x << ", " << gazeDirection1.y << ", " << gazeDirection1.z;
<< ", " << gazeDirection1.x << ", " << gazeDirection1.y << ", " << gazeDirection1.z
<< ", " << gaze_angle[0] << ", " << gaze_angle[1];
}
*output_file << std::setprecision(4);

View file

@ -260,7 +260,7 @@ namespace OpenFaceDemo
List<double> pose = new List<double>();
clnf_model.GetCorrectedPoseCamera(pose, fx, fy, cx, cy);
clnf_model.GetPose(pose, fx, fy, cx, cy);
List<double> non_rigid_params = clnf_model.GetNonRigidParams();
double scale = clnf_model.GetRigidParams()[0];

View file

@ -492,7 +492,7 @@ namespace OpenFaceOffline
confidence = 1;
List<double> pose = new List<double>();
clnf_model.GetCorrectedPoseWorld(pose, fx, fy, cx, cy);
clnf_model.GetPose(pose, fx, fy, cx, cy);
List<double> non_rigid_params = clnf_model.GetNonRigidParams();
// The face analysis step (only done if recording AUs, HOGs or video)
@ -505,6 +505,7 @@ namespace OpenFaceOffline
List<Tuple<double, double>> landmarks = null;
List<Tuple<double, double>> eye_landmarks = null;
List<Tuple<Point, Point>> gaze_lines = null;
Tuple<double, double> gaze_angle = new Tuple<double, double>(0,0);
if (detectionSucceeding)
{
@ -512,6 +513,7 @@ namespace OpenFaceOffline
eye_landmarks = clnf_model.CalculateEyeLandmarks();
lines = clnf_model.CalculateBox((float)fx, (float)fy, (float)cx, (float)cy);
gaze_lines = face_analyser.CalculateGazeLines(scale, (float)fx, (float)fy, (float)cx, (float)cy);
gaze_angle = face_analyser.GetGazeAngle();
}
// Visualisation
@ -554,26 +556,8 @@ namespace OpenFaceOffline
nonRigidGraph.Update(non_rigid_params);
// Update eye gaze
var gaze_both = face_analyser.GetGazeCamera();
double x = (gaze_both.Item1.Item1 + gaze_both.Item2.Item1) / 2.0;
double y = (gaze_both.Item1.Item2 + gaze_both.Item2.Item2) / 2.0;
// Tweak it to a more presentable value
x = (int)(x * 35);
y = (int)(y * 70);
if (x < -10)
x = -10;
if (x > 10)
x = 10;
if (y < -10)
y = -10;
if (y > 10)
y = 10;
GazeXLabel.Content = x / 10.0;
GazeYLabel.Content = y / 10.0;
GazeXLabel.Content = gaze_angle.Item1 * (180.0/ Math.PI);
GazeYLabel.Content = gaze_angle.Item2 * (180.0 / Math.PI);
}
@ -849,16 +833,17 @@ namespace OpenFaceOffline
double confidence = (-clnf_model.GetConfidence()) / 2.0 + 0.5;
List<double> pose = new List<double>();
clnf_model.GetCorrectedPoseWorld(pose, fx, fy, cx, cy);
clnf_model.GetPose(pose, fx, fy, cx, cy);
output_features_file.Write(String.Format("{0}, {1}, {2:F3}, {3}", frame_ind, time_stamp, confidence, success ? 1 : 0));
if (output_gaze)
{
var gaze = face_analyser.GetGazeCamera();
var gaze_angle = face_analyser.GetGazeAngle();
output_features_file.Write(String.Format(", {0:F5}, {1:F5}, {2:F5}, {3:F5}, {4:F5}, {5:F5}", gaze.Item1.Item1, gaze.Item1.Item2, gaze.Item1.Item3,
gaze.Item2.Item1, gaze.Item2.Item2, gaze.Item2.Item3));
output_features_file.Write(String.Format(", {0:F5}, {1:F5}, {2:F5}, {3:F5}, {4:F5}, {5:F5}, {6:F5}, {7:F5}", gaze.Item1.Item1, gaze.Item1.Item2, gaze.Item1.Item3,
gaze.Item2.Item1, gaze.Item2.Item2, gaze.Item2.Item3, gaze_angle.Item1, gaze_angle.Item2));
}
if (output_pose)

View file

@ -126,7 +126,8 @@ private:
// Absolute gaze direction
cv::Point3f* gazeDirection0;
cv::Point3f* gazeDirection1;
cv::Vec2d* gazeAngle;
cv::Point3f* pupil_left;
cv::Point3f* pupil_right;
@ -174,6 +175,7 @@ public:
gazeDirection0 = new cv::Point3f();
gazeDirection1 = new cv::Point3f();
gazeAngle = new cv::Vec2d();
pupil_left = new cv::Point3f();
pupil_right = new cv::Point3f();
@ -293,6 +295,13 @@ public:
FaceAnalysis::EstimateGaze(*clnf->getCLNF(), *gazeDirection0, fx, fy, cx, cy, true);
FaceAnalysis::EstimateGaze(*clnf->getCLNF(), *gazeDirection1, fx, fy, cx, cy, false);
// Estimate the gaze angle WRT to head pose here
System::Collections::Generic::List<double>^ pose_list = gcnew System::Collections::Generic::List<double>();
clnf->GetPose(pose_list, fx, fy, cx, cy);
cv::Vec6d pose(pose_list[0], pose_list[1], pose_list[2], pose_list[3], pose_list[4], pose_list[5]);
cv::Vec2d gaze_angle = FaceAnalysis::GetGazeAngle(*gazeDirection0, *gazeDirection1, pose);
// Grab pupil locations
int part_left = -1;
int part_right = -1;
@ -327,6 +336,12 @@ public:
}
System::Tuple<double, double>^ GetGazeAngle()
{
auto gaze_angle = gcnew System::Tuple<double, double>((*gazeAngle)[0], (*gazeAngle)[1]);
return gaze_angle;
}
System::Collections::Generic::List<System::Tuple<System::Windows::Point, System::Windows::Point>^>^ CalculateGazeLines(double scale, float fx, float fy, float cx, float cy)
{
@ -453,6 +468,7 @@ public:
delete gazeDirection0;
delete gazeDirection1;
delete gazeAngle;
delete pupil_left;
delete pupil_right;

View file

@ -262,7 +262,7 @@ namespace CppInterop {
return all_landmarks;
}
void GetCorrectedPoseCamera(System::Collections::Generic::List<double>^ pose, double fx, double fy, double cx, double cy) {
void GetPoseWRTCamera(System::Collections::Generic::List<double>^ pose, double fx, double fy, double cx, double cy) {
auto pose_vec = ::LandmarkDetector::GetPoseWRTCamera(*clnf, fx, fy, cx, cy);
pose->Clear();
for(int i = 0; i < 6; ++i)
@ -271,7 +271,7 @@ namespace CppInterop {
}
}
void GetCorrectedPoseWorld(System::Collections::Generic::List<double>^ pose, double fx, double fy, double cx, double cy) {
void GetPose(System::Collections::Generic::List<double>^ pose, double fx, double fy, double cx, double cy) {
auto pose_vec = ::LandmarkDetector::GetPose(*clnf, fx, fy, cx, cy);
pose->Clear();
for(int i = 0; i < 6; ++i)

View file

@ -68,6 +68,9 @@ namespace FaceAnalysis
void EstimateGaze(const LandmarkDetector::CLNF& clnf_model, cv::Point3f& gaze_absolute, float fx, float fy, float cx, float cy, bool left_eye);
void DrawGaze(cv::Mat img, const LandmarkDetector::CLNF& clnf_model, cv::Point3f gazeVecAxisLeft, cv::Point3f gazeVecAxisRight, float fx, float fy, float cx, float cy);
// Getting the gaze angle in radians with respect to head pose (need to call EstimateGaze first)
cv::Vec2d GetGazeAngle(cv::Point3f& gaze_vector_1, cv::Point3f& gaze_vector_2, cv::Vec6d head_pose);
// Some utilities
cv::Point3f GetPupilPosition(cv::Mat_<double> eyeLdmks3d);

View file

@ -157,7 +157,23 @@ void FaceAnalysis::EstimateGaze(const LandmarkDetector::CLNF& clnf_model, cv::Po
gaze_absolute = gazeVecAxis / norm(gazeVecAxis);
}
cv::Vec2d FaceAnalysis::GetGazeAngle(cv::Point3f& gaze_vector_1, cv::Point3f& gaze_vector_2, cv::Vec6d head_pose)
{
cv::Vec3d eulerAngles(head_pose(3), head_pose(4), head_pose(5));
cv::Matx33d rotMat = LandmarkDetector::Euler2RotationMatrix(eulerAngles);
cv::Point3f gaze_point = (gaze_vector_1 + gaze_vector_2) / 2;
double gaze_diff = acos(gaze_vector_1.dot(gaze_vector_2));
cv::Vec3d gaze(gaze_point.x, gaze_point.y, gaze_point.z);
gaze = rotMat * gaze;
double x_angle = atan2(gaze[0], -gaze[2]);
double y_angle = atan2(gaze[1], -gaze[2]);
return cv::Vec2d(x_angle, y_angle);
}
void FaceAnalysis::DrawGaze(cv::Mat img, const LandmarkDetector::CLNF& clnf_model, cv::Point3f gazeVecAxisLeft, cv::Point3f gazeVecAxisRight, float fx, float fy, float cx, float cy)
{

View file

@ -43,12 +43,28 @@ gaze = dlmread([filename, '_gaze.txt'], ',', 1, 0);
valid_frames = gaze(:,4);
% only picking left, right and up down views for visualisation
% These are gaze angles with respect to head pose
gaze_angle = gaze(:,[11,12]);
figure;
plot(smooth(gaze_angle(:,1)), 'DisplayName', 'Left - right');
hold on;
plot(smooth(gaze_angle(:,2)), 'DisplayName', 'Up - down');
xlabel('Frame') % x-axis label
ylabel('Radians') % y-axis label
legend('show');
hold off;
% These are gaze direction vectors
gaze = gaze(:,[5,6,7,8,9,10]);
gaze = (gaze(:,[1,2,3]) + gaze(:,[4,5,6]))/2;
gaze(:,1) = smooth(gaze(:,1));
gaze(:,2) = smooth(gaze(:,2));
gaze(:,3) = smooth(gaze(:,3));
figure;
plot(gaze(:,1), 'DisplayName', 'Left - right');
hold on;
plot(gaze(:,2), 'DisplayName', 'Up - down');