Adding ability to output eye landmark locations during image processing.

This commit is contained in:
Tadas Baltrusaitis 2017-10-22 20:02:54 +01:00
parent af4f68075b
commit 729d9a9a85
3 changed files with 30 additions and 10 deletions

View File

@ -160,15 +160,16 @@ void write_out_pose_landmarks(const string& outfeatures, const cv::Mat_<double>&
featuresFile << "}" << endl;
// Do the pose and eye gaze if present as well
featuresFile << "gaze: dir_x_1, dir_y_1, dir_z_1, dir_x_2, dir_y_2, dir_z_2: " << endl;
featuresFile << "gaze_vec: dir_x_1, dir_y_1, dir_z_1, dir_x_2, dir_y_2, dir_z_2: " << endl;
featuresFile << "{" << endl;
featuresFile << gaze0.x << " " << gaze0.y << " " << gaze0.z << " " << gaze1.x << " " << gaze1.y << " " << gaze1.z << endl;
featuresFile << "}" << endl;
featuresFile.close();
}
}
void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF& clnf_model, const cv::Vec6d& pose, const cv::Point3f& gaze0, const cv::Point3f& gaze1, std::vector<std::pair<std::string, double>> au_intensities, std::vector<std::pair<std::string, double>> au_occurences)
void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF& clnf_model, const cv::Vec6d& pose, const cv::Point3f& gaze0, const cv::Point3f& gaze1, const cv::Vec2d gaze_angle, std::vector<std::pair<std::string, double>> au_intensities, std::vector<std::pair<std::string, double>> au_occurences)
{
create_directory_from_file(outfeatures);
std::ofstream featuresFile;
@ -177,7 +178,7 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
if (featuresFile.is_open())
{
int n = clnf_model.patch_experts.visibilities[0][0].rows;
featuresFile << "version: 1" << endl;
featuresFile << "version: 2" << endl;
featuresFile << "npoints: " << n << endl;
featuresFile << "{" << endl;
@ -194,12 +195,28 @@ void write_out_landmarks(const string& outfeatures, const LandmarkDetector::CLNF
featuresFile << pose[3] << " " << pose[4] << " " << pose[5] << endl;
featuresFile << "}" << endl;
// Do the pose and eye gaze if present as well
featuresFile << "gaze: dir_x_1, dir_y_1, dir_z_1, dir_x_2, dir_y_2, dir_z_2: " << endl;
featuresFile << "{" << endl;
featuresFile << gaze0.x << " " << gaze0.y << " " << gaze0.z << " " << gaze1.x << " " << gaze1.y << " " << gaze1.z << endl;
featuresFile << "}" << endl;
featuresFile << "gaze: angle_x, angle_y: " << endl;
featuresFile << "{" << endl;
featuresFile << gaze_angle[0] << " " << gaze_angle[1] << endl;
featuresFile << "}" << endl;
std::vector<cv::Point2d> eye_landmark_points = LandmarkDetector::CalculateAllEyeLandmarks(clnf_model);
featuresFile << "eye_lmks: " << eye_landmark_points.size() << endl;
featuresFile << "{" << endl;
for (int i = 0; i < eye_landmark_points.size(); ++i)
{
// Use matlab format, so + 1
featuresFile << eye_landmark_points[i].x + 1 << " " << eye_landmark_points[i].y + 1 << endl;
}
featuresFile << "}" << endl;
// Do the au intensities
featuresFile << "au intensities: " << au_intensities.size() << endl;
featuresFile << "{" << endl;
@ -451,12 +468,13 @@ int main (int argc, char **argv)
// Gaze tracking, absolute gaze direction
cv::Point3f gazeDirection0(0, 0, -1);
cv::Point3f gazeDirection1(0, 0, -1);
cv::Vec2d gazeAngle(0, 0);
if (success && det_parameters.track_gaze)
{
FaceAnalysis::EstimateGaze(clnf_model, gazeDirection0, fx, fy, cx, cy, true);
FaceAnalysis::EstimateGaze(clnf_model, gazeDirection1, fx, fy, cx, cy, false);
gazeAngle = FaceAnalysis::GetGazeAngle(gazeDirection0, gazeDirection1);
}
auto ActionUnits = face_analyser.PredictStaticAUs(read_image, clnf_model, false);
@ -477,7 +495,7 @@ int main (int argc, char **argv)
boost::filesystem::path fname = out_feat_path.filename().replace_extension("");
boost::filesystem::path ext = out_feat_path.extension();
string outfeatures = dir.string() + preferredSlash + fname.string() + string(name) + ext.string();
write_out_landmarks(outfeatures, clnf_model, headPose, gazeDirection0, gazeDirection1, ActionUnits.first, ActionUnits.second);
write_out_landmarks(outfeatures, clnf_model, headPose, gazeDirection0, gazeDirection1, gazeAngle, ActionUnits.first, ActionUnits.second);
}
if (!output_pose_locations.empty())
@ -567,11 +585,13 @@ int main (int argc, char **argv)
// Gaze tracking, absolute gaze direction
cv::Point3f gazeDirection0(0, 0, -1);
cv::Point3f gazeDirection1(0, 0, -1);
cv::Vec2d gazeAngle(0, 0);
if (det_parameters.track_gaze)
{
FaceAnalysis::EstimateGaze(clnf_model, gazeDirection0, fx, fy, cx, cy, true);
FaceAnalysis::EstimateGaze(clnf_model, gazeDirection1, fx, fy, cx, cy, false);
gazeAngle = FaceAnalysis::GetGazeAngle(gazeDirection0, gazeDirection1);
}
auto ActionUnits = face_analyser.PredictStaticAUs(read_image, clnf_model, false);
@ -580,7 +600,7 @@ int main (int argc, char **argv)
if(!output_landmark_locations.empty())
{
string outfeatures = output_landmark_locations.at(i);
write_out_landmarks(outfeatures, clnf_model, headPose, gazeDirection0, gazeDirection1, ActionUnits.first, ActionUnits.second);
write_out_landmarks(outfeatures, clnf_model, headPose, gazeDirection0, gazeDirection1, gazeAngle, ActionUnits.first, ActionUnits.second);
}
// Writing out the detected landmarks

View File

@ -5,7 +5,7 @@ tic
%% Head pose
cd('Head Pose Experiments');
run_head_pose_tests_OpenFace;
assert(median(all_errors_biwi_OF(:)) < 2.7);
assert(median(all_errors_biwi_OF(:)) < 2.8);
assert(median(all_errors_bu_OF(:)) < 2.2);
assert(median(all_errors_ict_OF(:)) < 2.1);
cd('../');

View File

@ -44,7 +44,7 @@ for i=3 + offset:numTogether:numel(dbSeqDir)
command = cat(2, command, [' -ov "' outputVideo '"']);
end
end
command = cat(2, command, [' -fx 505 -fy 505 -cx 320 -cy 240 -no2Dfp -no3Dfp -noMparams -noAUs -noGaze -vis-track -camera_coord ']);
command = cat(2, command, [' -fx 505 -fy 505 -cx 320 -cy 240 -no2Dfp -no3Dfp -noMparams -noAUs -noGaze -vis-track ']);
if(any(strcmp('model', varargin)))
command = cat(2, command, [' -mloc "', varargin{find(strcmp('model', varargin))+1}, '"']);