Working towards coping without AU and eye models. Fixes to the visualizer.
This commit is contained in:
parent
51fbf805bc
commit
6e15e81777
8 changed files with 139 additions and 37 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -95,3 +95,5 @@ lib/3rdParty/CameraEnumerator/Release/
|
||||||
lib/local/Utilities/Release/
|
lib/local/Utilities/Release/
|
||||||
exe/FaceLandmarkVidMulti/processed/
|
exe/FaceLandmarkVidMulti/processed/
|
||||||
matlab_runners/Demos/processed/multi_face_aligned/
|
matlab_runners/Demos/processed/multi_face_aligned/
|
||||||
|
exe/releases/OpenFace_0.4.1_win_x64_landmarks/
|
||||||
|
exe/releases/OpenFace_*
|
||||||
|
|
|
@ -127,18 +127,30 @@ int main (int argc, char **argv)
|
||||||
|
|
||||||
captured_image = image_reader.GetNextImage();
|
captured_image = image_reader.GetNextImage();
|
||||||
|
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no eye model found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (face_analyser.GetAUClassNames().size() == 0 && face_analyser.GetAUClassNames().size() == 0)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no Action Unit models found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
cout << "Starting tracking" << endl;
|
cout << "Starting tracking" << endl;
|
||||||
while (!captured_image.empty())
|
while (!captured_image.empty())
|
||||||
{
|
{
|
||||||
|
|
||||||
Utilities::RecorderOpenFaceParameters recording_params(arguments, false, false,
|
Utilities::RecorderOpenFaceParameters recording_params(arguments, false, false,
|
||||||
image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy);
|
image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy);
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
recording_params.setOutputGaze(false);
|
||||||
|
}
|
||||||
Utilities::RecorderOpenFace open_face_rec(image_reader.name, recording_params, arguments);
|
Utilities::RecorderOpenFace open_face_rec(image_reader.name, recording_params, arguments);
|
||||||
|
|
||||||
visualizer.SetImage(captured_image, image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy);
|
visualizer.SetImage(captured_image, image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy);
|
||||||
|
|
||||||
if (recording_params.outputGaze() && !face_model.eye_model)
|
|
||||||
cout << "WARNING: no eye model defined, but outputting gaze" << endl;
|
|
||||||
|
|
||||||
// Making sure the image is in uchar grayscale
|
// Making sure the image is in uchar grayscale
|
||||||
cv::Mat_<uchar> grayscale_image = image_reader.GetGrayFrame();
|
cv::Mat_<uchar> grayscale_image = image_reader.GetGrayFrame();
|
||||||
|
|
|
@ -104,6 +104,11 @@ int main (int argc, char **argv)
|
||||||
// The modules that are being used for tracking
|
// The modules that are being used for tracking
|
||||||
LandmarkDetector::CLNF face_model(det_parameters.model_location);
|
LandmarkDetector::CLNF face_model(det_parameters.model_location);
|
||||||
|
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no eye model found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
// Open a sequence
|
// Open a sequence
|
||||||
Utilities::SequenceCapture sequence_reader;
|
Utilities::SequenceCapture sequence_reader;
|
||||||
|
|
||||||
|
|
|
@ -158,6 +158,16 @@ int main (int argc, char **argv)
|
||||||
face_analysis_params.OptimizeForImages();
|
face_analysis_params.OptimizeForImages();
|
||||||
FaceAnalysis::FaceAnalyser face_analyser(face_analysis_params);
|
FaceAnalysis::FaceAnalyser face_analyser(face_analysis_params);
|
||||||
|
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no eye model found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (face_analyser.GetAUClassNames().size() == 0 && face_analyser.GetAUClassNames().size() == 0)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no Action Unit models found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
// Open a sequence
|
// Open a sequence
|
||||||
Utilities::SequenceCapture sequence_reader;
|
Utilities::SequenceCapture sequence_reader;
|
||||||
|
|
||||||
|
@ -185,12 +195,13 @@ int main (int argc, char **argv)
|
||||||
|
|
||||||
Utilities::RecorderOpenFaceParameters recording_params(arguments, true, sequence_reader.IsWebcam(),
|
Utilities::RecorderOpenFaceParameters recording_params(arguments, true, sequence_reader.IsWebcam(),
|
||||||
sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy, sequence_reader.fps);
|
sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy, sequence_reader.fps);
|
||||||
// Do not do AU detection on multi-face case as it is not supported
|
if (!face_model.eye_model)
|
||||||
recording_params.setOutputAUs(false);
|
{
|
||||||
|
recording_params.setOutputGaze(false);
|
||||||
|
}
|
||||||
|
|
||||||
Utilities::RecorderOpenFace open_face_rec(sequence_reader.name, recording_params, arguments);
|
Utilities::RecorderOpenFace open_face_rec(sequence_reader.name, recording_params, arguments);
|
||||||
|
|
||||||
if (recording_params.outputGaze() && !face_model.eye_model)
|
|
||||||
cout << "WARNING: no eye model defined, but outputting gaze" << endl;
|
|
||||||
|
|
||||||
if (sequence_reader.IsWebcam())
|
if (sequence_reader.IsWebcam())
|
||||||
{
|
{
|
||||||
|
|
|
@ -121,6 +121,16 @@ int main (int argc, char **argv)
|
||||||
FaceAnalysis::FaceAnalyserParameters face_analysis_params(arguments);
|
FaceAnalysis::FaceAnalyserParameters face_analysis_params(arguments);
|
||||||
FaceAnalysis::FaceAnalyser face_analyser(face_analysis_params);
|
FaceAnalysis::FaceAnalyser face_analyser(face_analysis_params);
|
||||||
|
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no eye model found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (face_analyser.GetAUClassNames().size() == 0 && face_analyser.GetAUClassNames().size() == 0)
|
||||||
|
{
|
||||||
|
cout << "WARNING: no Action Unit models found" << endl;
|
||||||
|
}
|
||||||
|
|
||||||
Utilities::SequenceCapture sequence_reader;
|
Utilities::SequenceCapture sequence_reader;
|
||||||
|
|
||||||
// A utility for visualizing the results
|
// A utility for visualizing the results
|
||||||
|
@ -150,6 +160,10 @@ int main (int argc, char **argv)
|
||||||
|
|
||||||
Utilities::RecorderOpenFaceParameters recording_params(arguments, true, sequence_reader.IsWebcam(),
|
Utilities::RecorderOpenFaceParameters recording_params(arguments, true, sequence_reader.IsWebcam(),
|
||||||
sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy, sequence_reader.fps);
|
sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy, sequence_reader.fps);
|
||||||
|
if (!face_model.eye_model)
|
||||||
|
{
|
||||||
|
recording_params.setOutputGaze(false);
|
||||||
|
}
|
||||||
Utilities::RecorderOpenFace open_face_rec(sequence_reader.name, recording_params, arguments);
|
Utilities::RecorderOpenFace open_face_rec(sequence_reader.name, recording_params, arguments);
|
||||||
|
|
||||||
if (recording_params.outputGaze() && !face_model.eye_model)
|
if (recording_params.outputGaze() && !face_model.eye_model)
|
||||||
|
|
|
@ -259,37 +259,47 @@ namespace OpenFaceDemo
|
||||||
{
|
{
|
||||||
|
|
||||||
var au_regs = face_analyser.GetCurrentAUsReg();
|
var au_regs = face_analyser.GetCurrentAUsReg();
|
||||||
|
if(au_regs.Count > 0)
|
||||||
|
{
|
||||||
|
double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0;
|
||||||
|
double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0;
|
||||||
|
|
||||||
double smile = (au_regs["AU12"] + au_regs["AU06"] + au_regs["AU25"]) / 13.0;
|
double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0;
|
||||||
double frown = (au_regs["AU15"] + au_regs["AU17"]) / 12.0;
|
double brow_down = au_regs["AU04"] / 5.0;
|
||||||
|
|
||||||
double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 10.0;
|
double eye_widen = au_regs["AU05"] / 3.0;
|
||||||
double brow_down = au_regs["AU04"] / 5.0;
|
double nose_wrinkle = au_regs["AU09"] / 4.0;
|
||||||
|
|
||||||
double eye_widen = au_regs["AU05"] / 3.0;
|
Dictionary<int, double> smileDict = new Dictionary<int, double>();
|
||||||
double nose_wrinkle = au_regs["AU09"] / 4.0;
|
smileDict[0] = 0.7 * smile_cumm + 0.3 * smile;
|
||||||
|
smileDict[1] = 0.7 * frown_cumm + 0.3 * frown;
|
||||||
|
smilePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = smileDict, Confidence = confidence });
|
||||||
|
|
||||||
Dictionary<int, double> smileDict = new Dictionary<int, double>();
|
Dictionary<int, double> browDict = new Dictionary<int, double>();
|
||||||
smileDict[0] = 0.7 * smile_cumm + 0.3 * smile;
|
browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up;
|
||||||
smileDict[1] = 0.7 * frown_cumm + 0.3 * frown;
|
browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down;
|
||||||
smilePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = smileDict, Confidence = confidence });
|
browPlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = browDict, Confidence = confidence });
|
||||||
|
|
||||||
Dictionary<int, double> browDict = new Dictionary<int, double>();
|
Dictionary<int, double> eyeDict = new Dictionary<int, double>();
|
||||||
browDict[0] = 0.7 * brow_up_cumm + 0.3 * brow_up;
|
eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen;
|
||||||
browDict[1] = 0.7 * brow_down_cumm + 0.3 * brow_down;
|
eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle;
|
||||||
browPlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = browDict, Confidence = confidence });
|
eyePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = eyeDict, Confidence = confidence });
|
||||||
|
|
||||||
Dictionary<int, double> eyeDict = new Dictionary<int, double>();
|
smile_cumm = smileDict[0];
|
||||||
eyeDict[0] = 0.7 * widen_cumm + 0.3 * eye_widen;
|
frown_cumm = smileDict[1];
|
||||||
eyeDict[1] = 0.7 * wrinkle_cumm + 0.3 * nose_wrinkle;
|
brow_up_cumm = browDict[0];
|
||||||
eyePlot.AddDataPoint(new DataPointGraph() { Time = CurrentTime, values = eyeDict, Confidence = confidence });
|
brow_down_cumm = browDict[1];
|
||||||
|
widen_cumm = eyeDict[0];
|
||||||
smile_cumm = smileDict[0];
|
wrinkle_cumm = eyeDict[1];
|
||||||
frown_cumm = smileDict[1];
|
}
|
||||||
brow_up_cumm = browDict[0];
|
else
|
||||||
brow_down_cumm = browDict[1];
|
{
|
||||||
widen_cumm = eyeDict[0];
|
// If no AUs present disable the AU visualization
|
||||||
wrinkle_cumm = eyeDict[1];
|
MainGrid.ColumnDefinitions[2].Width = new GridLength(0);
|
||||||
|
eyePlot.Visibility = Visibility.Collapsed;
|
||||||
|
browPlot.Visibility = Visibility.Collapsed;
|
||||||
|
smilePlot.Visibility = Visibility.Collapsed;
|
||||||
|
}
|
||||||
|
|
||||||
Dictionary<int, double> poseDict = new Dictionary<int, double>();
|
Dictionary<int, double> poseDict = new Dictionary<int, double>();
|
||||||
poseDict[0] = -pose[3];
|
poseDict[0] = -pose[3];
|
||||||
|
|
|
@ -78,6 +78,7 @@ namespace Utilities
|
||||||
float getCy() const { return cy; }
|
float getCy() const { return cy; }
|
||||||
|
|
||||||
void setOutputAUs(bool output_AUs) { this->output_AUs = output_AUs; }
|
void setOutputAUs(bool output_AUs) { this->output_AUs = output_AUs; }
|
||||||
|
void setOutputGaze(bool output_gaze) { this->output_gaze = output_gaze; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <map>
|
#include <map>
|
||||||
|
#include <set>
|
||||||
|
|
||||||
#include "Visualizer.h"
|
#include "Visualizer.h"
|
||||||
#include "VisualizationUtils.h"
|
#include "VisualizationUtils.h"
|
||||||
|
@ -236,7 +237,22 @@ void Visualizer::SetObservationActionUnits(const std::vector<std::pair<std::stri
|
||||||
{
|
{
|
||||||
if(au_intensities.size() > 0 || au_occurences.size() > 0)
|
if(au_intensities.size() > 0 || au_occurences.size() > 0)
|
||||||
{
|
{
|
||||||
const int NB_AUS = 17;
|
|
||||||
|
std::set<std::string> au_names;
|
||||||
|
std::map<std::string, bool> occurences_map;
|
||||||
|
std::map<std::string, double> intensities_map;
|
||||||
|
|
||||||
|
for (size_t idx = 0; idx < au_intensities.size(); idx++)
|
||||||
|
{
|
||||||
|
au_names.insert(au_intensities[idx].first);
|
||||||
|
intensities_map[au_intensities[idx].first] = au_intensities[idx].second;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (size_t idx = 0; idx < au_occurences.size(); idx++)
|
||||||
|
{
|
||||||
|
au_names.insert(au_occurences[idx].first);
|
||||||
|
occurences_map[au_occurences[idx].first] = au_occurences[idx].second;
|
||||||
|
}
|
||||||
|
|
||||||
const int AU_TRACKBAR_LENGTH = 400;
|
const int AU_TRACKBAR_LENGTH = 400;
|
||||||
const int AU_TRACKBAR_HEIGHT = 10;
|
const int AU_TRACKBAR_HEIGHT = 10;
|
||||||
|
@ -244,13 +260,46 @@ void Visualizer::SetObservationActionUnits(const std::vector<std::pair<std::stri
|
||||||
const int MARGIN_X = 185;
|
const int MARGIN_X = 185;
|
||||||
const int MARGIN_Y = 10;
|
const int MARGIN_Y = 10;
|
||||||
|
|
||||||
action_units_image = cv::Mat(NB_AUS * (AU_TRACKBAR_HEIGHT + 10) + MARGIN_Y * 2, AU_TRACKBAR_LENGTH + MARGIN_X, CV_8UC3, cv::Scalar(255,255,255));
|
const int nb_aus = au_names.size();
|
||||||
|
|
||||||
|
// Do not reinitialize
|
||||||
|
if(action_units_image.empty())
|
||||||
|
{
|
||||||
|
action_units_image = cv::Mat(nb_aus * (AU_TRACKBAR_HEIGHT + 10) + MARGIN_Y * 2, AU_TRACKBAR_LENGTH + MARGIN_X, CV_8UC3, cv::Scalar(255,255,255));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
action_units_image.setTo(255);
|
||||||
|
}
|
||||||
|
|
||||||
std::map<std::string, std::pair<bool, double>> aus;
|
std::map<std::string, std::pair<bool, double>> aus;
|
||||||
|
|
||||||
// first, prepare a mapping "AU name" -> { present, intensity }
|
// first, prepare a mapping "AU name" -> { present, intensity }
|
||||||
for (size_t idx = 0; idx < au_intensities.size(); idx++) {
|
for (auto au_name : au_names)
|
||||||
aus[au_intensities[idx].first] = std::make_pair(au_occurences[idx].second != 0, au_intensities[idx].second);
|
{
|
||||||
|
// Insert the intensity and AU presense (as these do not always overlap check if they exist first)
|
||||||
|
bool occurence = false;
|
||||||
|
if (occurences_map.find(au_name) != occurences_map.end())
|
||||||
|
{
|
||||||
|
occurence = occurences_map[au_name] != 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// If we do not have an occurence label, trust the intensity one
|
||||||
|
occurence = intensities_map[au_name] > 1;
|
||||||
|
}
|
||||||
|
double intensity = 0.0;
|
||||||
|
if (intensities_map.find(au_name) != intensities_map.end())
|
||||||
|
{
|
||||||
|
intensity = intensities_map[au_name];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// If we do not have an intensity label, trust the occurence one
|
||||||
|
intensity = occurences_map[au_name] == 0 ? 0 : 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
aus[au_name] = std::make_pair(occurence, intensity);
|
||||||
}
|
}
|
||||||
|
|
||||||
// then, build the graph
|
// then, build the graph
|
||||||
|
@ -382,12 +431,10 @@ char Visualizer::ShowObservation()
|
||||||
}
|
}
|
||||||
if (vis_aus && !action_units_image.empty())
|
if (vis_aus && !action_units_image.empty())
|
||||||
{
|
{
|
||||||
cv::namedWindow("action units", cv::WindowFlags::WINDOW_KEEPRATIO);
|
|
||||||
cv::imshow("action units", action_units_image);
|
cv::imshow("action units", action_units_image);
|
||||||
}
|
}
|
||||||
if (vis_track)
|
if (vis_track)
|
||||||
{
|
{
|
||||||
cv::namedWindow("tracking result", cv::WindowFlags::WINDOW_KEEPRATIO);
|
|
||||||
cv::imshow("tracking result", captured_image);
|
cv::imshow("tracking result", captured_image);
|
||||||
}
|
}
|
||||||
return cv::waitKey(1);
|
return cv::waitKey(1);
|
||||||
|
|
Loading…
Reference in a new issue