Working on the visualizer interoperability.

This commit is contained in:
Tadas Baltrusaitis 2018-01-19 16:17:22 +00:00
parent a84eea57c8
commit 57c8bb75ea
8 changed files with 23 additions and 20 deletions

View File

@ -192,7 +192,7 @@ int main (int argc, char **argv)
// Displaying the tracking visualizations
visualizer.SetObservationFaceAlign(sim_warped_img);
visualizer.SetObservationHOG(hog_descriptor, num_hog_rows, num_hog_cols);
visualizer.SetObservationLandmarks(face_model.detected_landmarks, 1.0, face_model.detection_success); // Set confidence to high to make sure we always visualize
visualizer.SetObservationLandmarks(face_model.detected_landmarks, 1.0); // Set confidence to high to make sure we always visualize
visualizer.SetObservationPose(pose_estimate, 1.0);
visualizer.SetObservationGaze(gaze_direction0, gaze_direction1, LandmarkDetector::CalculateAllEyeLandmarks(face_model), LandmarkDetector::Calculate3DEyeLandmarks(face_model, image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy), face_model.detection_certainty);

View File

@ -164,7 +164,7 @@ int main (int argc, char **argv)
// Displaying the tracking visualizations
visualizer.SetImage(captured_image, sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy);
visualizer.SetObservationLandmarks(face_model.detected_landmarks, face_model.detection_certainty, detection_success);
visualizer.SetObservationLandmarks(face_model.detected_landmarks, face_model.detection_certainty);
visualizer.SetObservationPose(pose_estimate, face_model.detection_certainty);
visualizer.SetObservationGaze(gazeDirection0, gazeDirection1, LandmarkDetector::CalculateAllEyeLandmarks(face_model), LandmarkDetector::Calculate3DEyeLandmarks(face_model, sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy), face_model.detection_certainty);
visualizer.SetFps(fps_tracker.GetFPS());

View File

@ -290,7 +290,7 @@ int main (int argc, char **argv)
// Visualising the results
if(active_models[model])
{
visualizer.SetObservationLandmarks(face_models[model].detected_landmarks, face_models[model].detection_certainty, face_models[model].detection_success);
visualizer.SetObservationLandmarks(face_models[model].detected_landmarks, face_models[model].detection_certainty);
visualizer.SetObservationPose(LandmarkDetector::GetPose(face_models[model], sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy), face_models[model].detection_certainty);
}
}

View File

@ -194,7 +194,7 @@ int main (int argc, char **argv)
visualizer.SetImage(captured_image, sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy);
visualizer.SetObservationFaceAlign(sim_warped_img);
visualizer.SetObservationHOG(hog_descriptor, num_hog_rows, num_hog_cols);
visualizer.SetObservationLandmarks(face_model.detected_landmarks, face_model.detection_certainty, detection_success);
visualizer.SetObservationLandmarks(face_model.detected_landmarks, face_model.detection_certainty);
visualizer.SetObservationPose(pose_estimate, face_model.detection_certainty);
visualizer.SetObservationGaze(gazeDirection0, gazeDirection1, LandmarkDetector::CalculateAllEyeLandmarks(face_model), LandmarkDetector::Calculate3DEyeLandmarks(face_model, sequence_reader.fx, sequence_reader.fy, sequence_reader.cx, sequence_reader.cy), face_model.detection_certainty);
visualizer.SetFps(fps_tracker.GetFPS());

View File

@ -108,9 +108,6 @@ namespace OpenFaceOffline
FaceAnalyserManaged face_analyser;
GazeAnalyserManaged gaze_analyser;
// For visualization of results
Visualizer visualizer_of;
// For output recording
Recorder recorder;
@ -254,8 +251,11 @@ namespace OpenFaceOffline
// Setup the parameters optimized for working on individual images rather than sequences
face_model_params.optimiseForImages();
// Setup the visualization
Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance);
// Initialize the face detector if it has not been initialized yet
if(face_detector == null)
if (face_detector == null)
{
face_detector = new FaceDetector();
}
@ -304,7 +304,7 @@ namespace OpenFaceOffline
gaze_analyser.AddNextFrame(clnf_model, detectionSucceeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
// Only the final face will contain the details
VisualizeFeatures(frame, landmarks, i == 0, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);
VisualizeFeatures(frame, visualizer_of, landmarks, i == 0, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);
// Record an observation
RecordObservation(recorder, detectionSucceeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
@ -357,6 +357,9 @@ namespace OpenFaceOffline
recorder = new Recorder(record_root, output_file_name, capture.width, capture.height, Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose,
RecordAUs, RecordGaze, RecordAligned, RecordHOG, clnf_model, face_analyser, fx, fy, cx, cy, DynamicAUModels);
// Setup the c++ visualizer
Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance);
int frame_id = 0;
double fps = capture.GetFPS();
@ -400,7 +403,7 @@ namespace OpenFaceOffline
List<Tuple<double, double>> landmarks = clnf_model.CalculateVisibleLandmarks();
VisualizeFeatures(frame, landmarks, true, fx, fy, cx, cy, progress);
VisualizeFeatures(frame, visualizer_of, landmarks, true, fx, fy, cx, cy, progress);
while (thread_running & thread_paused && skip_frames == 0)
{
@ -469,7 +472,7 @@ namespace OpenFaceOffline
}
private void VisualizeFeatures(RawImage frame, List<Tuple<double, double>> landmarks, bool new_image, double fx, double fy, double cx, double cy, double progress)
private void VisualizeFeatures(RawImage frame, Visualizer visualizer, List<Tuple<double, double>> landmarks, bool new_image, double fx, double fy, double cx, double cy, double progress)
{
List<Tuple<Point, Point>> lines = null;
@ -491,10 +494,10 @@ namespace OpenFaceOffline
double scale = 0;
// Helps with recording and showing the visualizations
///visualizer.SetObservationFaceAlign(sim_warped_img);
//visualizer.SetObservationHOG(hog_descriptor, num_hog_rows, num_hog_cols);
//visualizer.SetObservationLandmarks(face_model.detected_landmarks, 1.0, face_model.detection_success); // Set confidence to high to make sure we always visualize
//visualizer.SetObservationPose(pose_estimate, 1.0);
visualizer.SetObservationFaceAlign(face_analyser.GetLatestAlignedFace());
visualizer.SetObservationHOG(face_analyser.GetLatestHOGFeature(), face_analyser.GetHOGRows(), face_analyser.GetHOGCols());
visualizer.SetObservationLandmarks(landmarks, confidence); // Set confidence to high to make sure we always visualize
visualizer.SetObservationPose(pose, confidence);
//visualizer.SetObservationGaze(gaze_direction0, gaze_direction1, LandmarkDetector::CalculateAllEyeLandmarks(face_model), LandmarkDetector::Calculate3DEyeLandmarks(face_model, image_reader.fx, image_reader.fy, image_reader.cx, image_reader.cy), face_model.detection_certainty);

View File

@ -95,12 +95,12 @@ namespace UtilitiesOF {
m_visualizer->SetObservationFaceAlign(aligned_face_image->Mat);
}
void SetObservationHOG(bool success, OpenCVWrappers::RawImage^ observation_HOG, int num_cols, int num_rows)
void SetObservationHOG(OpenCVWrappers::RawImage^ observation_HOG, int num_cols, int num_rows)
{
m_visualizer->SetObservationHOG(observation_HOG->Mat, num_cols, num_rows);
}
void SetObservationLandmarks(List<System::Tuple<double, double>^>^ landmarks_2D, double confidence, bool success)
void SetObservationLandmarks(List<System::Tuple<double, double>^>^ landmarks_2D, double confidence)
{
// Construct an OpenCV matrix from the landmarks
cv::Mat_<double> landmarks_2D_mat(landmarks_2D->Count * 2, 1, 0.0);
@ -110,7 +110,7 @@ namespace UtilitiesOF {
landmarks_2D_mat.at<double>(i + landmarks_2D->Count, 0) = landmarks_2D[i]->Item2;
}
// TODO add visibilities
m_visualizer->SetObservationLandmarks(landmarks_2D_mat, confidence, success);
m_visualizer->SetObservationLandmarks(landmarks_2D_mat, confidence);
}
// Finalizer. Definitely called before Garbage Collection,

View File

@ -62,7 +62,7 @@ namespace Utilities
void SetImage(const cv::Mat& canvas, float fx, float fy, float cx, float cy);
// All observations relevant to facial landmarks (optional visibilities parameter to not display all landmarks)
void SetObservationLandmarks(const cv::Mat_<double>& landmarks_2D, double confidence, bool success, const cv::Mat_<int>& visibilities = cv::Mat_<int>());
void SetObservationLandmarks(const cv::Mat_<double>& landmarks_2D, double confidence, const cv::Mat_<int>& visibilities = cv::Mat_<int>());
// Pose related observations
void SetObservationPose(const cv::Vec6d& pose, double confidence);

View File

@ -134,7 +134,7 @@ void Visualizer::SetObservationHOG(const cv::Mat_<double>& hog_descriptor, int n
}
void Visualizer::SetObservationLandmarks(const cv::Mat_<double>& landmarks_2D, double confidence, bool success, const cv::Mat_<int>& visibilities)
void Visualizer::SetObservationLandmarks(const cv::Mat_<double>& landmarks_2D, double confidence, const cv::Mat_<int>& visibilities)
{
if(confidence > visualisation_boundary)