diff --git a/gui/OpenFaceDemo/MainWindow.xaml b/gui/OpenFaceDemo/MainWindow.xaml
index ab63e85..a673049 100644
--- a/gui/OpenFaceDemo/MainWindow.xaml
+++ b/gui/OpenFaceDemo/MainWindow.xaml
@@ -38,5 +38,16 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gui/OpenFaceDemo/MainWindow.xaml.cs b/gui/OpenFaceDemo/MainWindow.xaml.cs
index 9c4eeda..ca06cea 100644
--- a/gui/OpenFaceDemo/MainWindow.xaml.cs
+++ b/gui/OpenFaceDemo/MainWindow.xaml.cs
@@ -22,6 +22,7 @@ using CppInterop.LandmarkDetector;
using CameraInterop;
using FaceAnalyser_Interop;
using System.Windows.Threading;
+using System.Diagnostics;
namespace OpenFaceDemo
{
@@ -30,24 +31,108 @@ namespace OpenFaceDemo
///
public partial class MainWindow : Window
{
-
+
// -----------------------------------------------------------------
// Members
// -----------------------------------------------------------------
+ // Timing for measuring FPS
+ #region High-Resolution Timing
+ static DateTime startTime;
+ static Stopwatch sw = new Stopwatch();
+
+ static MainWindow()
+ {
+ startTime = DateTime.Now;
+ sw.Start();
+ }
+
+ public static DateTime CurrentTime
+ {
+ get { return startTime + sw.Elapsed; }
+ }
+ #endregion
Thread processing_thread;
// Some members for displaying the results
private Capture capture;
+ private WriteableBitmap latest_img;
private volatile bool thread_running;
+
+ FpsTracker processing_fps = new FpsTracker();
+
+ volatile bool detectionSucceeding = false;
+ volatile bool reset = false;
// For selecting webcams
CameraSelection cam_sec;
+ // For tracking
+ FaceModelParameters clnf_params;
+ CLNF clnf_model;
+ FaceAnalyserManaged face_analyser;
+
public MainWindow()
{
InitializeComponent();
+
+ // Set the icon
+ Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute);
+ this.Icon = BitmapFrame.Create(iconUri);
+
+ String root = AppDomain.CurrentDomain.BaseDirectory;
+
+ clnf_params = new FaceModelParameters(root);
+ clnf_model = new CLNF(clnf_params);
+ face_analyser = new FaceAnalyserManaged(root, true);
+
+ Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
+ {
+
+ headPosePlot.AssocColor(0, Colors.Blue);
+ headPosePlot.AssocColor(1, Colors.Red);
+ headPosePlot.AssocColor(2, Colors.Green);
+
+ headPosePlot.AssocName(1, "Turn");
+ headPosePlot.AssocName(2, "Tilt");
+ headPosePlot.AssocName(0, "Up/Down");
+
+ headPosePlot.AssocThickness(0, 2);
+ headPosePlot.AssocThickness(1, 2);
+ headPosePlot.AssocThickness(2, 2);
+
+ gazePlot.AssocColor(0, Colors.Red);
+ gazePlot.AssocColor(1, Colors.Blue);
+
+ gazePlot.AssocName(0, "Left-right");
+ gazePlot.AssocName(1, "Up-down");
+ gazePlot.AssocThickness(0, 2);
+ gazePlot.AssocThickness(1, 2);
+
+ smilePlot.AssocColor(0, Colors.Green);
+ smilePlot.AssocColor(1, Colors.Red);
+ smilePlot.AssocName(0, "Smile");
+ smilePlot.AssocName(1, "Frown");
+ smilePlot.AssocThickness(0, 2);
+ smilePlot.AssocThickness(1, 2);
+
+ browPlot.AssocColor(0, Colors.Green);
+ browPlot.AssocColor(1, Colors.Red);
+ browPlot.AssocName(0, "Raise");
+ browPlot.AssocName(1, "Furrow");
+ browPlot.AssocThickness(0, 2);
+ browPlot.AssocThickness(1, 2);
+
+ eyePlot.AssocColor(0, Colors.Green);
+ eyePlot.AssocColor(1, Colors.Red);
+ eyePlot.AssocName(0, "Eye widen");
+ eyePlot.AssocName(1, "Nose wrinkle");
+ eyePlot.AssocThickness(0, 2);
+ eyePlot.AssocThickness(1, 2);
+
+ }));
+
}
@@ -62,6 +147,259 @@ namespace OpenFaceDemo
}
}
+ // The main function call for processing images, video files or webcam feed
+ private void ProcessingLoop(int cam_id = -1, int width = -1, int height = -1, bool multi_face = false)
+ {
+
+ thread_running = true;
+
+ // Create the video capture from a webcam and call the VideoLoop
+ capture = new Capture(cam_id, width, height);
+
+ if (capture.isOpened())
+ {
+ // Start the actual processing
+ VideoLoop();
+ }
+ else
+ {
+
+ string messageBoxText = "Failed to open a webcam";
+ string caption = "Webcam failure";
+ MessageBoxButton button = MessageBoxButton.OK;
+ MessageBoxImage icon = MessageBoxImage.Warning;
+
+ // Display message box
+ MessageBox.Show(messageBoxText, caption, button, icon);
+ }
+
+ }
+
+ // Capturing and processing the video frame by frame
+ private void VideoLoop()
+ {
+
+ Thread.CurrentThread.IsBackground = true;
+
+ DateTime? startTime = CurrentTime;
+
+ var lastFrameTime = CurrentTime;
+
+ clnf_model.Reset();
+ face_analyser.Reset();
+
+ double fx, fy, cx, cy;
+ fx = 500.0;
+ fy = 500.0;
+ cx = cy = -1;
+
+ int frame_id = 0;
+
+ double old_gaze_x = 0;
+ double old_gaze_y = 0;
+
+ double smile_cumm = 0;
+ double frown_cumm = 0;
+ double brow_up_cumm = 0;
+ double brow_down_cumm = 0;
+ double widen_cumm = 0;
+ double wrinkle_cumm = 0;
+
+ while (thread_running)
+ {
+ //////////////////////////////////////////////
+ // CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING
+ //////////////////////////////////////////////
+ RawImage frame = null;
+ double progress = -1;
+
+ frame = new RawImage(capture.GetNextFrame(true));
+ progress = capture.GetProgress();
+
+ if (frame.Width == 0)
+ {
+ // This indicates that we reached the end of the video file
+ break;
+ }
+
+ lastFrameTime = CurrentTime;
+ processing_fps.AddFrame();
+
+ var grayFrame = new RawImage(capture.GetCurrentFrameGray());
+
+ if (grayFrame == null)
+ {
+ Console.WriteLine("Gray is empty");
+ continue;
+ }
+
+ // This is more ore less guess work, but seems to work well enough
+ if (cx == -1)
+ {
+ fx = fx * (grayFrame.Width / 640.0);
+ fy = fy * (grayFrame.Height / 480.0);
+
+ fx = (fx + fy) / 2.0;
+ fy = fx;
+
+ cx = grayFrame.Width / 2f;
+ cy = grayFrame.Height / 2f;
+ }
+
+ bool detectionSucceeding = ProcessFrame(clnf_model, clnf_params, frame, grayFrame, fx, fy, cx, cy);
+
+ double confidence = (-clnf_model.GetConfidence()) / 2.0 + 0.5;
+
+ if (confidence < 0)
+ confidence = 0;
+ else if (confidence > 1)
+ confidence = 1;
+
+ List pose = new List();
+ clnf_model.GetCorrectedPoseCamera(pose, fx, fy, cx, cy);
+ List non_rigid_params = clnf_model.GetNonRigidParams();
+
+ double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds;
+ // The face analysis step (only done if recording AUs, HOGs or video)
+ face_analyser.AddNextFrame(frame, clnf_model, fx, fy, cx, cy, true, false, false);
+
+ List> lines = null;
+ List> landmarks = null;
+ List> gaze_lines = null;
+ var gaze = face_analyser.GetGazeCamera();
+ double x_gaze = (gaze.Item1.Item1 + gaze.Item2.Item1) / 2.0;
+ double y_gaze = (gaze.Item1.Item2 + gaze.Item2.Item2) / 2.0;
+
+ if (detectionSucceeding)
+ {
+ landmarks = clnf_model.CalculateLandmarks();
+ lines = clnf_model.CalculateBox((float)fx, (float)fy, (float)cx, (float)cy);
+ gaze_lines = face_analyser.CalculateGazeLines((float)fx, (float)fy, (float)cx, (float)cy);
+ }
+
+ // Visualisation
+ Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
+ {
+
+ var au_regs = face_analyser.GetCurrentAUsReg();
+
+ double smile = (au_regs["AU12"] + au_regs["AU06"]) / 7.5 + 0.05;
+ double frown = (au_regs["AU15"] + au_regs["AU17"] + au_regs["AU04"]) / 10.0 + 0.05;
+
+ double brow_up = (au_regs["AU01"] + au_regs["AU02"]) / 7.5 + 0.05;
+ double brow_down = au_regs["AU04"] / 5.0 + 0.05;
+
+ double eye_widen = au_regs["AU05"] / 2.5 + 0.05;
+ double nose_wrinkle = au_regs["AU09"] / 4.0 + 0.05;
+
+ Dictionary smileDict = new Dictionary();
+ smileDict[0] = 0.5 * smile_cumm + 0.5 * smile;
+ smileDict[1] = 0.5 * frown_cumm + 0.5 * frown;
+ smilePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = smileDict, Confidence = confidence });
+
+ Dictionary browDict = new Dictionary();
+ browDict[0] = 0.5 * brow_up_cumm + 0.5 * brow_up;
+ browDict[1] = 0.5 * brow_down_cumm + 0.5 * brow_down;
+ browPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = browDict, Confidence = confidence });
+
+ Dictionary eyeDict = new Dictionary();
+ eyeDict[0] = 0.5 * widen_cumm + 0.5 * eye_widen;
+ eyeDict[1] = 0.5 * wrinkle_cumm + 0.5 * nose_wrinkle;
+ eyePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = eyeDict, Confidence = confidence });
+
+ smile_cumm = smileDict[0];
+ frown_cumm = smileDict[1];
+ brow_up_cumm = browDict[0];
+ brow_down_cumm = browDict[1];
+ widen_cumm = eyeDict[0];
+ wrinkle_cumm = eyeDict[1];
+
+ Dictionary poseDict = new Dictionary();
+ poseDict[0] = -pose[3] / 2.0 + 0.5;// (face_analyser.GetRapport() - 1.0) / 6.5;
+ poseDict[1] = pose[4] / 2.0 + 0.5;// (rapport_fixed - 1.0) / 6.0;
+ poseDict[2] = pose[5] / 2.0 + 0.5;
+ headPosePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = poseDict, Confidence = confidence });
+
+ Dictionary gazeDict = new Dictionary();
+ gazeDict[0] = x_gaze * 2.5;
+ gazeDict[0] = 0.5 * old_gaze_x + 0.5 * gazeDict[0] + 0.5;
+ gazeDict[1] = -y_gaze * 2.0;
+ gazeDict[1] = 0.5 * old_gaze_y + 0.5 * gazeDict[1] + 0.5;
+ //gazeDict[2] = face_analyser.GetEyeAttention();
+ //Console.WriteLine("{0}, {1}", x_gaze, y_gaze);
+ gazePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = gazeDict, Confidence = confidence });
+
+ old_gaze_x = gazeDict[0] - 0.5;
+ old_gaze_y = gazeDict[1] - 0.5;
+
+ //Dictionary valenceDict = new Dictionary();
+ //valenceDict[0] = (face_analyser.GetValence() - 1.0) / 6.5;
+ //valenceDict[1] = face_analyser.GetArousal();
+ //valencePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = valenceDict, Confidence = confidence });
+
+ //Dictionary avDict = new Dictionary();
+ //avDict[0] = (face_analyser.GetArousal() - 0.5) * 2.0;
+ //avDict[1] = ((face_analyser.GetValence() - 1.0) / 6.5 - 0.5)*2;
+ //avPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = avDict, Confidence = confidence });
+
+ if (latest_img == null)
+ {
+ latest_img = frame.CreateWriteableBitmap();
+ }
+
+ frame.UpdateWriteableBitmap(latest_img);
+
+ video.Source = latest_img;
+ video.Confidence = confidence;
+ video.FPS = processing_fps.GetFPS();
+ video.Progress = progress;
+
+ if (!detectionSucceeding)
+ {
+ video.OverlayLines.Clear();
+ video.OverlayPoints.Clear();
+ video.GazeLines.Clear();
+ }
+ else
+ {
+ video.OverlayLines = lines;
+
+ List landmark_points = new List();
+ foreach (var p in landmarks)
+ {
+ landmark_points.Add(new Point(p.Item1, p.Item2));
+ }
+
+ video.OverlayPoints = landmark_points;
+
+ video.GazeLines = gaze_lines;
+ }
+
+ }));
+
+ if (reset)
+ {
+ clnf_model.Reset();
+ face_analyser.Reset();
+ reset = false;
+ }
+
+ frame_id++;
+
+
+ }
+
+ latest_img = null;
+ }
+
+ private bool ProcessFrame(CLNF clnf_model, FaceModelParameters clnf_params, RawImage frame, RawImage grayscale_frame, double fx, double fy, double cx, double cy)
+ {
+ detectionSucceeding = clnf_model.DetectLandmarksInVideo(grayscale_frame, clnf_params);
+ return detectionSucceeding;
+
+ }
+
+
// --------------------------------------------------------
// Button handling
// --------------------------------------------------------
@@ -105,10 +443,9 @@ namespace OpenFaceDemo
int cam_id = cam_sec.selected_camera.Item1;
int width = cam_sec.selected_camera.Item2;
int height = cam_sec.selected_camera.Item3;
-
- // TODO add
- //processing_thread = new Thread(() => ProcessingLoop(null, cam_id, width, height));
- //processing_thread.Start();
+
+ processing_thread = new Thread(() => ProcessingLoop(cam_id, width, height));
+ processing_thread.Start();
}
}));
@@ -127,8 +464,8 @@ namespace OpenFaceDemo
capture.Dispose();
}
- // TODO add
- //face_analyser.Dispose();
+
+ face_analyser.Dispose();
}
}