ROI reinitialization, preference for large faces.

This commit is contained in:
Tadas Baltrusaitis 2016-10-06 14:59:41 -04:00
parent f153e5c255
commit 99cc06631f
4 changed files with 61 additions and 15 deletions

View File

@ -6,7 +6,7 @@
xmlns:local="clr-namespace:OpenFaceDemo"
xmlns:of="clr-namespace:OpenFaceOffline;assembly=OpenFaceOffline"
mc:Ignorable="d"
Title="OpenFace Analyser" Height="800" Width="1300" MinWidth="700" MinHeight="450" Closing="Window_Closing" WindowStartupLocation="CenterScreen">
Title="OpenFace Analyser" Height="800" Width="1300" MinWidth="700" MinHeight="450" Closing="Window_Closing" WindowStartupLocation="CenterScreen" KeyDown="Window_KeyDown">
<Grid Name="MainGrid" Margin="-1,1,1.333,-1.333">
<Grid.ColumnDefinitions>
<ColumnDefinition Width="1.8*"/>
@ -27,8 +27,8 @@
</MenuItem>
</MenuItem>
</Menu>
<Border Name="VideoBorder" Grid.Row="1" Grid.Column="0" Grid.ColumnSpan="2" Grid.RowSpan="3" BorderBrush="Black" BorderThickness="1" Background="LightGray" Margin="5,5,0,0">
<of:OverlayImage x:Name="video" />
<Border Name="VideoBorder" Grid.Row="1" Grid.Column="0" Grid.ColumnSpan="2" Grid.RowSpan="3" BorderBrush="Black" BorderThickness="1" Background="LightGray" Margin="5,5,0,0" >
<of:OverlayImage x:Name="video" MouseDown="video_MouseDown" />
</Border>
<local:AxesTimeSeriesPlot NumVertGrid="5" x:Name="headPosePlot" ShowLegend="True" MinVal="-1" MaxVal="1" MinHeight="180" Grid.Row="4" Grid.Column="0" Padding="60 20 30 40" RangeLabel="Head pose" Orientation="Horizontal">

View File

@ -62,8 +62,10 @@ namespace OpenFaceDemo
FpsTracker processing_fps = new FpsTracker();
// Controlling the model reset
volatile bool detectionSucceeding = false;
volatile bool reset = false;
Point? resetPoint = null;
// For selecting webcams
CameraSelection cam_sec;
@ -73,6 +75,7 @@ namespace OpenFaceDemo
CLNF clnf_model;
FaceAnalyserManaged face_analyser;
public MainWindow()
{
InitializeComponent();
@ -245,7 +248,7 @@ namespace OpenFaceDemo
cx = grayFrame.Width / 2f;
cy = grayFrame.Height / 2f;
}
bool detectionSucceeding = ProcessFrame(clnf_model, clnf_params, frame, grayFrame, fx, fy, cx, cy);
double confidence = (-clnf_model.GetConfidence()) / 2.0 + 0.5;
@ -384,7 +387,16 @@ namespace OpenFaceDemo
if (reset)
{
clnf_model.Reset();
if (resetPoint.HasValue)
{
clnf_model.Reset(resetPoint.Value.X, resetPoint.Value.Y);
resetPoint = null;
}
else
{
clnf_model.Reset();
}
face_analyser.Reset();
reset = false;
@ -477,13 +489,32 @@ namespace OpenFaceDemo
thread_running = false;
processing_thread.Join();
capture.Dispose();
if (capture != null)
capture.Dispose();
}
face_analyser.Dispose();
clnf_model.Dispose();
this.Close();
if (face_analyser != null)
face_analyser.Dispose();
if(clnf_model != null)
clnf_model.Dispose();
}
private void Window_KeyDown(object sender, KeyEventArgs e)
{
if (e.Key == Key.R)
{
reset = true;
}
}
private void video_MouseDown(object sender, MouseButtonEventArgs e)
{
var clickPos = e.GetPosition(video);
resetPoint = new Point(clickPos.X / video.ActualWidth, clickPos.Y / video.ActualHeight);
reset = true;
}
}
}

View File

@ -548,6 +548,9 @@ void CLNF::Read(string main_location)
failures_in_a_row = -1;
preference_det.x = -1;
preference_det.y = -1;
}
// Resetting the model (for a new video, or complet reinitialisation

View File

@ -1452,9 +1452,11 @@ bool DetectSingleFaceHOG(cv::Rect_<double>& o_region, const cv::Mat_<uchar>& int
// The tracker can return multiple faces
vector<cv::Rect_<double> > face_detections;
vector<double> confidences;
bool detect_success = LandmarkDetector::DetectFacesHOG(face_detections, intensity_img, detector, confidences);
// In case of multiple faces pick the biggest one
bool use_size = true;
if(detect_success)
{
@ -1463,10 +1465,14 @@ bool DetectSingleFaceHOG(cv::Rect_<double>& o_region, const cv::Mat_<uchar>& int
// keep the most confident one or the one closest to preference point if set
double best_so_far;
if(use_preferred)
{
{
best_so_far = sqrt((preference.x - (face_detections[0].width/2 + face_detections[0].x)) * (preference.x - (face_detections[0].width/2 + face_detections[0].x)) +
(preference.y - (face_detections[0].height/2 + face_detections[0].y)) * (preference.y - (face_detections[0].height/2 + face_detections[0].y)));
}
else if (use_size)
{
best_so_far = (face_detections[0].width + face_detections[0].height) / 2.0;
}
else
{
best_so_far = confidences[0];
@ -1481,10 +1487,16 @@ bool DetectSingleFaceHOG(cv::Rect_<double>& o_region, const cv::Mat_<uchar>& int
if(use_preferred)
{
dist = sqrt((preference.x - (face_detections[0].width/2 + face_detections[0].x)) * (preference.x - (face_detections[0].width/2 + face_detections[0].x)) +
(preference.y - (face_detections[0].height/2 + face_detections[0].y)) * (preference.y - (face_detections[0].height/2 + face_detections[0].y)));
dist = sqrt((preference.x - (face_detections[i].width/2 + face_detections[i].x)) * (preference.x - (face_detections[i].width/2 + face_detections[i].x)) +
(preference.y - (face_detections[i].height/2 + face_detections[i].y)) * (preference.y - (face_detections[i].height/2 + face_detections[i].y)));
better = dist < best_so_far;
}
else if (use_size)
{
dist = (face_detections[i].width + face_detections[i].height) / 2.0;
better = dist > best_so_far;
}
else
{
dist = confidences[i];