Moving the demo mode to the new interface.

This commit is contained in:
Tadas Baltrusaitis 2018-01-27 09:00:18 +00:00
parent 08e6622dc7
commit 10034b1b14
5 changed files with 57 additions and 440 deletions

View file

@ -34,9 +34,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows; using System.Windows;
using System.Windows.Controls; using System.Windows.Controls;
using System.Windows.Data; using System.Windows.Data;
@ -44,20 +41,18 @@ using System.Windows.Documents;
using System.Windows.Input; using System.Windows.Input;
using System.Windows.Media; using System.Windows.Media;
using System.Windows.Media.Imaging; using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Threading; using System.Threading;
using System.Windows.Threading;
using System.Diagnostics;
// Internal libraries // Internal libraries
using OpenFaceOffline; using OpenFaceOffline;
using OpenCVWrappers; using OpenCVWrappers;
using CppInterop;
using CppInterop.LandmarkDetector; using CppInterop.LandmarkDetector;
using CameraInterop;
using FaceAnalyser_Interop; using FaceAnalyser_Interop;
using GazeAnalyser_Interop; using GazeAnalyser_Interop;
using System.Windows.Threading; using UtilitiesOF;
using System.Diagnostics;
namespace OpenFaceDemo namespace OpenFaceDemo
{ {
@ -90,7 +85,6 @@ namespace OpenFaceDemo
Thread processing_thread; Thread processing_thread;
// Some members for displaying the results // Some members for displaying the results
private Capture capture;
private WriteableBitmap latest_img; private WriteableBitmap latest_img;
private volatile bool thread_running; private volatile bool thread_running;
@ -98,7 +92,6 @@ namespace OpenFaceDemo
FpsTracker processing_fps = new FpsTracker(); FpsTracker processing_fps = new FpsTracker();
// Controlling the model reset // Controlling the model reset
volatile bool detectionSucceeding = false;
volatile bool reset = false; volatile bool reset = false;
Point? resetPoint = null; Point? resetPoint = null;
@ -106,7 +99,7 @@ namespace OpenFaceDemo
CameraSelection cam_sec; CameraSelection cam_sec;
// For tracking // For tracking
FaceModelParameters clnf_params; FaceModelParameters face_model_params;
CLNF landmark_detector; CLNF landmark_detector;
FaceAnalyserManaged face_analyser; FaceAnalyserManaged face_analyser;
GazeAnalyserManaged gaze_analyser; GazeAnalyserManaged gaze_analyser;
@ -121,8 +114,9 @@ namespace OpenFaceDemo
String root = AppDomain.CurrentDomain.BaseDirectory; String root = AppDomain.CurrentDomain.BaseDirectory;
clnf_params = new FaceModelParameters(root, false); // TODO check this // TODO, create a demo version of parameters
landmark_detector = new CLNF(clnf_params); face_model_params = new FaceModelParameters(root, false);
landmark_detector = new CLNF(face_model_params);
face_analyser = new FaceAnalyserManaged(root, true, 112); face_analyser = new FaceAnalyserManaged(root, true, 112);
gaze_analyser = new GazeAnalyserManaged(); gaze_analyser = new GazeAnalyserManaged();
@ -186,37 +180,11 @@ namespace OpenFaceDemo
} }
} }
// The main function call for processing images, video files or webcam feed // The main function call for processing the webcam feed
private void ProcessingLoop(int cam_id = -1, int width = -1, int height = -1, bool multi_face = false) private void ProcessingLoop(SequenceReader reader)
{ {
thread_running = true; thread_running = true;
// Create the video capture from a webcam and call the VideoLoop
capture = new Capture(cam_id, width, height);
if (capture.isOpened())
{
// Start the actual processing
VideoLoop();
}
else
{
string messageBoxText = "Failed to open a webcam";
string caption = "Webcam failure";
MessageBoxButton button = MessageBoxButton.OK;
MessageBoxImage icon = MessageBoxImage.Warning;
// Display message box
MessageBox.Show(messageBoxText, caption, button, icon);
}
}
// Capturing and processing the video frame by frame
private void VideoLoop()
{
Thread.CurrentThread.IsBackground = true; Thread.CurrentThread.IsBackground = true;
@ -226,12 +194,7 @@ namespace OpenFaceDemo
landmark_detector.Reset(); landmark_detector.Reset();
face_analyser.Reset(); face_analyser.Reset();
double fx, fy, cx, cy;
fx = 500.0;
fy = 500.0;
cx = cy = -1;
int frame_id = 0; int frame_id = 0;
double old_gaze_x = 0; double old_gaze_x = 0;
@ -246,46 +209,19 @@ namespace OpenFaceDemo
while (thread_running) while (thread_running)
{ {
//////////////////////////////////////////////
// CAPTURE FRAME AND DETECT LANDMARKS FOLLOWED BY THE REQUIRED IMAGE PROCESSING
//////////////////////////////////////////////
RawImage frame = null;
double progress = -1;
frame = new RawImage(capture.GetNextFrame(true)); // Loading an image file
progress = capture.GetProgress(); RawImage frame = new RawImage(reader.GetNextImage());
RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray());
if (frame.Width == 0)
{
// This indicates that we reached the end of the video file
break;
}
lastFrameTime = CurrentTime; lastFrameTime = CurrentTime;
processing_fps.AddFrame(); processing_fps.AddFrame();
var grayFrame = new RawImage(capture.GetCurrentFrameGray()); bool detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params);
if (grayFrame == null) // The face analysis step (only done if recording AUs, HOGs or video)
{ face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true);
Console.WriteLine("Gray is empty"); gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
continue;
}
// This is more ore less guess work, but seems to work well enough
if (cx == -1)
{
fx = fx * (grayFrame.Width / 640.0);
fy = fy * (grayFrame.Height / 480.0);
fx = (fx + fy) / 2.0;
fy = fx;
cx = grayFrame.Width / 2f;
cy = grayFrame.Height / 2f;
}
bool detectionSucceeding = ProcessFrame(landmark_detector, clnf_params, frame, grayFrame, fx, fy, cx, cy);
double confidence = landmark_detector.GetConfidence(); double confidence = landmark_detector.GetConfidence();
@ -296,29 +232,26 @@ namespace OpenFaceDemo
List<double> pose = new List<double>(); List<double> pose = new List<double>();
landmark_detector.GetPose(pose, fx, fy, cx, cy); landmark_detector.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
List<double> non_rigid_params = landmark_detector.GetNonRigidParams(); List<double> non_rigid_params = landmark_detector.GetNonRigidParams();
double scale = landmark_detector.GetRigidParams()[0]; double scale = landmark_detector.GetRigidParams()[0];
double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds; double time_stamp = (DateTime.Now - (DateTime)startTime).TotalMilliseconds;
// The face analysis step (only done if recording AUs, HOGs or video)
face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detectionSucceeding, true);
gaze_analyser.AddNextFrame(landmark_detector, detectionSucceeding, fx, fy, cx, cy);
List<Tuple<Point, Point>> lines = null; List<Tuple<Point, Point>> lines = null;
List<Tuple<double, double>> landmarks = null; List<Tuple<double, double>> landmarks = null;
List<Tuple<double, double>> eye_landmarks = null; List<Tuple<double, double>> eye_landmarks = null;
List<Tuple<Point, Point>> gaze_lines = null; List<Tuple<Point, Point>> gaze_lines = null;
Tuple<double, double> gaze_angle = gaze_analyser.GetGazeAngle(); Tuple<double, double> gaze_angle = gaze_analyser.GetGazeAngle();
if (detectionSucceeding) if (detection_succeeding)
{ {
landmarks = landmark_detector.CalculateVisibleLandmarks(); landmarks = landmark_detector.CalculateVisibleLandmarks();
eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks(); eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks();
lines = landmark_detector.CalculateBox((float)fx, (float)fy, (float)cx, (float)cy); lines = landmark_detector.CalculateBox(reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
gaze_lines = gaze_analyser.CalculateGazeLines(scale, (float)fx, (float)fy, (float)cx, (float)cy); gaze_lines = gaze_analyser.CalculateGazeLines(scale, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
} }
// Visualisation // Visualisation
@ -373,7 +306,7 @@ namespace OpenFaceDemo
old_gaze_x = gazeDict[0]; old_gaze_x = gazeDict[0];
old_gaze_y = gazeDict[1]; old_gaze_y = gazeDict[1];
if (latest_img == null) if (latest_img == null)
{ {
latest_img = frame.CreateWriteableBitmap(); latest_img = frame.CreateWriteableBitmap();
@ -384,9 +317,8 @@ namespace OpenFaceDemo
video.Source = latest_img; video.Source = latest_img;
video.Confidence = confidence; video.Confidence = confidence;
video.FPS = processing_fps.GetFPS(); video.FPS = processing_fps.GetFPS();
video.Progress = progress;
if (!detectionSucceeding) if (!detection_succeeding)
{ {
video.OverlayLines.Clear(); video.OverlayLines.Clear();
video.OverlayPoints.Clear(); video.OverlayPoints.Clear();
@ -447,71 +379,53 @@ namespace OpenFaceDemo
} }
reader.Close();
latest_img = null; latest_img = null;
}
private bool ProcessFrame(CLNF clnf_model, FaceModelParameters clnf_params, RawImage frame, RawImage grayscale_frame, double fx, double fy, double cx, double cy)
{
detectionSucceeding = clnf_model.DetectLandmarksInVideo(grayscale_frame, clnf_params);
return detectionSucceeding;
} }
// -------------------------------------------------------- // --------------------------------------------------------
// Button handling // Button handling
// -------------------------------------------------------- // --------------------------------------------------------
private void openWebcamClick(object sender, RoutedEventArgs e) private void openWebcamClick(object sender, RoutedEventArgs e)
{
new Thread(() => openWebcam()).Start();
}
private void openWebcam()
{ {
StopTracking(); StopTracking();
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 2, 0), (Action)(() => if (cam_sec == null)
{ {
// First close the cameras that might be open to avoid clashing with webcam opening cam_sec = new CameraSelection();
if (capture != null) }
{ else
capture.Dispose(); {
} cam_sec = new CameraSelection(cam_sec.cams);
cam_sec.Visibility = System.Windows.Visibility.Visible;
}
if (cam_sec == null) // Set the icon
{ Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute);
cam_sec = new CameraSelection(); cam_sec.Icon = BitmapFrame.Create(iconUri);
}
else
{
cam_sec = new CameraSelection(cam_sec.cams);
cam_sec.Visibility = System.Windows.Visibility.Visible;
}
// Set the icon if (!cam_sec.no_cameras_found)
Uri iconUri = new Uri("logo1.ico", UriKind.RelativeOrAbsolute); cam_sec.ShowDialog();
cam_sec.Icon = BitmapFrame.Create(iconUri);
if (!cam_sec.no_cameras_found) if (cam_sec.camera_selected)
cam_sec.ShowDialog(); {
if (cam_sec.camera_selected) int cam_id = cam_sec.selected_camera.Item1;
{ int width = cam_sec.selected_camera.Item2;
int cam_id = cam_sec.selected_camera.Item1; int height = cam_sec.selected_camera.Item3;
int width = cam_sec.selected_camera.Item2;
int height = cam_sec.selected_camera.Item3; SequenceReader reader = new SequenceReader(cam_id, width, height);
processing_thread = new Thread(() => ProcessingLoop(cam_id, width, height)); processing_thread = new Thread(() => ProcessingLoop(reader));
processing_thread.Start(); processing_thread.Name = "Webcam processing";
processing_thread.Start();
}
}
}));
} }
// Cleanup stuff when closing the window // Cleanup stuff when closing the window
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{ {
@ -520,10 +434,7 @@ namespace OpenFaceDemo
// Stop capture and tracking // Stop capture and tracking
thread_running = false; thread_running = false;
processing_thread.Join(); processing_thread.Join();
if (capture != null)
capture.Dispose();
} }
if (face_analyser != null) if (face_analyser != null)
face_analyser.Dispose(); face_analyser.Dispose();

View file

@ -84,9 +84,6 @@
<Compile Include="UI_items\AxesTimeSeriesPlot.xaml.cs"> <Compile Include="UI_items\AxesTimeSeriesPlot.xaml.cs">
<DependentUpon>AxesTimeSeriesPlot.xaml</DependentUpon> <DependentUpon>AxesTimeSeriesPlot.xaml</DependentUpon>
</Compile> </Compile>
<Compile Include="UI_items\CameraSelection.xaml.cs">
<DependentUpon>CameraSelection.xaml</DependentUpon>
</Compile>
<Page Include="MainWindow.xaml"> <Page Include="MainWindow.xaml">
<Generator>MSBuild:Compile</Generator> <Generator>MSBuild:Compile</Generator>
<SubType>Designer</SubType> <SubType>Designer</SubType>
@ -103,10 +100,6 @@
<SubType>Designer</SubType> <SubType>Designer</SubType>
<Generator>MSBuild:Compile</Generator> <Generator>MSBuild:Compile</Generator>
</Page> </Page>
<Page Include="UI_items\CameraSelection.xaml">
<SubType>Designer</SubType>
<Generator>MSBuild:Compile</Generator>
</Page>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<Compile Include="Properties\AssemblyInfo.cs"> <Compile Include="Properties\AssemblyInfo.cs">

View file

@ -1,43 +0,0 @@
<Window x:Class="OpenFaceDemo.CameraSelection"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:local="clr-namespace:OpenFaceDemo"
mc:Ignorable="d"
Title="CameraSelection" Height="460" Width="600" ResizeMode="NoResize" WindowStartupLocation="CenterScreen" Closing="Window_Closing">
<Grid>
<Grid Name="camerasPanel" Visibility="Hidden">
<Grid.RowDefinitions>
<RowDefinition Height="auto" />
<RowDefinition Height="320" />
<RowDefinition Height="*" />
<RowDefinition Height="40" />
</Grid.RowDefinitions>
<Grid.ColumnDefinitions>
<ColumnDefinition Width="*"/>
</Grid.ColumnDefinitions>
<Label Grid.Row="0" Grid.ColumnSpan="10" HorizontalContentAlignment="Center" FontSize="20">Choose Video Source</Label>
<Grid Grid.Row="1" Grid.Column="0" Name="ThumbnailPanel" HorizontalAlignment="Center">
<Grid.RowDefinitions>
<RowDefinition Height="*" />
<RowDefinition Height="25" />
</Grid.RowDefinitions>
</Grid>
<!-- Click="OpenFile_Click" -->
<Button Width="150" Grid.Row="3" Height="35" Grid.ColumnSpan="10" FontSize="20" Click="Button_Click">Select camera</Button>
</Grid>
<Grid Name="LoadingGrid" Visibility="Visible">
<StackPanel Grid.Row="1" Name="ProgressBar" Margin="20">
<Label HorizontalAlignment="Center" FontSize="18">Loading Webcams</Label>
<ProgressBar Height="20" Minimum="0" Maximum="100" Name="pbStatus" IsIndeterminate="True" />
<Label HorizontalAlignment="Center" FontSize="18">Might take some time the first time</Label>
</StackPanel>
</Grid>
</Grid>
</Window>

View file

@ -1,243 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
using CppInterop;
using System.Windows.Threading;
using System.Threading;
namespace OpenFaceDemo
{
/// <summary>
/// Interaction logic for CameraSelection.xaml
/// </summary>
public partial class CameraSelection : Window
{
List<Border> sample_images;
List<ComboBox> combo_boxes;
// id, width, height
public Tuple<int, int, int> selected_camera;
List<List<Tuple<int, int>>> resolutions_all;
int selected_camera_idx = -1;
// indicate if user clicked on camera
public bool camera_selected = false;
public bool no_cameras_found = false;
public List<Tuple<String, List<Tuple<int, int>>, OpenCVWrappers.RawImage>> cams;
public void PopulateCameraSelections()
{
this.KeyDown += new KeyEventHandler(CameraSelection_KeyDown);
// Finding the cameras here
if (cams == null)
{
String root = AppDomain.CurrentDomain.BaseDirectory;
cams = CameraInterop.Capture.GetCameras(root);
}
int i = 0;
sample_images = new List<Border>();
// Each cameras corresponding resolutions
resolutions_all = new List<List<Tuple<int, int>>>();
combo_boxes = new List<ComboBox>();
foreach (var s in cams)
{
var b = s.Item3.CreateWriteableBitmap();
s.Item3.UpdateWriteableBitmap(b);
b.Freeze();
Dispatcher.Invoke(() =>
{
int idx = i;
Image img = new Image();
img.Source = b;
img.Margin = new Thickness(5);
ColumnDefinition col_def = new ColumnDefinition();
ThumbnailPanel.ColumnDefinitions.Add(col_def);
Border img_border = new Border();
img_border.SetValue(Grid.ColumnProperty, i);
img_border.SetValue(Grid.RowProperty, 0);
img_border.CornerRadius = new CornerRadius(5);
StackPanel img_panel = new StackPanel();
Label camera_name_label = new Label();
camera_name_label.Content = s.Item1;
camera_name_label.HorizontalAlignment = System.Windows.HorizontalAlignment.Center;
img_panel.Children.Add(camera_name_label);
img.Height = 200;
img_panel.Children.Add(img);
img_border.Child = img_panel;
sample_images.Add(img_border);
ThumbnailPanel.Children.Add(img_border);
ComboBox resolutions = new ComboBox();
resolutions.Width = 80;
combo_boxes.Add(resolutions);
resolutions_all.Add(new List<Tuple<int, int>>());
foreach (var r in s.Item2)
{
resolutions.Items.Add(r.Item1 + "x" + r.Item2);
resolutions_all[resolutions_all.Count - 1].Add(new Tuple<int, int>(r.Item1, r.Item2));
}
resolutions.SelectedIndex = 0;
for (int res = 0; res < s.Item2.Count; ++res)
{
if (s.Item2[res].Item1 >= 640 && s.Item2[res].Item2 >= 480)
{
resolutions.SelectedIndex = res;
break;
}
}
resolutions.SetValue(Grid.ColumnProperty, i);
resolutions.SetValue(Grid.RowProperty, 2);
ThumbnailPanel.Children.Add(resolutions);
img_panel.MouseDown += (sender, e) =>
{
ChooseCamera(idx);
};
resolutions.DropDownOpened += (sender, e) =>
{
ChooseCamera(idx);
};
});
i++;
}
if (cams.Count > 0)
{
no_cameras_found = false;
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
{
ChooseCamera(0);
}));
}
else
{
string messageBoxText = "No cameras detected, please connect a webcam";
string caption = "Camera error!";
MessageBoxButton button = MessageBoxButton.OK;
MessageBoxImage icon = MessageBoxImage.Warning;
MessageBox.Show(messageBoxText, caption, button, icon);
selected_camera_idx = -1;
no_cameras_found = true;
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
{
this.Close();
}));
}
}
public CameraSelection()
{
InitializeComponent();
// We want to display the loading screen first
Thread load_cameras = new Thread(LoadCameras);
load_cameras.Start();
}
public void LoadCameras()
{
Thread.CurrentThread.IsBackground = true;
PopulateCameraSelections();
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
{
LoadingGrid.Visibility = System.Windows.Visibility.Hidden;
camerasPanel.Visibility = System.Windows.Visibility.Visible;
}));
}
public CameraSelection(List<Tuple<String, List<Tuple<int, int>>, OpenCVWrappers.RawImage>> cams)
{
InitializeComponent();
this.cams = cams;
PopulateCameraSelections();
Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
{
LoadingGrid.Visibility = System.Windows.Visibility.Hidden;
camerasPanel.Visibility = System.Windows.Visibility.Visible;
}));
}
private void ChooseCamera(int idx)
{
selected_camera_idx = idx;
foreach (var img in sample_images)
{
img.BorderThickness = new Thickness(1);
img.BorderBrush = Brushes.Gray;
}
sample_images[idx].BorderThickness = new Thickness(4);
sample_images[idx].BorderBrush = Brushes.Green;
}
private void Button_Click(object sender, RoutedEventArgs e)
{
Select();
}
private void CameraSelection_KeyDown(object sender, KeyEventArgs e)
{
if (e.Key == Key.Enter)
{
Select();
}
}
private void Select()
{
camera_selected = true;
int selected_res = combo_boxes[selected_camera_idx].SelectedIndex;
Tuple<int, int> resolution_selected = resolutions_all[selected_camera_idx][selected_res];
selected_camera = new Tuple<int, int, int>(selected_camera_idx, resolution_selected.Item1, resolution_selected.Item2);
this.Close();
}
// Do not close it as user might want to open it again
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
}
}
}

View file

@ -43,7 +43,6 @@ using System.Windows.Media.Imaging;
// Internal libraries // Internal libraries
using OpenCVWrappers; using OpenCVWrappers;
using CppInterop.LandmarkDetector; using CppInterop.LandmarkDetector;
using CameraInterop;
using FaceAnalyser_Interop; using FaceAnalyser_Interop;
using GazeAnalyser_Interop; using GazeAnalyser_Interop;
using FaceDetectorInterop; using FaceDetectorInterop;
@ -223,7 +222,7 @@ namespace OpenFaceOffline
// The face analysis step (for AUs and eye gaze) // The face analysis step (for AUs and eye gaze)
face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false); face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false);
gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, fx, fy, cx, cy); gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
// Only the final face will contain the details // Only the final face will contain the details
VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress); VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), detection_succeeding, true, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);