Merge pull request #7 from Affectiva/linux-3.0

Linux 3.0
This commit is contained in:
Abdelrahman Mahmoud 2016-04-11 23:00:00 +03:00
commit 0df3f24706
9 changed files with 542 additions and 113 deletions

14
.travis.yml Normal file
View File

@ -0,0 +1,14 @@
dist: trusty
language: cpp
compiler:
- gcc-4.8
before_script:
- sudo apt-get install -y gcc-4.8 g++-4.8 libopencv-dev libboost1.55-all-dev cmake
- wget http://affdex-sdk-dist.s3-website-us-east-1.amazonaws.com/linux/download_sdk.html -O /tmp/affdex-sdk.tar.gz
- mkdir /tmp/affdex-sdk
- tar -xzvf /tmp/affdex-sdk.tar.gz -C /tmp/affdex-sdk
- mkdir build
- cd build
- cmake -DBOOST_ROOT=/usr/ -DOpenCV_DIR=/usr/ -DAFFDEX_DIR=/tmp/affdex-sdk ..
script:
- make

162
CMakeLists.txt Executable file
View File

@ -0,0 +1,162 @@
cmake_minimum_required(VERSION 2.6)
set(rootProject cpp-sdk-samples)
project(${rootProject})
# CMake includes
include(cmake_modules/Macros.cmake) # Some custom macros we have writtens
# -------------------
# CMAKE - ENVIRONMENT
# --------------------
set(CXX_COMPILER_WARNINGS "-Wreturn-type" CACHE STRING "Compiler warnings to use")
set(CMAKE_VERBOSE ON CACHE BOOL "Verbose mode")
# Setup "Profile" build type
set(CMAKE_CXX_FLAGS_PROFILE "-O3 -pg")
set(CMAKE_C_FLAGS_PROFILE "-O3 -pg")
set(CMAKE_EXE_LINKER_FLAGS_PROFILE "-pg")
set(CMAKE_MODULE_LINKER_FLAGS_PROFILE "-pg")
# Setup additional compiler warnings
status("Setting up compiler warnings")
if( MSVC )
# Force to always compile with W4
if( CMAKE_CXX_FLAGS MATCHES "/W[0-4]" )
string( REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}" )
else()
set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4" )
endif()
elseif( CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX )
# Update if necessary
set( CMAKE_CXX_FLAGS "-std=c++11 ${CMAKE_CXX_FLAGS} ${CXX_COMPILER_WARNINGS}" )
endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
status("Updating compiler to make use of C++14")
set(CMAKE_XCODE_ATTRIBUTE_CLANG_CXX_LANGUAGE_STANDARD "c++14")
set(CMAKE_XCODE_ATTRIBUTE_CLANG_CXX_LIBRARY "libc++")
endif()
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
set(bitness 64) # We have a 64-bit machine
else()
set(bitness 32) # We have a 32-bit machine
endif()
status("Bitness detected: ${bitness}")
# Setup install locations
if( NOT RUNTIME_INSTALL_DIRECTORY )
set( RUNTIME_INSTALL_DIRECTORY "bin" CACHE STRING "Install sub-directory of CMAKE_INSTALL_PREFIX for RUNTIME targets (binaries, and *.dll on windows)." )
endif( NOT RUNTIME_INSTALL_DIRECTORY )
# --------------------
# LOCATE DEPENDENCIES
# --------------------
# OpenCV
# ----------------------------------------------------------------------------
# find_package OpenCV to get OpenCV_FOUND, OpenCV_INCLUDE_DIRS, OpenCV_LIBS, OpenCV_LINK_LIBRARIES
# ----------------------------------------------------------------------------
set( OPENCV_COMPONENTS ml highgui core imgproc objdetect )
if( DEFINED OpenCV_DIR ) # Force the user to tell us which OpenCV they want (otherwise find_package can find the wrong one, cache it and changes to OpenCV_DIR are ignored)
find_package(OpenCV REQUIRED PATHS ${OpenCV_DIR})
if( NOT OpenCV_FOUND)
message(SEND_ERROR "Failed to find OpenCV. Double check that \"OpenCV_DIR\" to the root build directory of OpenCV.")
endif(NOT OpenCV_FOUND)
else( DEFINED OpenCV_DIR )
set( OpenCV_DIR "" CACHE PATH "Root directory for opencv BUILD directory." )
message(FATAL_ERROR "\"OpenCV_DIR\" not set. Please explicitly provide the path to the root build directory of OpenCV.")
endif( DEFINED OpenCV_DIR )
# Boost package
# ----------------------------------------------------------------------------
# BOOST_ROOT is needed by BoostConfig.cmake configuration file to
# look for the Boost includes / libraries:
# Boost_FOUND, Boost_INCLUDE_DIRS, Boost_LIBRARY_DIRS, Boost_LIBRARIES,Boost_VERSION
set(Boost_USE_MULTITHREADED ON)
set( BOOST_COMPONENTS system filesystem date_time regex thread timer chrono serialization log log_setup program_options)
set( BOOST_MIN_VERSION "1.54.0" CACHE STRING "Minimum version of boost you would like to link against (e.g. C:/BOOST_1_55_0 is 1.55.0" )
status("")
if( ANDROID )
find_host_package( Boost ${BOOST_MIN_VERSION} REQUIRED COMPONENTS ${BOOST_COMPONENTS} )
else( ANDROID )
find_package( Boost ${BOOST_MIN_VERSION} REQUIRED COMPONENTS ${BOOST_COMPONENTS} )
endif()
if( NOT Boost_FOUND )
if( NOT DEFINED BOOST_ROOT )
set( BOOST_ROOT "" CACHE PATH "Root directory for Boost." )
endif( NOT DEFINED BOOST_ROOT )
message( FATAL_ERROR "Failed to find Boost (or missing components). Double check that \"BOOST_ROOT\" is properly set")
endif( NOT Boost_FOUND )
# Affdex package
# ----------------------------------------------------------------------------
set (AFFDEX_FOUND FALSE)
if( DEFINED AFFDEX_DIR )
find_path(AFFDEX_INCLUDE_DIR FrameDetector.h
HINTS "${AFFDEX_DIR}/include" )
find_library(AFFDEX_LIBRARY NAMES affdex-native
HINTS "${AFFDEX_DIR}/lib" )
set(AFFDEX_INCLUDE_DIRS "${AFFDEX_INCLUDE_DIR}")
set(AFFDEX_LIBRARIES "${AFFDEX_LIBRARY}")
if (AFFDEX_INCLUDE_DIR AND AFFDEX_LIBRARY)
set(AFFDEX_FOUND TRUE)
endif (AFFDEX_INCLUDE_DIR AND AFFDEX_LIBRARY)
set(AFFDEX_DATA_DIR "${AFFDEX_DIR}/data")
if (NOT AFFDEX_FOUND)
message(FATAL_ERROR "Unable to find the Affdex found")
endif (NOT AFFDEX_FOUND)
else (DEFINED AFFDEX_DIR)
message(FATAL_ERROR "Please define AFFDEX_DIR")
endif (DEFINED AFFDEX_DIR)
add_subdirectory(opencv-webcam-demo)
add_subdirectory(video-demo)
# --------------------
# SUMMARY
# --------------------
status("${CMAKE_INCLUDE_DIRECTORIES}")
status( "------- SUMMARY ------- " )
status( "Boost version found = ${Boost_MAJOR_VERSION}.${Boost_MINOR_VERSION}.${Boost_SUBMINOR_VERSION} (${Boost_VERSION})" )
foreach( comp ${BOOST_COMPONENTS} )
string( TOUPPER ${comp} COMP )
status( " - ${comp}" 1 THEN "${Boost_${COMP}_LIBRARY}" )
endforeach( comp )
status("")
status("Affdex")
foreach( lib ${AFFDEX_LIBRARIES} )
status( "${lib}")
endforeach( lib )
status("")
status( "OpenCV version found = ${OpenCV_VERSION_MAJOR}.${OpenCV_VERSION_MINOR}.${OpenCV_VERSION_PATCH} (${OpenCV_VERSION})" )
status( "OpenCV_LIB_DIR = ${OpenCV_DIR}/lib" )
foreach( lib ${OpenCV_LIBRARIES} )
foreach( comp ${OPENCV_COMPONENTS} )
if( ${lib} MATCHES ${comp} )
status( " - ${comp}" 1 THEN "${lib}" )
endif( ${lib} MATCHES ${comp} )
endforeach( comp )
endforeach( lib )
status("")
status( "Apps identified for building:" )
foreach( app ${${rootProject}_APPS} )
status( " - ${app}" )
endforeach( app ${${rootProject}_APPS} )

View File

@ -1,6 +1,6 @@
#Sample Apps for Affdex SDK for Windows
#Sample Apps for Affdex SDK for Windows and Linux
Welcome to our repository on GitHub! Here you will find example code to get you started with our Affdex SDK 3.0 for Windows and begin emotion-enabling you own app! Documentation for the Windows SDK is at <a href=http://developer.affectiva.com/windows/>Affectiva's Developer Portal</a>.
Welcome to our repository on GitHub! Here you will find example code to get you started with our Affdex SDK 3.0 and begin emotion-enabling you own app! Documentation for the SDKs is available on the <a href=http://developer.affectiva.com/>Affectiva's Developer Portal</a>.
[![Build status](https://ci.appveyor.com/api/projects/status/pn2y9h8a3nnkiw41?svg=true)]
(https://ci.appveyor.com/project/ahamino/win-sdk-samples)
@ -8,22 +8,47 @@ Welcome to our repository on GitHub! Here you will find example code to get you
Dependencies
------------
*Windows*
- Affdex SDK 3.0 (32 bit)
- Visual Studio 2013 or higher
*Linux*
- Ubuntu 14.04 or higher or CentOS 7 or higher
- Affdex SDK 3.0
- CMake 2.8 or higher
- GCC 4.8 or higher
*Additional dependencies for the C++ projects*
*Additional dependencies*
- OpenCV 3.1
- Boost 1.59
Installation
------------
- Download Affdex SDK for windows [from here](http://developer.affectiva.com/downloads)
- Download Affdex SDK [from here](http://developer.affectiva.com/downloads)
- Sign up for an evaluation license [by submitting this form](http://www.affectiva.com/45-day-free-trial/)
*Windows*
- Install the SDK using MSI installer.
- The additional dependencies get installed automatically by NuGet.
*Ubuntu*
```bashrc
sudo apt-get install build-essential libopencv-dev libboost1.55-all-dev cmake
wget http://developer.affectiva.com/downloads/linux
mkdir $HOME/affdex-sdk
tar -xzvf affdex-cpp-sdk-3.0-linux-64bit.tar.gz -C $HOME/affdex-sdk
export AFFDEX_DATA_DIR=$HOME/affdex-sdk/data
git clone https://github.com/Affectiva/cpp-sdk-samples.git $HOME/sdk-samples
mkdir $HOME/build
cd $HOME/build
cmake -DOpenCV_DIR=/usr/ -DBOOST_ROOT=/usr/ -DAFFDEX_DIR=$HOME/affdex-sdk $HOME/sdk-samples
make
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/affdex-sdk/lib
```
OpenCV-webcam-demo (c++)
------------------
@ -49,7 +74,7 @@ The following command line arguments can be used to run it:
Video-demo (c++)
----------
Project for demoing the Windows SDK [VideoDetector class](http://developer.affectiva.com/v3/windows/analyze-video/). It processs video files, displays the emotion metrics and exports the results in a csv file.
Project for demoing the C++ SDK [VideoDetector class](http://developer.affectiva.com/v3/windows/analyze-video/). It processs video files, displays the emotion metrics and exports the results in a csv file.
The following command line arguments can be used to run it:

106
cmake_modules/Macros.cmake Normal file
View File

@ -0,0 +1,106 @@
# Extracting the subdirectories from a given folder
#
# Usage:
# SUBDIRLIST( SUBDIRS "path/to/base/dir" )
#
# Source: http://stackoverflow.com/questions/7787823/cmake-how-to-get-the-name-of-all-subdirectories-of-a-directory
MACRO(SUBDIRLIST result curdir)
FILE(GLOB children RELATIVE ${curdir} ${curdir}/*)
SET(dirlist "")
FOREACH(child ${children})
IF(IS_DIRECTORY ${curdir}/${child})
SET(dirlist ${dirlist} ${child})
ENDIF()
ENDFOREACH()
SET(${result} ${dirlist})
ENDMACRO()
# Search packages for host system instead of packages for target system
# in case of cross compilation thess macro should be defined by toolchain file
# adopted from OpenCV
if(NOT COMMAND find_host_package)
macro(find_host_package)
find_package(${ARGN})
endmacro()
endif()
if(NOT COMMAND find_host_program)
macro(find_host_program)
find_program(${ARGN})
endmacro()
endif()
macro(check_environment_variables)
foreach(_var ${ARGN})
if(NOT DEFINED ${_var} AND DEFINED ENV{${_var}})
set(__value "$ENV{${_var}}")
file(TO_CMAKE_PATH "${__value}" __value) # Assume that we receive paths
set(${_var} "${__value}")
message(STATUS "Update variable ${_var} from environment: ${${_var}}")
endif()
endforeach()
endmacro()
# Status convinience function.
# adopted from OpenCV
function(output_status msg)
message(STATUS "${msg}")
string(REPLACE "\\" "\\\\" msg "${msg}")
string(REPLACE "\"" "\\\"" msg "${msg}")
endfunction()
# Status report function.
# Automatically align right column and selects text based on condition.
# Usage:
# status(<text>)
# status(<heading> <value1> [<value2> ...])
# status(<heading> <condition> THEN <text for TRUE> ELSE <text for FALSE> )
# adopted from OpenCV
function(status text)
set(status_cond)
set(status_then)
set(status_else)
set(status_current_name "cond")
foreach(arg ${ARGN})
if(arg STREQUAL "THEN")
set(status_current_name "then")
elseif(arg STREQUAL "ELSE")
set(status_current_name "else")
else()
list(APPEND status_${status_current_name} ${arg})
endif()
endforeach()
if(DEFINED status_cond)
set(status_placeholder_length 18)
string(RANDOM LENGTH ${status_placeholder_length} ALPHABET " " status_placeholder)
string(LENGTH "${text}" status_text_length)
if(status_text_length LESS status_placeholder_length)
string(SUBSTRING "${text}${status_placeholder}" 0 ${status_placeholder_length} status_text)
elseif(DEFINED status_then OR DEFINED status_else)
output_status("${text}")
set(status_text "${status_placeholder}")
else()
set(status_text "${text}")
endif()
if(DEFINED status_then OR DEFINED status_else)
if(${status_cond})
string(REPLACE ";" " " status_then "${status_then}")
string(REGEX REPLACE "^[ \t]+" "" status_then "${status_then}")
output_status("${status_text} ${status_then}")
else()
string(REPLACE ";" " " status_else "${status_else}")
string(REGEX REPLACE "^[ \t]+" "" status_else "${status_else}")
output_status("${status_text} ${status_else}")
endif()
else()
string(REPLACE ";" " " status_cond "${status_cond}")
string(REGEX REPLACE "^[ \t]+" "" status_cond "${status_cond}")
output_status("${status_text} ${status_cond}")
endif()
else()
output_status("${text}")
endif()
endfunction()

View File

@ -24,10 +24,10 @@ using namespace affdex;
class PlottingImageListener : public ImageListener
{
std::mutex mMutex;
std::deque<std::pair<Frame, std::map<FaceId, Face> > > mDataArray;
double mCaptureLastTS;
double mCaptureFPS;
double mProcessLastTS;
@ -38,17 +38,18 @@ class PlottingImageListener : public ImageListener
const int spacing = 10;
const float font_size = 0.5f;
const int font = cv::FONT_HERSHEY_COMPLEX_SMALL;
std::vector<std::string> expressions;
std::vector<std::string> emotions;
std::vector<std::string> emojis;
std::vector<std::string> headAngles;
std::map<affdex::Glasses, std::string> glassesMap;
std::map<affdex::Gender, std::string> genderMap;
public:
PlottingImageListener(std::ofstream &csv, const bool draw_display)
: fStream(csv), mDrawDisplay(draw_display), mStartT(std::chrono::system_clock::now()),
mCaptureLastTS(-1.0f), mCaptureFPS(-1.0f),
@ -59,35 +60,45 @@ public:
"upperLipRaise", "lipCornerDepressor", "chinRaise", "lipPucker", "lipPress",
"lipSuck", "mouthOpen", "smirk", "eyeClosure", "attention"
};
emotions = {
"joy", "fear", "disgust", "sadness", "anger",
"surprise", "contempt", "valence", "engagement"
};
headAngles = { "pitch", "yaw", "roll" };
emojis = std::vector<std::string> {
"relaxed", "smiley", "laughing",
"kissing", "disappointed",
"rage", "smirk", "wink",
"stuckOutTongueWinkingEye", "stuckOutTongue",
"flushed", "scream"
};
genderMap = std::map<affdex::Gender, std::string> {
{ affdex::Gender::Male, "male" },
{ affdex::Gender::Female, "female" },
{ affdex::Gender::Unknown, "unknown" },
};
glassesMap = std::map<affdex::Glasses, std::string> {
{ affdex::Glasses::Yes, "glasses" },
{ affdex::Glasses::No, "no glasses" }
};
fStream << "TimeStamp,faceId,interocularDistance,glasses,gender,";
fStream << "TimeStamp,faceId,interocularDistance,glasses,gender,dominantEmoji,";
for (std::string angle : headAngles) fStream << angle << ",";
for (std::string emotion : emotions) fStream << emotion << ",";
for (std::string expression : expressions) fStream << expression << ",";
for (std::string emoji : emojis) fStream << emoji << ",";
fStream << std::endl;
fStream.precision(4);
fStream << std::fixed;
}
FeaturePoint minPoint(VecFeaturePoint points)
{
VecFeaturePoint::iterator it = points.begin();
@ -99,7 +110,7 @@ public:
}
return ret;
};
FeaturePoint maxPoint( VecFeaturePoint points)
{
VecFeaturePoint::iterator it = points.begin();
@ -111,27 +122,27 @@ public:
}
return ret;
};
double getProcessingFrameRate()
{
std::lock_guard<std::mutex> lg(mMutex);
return mProcessFPS;
}
double getCaptureFrameRate()
{
std::lock_guard<std::mutex> lg(mMutex);
return mCaptureFPS;
}
int getDataSize()
{
std::lock_guard<std::mutex> lg(mMutex);
return mDataArray.size();
}
std::pair<Frame, std::map<FaceId, Face>> getData()
{
std::lock_guard<std::mutex> lg(mMutex);
@ -139,7 +150,7 @@ public:
mDataArray.pop_front();
return dpoint;
}
void onImageResults(std::map<FaceId, Face> faces, Frame image) override
{
std::lock_guard<std::mutex> lg(mMutex);
@ -150,58 +161,68 @@ public:
mProcessFPS = 1.0f / (seconds - mProcessLastTS);
mProcessLastTS = seconds;
};
void onImageCapture(Frame image) override
{
std::lock_guard<std::mutex> lg(mMutex);
mCaptureFPS = 1.0f / (image.getTimestamp() - mCaptureLastTS);
mCaptureLastTS = image.getTimestamp();
};
void outputToFile(const std::map<FaceId, Face> faces, const double timeStamp)
{
if (faces.empty())
{
fStream << timeStamp << "nan,nan,no glasses,unknown,";
fStream << timeStamp << "nan,nan,no glasses,unknown, unknown,";
for (std::string angle : headAngles) fStream << "nan,";
for (std::string emotion : emotions) fStream << "nan,";
for (std::string expression : expressions) fStream << "nan,";
for (std::string emoji : emojis) fStream << "nan,";
fStream << std::endl;
}
for (auto & face_id_pair : faces)
{
Face f = face_id_pair.second;
fStream << timeStamp << ","
<< f.id << ","
<< f.measurements.interocularDistance << ","
<< glassesMap[f.appearance.glasses] << ","
<< genderMap[f.appearance.gender] << ",";
<< genderMap[f.appearance.gender] << ","
<< affdex::EmojiToString(f.emojis.dominantEmoji) << ",";
float *values = (float *)&f.measurements.orientation;
for (std::string angle : headAngles)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.emotions;
for (std::string emotion : emotions)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.expressions;
for (std::string expression : expressions)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.emojis;
for (std::string emoji : emojis)
{
fStream << (*values) << ",";
values++;
}
fStream << std::endl;
}
}
void drawValues(const float * first, const std::vector<std::string> names,
const int x, int &padding, const cv::Scalar clr,
cv::Mat img)
@ -217,18 +238,18 @@ public:
first++;
}
}
void draw(const std::map<FaceId, Face> faces, Frame image)
{
std::shared_ptr<byte> imgdata = image.getBGRByteArray();
cv::Mat img = cv::Mat(image.getHeight(), image.getWidth(), CV_8UC3, imgdata.get());
const int left_margin = 30;
cv::Scalar clr = cv::Scalar(255, 255, 255);
cv::Scalar clr = cv::Scalar(0, 0, 255);
cv::Scalar header_clr = cv::Scalar(255, 0, 0);
for (auto & face_id_pair : faces)
{
Face f = face_id_pair.second;
@ -239,49 +260,56 @@ public:
}
FeaturePoint tl = minPoint(points);
FeaturePoint br = maxPoint(points);
//Output the results of the different classifiers.
int padding = tl.y + 10;
cv::putText(img, "APPEARANCE", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, genderMap[f.appearance.gender], cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, glassesMap[f.appearance.glasses], cv::Point(br.x, padding += spacing), font, font_size, clr);
Orientation headAngles = f.measurements.orientation;
char strAngles[100];
sprintf(strAngles, "Pitch: %3.2f Yaw: %3.2f Roll: %3.2f Interocular: %3.2f",
headAngles.pitch, headAngles.yaw, headAngles.roll, f.measurements.interocularDistance);
char fId[10];
sprintf(fId, "ID: %i", f.id);
cv::putText(img, fId, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "MEASUREMENTS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, strAngles, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "EMOJIS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, "dominantEmoji: " + affdex::EmojiToString(f.emojis.dominantEmoji),
cv::Point(br.x, padding += spacing), font, font_size, clr);
drawValues((float *)&f.emojis, emojis, br.x, padding, clr, img);
cv::putText(img, "EXPRESSIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.expressions, expressions, br.x, padding, clr, img);
cv::putText(img, "EMOTIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.emotions, emotions, br.x, padding, clr, img);
}
char fps_str[50];
sprintf(fps_str, "capture fps: %2.0f", mCaptureFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin - spacing), font, font_size, clr);
sprintf(fps_str, "process fps: %2.0f", mProcessFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin), font, font_size, clr);
cv::imshow("analyze video", img);
cv::waitKey(5);
}
};

View File

@ -0,0 +1,34 @@
# --------------
# CMake file opencv-webcam-demo
# --------------
CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
set(subProject opencv-webcam-demo)
PROJECT(${subProject})
file(GLOB SRCS *.c*)
file(GLOB HDRS *.h*)
if( ${CMAKE_VERSION} VERSION_GREATER 2.8.11 )
get_filename_component(PARENT_DIR ${PROJECT_SOURCE_DIR} DIRECTORY) # PATH was updated to DIRECTORY in 2.8.12
else()
get_filename_component(PARENT_DIR ${PROJECT_SOURCE_DIR} PATH)
endif()
set(COMMON_HDRS "${PARENT_DIR}/common/")
file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*)
add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES})
target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS})
target_link_libraries( ${subProject} ${AFFDEX_LIBRARIES} ${OpenCV_LIBS} ${Boost_LIBRARIES} )
#Add to the apps list
list( APPEND ${rootProject}_APPS ${subProject} )
set( ${rootProject}_APPS ${${rootProject}_APPS} PARENT_SCOPE )
# Installation steps
install( TARGETS ${subProject}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIRECTORY} )

View File

@ -52,7 +52,7 @@ int main(int argsc, char ** argsv)
std::cerr.precision(precision);
std::cout.precision(precision);
po::options_description description("Project for demoing the Windows SDK CameraDetector class (grabbing and processing frames from the camera).");
po::options_description description("Project for demoing the Affdex SDK CameraDetector class (grabbing and processing frames from the camera).");
description.add_options()
("help,h", po::bool_switch()->default_value(false), "Display this help message.")
#ifdef _WIN32
@ -126,6 +126,7 @@ int main(int argsc, char ** argsv)
//Initialize detectors
frameDetector->setDetectAllEmotions(true);
frameDetector->setDetectAllExpressions(true);
frameDetector->setDetectAllEmojis(true);
frameDetector->setDetectGender(true);
frameDetector->setDetectGlasses(true);
frameDetector->setClassifierPath(DATA_FOLDER);

34
video-demo/CMakeLists.txt Executable file
View File

@ -0,0 +1,34 @@
# --------------
# CMake file video-demo
# --------------
CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
set(subProject video-demo)
PROJECT(${subProject})
file(GLOB SRCS *.c*)
file(GLOB HDRS *.h*)
if( ${CMAKE_VERSION} VERSION_GREATER 2.8.11 )
get_filename_component(PARENT_DIR ${PROJECT_SOURCE_DIR} DIRECTORY) # PATH was updated to DIRECTORY in 2.8.12
else()
get_filename_component(PARENT_DIR ${PROJECT_SOURCE_DIR} PATH)
endif()
set(COMMON_HDRS "${PARENT_DIR}/common/")
file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*)
add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES})
target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS})
target_link_libraries( ${subProject} ${AFFDEX_LIBRARIES} ${OpenCV_LIBS} ${Boost_LIBRARIES} )
#Add to the apps list
list( APPEND ${rootProject}_APPS ${subProject} )
set( ${rootProject}_APPS ${${rootProject}_APPS} PARENT_SCOPE )
# Installation steps
install( TARGETS ${subProject}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIRECTORY} )

View File

@ -3,13 +3,14 @@
#include <chrono>
#include <fstream>
#include <opencv2/highgui/highgui.hpp>
#include <highgui.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <boost/filesystem.hpp>
#include <boost/timer/timer.hpp>
#include <boost/program_options.hpp>
#include "VideoDetector.h"
#include "PhotoDetector.h"
#include "AffdexException.h"
#include "AFaceListener.hpp"
@ -20,13 +21,17 @@
using namespace std;
using namespace affdex;
/// <summary>
/// Project demos how to use the Affdex Windows SDK VideoDetector
/// </summary>
int main(int argsc, char ** argsv)
{
//Defaults, overridden by the command line parameters
std::map<boost::filesystem::path, bool> VIDEO_EXTS = { {boost::filesystem::path(".avi"), 1},
{boost::filesystem::path(".mov"), 1},
{boost::filesystem::path(".flv"), 1},
{boost::filesystem::path(".webm"), 1},
{boost::filesystem::path(".wmv"), 1},
{boost::filesystem::path(".mp4"), 1} };
affdex::path DATA_FOLDER;
affdex::path LICENSE_PATH;
affdex::path videoPath;
@ -34,14 +39,14 @@ int main(int argsc, char ** argsv)
bool draw_display = true;
bool loop = false;
unsigned int nFaces = 1;
int faceDetectorMode = (int)FaceDetectorMode::SMALL_FACES;
int faceDetectorMode = (int)FaceDetectorMode::LARGE_FACES;
const int precision = 2;
std::cerr.precision(precision);
std::cout.precision(precision);
namespace po = boost::program_options; // abbreviate namespace
po::options_description description("Project for demoing the Windows SDK VideoDetector class (processing video files).");
po::options_description description("Project for demoing the Affdex SDK VideoDetector class (processing video files).");
description.add_options()
("help,h", po::bool_switch()->default_value(false), "Display this help message.")
#ifdef _WIN32
@ -76,7 +81,7 @@ int main(int argsc, char ** argsv)
std::cerr << "For help, use the -h option." << std::endl << std::endl;
return 1;
}
// Parse and check the data folder (with assets)
if (!boost::filesystem::exists(DATA_FOLDER))
{
@ -87,25 +92,37 @@ int main(int argsc, char ** argsv)
}
try
{
//Initialize the video file detector
VideoDetector videoDetector(process_framerate, nFaces, (affdex::FaceDetectorMode) faceDetectorMode);
std::shared_ptr<Detector> detector;
//Initialize out file
boost::filesystem::path csvPath(videoPath);
boost::filesystem::path fileExt = csvPath.extension();
csvPath.replace_extension(".csv");
std::ofstream csvFileStream(csvPath.c_str());
if (!csvFileStream.is_open())
{
std::cerr << "Unable to open csv file " << csvPath << std::endl;
return 1;
}
std::cout << "Max num of faces set to: " << videoDetector.getMaxNumberFaces() << std::endl;
if (VIDEO_EXTS[fileExt]) // IF it is a video file.
{
detector = std::make_shared<VideoDetector>(process_framerate, nFaces, (affdex::FaceDetectorMode) faceDetectorMode);
}
else //Otherwise it's a photo
{
detector = std::make_shared<PhotoDetector>(nFaces, (affdex::FaceDetectorMode) faceDetectorMode);
}
//VideoDetector videoDetector(process_framerate, nFaces, (affdex::FaceDetectorMode) faceDetectorMode);
std::cout << "Max num of faces set to: " << detector->getMaxNumberFaces() << std::endl;
std::string mode;
switch (videoDetector.getFaceDetectorMode())
switch (detector->getFaceDetectorMode())
{
case FaceDetectorMode::LARGE_FACES:
mode = "LARGE_FACES";
@ -116,66 +133,74 @@ int main(int argsc, char ** argsv)
default:
break;
}
std::cout << "Face detector mode set to: " << mode << std::endl;
shared_ptr<PlottingImageListener> listenPtr(new PlottingImageListener(csvFileStream, draw_display));
//Activate all the detectors
videoDetector.setDetectAllEmotions(true);
videoDetector.setDetectAllExpressions(true);
videoDetector.setDetectGender(true);
videoDetector.setDetectGlasses(true);
//Set the location of the data folder and license file
videoDetector.setClassifierPath(DATA_FOLDER);
videoDetector.setLicensePath(LICENSE_PATH);
//Add callback functions implementations
videoDetector.setImageListener(listenPtr.get());
videoDetector.start(); //Initialize the detectors .. call only once
detector->setDetectAllEmotions(true);
detector->setDetectAllExpressions(true);
detector->setDetectAllEmojis(true);
detector->setDetectGender(true);
detector->setDetectGlasses(true);
detector->setClassifierPath(DATA_FOLDER);
detector->setLicensePath(LICENSE_PATH);
detector->setImageListener(listenPtr.get());
detector->start(); //Initialize the detectors .. call only once
do
{
shared_ptr<StatusListener> videoListenPtr = std::make_shared<StatusListener>();
videoDetector.setProcessStatusListener(videoListenPtr.get());
videoDetector.process(videoPath); //Process a video
//For each frame processed
while (videoListenPtr->isRunning())
detector->setProcessStatusListener(videoListenPtr.get());
if (VIDEO_EXTS[fileExt])
{
((VideoDetector *)detector.get())->process(videoPath); //Process a video
}
else
{
//videoPath is of type std::wstring on windows, but std::string on other platforms.
cv::Mat img = cv::imread(std::string(videoPath.begin(), videoPath.end()));
// Create a frame
Frame frame(img.size().width, img.size().height, img.data, Frame::COLOR_FORMAT::BGR);
((PhotoDetector *)detector.get())->process(frame); //Process an image
}
do
{
if (listenPtr->getDataSize() > 0)
{
std::pair<Frame, std::map<FaceId, Face> > dataPoint = listenPtr->getData();
Frame frame = dataPoint.first;
std::map<FaceId, Face> faces = dataPoint.second;
//Draw on the GUI
if (draw_display)
{
listenPtr->draw(faces, frame);
}
std::cerr << "timestamp: " << frame.getTimestamp()
<< " cfps: " << listenPtr->getCaptureFrameRate()
<< " pfps: " << listenPtr->getProcessingFrameRate()
<< " faces: "<< faces.size() << endl;
//Output metrics to file
listenPtr->outputToFile(faces, frame.getTimestamp());
}
}
} while(VIDEO_EXTS[fileExt] && videoListenPtr->isRunning());
} while(loop);
videoDetector.stop();
detector->stop();
csvFileStream.close();
std::cout << "Output written to file: " << csvPath << std::endl;
}
catch (AffdexException ex)
{
std::cerr << ex.what();
}
return 0;
}