Update the UI to equalizer UI (#40)

* Update the UI to equalizer UI

* Fix compatibility with VS
This commit is contained in:
Abdelrahman Mahmoud 2018-03-14 15:00:50 -04:00 committed by GitHub
parent db582b8caa
commit 055967d7d3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 18468 additions and 166 deletions

View file

@ -7,17 +7,12 @@
#include <thread> #include <thread>
#include <mutex> #include <mutex>
#include <fstream> #include <fstream>
#include <map>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <boost/filesystem.hpp> #include <boost/filesystem.hpp>
#include <boost/timer/timer.hpp> #include <boost/timer/timer.hpp>
#include <boost/program_options.hpp>
#include <boost/algorithm/string.hpp>
#include "Visualizer.h"
#include "ImageListener.h" #include "ImageListener.h"
#include "typedefs.h"
using namespace affdex; using namespace affdex;
@ -35,19 +30,10 @@ class PlottingImageListener : public ImageListener
std::ofstream &fStream; std::ofstream &fStream;
std::chrono::time_point<std::chrono::system_clock> mStartT; std::chrono::time_point<std::chrono::system_clock> mStartT;
const bool mDrawDisplay; const bool mDrawDisplay;
const int spacing = 10; const int spacing = 20;
const float font_size = 0.5f; const float font_size = 0.5f;
const int font = cv::FONT_HERSHEY_COMPLEX_SMALL; const int font = cv::FONT_HERSHEY_COMPLEX_SMALL;
Visualizer viz;
std::vector<std::string> expressions;
std::vector<std::string> emotions;
std::vector<std::string> emojis;
std::vector<std::string> headAngles;
std::map<affdex::Glasses, std::string> glassesMap;
std::map<affdex::Gender, std::string> genderMap;
std::map<affdex::Age, std::string> ageMap;
std::map<affdex::Ethnicity, std::string> ethnicityMap;
public: public:
@ -57,72 +43,18 @@ public:
mCaptureLastTS(-1.0f), mCaptureFPS(-1.0f), mCaptureLastTS(-1.0f), mCaptureFPS(-1.0f),
mProcessLastTS(-1.0f), mProcessFPS(-1.0f) mProcessLastTS(-1.0f), mProcessFPS(-1.0f)
{ {
expressions = {
"smile", "innerBrowRaise", "browRaise", "browFurrow", "noseWrinkle",
"upperLipRaise", "lipCornerDepressor", "chinRaise", "lipPucker", "lipPress",
"lipSuck", "mouthOpen", "smirk", "eyeClosure", "attention", "eyeWiden", "cheekRaise",
"lidTighten", "dimpler", "lipStretch", "jawDrop"
};
emotions = {
"joy", "fear", "disgust", "sadness", "anger",
"surprise", "contempt", "valence", "engagement"
};
headAngles = { "pitch", "yaw", "roll" };
emojis = std::vector<std::string> {
"relaxed", "smiley", "laughing",
"kissing", "disappointed",
"rage", "smirk", "wink",
"stuckOutTongueWinkingEye", "stuckOutTongue",
"flushed", "scream"
};
genderMap = std::map<affdex::Gender, std::string> {
{ affdex::Gender::Male, "male" },
{ affdex::Gender::Female, "female" },
{ affdex::Gender::Unknown, "unknown" },
};
glassesMap = std::map<affdex::Glasses, std::string> {
{ affdex::Glasses::Yes, "yes" },
{ affdex::Glasses::No, "no" }
};
ageMap = std::map<affdex::Age, std::string> {
{ affdex::Age::AGE_UNKNOWN, "unknown"},
{ affdex::Age::AGE_UNDER_18, "under 18" },
{ affdex::Age::AGE_18_24, "18-24" },
{ affdex::Age::AGE_25_34, "25-34" },
{ affdex::Age::AGE_35_44, "35-44" },
{ affdex::Age::AGE_45_54, "45-54" },
{ affdex::Age::AGE_55_64, "55-64" },
{ affdex::Age::AGE_65_PLUS, "65 plus" }
};
ethnicityMap = std::map<affdex::Ethnicity, std::string> {
{ affdex::Ethnicity::UNKNOWN, "unknown"},
{ affdex::Ethnicity::CAUCASIAN, "caucasian" },
{ affdex::Ethnicity::BLACK_AFRICAN, "black african" },
{ affdex::Ethnicity::SOUTH_ASIAN, "south asian" },
{ affdex::Ethnicity::EAST_ASIAN, "east asian" },
{ affdex::Ethnicity::HISPANIC, "hispanic" }
};
fStream << "TimeStamp,faceId,interocularDistance,glasses,age,ethnicity,gender,dominantEmoji,"; fStream << "TimeStamp,faceId,interocularDistance,glasses,age,ethnicity,gender,dominantEmoji,";
for (std::string angle : headAngles) fStream << angle << ","; for (std::string angle : viz.HEAD_ANGLES) fStream << angle << ",";
for (std::string emotion : emotions) fStream << emotion << ","; for (std::string emotion : viz.EMOTIONS) fStream << emotion << ",";
for (std::string expression : expressions) fStream << expression << ","; for (std::string expression : viz.EXPRESSIONS) fStream << expression << ",";
for (std::string emoji : emojis) fStream << emoji << ","; for (std::string emoji : viz.EMOJIS) fStream << emoji << ",";
fStream << std::endl; fStream << std::endl;
fStream.precision(4); fStream.precision(4);
fStream << std::fixed; fStream << std::fixed;
} }
FeaturePoint minPoint(VecFeaturePoint points) cv::Point2f minPoint(VecFeaturePoint points)
{ {
VecFeaturePoint::iterator it = points.begin(); VecFeaturePoint::iterator it = points.begin();
FeaturePoint ret = *it; FeaturePoint ret = *it;
@ -131,10 +63,10 @@ public:
if (it->x < ret.x) ret.x = it->x; if (it->x < ret.x) ret.x = it->x;
if (it->y < ret.y) ret.y = it->y; if (it->y < ret.y) ret.y = it->y;
} }
return ret; return cv::Point2f(ret.x, ret.y);
}; };
FeaturePoint maxPoint(VecFeaturePoint points) cv::Point2f maxPoint(VecFeaturePoint points)
{ {
VecFeaturePoint::iterator it = points.begin(); VecFeaturePoint::iterator it = points.begin();
FeaturePoint ret = *it; FeaturePoint ret = *it;
@ -143,7 +75,7 @@ public:
if (it->x > ret.x) ret.x = it->x; if (it->x > ret.x) ret.x = it->x;
if (it->y > ret.y) ret.y = it->y; if (it->y > ret.y) ret.y = it->y;
} }
return ret; return cv::Point2f(ret.x, ret.y);
}; };
@ -197,10 +129,10 @@ public:
if (faces.empty()) if (faces.empty())
{ {
fStream << timeStamp << ",nan,nan,no,unknown,unknown,unknown,unknown,"; fStream << timeStamp << ",nan,nan,no,unknown,unknown,unknown,unknown,";
for (std::string angle : headAngles) fStream << "nan,"; for (std::string angle : viz.HEAD_ANGLES) fStream << "nan,";
for (std::string emotion : emotions) fStream << "nan,"; for (std::string emotion : viz.EMOTIONS) fStream << "nan,";
for (std::string expression : expressions) fStream << "nan,"; for (std::string expression : viz.EXPRESSIONS) fStream << "nan,";
for (std::string emoji : emojis) fStream << "nan,"; for (std::string emoji : viz.EMOJIS) fStream << "nan,";
fStream << std::endl; fStream << std::endl;
} }
for (auto & face_id_pair : faces) for (auto & face_id_pair : faces)
@ -210,35 +142,35 @@ public:
fStream << timeStamp << "," fStream << timeStamp << ","
<< f.id << "," << f.id << ","
<< f.measurements.interocularDistance << "," << f.measurements.interocularDistance << ","
<< glassesMap[f.appearance.glasses] << "," << viz.GLASSES_MAP[f.appearance.glasses] << ","
<< ageMap[f.appearance.age] << "," << viz.AGE_MAP[f.appearance.age] << ","
<< ethnicityMap[f.appearance.ethnicity] << "," << viz.ETHNICITY_MAP[f.appearance.ethnicity] << ","
<< genderMap[f.appearance.gender] << "," << viz.GENDER_MAP[f.appearance.gender] << ","
<< affdex::EmojiToString(f.emojis.dominantEmoji) << ","; << affdex::EmojiToString(f.emojis.dominantEmoji) << ",";
float *values = (float *)&f.measurements.orientation; float *values = (float *)&f.measurements.orientation;
for (std::string angle : headAngles) for (std::string angle : viz.HEAD_ANGLES)
{ {
fStream << (*values) << ","; fStream << (*values) << ",";
values++; values++;
} }
values = (float *)&f.emotions; values = (float *)&f.emotions;
for (std::string emotion : emotions) for (std::string emotion : viz.EMOTIONS)
{ {
fStream << (*values) << ","; fStream << (*values) << ",";
values++; values++;
} }
values = (float *)&f.expressions; values = (float *)&f.expressions;
for (std::string expression : expressions) for (std::string expression : viz.EXPRESSIONS)
{ {
fStream << (*values) << ","; fStream << (*values) << ",";
values++; values++;
} }
values = (float *)&f.emojis; values = (float *)&f.emojis;
for (std::string emoji : emojis) for (std::string emoji : viz.EMOTIONS)
{ {
fStream << (*values) << ","; fStream << (*values) << ",";
values++; values++;
@ -248,94 +180,57 @@ public:
} }
} }
void drawValues(const float * first, const std::vector<std::string> names, std::vector<cv::Point2f> CalculateBoundingBox(VecFeaturePoint points)
const int x, int &padding, const cv::Scalar clr,
cv::Mat img)
{ {
for (std::string name : names)
{ std::vector<cv::Point2f> ret;
if (std::abs(*first) > 5.0f)
{ //Top Left
char m[50]; ret.push_back(minPoint(points));
sprintf(m, "%s: %3.2f", name.c_str(), (*first));
cv::putText(img, m, cv::Point(x, padding += spacing), font, font_size, clr); //Bottom Right
} ret.push_back(maxPoint(points));
first++;
} //Top Right
ret.push_back(cv::Point2f(ret[1].x,
ret[0].y));
//Bottom Left
ret.push_back(cv::Point2f(ret[0].x,
ret[1].y));
return ret;
} }
void draw(const std::map<FaceId, Face> faces, Frame image) void draw(const std::map<FaceId, Face> faces, Frame image)
{ {
std::shared_ptr<byte> imgdata = image.getBGRByteArray();
cv::Mat img = cv::Mat(image.getHeight(), image.getWidth(), CV_8UC3, imgdata.get());
const int left_margin = 30; const int left_margin = 30;
cv::Scalar clr = cv::Scalar(0, 0, 255); cv::Scalar clr = cv::Scalar(0, 0, 255);
cv::Scalar header_clr = cv::Scalar(255, 0, 0); cv::Scalar header_clr = cv::Scalar(255, 0, 0);
std::shared_ptr<byte> imgdata = image.getBGRByteArray();
cv::Mat img = cv::Mat(image.getHeight(), image.getWidth(), CV_8UC3, imgdata.get());
viz.updateImage(img);
for (auto & face_id_pair : faces) for (auto & face_id_pair : faces)
{ {
Face f = face_id_pair.second; Face f = face_id_pair.second;
VecFeaturePoint points = f.featurePoints; VecFeaturePoint points = f.featurePoints;
for (auto& point : points) //Draw face feature points. std::vector<cv::Point2f> bounding_box = CalculateBoundingBox(points);
{
cv::circle(img, cv::Point(point.x, point.y), 2.0f, cv::Scalar(0, 0, 255)); // Draw Facial Landmarks Points
//viz.drawPoints(points);
// Draw bounding box
viz.drawBoundingBox(bounding_box[0], bounding_box[1], f.emotions.valence);
// Draw a face on screen
viz.drawFaceMetrics(f, bounding_box);
} }
FeaturePoint tl = minPoint(points);
FeaturePoint br = maxPoint(points);
//Output the results of the different classifiers. viz.showImage();
int padding = tl.y + 10;
cv::putText(img, "APPEARANCE", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, genderMap[f.appearance.gender], cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, glassesMap[f.appearance.glasses], cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, ageMap[f.appearance.age], cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, ethnicityMap[f.appearance.ethnicity], cv::Point(br.x, padding += spacing), font, font_size, clr);
Orientation headAngles = f.measurements.orientation;
char strAngles[100];
sprintf(strAngles, "Pitch: %3.2f Yaw: %3.2f Roll: %3.2f Interocular: %3.2f",
headAngles.pitch, headAngles.yaw, headAngles.roll, f.measurements.interocularDistance);
char fId[10];
sprintf(fId, "ID: %i", f.id);
cv::putText(img, fId, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "MEASUREMENTS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, strAngles, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "EMOJIS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, "dominantEmoji: " + affdex::EmojiToString(f.emojis.dominantEmoji),
cv::Point(br.x, padding += spacing), font, font_size, clr);
drawValues((float *)&f.emojis, emojis, br.x, padding, clr, img);
cv::putText(img, "EXPRESSIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.expressions, expressions, br.x, padding, clr, img);
cv::putText(img, "EMOTIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.emotions, emotions, br.x, padding, clr, img);
}
char fps_str[50];
sprintf(fps_str, "capture fps: %2.0f", mCaptureFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin - spacing), font, font_size, clr);
sprintf(fps_str, "process fps: %2.0f", mProcessFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin), font, font_size, clr);
cv::imshow("analyze video", img);
std::lock_guard<std::mutex> lg(mMutex); std::lock_guard<std::mutex> lg(mMutex);
cv::waitKey(30);
} }
}; };

343
common/Visualizer.cpp Normal file
View file

@ -0,0 +1,343 @@
#include "Visualizer.h"
#include <boost/format.hpp>
#include "affdex_small_logo.h"
#include <algorithm>
Visualizer::Visualizer():
GREEN_COLOR_CLASSIFIERS({
"joy"
}),
RED_COLOR_CLASSIFIERS({
"anger", "disgust", "sadness", "fear", "contempt"
})
{
logo_resized = false;
logo = cv::imdecode(cv::InputArray(small_logo), CV_LOAD_IMAGE_UNCHANGED);
EXPRESSIONS = {
"smile", "innerBrowRaise", "browRaise", "browFurrow", "noseWrinkle",
"upperLipRaise", "lipCornerDepressor", "chinRaise", "lipPucker", "lipPress",
"lipSuck", "mouthOpen", "smirk", "eyeClosure", "attention", "eyeWiden", "cheekRaise",
"lidTighten", "dimpler", "lipStretch", "jawDrop"
};
EMOTIONS = {
"joy", "fear", "disgust", "sadness", "anger",
"surprise", "contempt", "valence", "engagement"
};
HEAD_ANGLES = { "pitch", "yaw", "roll" };
EMOJIS = std::vector<std::string> {
"relaxed", "smiley", "laughing",
"kissing", "disappointed",
"rage", "smirk", "wink",
"stuckOutTongueWinkingEye", "stuckOutTongue",
"flushed", "scream"
};
GENDER_MAP = std::map<affdex::Gender, std::string> {
{ affdex::Gender::Male, "male" },
{ affdex::Gender::Female, "female" },
{ affdex::Gender::Unknown, "unknown" },
};
GLASSES_MAP = std::map<affdex::Glasses, std::string> {
{ affdex::Glasses::Yes, "yes" },
{ affdex::Glasses::No, "no" }
};
AGE_MAP = std::map<affdex::Age, std::string> {
{ affdex::Age::AGE_UNKNOWN, "unknown"},
{ affdex::Age::AGE_UNDER_18, "under 18" },
{ affdex::Age::AGE_18_24, "18-24" },
{ affdex::Age::AGE_25_34, "25-34" },
{ affdex::Age::AGE_35_44, "35-44" },
{ affdex::Age::AGE_45_54, "45-54" },
{ affdex::Age::AGE_55_64, "55-64" },
{ affdex::Age::AGE_65_PLUS, "65 plus" }
};
ETHNICITY_MAP = std::map<affdex::Ethnicity, std::string> {
{ affdex::Ethnicity::UNKNOWN, "unknown"},
{ affdex::Ethnicity::CAUCASIAN, "caucasian" },
{ affdex::Ethnicity::BLACK_AFRICAN, "black african" },
{ affdex::Ethnicity::SOUTH_ASIAN, "south asian" },
{ affdex::Ethnicity::EAST_ASIAN, "east asian" },
{ affdex::Ethnicity::HISPANIC, "hispanic" }
};
}
void Visualizer::drawFaceMetrics(affdex::Face face, std::vector<cv::Point2f> bounding_box)
{
cv::Scalar white_color = cv::Scalar(255, 255, 255);
//Draw Right side metrics
int padding = bounding_box[0].y; //Top left Y
drawValues((float *)&face.expressions, EXPRESSIONS,
bounding_box[2].x + spacing, padding, white_color, false);
padding = bounding_box[2].y; //Top left Y
//Draw Head Angles
drawHeadOrientation(face.measurements.orientation,
bounding_box[0].x - spacing, padding);
//Draw Appearance
drawAppearance(face.appearance, bounding_box[0].x - spacing, padding);
//Draw Left side metrics
drawValues((float *)&face.emotions, EMOTIONS,
bounding_box[0].x - spacing, padding, white_color, true);
}
void Visualizer::drawValues(const float * first, const std::vector<std::string> names,
const int x, int &padding, const cv::Scalar clr, const bool align_right)
{
for (std::string name : names)
{
drawClassifierOutput(name, (*first), cv::Point(x, padding += spacing), align_right);
first++;
}
}
void Visualizer::updateImage(cv::Mat output_img)
{
img = output_img;
if (!logo_resized)
{
double logo_width = (logo.size().width > img.size().width*0.25 ? img.size().width*0.25 : logo.size().width);
double logo_height = ((double)logo_width) * ((double)logo.size().height / logo.size().width);
cv::resize(logo, logo, cv::Size(logo_width, logo_height));
logo_resized = true;
}
cv::Mat roi = img(cv::Rect(img.cols - logo.cols - 10, 10, logo.cols, logo.rows));
overlayImage(logo, roi, cv::Point(0, 0));
}
void Visualizer::drawPoints(affdex::VecFeaturePoint points)
{
for (auto& point : points) //Draw face feature points.
{
cv::circle(img, cv::Point(point.x, point.y), 2.0f, cv::Scalar(255, 255, 255));
}
}
void Visualizer::drawBoundingBox(cv::Point2f top_left, cv::Point2f bottom_right, float valence)
{
//Draw bounding box
const ColorgenRedGreen valence_color_generator( -100, 100 );
cv::rectangle( img, top_left, bottom_right,
valence_color_generator(valence), 3);
}
/** @brief DrawText prints text on screen either right or left justified at the anchor location (loc)
* @param output_img -- Image we are plotting on
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param loc -- Exact location. When aligh_right is (true/false) this should be the (upper-right, upper-left)
* @param align_right -- Whether to right or left justify the text
* @param color -- Color
*/
void Visualizer::drawText(const std::string& name, const std::string& value,
const cv::Point2f loc, bool align_right, cv::Scalar color)
{
const int block_width = 8;
const int margin = 2;
const int block_size = 10;
const int max_blocks = 100/block_size;
cv::Point2f display_loc = loc;
const std::string label = name+": ";
if( align_right )
{
display_loc.x -= (margin+block_width) * max_blocks;
int baseline=0;
cv::Size txtSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5f, 5,&baseline);
display_loc.x -= txtSize.width;
}
cv::putText(img, label+value, display_loc, cv::FONT_HERSHEY_SIMPLEX, 0.5f, color, 1);
}
/** @brief DrawClassifierOutput handles choosing between equalizer or text as well as defining the colors
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param loc -- Exact location. When aligh_right is (true/false) this should be the (upper-right, upper-left)
* @param align_right -- Whether to right or left justify the text
*/
void Visualizer::drawClassifierOutput(const std::string& classifier,
const float value, const cv::Point2f& loc, bool align_right)
{
static const ColorgenLinear white_yellow_generator( 0, 100, cv::Scalar(255,255,255), cv::Scalar(0, 255, 255));
static const ColorgenRedGreen valence_color_generator( -100, 100 );
// Determine the display color
cv::Scalar color = cv::Scalar(255, 255, 255);
if( classifier == "valence")
{
color = valence_color_generator( value );
}
else if( RED_COLOR_CLASSIFIERS.count(classifier) )
{
color = cv::Scalar(0, 0, 255);
}
else if( GREEN_COLOR_CLASSIFIERS.count(classifier) )
{
color = cv::Scalar(0, 255, 0);
}
float equalizer_magnitude = value;
if( classifier == "valence" )
{
equalizer_magnitude = std::fabs(value);
}
drawEqualizer(classifier, equalizer_magnitude, loc, align_right, color );
}
void Visualizer::drawEqualizer(const std::string& name, const float value, const cv::Point2f& loc,
bool align_right, cv::Scalar color)
{
const int block_width = 8;
const int block_height = 10;
const int margin = 2;
const int block_size = 10;
const int max_blocks = 100/block_size;
int blocks = round(value / block_size);
int i = loc.x, j = loc.y - 10;
cv::Point2f display_loc = loc;
const std::string label = align_right? name+": " : " :"+name;
for (int x = 0 ; x < (100/block_size) ; x++)
{
cv::Scalar scalar_clr = color;
float alpha = 0.8;
const int ii = (std::max)( float(i), 0.0f);
const int jj = (std::max)( float(j), 0.0f);
const int width = (std::min)(float(block_width), float(img.size().width-ii));
const int height = (std::min)(float(block_height), float(img.size().height-jj));
if (height < 0 || width < 0) continue;
cv::Mat roi = img(cv::Rect(ii, jj, width, height));
if (x >= blocks)
{
alpha = 0.3;
scalar_clr = cv::Scalar(186, 186, 186);
}
cv::Mat color(roi.size(), CV_8UC3, scalar_clr);
cv::addWeighted(color, alpha, roi, 1.0 - alpha , 0.0, roi);
i += align_right? -(margin+block_width):(margin+block_width);
}
display_loc.x += align_right? -(margin+block_width) * max_blocks : (margin+block_width) * max_blocks;
if( align_right )
{
int baseline=0;
cv::Size txtSize = cv::getTextSize(label, cv::FONT_HERSHEY_SIMPLEX, 0.5f, 5,&baseline);
display_loc.x -= txtSize.width;
}
cv::putText(img, label, display_loc, cv::FONT_HERSHEY_SIMPLEX, 0.5f, cv::Scalar(50,50,50), 5);
cv::putText(img, label, display_loc, cv::FONT_HERSHEY_SIMPLEX, 0.5f, cv::Scalar(255, 255, 255), 1);
}
void Visualizer::drawHeadOrientation(affdex::Orientation headAngles, const int x, int &padding,
bool align_right, cv::Scalar color)
{
std::string valueStr = boost::str(boost::format("%3.1f") % headAngles.pitch);
drawText("pitch", valueStr, cv::Point(x, padding += spacing), align_right, color );
valueStr = boost::str(boost::format("%3.1f") % headAngles.yaw);
drawText("yaw", valueStr, cv::Point(x, padding += spacing), align_right, color );
valueStr = boost::str(boost::format("%3.1f") % headAngles.roll);
drawText("roll", valueStr, cv::Point(x, padding += spacing), align_right, color );
}
void Visualizer::drawAppearance(affdex::Appearance appearance, const int x, int &padding,
bool align_right, cv::Scalar color)
{
drawText("gender", GENDER_MAP[appearance.gender], cv::Point(x, padding += spacing), align_right, color );
drawText("age", AGE_MAP[appearance.age], cv::Point(x, padding += spacing), align_right, color );
drawText("ethnicity", ETHNICITY_MAP[appearance.ethnicity], cv::Point(x, padding += spacing), align_right, color );
}
void Visualizer::showImage()
{
cv::imshow("analyze video", img);
cv::waitKey(5);
}
void Visualizer::overlayImage(const cv::Mat &foreground, cv::Mat &background, cv::Point2i location)
{
// start at the row indicated by location, or at row 0 if location.y is negative.
for(int y = (std::max)(location.y , 0); y < background.rows; ++y)
{
int fY = y - location.y; // because of the translation
// we are done of we have processed all rows of the foreground image.
if(fY >= foreground.rows)
break;
// start at the column indicated by location,
// or at column 0 if location.x is negative.
for(int x = (std::max)(location.x, 0); x < background.cols; ++x)
{
int fX = x - location.x; // because of the translation.
// we are done with this row if the column is outside of the foreground image.
if(fX >= foreground.cols)
break;
// determine the opacity of the foregrond pixel, using its fourth (alpha) channel.
double opacity =
((double)foreground.data[fY * foreground.step + fX * foreground.channels() + (foreground.channels()-1)])
/ 255.;
// and now combine the background and foreground pixel, using the opacity,
// but only if opacity > 0.
for(int c = 0; opacity > 0 && c < background.channels(); ++c)
{
unsigned char foregroundPx =
foreground.data[fY * foreground.step + fX * foreground.channels() + c];
unsigned char backgroundPx =
background.data[y * background.step + x * background.channels() + c];
background.data[y*background.step + background.channels()*x + c] =
backgroundPx * (1.-opacity) + foregroundPx * opacity;
}
}
}
}
cv::Scalar ColorgenRedGreen::operator()( const float val ) const
{
float norm_val = ( val - red_val_ ) / ( green_val_ - red_val_ );
norm_val = norm_val < 0.0 ? 0.0 : norm_val;
norm_val = norm_val > 1.0 ? 1.0 : norm_val;
const int B = 0;
const int G = norm_val * 255;
const int R = ( 1.0 - norm_val ) * 255;
return cv::Scalar( B, G, R );
}
cv::Scalar ColorgenLinear::operator()( const float val ) const
{
float norm_val = ( val - val1_ ) / ( val2_ - val1_ );
const int B = color1_.val[0] * (1.0f-norm_val) + color2_.val[0]*norm_val;
const int G = color1_.val[1] * (1.0f-norm_val) + color2_.val[1]*norm_val;
const int R = color1_.val[2] * (1.0f-norm_val) + color2_.val[2]*norm_val;
return cv::Scalar( B, G, R );
}

189
common/Visualizer.h Normal file
View file

@ -0,0 +1,189 @@
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <Frame.h>
#include <Face.h>
#include <set>
/** @brief Plot the face metrics using opencv highgui
*/
class Visualizer
{
public:
Visualizer();
/** @brief UpdateImage refreshes the image that will be update
* @param output_img -- The image to display output on
*/
void updateImage(cv::Mat output_img);
/** @brief DrawPoints displays the landmark points on the image
* @param points -- The landmark points
*/
void drawPoints(affdex::VecFeaturePoint points);
/** @brief DrawBoundingBox displays the bounding box
* @param top_left -- The top left point
* @param bottom_right -- The bottom right point
* @param valence -- The valence value
*/
void drawBoundingBox(cv::Point2f top_left, cv::Point2f bottom_right, float valence);
/** @brief DrawHeadOrientation Displays head orientation and associated value
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param x -- The x value of the location
* @param padding -- The padding value
* @param align_right -- Whether to right or left justify the text
* @param color -- Color
*/
void drawHeadOrientation(affdex::Orientation headAngles, const int x, int &padding,
bool align_right=true, cv::Scalar color=cv::Scalar(255,255,255));
/** @brief DrawAppearance Draws appearance metrics on screen
* @param appearance -- affdex::Appearance metrics
* @param value -- Value we are trying to display
* @param x -- The x value of the location
* @param padding -- The padding value
* @param align_right -- Whether to right or left justify the text
* @param color -- Color
*/
void drawAppearance(affdex::Appearance appearance, const int x, int &padding,
bool align_right=true, cv::Scalar color=cv::Scalar(255,255,255));
/** @brief DrawFaceMetrics Displays all facial metrics and associated value
* @param face -- The affdex::Face object to display
* @param bounding_box -- The bounding box coordinates
*/
void drawFaceMetrics(affdex::Face face, std::vector<cv::Point2f> bounding_box);
/** @brief ShowImage displays image on screen
*/
void showImage();
/**
* Overlay an image with an Alpha (foreground) channel over background
* Assumes foreground.size() == background.size()
* Adapted from : http://jepsonsblog.blogspot.com/2012/10/overlay-transparent-image-in-opencv.html
* @param foreground - image to overlay
* @param background - ROI to overlay on
*/
void overlayImage(const cv::Mat &foreground, cv::Mat &background, cv::Point2i location);
std::set<std::string> GREEN_COLOR_CLASSIFIERS;
std::set<std::string> RED_COLOR_CLASSIFIERS;
std::vector<std::string> EXPRESSIONS;
std::vector<std::string> EMOTIONS;
std::vector<std::string> EMOJIS;
std::vector<std::string> HEAD_ANGLES;
std::map<affdex::Glasses, std::string> GLASSES_MAP;
std::map<affdex::Gender, std::string> GENDER_MAP;
std::map<affdex::Age, std::string> AGE_MAP;
std::map<affdex::Ethnicity, std::string> ETHNICITY_MAP;
private:
/** @brief DrawClassifierOutput Displays a classifier and associated value
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param loc -- Exact location. When aligh_right is (true/false) this should be the (upper-right, upper-left)
* @param align_right -- Whether to right or left justify the text
*/
void drawClassifierOutput(const std::string& classifier, const float value,
const cv::Point2f& loc, bool align_right=false );
/** @brief DrawValues displays a list of classifiers and associated values
* @param names -- Names of the classifiers to show
* @param value -- Value we are trying to display
* @param x -- The x value of the location
* @param padding -- The padding value
* @param align_right -- Whether to right or left justify the text
*/
void drawValues(const float * first, const std::vector<std::string> names,
const int x, int &padding, const cv::Scalar clr, const bool align_right);
/** @brief DrawEqualizer displays an equalizer on screen either right or left justified at the anchor location (loc)
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param loc -- Exact location. When aligh_right is (true/false) this should be the (upper-right, upper-left)
* @param align_right -- Whether to right or left justify the text
* @param color -- Color
*/
void drawEqualizer(const std::string& name, const float value, const cv::Point2f& loc,
bool align_right, cv::Scalar color);
/** @brief DrawText displays an text on screen either right or left justified at the anchor location (loc)
* @param name -- Name of the classifier
* @param value -- Value we are trying to display
* @param loc -- Exact location. When aligh_right is (true/false) this should be the (upper-right, upper-left)
* @param align_right -- Whether to right or left justify the text
* @param color -- Color
*/
void drawText(const std::string& name, const std::string& value,
const cv::Point2f loc, bool align_right=false, cv::Scalar color=cv::Scalar(255,255,255));
cv::Mat img;
cv::Mat logo;
bool logo_resized;
const int spacing = 20;
const int LOGO_PADDING = 20;
};
/** @brief Color generator (linear) for red-to-green values
*/
class ColorgenRedGreen
{
public:
/** @brief ColorgenRedGreen
* @param[in] red_val - Value which will return green
* @param green_val - Value which will return green
*/
ColorgenRedGreen( const float red_val, const float green_val )
:
red_val_(red_val),
green_val_(green_val)
{}
/** @brief Generate accessor
* @param val -- Value for which we would like to generate a color
* @return BGR Scalar for use in open cv plotting functions (e.g. cv::circle)
*/
cv::Scalar operator()( const float val ) const;
private:
const float red_val_;
const float green_val_;
};
/**
* @brief Color generator (linear) between any two colors
*/
class ColorgenLinear
{
public:
ColorgenLinear( const float val1, const float val2, cv::Scalar color1, cv::Scalar color2 )
:
val1_(val1),
val2_(val2),
color1_(color1),
color2_(color2)
{}
/** @brief Generate accessor
* @param val -- Value for which we would like to generate a color
* @return BGR Scalar for use in open cv plotting functions (e.g. cv::circle)
*/
cv::Scalar operator()( const float val ) const;
private:
const float val1_;
const float val2_;
const cv::Scalar color1_;
const cv::Scalar color2_;
};

17849
common/affdex_small_logo.h Normal file

File diff suppressed because it is too large Load diff

View file

@ -18,8 +18,9 @@ else()
endif() endif()
set(COMMON_HDRS "${PARENT_DIR}/common/") set(COMMON_HDRS "${PARENT_DIR}/common/")
file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*) file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*)
file(GLOB COMMON_CPP_FILES ${COMMON_HDRS}/*.c*)
add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES}) add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES} ${COMMON_CPP_FILES})
target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS}) target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS})

View file

@ -31,7 +31,7 @@ int main(int argsc, char ** argsv)
try{ try{
const std::vector<int> DEFAULT_RESOLUTION{ 640, 480 }; const std::vector<int> DEFAULT_RESOLUTION{ 1280, 720 };
affdex::path DATA_FOLDER; affdex::path DATA_FOLDER;

View file

@ -59,12 +59,15 @@ xcopy /d /s /y "C:\Program Files\Affectiva\AffdexSDK\data" "$(OutDir)data\"</Com
</PostBuildEvent> </PostBuildEvent>
</ItemDefinitionGroup> </ItemDefinitionGroup>
<ItemGroup> <ItemGroup>
<ClCompile Include="..\common\Visualizer.cpp" />
<ClCompile Include="opencv-webcam-demo.cpp" /> <ClCompile Include="opencv-webcam-demo.cpp" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Include="packages.config" /> <None Include="packages.config" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<ClInclude Include="..\common\Visualizer.h" />
<ClInclude Include="..\common\affdex_small_logo.h" />
<ClInclude Include="..\common\AFaceListener.hpp" /> <ClInclude Include="..\common\AFaceListener.hpp" />
<ClInclude Include="..\common\PlottingImageListener.hpp" /> <ClInclude Include="..\common\PlottingImageListener.hpp" />
<ClInclude Include="..\common\StatusListener.hpp" /> <ClInclude Include="..\common\StatusListener.hpp" />

View file

@ -18,6 +18,9 @@
<ClCompile Include="opencv-webcam-demo.cpp"> <ClCompile Include="opencv-webcam-demo.cpp">
<Filter>Source Files</Filter> <Filter>Source Files</Filter>
</ClCompile> </ClCompile>
<ClCompile Include="..\common\Visualizer.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Include="packages.config" /> <None Include="packages.config" />
@ -32,5 +35,11 @@
<ClInclude Include="..\common\StatusListener.hpp"> <ClInclude Include="..\common\StatusListener.hpp">
<Filter>Header Files</Filter> <Filter>Header Files</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\common\Visualizer.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\common\affdex_small_logo.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup> </ItemGroup>
</Project> </Project>

View file

@ -18,8 +18,9 @@ else()
endif() endif()
set(COMMON_HDRS "${PARENT_DIR}/common/") set(COMMON_HDRS "${PARENT_DIR}/common/")
file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*) file(GLOB COMMON_HDRS_FILES ${COMMON_HDRS}/*.h*)
file(GLOB COMMON_CPP_FILES ${COMMON_HDRS}/*.c*)
add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES}) add_executable(${subProject} ${SRCS} ${HDRS} ${COMMON_HDRS_FILES} ${COMMON_CPP_FILES})
target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS}) target_include_directories(${subProject} PRIVATE ${Boost_INCLUDE_DIRS} ${AFFDEX_INCLUDE_DIR} ${COMMON_HDRS})

View file

@ -60,12 +60,15 @@ xcopy /d /s /y "C:\Program Files\Affectiva\AffdexSDK\data" "$(OutDir)data\"</Com
</PostBuildEvent> </PostBuildEvent>
</ItemDefinitionGroup> </ItemDefinitionGroup>
<ItemGroup> <ItemGroup>
<ClCompile Include="..\common\Visualizer.cpp" />
<ClCompile Include="video-demo.cpp" /> <ClCompile Include="video-demo.cpp" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Include="packages.config" /> <None Include="packages.config" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<ClInclude Include="..\common\Visualizer.h" />
<ClInclude Include="..\common\affdex_small_logo.h" />
<ClInclude Include="..\common\AFaceListener.hpp" /> <ClInclude Include="..\common\AFaceListener.hpp" />
<ClInclude Include="..\common\PlottingImageListener.hpp" /> <ClInclude Include="..\common\PlottingImageListener.hpp" />
<ClInclude Include="..\common\StatusListener.hpp" /> <ClInclude Include="..\common\StatusListener.hpp" />

View file

@ -18,6 +18,9 @@
<ClCompile Include="video-demo.cpp"> <ClCompile Include="video-demo.cpp">
<Filter>Source Files</Filter> <Filter>Source Files</Filter>
</ClCompile> </ClCompile>
<ClCompile Include="..\common\Visualizer.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Include="packages.config" /> <None Include="packages.config" />
@ -32,5 +35,11 @@
<ClInclude Include="..\common\StatusListener.hpp"> <ClInclude Include="..\common\StatusListener.hpp">
<Filter>Header Files</Filter> <Filter>Header Files</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\common\Visualizer.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\common\affdex_small_logo.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup> </ItemGroup>
</Project> </Project>