Add emojis to the output file

This commit is contained in:
Abdelrahman Mahmoud 2016-04-05 11:13:23 -04:00
parent 4b4e1e05ba
commit 0d8c63f959
3 changed files with 107 additions and 77 deletions

View file

@ -24,10 +24,10 @@ using namespace affdex;
class PlottingImageListener : public ImageListener
{
std::mutex mMutex;
std::deque<std::pair<Frame, std::map<FaceId, Face> > > mDataArray;
double mCaptureLastTS;
double mCaptureFPS;
double mProcessLastTS;
@ -38,17 +38,18 @@ class PlottingImageListener : public ImageListener
const int spacing = 10;
const float font_size = 0.5f;
const int font = cv::FONT_HERSHEY_COMPLEX_SMALL;
std::vector<std::string> expressions;
std::vector<std::string> emotions;
std::vector<std::string> emojis;
std::vector<std::string> headAngles;
std::map<affdex::Glasses, std::string> glassesMap;
std::map<affdex::Gender, std::string> genderMap;
public:
PlottingImageListener(std::ofstream &csv, const bool draw_display)
: fStream(csv), mDrawDisplay(draw_display), mStartT(std::chrono::system_clock::now()),
mCaptureLastTS(-1.0f), mCaptureFPS(-1.0f),
@ -59,35 +60,45 @@ public:
"upperLipRaise", "lipCornerDepressor", "chinRaise", "lipPucker", "lipPress",
"lipSuck", "mouthOpen", "smirk", "eyeClosure", "attention"
};
emotions = {
"joy", "fear", "disgust", "sadness", "anger",
"surprise", "contempt", "valence", "engagement"
};
headAngles = { "pitch", "yaw", "roll" };
emojis = std::vector<std::string> {
"relaxed", "smiley", "laughing",
"kissing", "disappointed",
"rage", "smirk", "wink",
"stuckOutTongueWinkingEye", "stuckOutTongue",
"flushed", "scream"
};
genderMap = std::map<affdex::Gender, std::string> {
{ affdex::Gender::Male, "male" },
{ affdex::Gender::Female, "female" },
{ affdex::Gender::Unknown, "unknown" },
};
glassesMap = std::map<affdex::Glasses, std::string> {
{ affdex::Glasses::Yes, "glasses" },
{ affdex::Glasses::No, "no glasses" }
};
fStream << "TimeStamp,faceId,interocularDistance,glasses,gender,";
fStream << "TimeStamp,faceId,interocularDistance,glasses,gender,dominantEmoji,";
for (std::string angle : headAngles) fStream << angle << ",";
for (std::string emotion : emotions) fStream << emotion << ",";
for (std::string expression : expressions) fStream << expression << ",";
for (std::string emoji : emojis) fStream << emoji << ",";
fStream << std::endl;
fStream.precision(4);
fStream << std::fixed;
}
FeaturePoint minPoint(VecFeaturePoint points)
{
VecFeaturePoint::iterator it = points.begin();
@ -99,7 +110,7 @@ public:
}
return ret;
};
FeaturePoint maxPoint( VecFeaturePoint points)
{
VecFeaturePoint::iterator it = points.begin();
@ -111,27 +122,27 @@ public:
}
return ret;
};
double getProcessingFrameRate()
{
std::lock_guard<std::mutex> lg(mMutex);
return mProcessFPS;
}
double getCaptureFrameRate()
{
std::lock_guard<std::mutex> lg(mMutex);
return mCaptureFPS;
}
int getDataSize()
{
std::lock_guard<std::mutex> lg(mMutex);
return mDataArray.size();
}
std::pair<Frame, std::map<FaceId, Face>> getData()
{
std::lock_guard<std::mutex> lg(mMutex);
@ -139,7 +150,7 @@ public:
mDataArray.pop_front();
return dpoint;
}
void onImageResults(std::map<FaceId, Face> faces, Frame image) override
{
std::lock_guard<std::mutex> lg(mMutex);
@ -150,58 +161,68 @@ public:
mProcessFPS = 1.0f / (seconds - mProcessLastTS);
mProcessLastTS = seconds;
};
void onImageCapture(Frame image) override
{
std::lock_guard<std::mutex> lg(mMutex);
mCaptureFPS = 1.0f / (image.getTimestamp() - mCaptureLastTS);
mCaptureLastTS = image.getTimestamp();
};
void outputToFile(const std::map<FaceId, Face> faces, const double timeStamp)
{
if (faces.empty())
{
fStream << timeStamp << "nan,nan,no glasses,unknown,";
fStream << timeStamp << "nan,nan,no glasses,unknown, unknown,";
for (std::string angle : headAngles) fStream << "nan,";
for (std::string emotion : emotions) fStream << "nan,";
for (std::string expression : expressions) fStream << "nan,";
for (std::string emoji : emojis) fStream << "nan,";
fStream << std::endl;
}
for (auto & face_id_pair : faces)
{
Face f = face_id_pair.second;
fStream << timeStamp << ","
<< f.id << ","
<< f.measurements.interocularDistance << ","
<< glassesMap[f.appearance.glasses] << ","
<< genderMap[f.appearance.gender] << ",";
<< genderMap[f.appearance.gender] << ","
<< affdex::EmojiToString(f.emojis.dominantEmoji) << ",";
float *values = (float *)&f.measurements.orientation;
for (std::string angle : headAngles)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.emotions;
for (std::string emotion : emotions)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.expressions;
for (std::string expression : expressions)
{
fStream << (*values) << ",";
values++;
}
values = (float *)&f.emojis;
for (std::string emoji : emojis)
{
fStream << (*values) << ",";
values++;
}
fStream << std::endl;
}
}
void drawValues(const float * first, const std::vector<std::string> names,
const int x, int &padding, const cv::Scalar clr,
cv::Mat img)
@ -217,18 +238,18 @@ public:
first++;
}
}
void draw(const std::map<FaceId, Face> faces, Frame image)
{
std::shared_ptr<byte> imgdata = image.getBGRByteArray();
cv::Mat img = cv::Mat(image.getHeight(), image.getWidth(), CV_8UC3, imgdata.get());
const int left_margin = 30;
cv::Scalar clr = cv::Scalar(255, 255, 255);
cv::Scalar clr = cv::Scalar(0, 0, 255);
cv::Scalar header_clr = cv::Scalar(255, 0, 0);
for (auto & face_id_pair : faces)
{
Face f = face_id_pair.second;
@ -239,49 +260,56 @@ public:
}
FeaturePoint tl = minPoint(points);
FeaturePoint br = maxPoint(points);
//Output the results of the different classifiers.
int padding = tl.y + 10;
cv::putText(img, "APPEARANCE", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, genderMap[f.appearance.gender], cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, glassesMap[f.appearance.glasses], cv::Point(br.x, padding += spacing), font, font_size, clr);
Orientation headAngles = f.measurements.orientation;
char strAngles[100];
sprintf(strAngles, "Pitch: %3.2f Yaw: %3.2f Roll: %3.2f Interocular: %3.2f",
headAngles.pitch, headAngles.yaw, headAngles.roll, f.measurements.interocularDistance);
char fId[10];
sprintf(fId, "ID: %i", f.id);
cv::putText(img, fId, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "MEASUREMENTS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, strAngles, cv::Point(br.x, padding += spacing), font, font_size, clr);
cv::putText(img, "EMOJIS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
cv::putText(img, "dominantEmoji: " + affdex::EmojiToString(f.emojis.dominantEmoji),
cv::Point(br.x, padding += spacing), font, font_size, clr);
drawValues((float *)&f.emojis, emojis, br.x, padding, clr, img);
cv::putText(img, "EXPRESSIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.expressions, expressions, br.x, padding, clr, img);
cv::putText(img, "EMOTIONS", cv::Point(br.x, padding += (spacing * 2)), font, font_size, header_clr);
drawValues((float *)&f.emotions, emotions, br.x, padding, clr, img);
}
char fps_str[50];
sprintf(fps_str, "capture fps: %2.0f", mCaptureFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin - spacing), font, font_size, clr);
sprintf(fps_str, "process fps: %2.0f", mProcessFPS);
cv::putText(img, fps_str, cv::Point(img.cols - 110, img.rows - left_margin), font, font_size, clr);
cv::imshow("analyze video", img);
cv::waitKey(5);
}
};

View file

@ -126,6 +126,7 @@ int main(int argsc, char ** argsv)
//Initialize detectors
frameDetector->setDetectAllEmotions(true);
frameDetector->setDetectAllExpressions(true);
frameDetector->setDetectAllEmojis(true);
frameDetector->setDetectGender(true);
frameDetector->setDetectGlasses(true);
frameDetector->setClassifierPath(DATA_FOLDER);

View file

@ -35,11 +35,11 @@ int main(int argsc, char ** argsv)
bool loop = false;
unsigned int nFaces = 1;
int faceDetectorMode = (int)FaceDetectorMode::SMALL_FACES;
const int precision = 2;
std::cerr.precision(precision);
std::cout.precision(precision);
namespace po = boost::program_options; // abbreviate namespace
po::options_description description("Project for demoing the Windows SDK VideoDetector class (processing video files).");
description.add_options()
@ -76,7 +76,7 @@ int main(int argsc, char ** argsv)
std::cerr << "For help, use the -h option." << std::endl << std::endl;
return 1;
}
// Parse and check the data folder (with assets)
if (!boost::filesystem::exists(DATA_FOLDER))
{
@ -89,20 +89,20 @@ int main(int argsc, char ** argsv)
{
//Initialize the video file detector
VideoDetector videoDetector(process_framerate, nFaces, (affdex::FaceDetectorMode) faceDetectorMode);
//Initialize out file
boost::filesystem::path csvPath(videoPath);
csvPath.replace_extension(".csv");
std::ofstream csvFileStream(csvPath.c_str());
if (!csvFileStream.is_open())
{
std::cerr << "Unable to open csv file " << csvPath << std::endl;
return 1;
}
std::cout << "Max num of faces set to: " << videoDetector.getMaxNumberFaces() << std::endl;
std::string mode;
switch (videoDetector.getFaceDetectorMode())
@ -116,13 +116,14 @@ int main(int argsc, char ** argsv)
default:
break;
}
std::cout << "Face detector mode set to: " << mode << std::endl;
shared_ptr<PlottingImageListener> listenPtr(new PlottingImageListener(csvFileStream, draw_display));
//Activate all the detectors
videoDetector.setDetectAllEmotions(true);
videoDetector.setDetectAllExpressions(true);
videoDetector.setDetectAllEmojis(true);
videoDetector.setDetectGender(true);
videoDetector.setDetectGlasses(true);
//Set the location of the data folder and license file
@ -130,16 +131,16 @@ int main(int argsc, char ** argsv)
videoDetector.setLicensePath(LICENSE_PATH);
//Add callback functions implementations
videoDetector.setImageListener(listenPtr.get());
videoDetector.start(); //Initialize the detectors .. call only once
do
{
shared_ptr<StatusListener> videoListenPtr = std::make_shared<StatusListener>();
videoDetector.setProcessStatusListener(videoListenPtr.get());
videoDetector.process(videoPath); //Process a video
//For each frame processed
while (videoListenPtr->isRunning())
{
@ -148,34 +149,34 @@ int main(int argsc, char ** argsv)
std::pair<Frame, std::map<FaceId, Face> > dataPoint = listenPtr->getData();
Frame frame = dataPoint.first;
std::map<FaceId, Face> faces = dataPoint.second;
//Draw on the GUI
if (draw_display)
{
listenPtr->draw(faces, frame);
}
std::cerr << "timestamp: " << frame.getTimestamp()
<< " cfps: " << listenPtr->getCaptureFrameRate()
<< " pfps: " << listenPtr->getProcessingFrameRate()
<< " faces: "<< faces.size() << endl;
//Output metrics to file
listenPtr->outputToFile(faces, frame.getTimestamp());
}
}
} while(loop);
videoDetector.stop();
csvFileStream.close();
std::cout << "Output written to file: " << csvPath << std::endl;
}
catch (AffdexException ex)
{
std::cerr << ex.what();
}
return 0;
}