Test json output

This commit is contained in:
Ruben 2017-11-12 23:15:44 +01:00
parent b1dda73d50
commit 8cc19d9a54

View file

@ -19,9 +19,9 @@
using namespace std; using namespace std;
using namespace affdex; using namespace affdex;
/// <summary> /// <summary>
/// Project for demoing the Windows SDK CameraDetector class (grabbing and processing frames from the camera). /// Project for demoing the Windows SDK CameraDetector class (grabbing and processing frames from the camera).
/// </summary> /// </summary>
int main(int argsc, char ** argsv) int main(int argsc, char ** argsv)
{ {
namespace po = boost::program_options; // abbreviate namespace namespace po = boost::program_options; // abbreviate namespace
@ -115,8 +115,8 @@ int main(int argsc, char ** argsv)
//Initialize detectors //Initialize detectors
frameDetector->setDetectAllEmotions(true); frameDetector->setDetectAllEmotions(true);
frameDetector->setDetectAllExpressions(true); frameDetector->setDetectAllExpressions(true);
frameDetector->setDetectAllEmojis(true); frameDetector->setDetectAllEmojis(false);
frameDetector->setDetectAllAppearances(true); frameDetector->setDetectAllAppearances(false);
frameDetector->setClassifierPath(DATA_FOLDER); frameDetector->setClassifierPath(DATA_FOLDER);
frameDetector->setImageListener(listenPtr.get()); frameDetector->setImageListener(listenPtr.get());
frameDetector->setFaceListener(faceListenPtr.get()); frameDetector->setFaceListener(faceListenPtr.get());
@ -153,6 +153,7 @@ int main(int argsc, char ** argsv)
frameDetector->start(); frameDetector->start();
do{ do{
cv::Mat img; cv::Mat img;
if (!webcam.read(img)) //Capture an image from the camera if (!webcam.read(img)) //Capture an image from the camera
{ {
@ -184,13 +185,15 @@ int main(int argsc, char ** argsv)
listenPtr->draw(faces, frame); listenPtr->draw(faces, frame);
} }
std::cerr << "timestamp: " << frame.getTimestamp() // std::cerr << "timestamp: " << frame.getTimestamp()
<< " cfps: " << listenPtr->getCaptureFrameRate() // << " cfps: " << listenPtr->getCaptureFrameRate()
<< " pfps: " << listenPtr->getProcessingFrameRate() // << " pfps: " << listenPtr->getProcessingFrameRate()
<< " faces: " << faces.size() << endl; // << " faces: " << faces.size() << endl;
//Output metrics to the file //Output metrics to the file
//listenPtr->outputToFile(faces, frame.getTimestamp()); //listenPtr->outputToFile(faces, frame.getTimestamp());
std:cout << getAsJson(faces, frame.getTimestamp()) << std::endl;
} }
@ -227,3 +230,60 @@ int main(int argsc, char ** argsv)
return 0; return 0;
} }
std::string getAsJson(const std::map<FaceId, Face> faces, const double timeStamp)
{
std::stringstream ss;
ss << "{" << "'t':" << timeStamp << ",";
ss << "'faces':[";
int i(0);
for (auto & face_id_pair : faces)
{
Face f = face_id_pair.second;
if(i > 0) { ss << ","; }
i++
ss << "{";
// fStream << timeStamp << ","
// << f.id << ","
// << f.measurements.interocularDistance << ","
// << glassesMap[f.appearance.glasses] << ","
// << ageMap[f.appearance.age] << ","
// << ethnicityMap[f.appearance.ethnicity] << ","
// << genderMap[f.appearance.gender] << ","
// << affdex::EmojiToString(f.emojis.dominantEmoji) << ",";
float *values = (float *)&f.measurements.orientation;
for (std::string angle : affdex::PlottingImageListener.headAngles)
{
ss << "'" << angle << "':" (*values) << ",";
values++;
}
values = (float *)&f.emotions;
for (std::string emotion : affdex::PlottingImageListener.emotions)
{
ss << "'" << emotion << "':" (*values) << ",";
values++;
}
values = (float *)&f.expressions;
for (std::string expression : affdex::PlottingImageListener.expressions)
{
ss << "'" << expression << "':" (*values) << ",";
values++;
}
ss << "'ioDistance':"<< f.measurements.interocularDistance << ",";
ss << "'id':"<< f.id;
ss << "}";
}
ss << "]"; // faces
ss << "}";
return ss.str();
}