Test json output
This commit is contained in:
parent
b1dda73d50
commit
8cc19d9a54
1 changed files with 69 additions and 9 deletions
|
@ -115,8 +115,8 @@ int main(int argsc, char ** argsv)
|
|||
//Initialize detectors
|
||||
frameDetector->setDetectAllEmotions(true);
|
||||
frameDetector->setDetectAllExpressions(true);
|
||||
frameDetector->setDetectAllEmojis(true);
|
||||
frameDetector->setDetectAllAppearances(true);
|
||||
frameDetector->setDetectAllEmojis(false);
|
||||
frameDetector->setDetectAllAppearances(false);
|
||||
frameDetector->setClassifierPath(DATA_FOLDER);
|
||||
frameDetector->setImageListener(listenPtr.get());
|
||||
frameDetector->setFaceListener(faceListenPtr.get());
|
||||
|
@ -153,6 +153,7 @@ int main(int argsc, char ** argsv)
|
|||
frameDetector->start();
|
||||
|
||||
do{
|
||||
|
||||
cv::Mat img;
|
||||
if (!webcam.read(img)) //Capture an image from the camera
|
||||
{
|
||||
|
@ -184,13 +185,15 @@ int main(int argsc, char ** argsv)
|
|||
listenPtr->draw(faces, frame);
|
||||
}
|
||||
|
||||
std::cerr << "timestamp: " << frame.getTimestamp()
|
||||
<< " cfps: " << listenPtr->getCaptureFrameRate()
|
||||
<< " pfps: " << listenPtr->getProcessingFrameRate()
|
||||
<< " faces: " << faces.size() << endl;
|
||||
// std::cerr << "timestamp: " << frame.getTimestamp()
|
||||
// << " cfps: " << listenPtr->getCaptureFrameRate()
|
||||
// << " pfps: " << listenPtr->getProcessingFrameRate()
|
||||
// << " faces: " << faces.size() << endl;
|
||||
|
||||
//Output metrics to the file
|
||||
//listenPtr->outputToFile(faces, frame.getTimestamp());
|
||||
|
||||
std:cout << getAsJson(faces, frame.getTimestamp()) << std::endl;
|
||||
}
|
||||
|
||||
|
||||
|
@ -227,3 +230,60 @@ int main(int argsc, char ** argsv)
|
|||
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string getAsJson(const std::map<FaceId, Face> faces, const double timeStamp)
|
||||
{
|
||||
std::stringstream ss;
|
||||
ss << "{" << "'t':" << timeStamp << ",";
|
||||
ss << "'faces':[";
|
||||
|
||||
int i(0);
|
||||
|
||||
for (auto & face_id_pair : faces)
|
||||
{
|
||||
Face f = face_id_pair.second;
|
||||
|
||||
if(i > 0) { ss << ","; }
|
||||
i++
|
||||
|
||||
ss << "{";
|
||||
|
||||
// fStream << timeStamp << ","
|
||||
// << f.id << ","
|
||||
// << f.measurements.interocularDistance << ","
|
||||
// << glassesMap[f.appearance.glasses] << ","
|
||||
// << ageMap[f.appearance.age] << ","
|
||||
// << ethnicityMap[f.appearance.ethnicity] << ","
|
||||
// << genderMap[f.appearance.gender] << ","
|
||||
// << affdex::EmojiToString(f.emojis.dominantEmoji) << ",";
|
||||
|
||||
float *values = (float *)&f.measurements.orientation;
|
||||
for (std::string angle : affdex::PlottingImageListener.headAngles)
|
||||
{
|
||||
ss << "'" << angle << "':" (*values) << ",";
|
||||
values++;
|
||||
}
|
||||
|
||||
values = (float *)&f.emotions;
|
||||
for (std::string emotion : affdex::PlottingImageListener.emotions)
|
||||
{
|
||||
ss << "'" << emotion << "':" (*values) << ",";
|
||||
values++;
|
||||
}
|
||||
|
||||
values = (float *)&f.expressions;
|
||||
for (std::string expression : affdex::PlottingImageListener.expressions)
|
||||
{
|
||||
ss << "'" << expression << "':" (*values) << ",";
|
||||
values++;
|
||||
}
|
||||
|
||||
ss << "'ioDistance':"<< f.measurements.interocularDistance << ",";
|
||||
ss << "'id':"<< f.id;
|
||||
ss << "}";
|
||||
}
|
||||
|
||||
ss << "]"; // faces
|
||||
ss << "}";
|
||||
return ss.str();
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue