diff --git a/console.html b/console.html
new file mode 100644
index 0000000..8e8e3a5
--- /dev/null
+++ b/console.html
@@ -0,0 +1,298 @@
+
+
+
+
websocketd console
+
+
+
+
diff --git a/opencv-webcam-demo/opencv-webcam-demo.cpp b/opencv-webcam-demo/opencv-webcam-demo.cpp
index 10b7a33..ad1904d 100644
--- a/opencv-webcam-demo/opencv-webcam-demo.cpp
+++ b/opencv-webcam-demo/opencv-webcam-demo.cpp
@@ -20,10 +20,11 @@ using namespace std;
using namespace affdex;
-std::string getAsJson(const std::map faces, const double timeStamp)
+std::string getAsJson(int framenr, const std::map faces, const double timeStamp)
{
std::stringstream ss;
ss << "{" << "'t':" << timeStamp << ",";
+ ss << "'nr':" << framenr << ",";
ss << "'faces':[";
int i(0);
@@ -85,7 +86,6 @@ std::string getAsJson(const std::map faces, const double timeStamp
return ss.str();
}
-
///
/// Project for demoing the Windows SDK CameraDetector class (grabbing and processing frames from the camera).
///
@@ -189,11 +189,21 @@ int main(int argsc, char ** argsv)
frameDetector->setFaceListener(faceListenPtr.get());
frameDetector->setProcessStatusListener(videoListenPtr.get());
+ /*std::string cameraPipeline;
+ cameraPipeline ="v4l2src device=/dev/video0 extra-controls=\"c,exposure_auto=1,exposure_absolute=500\" ! ";
+ cameraPipeline+="video/x-raw, format=BGR, framerate=30/1, width=(int)1280,height=(int)720 ! ";
+ cameraPipeline+="appsink";
+
+ cv::VideoCapture webcam;
+ webcam.open(cameraPipeline);*/
cv::VideoCapture webcam(camera_id); //Connect to the first webcam
- webcam.set(CV_CAP_PROP_FPS, camera_framerate); //Set webcam framerate.
- webcam.set(CV_CAP_PROP_FRAME_WIDTH, resolution[0]);
- webcam.set(CV_CAP_PROP_FRAME_HEIGHT, resolution[1]);
- std::cerr << "Setting the webcam frame rate to: " << camera_framerate << std::endl;
+ std::cerr << "Camera: " << camera_id << std::endl;
+ std::cerr << "- Setting the frame rate to: " << camera_framerate << std::endl;
+ //~ webcam.set(CV_CAP_PROP_FPS, camera_framerate); //Set webcam framerate.
+ std::cerr << "- Setting the resolution to: " << resolution[0] << "*" << resolution[1] << std::endl;
+ webcam.set(CV_CAP_PROP_FRAME_HEIGHT, 240);
+ webcam.set(CV_CAP_PROP_FRAME_WIDTH, 320);
+
auto start_time = std::chrono::system_clock::now();
if (!webcam.isOpened())
{
@@ -218,7 +228,7 @@ int main(int argsc, char ** argsv)
//Start the frame detector thread.
frameDetector->start();
-
+ int framenr = 0;
do{
cv::Mat img;
@@ -227,6 +237,8 @@ int main(int argsc, char ** argsv)
std::cerr << "Failed to read frame from webcam! " << std::endl;
break;
}
+
+ imread(img);
//Calculate the Image timestamp and the capture frame rate;
const auto milliseconds = std::chrono::duration_cast(std::chrono::system_clock::now() - start_time);
@@ -241,7 +253,8 @@ int main(int argsc, char ** argsv)
// For each frame processed
if (listenPtr->getDataSize() > 0)
{
-
+ framenr++;
+
std::pair > dataPoint = listenPtr->getData();
Frame frame = dataPoint.first;
std::map faces = dataPoint.second;
@@ -260,8 +273,20 @@ int main(int argsc, char ** argsv)
//Output metrics to the file
//listenPtr->outputToFile(faces, frame.getTimestamp());
- std:cout << getAsJson(faces, frame.getTimestamp()) << std::endl;
+ std:cout << getAsJson(framenr, faces, frame.getTimestamp()) << std::endl;
+
+ char buff[100];
+ snprintf(buff, sizeof(buff), "frame%06d.jpg", framenr);
+ std::string targetFilename = buff; // convert to std::string
+
+ vector compression_params;
+ compression_params.push_back(CV_IMWRITE_JPEG_QUALITY);
+ compression_params.push_back(90);
+
+ imwrite(targetFilename, img, compression_params);
}
+
+
}
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..794eac3
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1 @@
+git+https://github.com/dpallot/simple-websocket-server.git
diff --git a/run.py b/run.py
new file mode 100644
index 0000000..e6eb6ac
--- /dev/null
+++ b/run.py
@@ -0,0 +1,45 @@
+#sudo ~/build/opencv-webcam-demo/opencv-webcam-demo --data ~/affdex-sdk/data --faceMode 1 --numFaces 40 --draw 1
+
+import subprocess
+from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
+
+proc = subprocess.Popen([
+ '/home/crowd/build/opencv-webcam-demo/opencv-webcam-demo',
+ "--data", "/home/crowd/affdex-sdk/data",
+ "--faceMode", "1",
+ "--numFaces", "40",
+ "--draw", "1",
+ "--pfps", "5",
+ "--cfps", "5",
+ ],stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+clients = []
+class EchoOutput(WebSocket):
+
+# def handleMessage(self):
+# # echo message back to client
+# self.sendMessage(self.data)
+
+ def handleConnected(self):
+ clients.append(self)
+ print(self.address, 'connected')
+
+ def handleClose(self):
+ clients.remove(self)
+ print(self.address, 'closed')
+
+server = SimpleWebSocketServer('', 8080, EchoOutput)
+
+def send_message(msg):
+ print "send", msg, "to", len(clients), "clients"
+ for client in list(clients):
+ client.sendMessage(u''+msg)
+
+while proc.poll() is None:
+ server.serveonce()
+ line = proc.stdout.readline()
+ if line == '':
+ continue
+ send_message(line)
+ #print "test:", line.rstrip()
+