console.log('p5 version:', p5); console.log('ml5 version:', ml5); console.log(location.origin); let assets = {}; var draw = function () { }; // var gotResults = function(err, result) { // if (err) { // console.log(err) // return // } // }; // function code_error(type, error) { // window.parent.postMessage({ // 'type': type, // 'error': error.message, // 'name': error.name, // 'line': error.lineNumber - 2, // seems it giveswrong line numbers // 'column': error.columnNumber // }, '*'); // } // function no_code_error(type){ // window.parent.postMessage({ // 'type': type, // 'error': null // }, '*'); // } // window.addEventListener("message", function (e) { // if (event.origin !== window.location.origin) { // console.error("Invalid origin of message. Ignored"); // return; // } // console.debug("receive", e.data); // switch (e.data.action) { // case 'asset': // if(e.data.content === null){ // delete assets[e.data.id]; // } else { // assets[e.data.id] = loadImage(e.data.content); // } // break; // case 'code': // let f = new Function(""); // try { // f = new Function(e.data.draw); // no_code_error('syntax'); // } catch (error) { // code_error('syntax', error); // // window.parent.postMessage({'syntax': error.lineNumber}); // } // handleResults = f; // break; // default: // console.error("Invalid action", e.data.action); // break; // } // }); let faceapi; var video; var lastFrame; var detections = []; // function pause() { // if (running) // running = false; // else { // running = true; // faceapi.detect(gotResults); // } // } // by default all options are set to true const detection_options = { withLandmarks: true, withDescriptors: false, minConfidence: 0.5, Mobilenetv1Model: window.parent.location.origin + '/assets/faceapi', FaceLandmarkModel: window.parent.location.origin + '/assets/faceapi', FaceLandmark68TinyNet: window.parent.location.origin + '/assets/faceapi', FaceRecognitionModel: window.parent.location.origin + '/assets/faceapi', TinyFaceDetectorModel: window.parent.location.origin + '/assets/faceapi', } function setup() { // createCanvas(1280,720, WEBGL); createCanvas(540,420, WEBGL); smooth(); noFill(); push(); translate(-width/2, -height/2); let constraints = { video: { width: { min: 720 }, height: { min: 540 } }, audio: false }; video = createCapture(constraints); lastFrame = createGraphics(video.width, video.height); console.log(video); // HeadGazeSetup(video); // video.size(width, height); video.hide(); // Hide the video element, and just show the canvas faceapi = ml5.faceApi(video, detection_options, modelReady); textAlign(RIGHT); } function modelReady() { faceapi.detect(gotResults); } var handleResults = function(){ // background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255)); background((millis()/100)%255,0,0); image(video, -width/2 + 10, -height/2 + 10, width - 20, height -20); }; gotResults = function(err, result) { if (err) { console.log(err) return } // store data for async draw function detections = result; // size of video becomes known only after camera approval if(lastFrame.width != video.width || lastFrame.height != video.height){ console.log('Resizing canvas'); lastFrame.resizeCanvas(video.width, video.height); } // lastFrame.background('red'); lastFrame.image(video, 0,0, video.width, video.height); faceapi.detect(gotResults); } function drawBox(detections) { for (let i = 0; i < detections.length; i++) { const alignedRect = detections[i].alignedRect; const x = alignedRect._box._x const y = alignedRect._box._y const boxWidth = alignedRect._box._width const boxHeight = alignedRect._box._height noFill(); stroke(161, 95, 251); strokeWeight(2); rect(x, y, boxWidth, boxHeight); } } function drawLandmarks(detection) { // for (let i = 0; i < detections.length; i++) { const mouth = detection.parts.mouth; const nose = detection.parts.nose; const leftEye = detection.parts.leftEye; const rightEye = detection.parts.rightEye; const rightEyeBrow = detection.parts.rightEyeBrow; const leftEyeBrow = detection.parts.leftEyeBrow; const jawOutline = detection.parts.jawOutline; drawPart(mouth, true); drawPart(nose, true); drawPart(leftEye, true); drawPart(leftEyeBrow, false); drawPart(rightEye, true); drawPart(rightEyeBrow, false); drawPart(jawOutline, false); // } } function drawPart(feature, closed) { beginShape(); const factor_x = width / video.width; const factor_y = height / video.height; for (let i = 0; i < feature.length; i++) { const x = feature[i]._x *factor_x; const y = feature[i]._y * factor_y; vertex(x, y) } if (closed === true) { endShape(CLOSE); } else { endShape(); } } /** * Wrapper around p5.js color class * @param {*} c color, either as array, or string (css name or HEX string) */ function getColor(c) { if(!Array.isArray(c)) c = [c]; return new p5.Color(p5.instance, c); } var colors = { red: getColor('red'), blue: getColor('blue'), green: getColor('green'), }; function faceDistance(face1, face2){ // distance between faces, in pixels, not meters.. for now } function getBoundingBox(){ // arguments contains points, or sets of points. Find bbox console.log(arguments) // return { // top: // left: // width: // height: // } }