p5.js-web-editor/dist/static/assets/webcam.js

596 lines
16 KiB
JavaScript

// console.log('p5 version:', p5);
// console.log('ml5 version:', ml5);
// console.log(location.origin);
let assets = {};
var draw = function () {
// Begin met het tekenen van de video
// plaats hem op position x = 0, y = 0.
// vul de hele breedte en hoogte
image(lastFrame, 0,0, width, height);
for(let detection of detections) {
push();
let transformed = transformDetection(detection);
translate(transformed.origin.x, transformed.origin.y);
rotate(transformed.angle);
try {
if( overlayGrid ) {
drawGridOverlay(transformed)
}
if( overlayLandmarks ) {
drawLandmarkOverlay(transformed);
}
drawMask(transformed);
} catch (error) {
console.error(error);
}
pop();
}
};
function drawLandmarkOverlay(detection) {
for(let nr in detection.points) {
p = detection.points[nr]
stroke('red')
strokeWeight(5)
point(p.x, p.y)
noStroke();
textSize(12);
fill('white');
text(nr, p.x, p.y);
}
noFill();
}
function drawGridOverlay(detection) {
textSize(20)
stroke(100,100,100)
strokeWeight(1)
for (let y = 0; y < detection.height; y+=10) {
if(y%100 === 0) {
strokeWeight(3)
text(y, detection.x - 10, y);
} else {
strokeWeight(1)
}
line(detection.x, detection.y + y, detection.x+detection.width, detection.y + y);
}
for (let x = 0; x < detection.width; x+=10) {
if(x != 0 && x%100 === 0) { // 0 already drawn for y
strokeWeight(3)
text(x, x, detection.y - 10);
} else {
strokeWeight(1)
}
line(detection.x + x, detection.y, detection.x + x, detection.y + detection.height);
}
}
var drawMask = function(detection) {
};
// var gotResults = function(err, result) {
// if (err) {
// console.log(err)
// return
// }
// };
// function code_error(type, error) {
// window.parent.postMessage({
// 'type': type,
// 'error': error.message,
// 'name': error.name,
// 'line': error.lineNumber - 2, // seems it giveswrong line numbers
// 'column': error.columnNumber
// }, '*');
// }
// function no_code_error(type){
// window.parent.postMessage({
// 'type': type,
// 'error': null
// }, '*');
// }
// window.addEventListener("message", function (e) {
// if (event.origin !== window.location.origin) {
// console.error("Invalid origin of message. Ignored");
// return;
// }
// console.debug("receive", e.data);
// switch (e.data.action) {
// case 'asset':
// if(e.data.content === null){
// delete assets[e.data.id];
// } else {
// assets[e.data.id] = loadImage(e.data.content);
// }
// break;
// case 'code':
// let f = new Function("");
// try {
// f = new Function(e.data.draw);
// no_code_error('syntax');
// } catch (error) {
// code_error('syntax', error);
// // window.parent.postMessage({'syntax': error.lineNumber});
// }
// handleResults = f;
// break;
// default:
// console.error("Invalid action", e.data.action);
// break;
// }
// });
let faceapi;
var video;
var lastFrame;
var frameToDetect;
var detections = [];
var factor_x, factor_y;
var flip = false; // mirror mode, disabled, because it's tricky to enable with faceApi
// function pause() {
// if (running)
// running = false;
// else {
// running = true;
// faceapi.detect(gotResults);
// }
// }
// by default all options are set to true
const detection_options = {
withLandmarks: true,
withDescriptors: false,
minConfidence: 0.5,
Mobilenetv1Model: window.parent.location.origin + '/assets/faceapi',
FaceLandmarkModel: window.parent.location.origin + '/assets/faceapi',
FaceLandmark68TinyNet: window.parent.location.origin + '/assets/faceapi',
FaceRecognitionModel: window.parent.location.origin + '/assets/faceapi',
TinyFaceDetectorModel: window.parent.location.origin + '/assets/faceapi',
}
function setupAssets(){
// placeholder. Override in patch...
}
let images = {};
function preload() {
const req = new Request('/assets/images.json');
fetch(req).then(
response => response.json()
).then(data => {
for(let id in data) {
images[id] = loadImage(data[id]);
}
// console.log('images', data, images);
});
// console.log(images);
}
var overlayLandmarks = false;
var overlayGrid = false;
function setup() {
// createCanvas(1280,720, WEBGL);
createCanvas(540,420);
smooth();
noFill();
push();
translate(-width/2, -height/2);
let constraints = {
video: {
width: { min: 720 },
height: { min: 540 }
},
audio: false
};
video = createCapture(constraints);
lastFrame = createGraphics(video.width, video.height);
frameToDetect = createGraphics(video.width, video.height);
// console.log(video);
// HeadGazeSetup(video);
// video.size(width, height);
video.hide(); // Hide the video element, and just show the canvas
faceapi = ml5.faceApi(video, detection_options, modelReady);
textAlign(RIGHT);
setupAssets();
let controlEl = document.createElement('div');
controlEl.classList.add('controls');
let label1El = document.createElement('label');
let check1El = document.createElement('input');
check1El.type = 'checkbox';
check1El.addEventListener('change', (e) => overlayLandmarks = e.target.checked);
let text1Node = document.createTextNode("Show points")
label1El.appendChild(check1El);
label1El.appendChild(text1Node);
controlEl.appendChild(label1El);
let label2El = document.createElement('label');
let check2El = document.createElement('input');
check2El.type = 'checkbox';
check2El.addEventListener('change', (e) => overlayGrid = e.target.checked);
let text2Node = document.createTextNode("Show coordinates")
label2El.appendChild(check2El);
label2El.appendChild(text2Node);
controlEl.appendChild(label2El);
let downloadBtn = document.createElement('button');
downloadBtn.innerHTML = 'screenshot';
downloadBtn.style.float = 'right';
// Convert canvas to image
downloadBtn.addEventListener("click", function(e) {
const canvas = document.querySelector('canvas');
const dataURL = canvas.toDataURL("image/png", 1.0);
let a = document.createElement('a');
a.href = dataURL;
a.download = 'screenshot.png';
document.body.appendChild(a);
a.click();
});
controlEl.appendChild(downloadBtn);
document.body.appendChild(controlEl);
}
function modelReady() {
frameToDetect.image(video, 0,0, video.width, video.height);
faceapi.detect(gotResults);
}
// var handleResults = function(){
// // background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255));
// background((millis()/100)%255,0,0);
// image(video, -width/2 + 10, -height/2 + 10, width - 20, height -20);
// };
gotResults = function(err, result) {
if (err) {
console.error(err)
return
}
// store data for async draw function
// TODO results to more compatible format
// translate(width,0); // move to far corner
if (flip) {
lastFrame.push();
lastFrame.scale(-1.0,1.0); // flip x-axis backwards
lastFrame.image(frameToDetect, -lastFrame.width, 0, lastFrame.width, lastFrame.height);
lastFrame.pop();
} else {
lastFrame.image(frameToDetect, 0, 0, lastFrame.width, lastFrame.height);
}
detections = parseDetectionResults(result, flip, width);
// size of video becomes known only after camera approval
if(lastFrame.width != video.width || lastFrame.height != video.height){
// console.log('Resizing canvas');
lastFrame.resizeCanvas(video.width, video.height);
frameToDetect.resizeCanvas(video.width, video.height);
}
// lastFrame.background('red');
frameToDetect.image(video, 0,0, video.width, video.height);
factor_x = width / video.width;
factor_y = height / video.height;
faceapi.detect(gotResults);
}
function drawBox(detections) {
for (let i = 0; i < detections.length; i++) {
const alignedRect = detections[i].alignedRect;
const x = alignedRect._box._x
const y = alignedRect._box._y
const boxWidth = alignedRect._box._width
const boxHeight = alignedRect._box._height
noFill();
stroke(161, 95, 251);
strokeWeight(2);
rect(x, y, boxWidth, boxHeight);
}
}
function drawLandmarks(detection) {
// for (let i = 0; i < detections.length; i++) {
const mouth = detection.parts.mouth;
const nose = detection.parts.nose;
const leftEye = detection.parts.leftEye;
const rightEye = detection.parts.rightEye;
const rightEyeBrow = detection.parts.rightEyeBrow;
const leftEyeBrow = detection.parts.leftEyeBrow;
const jawOutline = detection.parts.jawOutline;
strokePoints(mouth, CLOSE);
strokePoints(nose, CLOSE);
strokePoints(leftEye, CLOSE);
strokePoints(leftEyeBrow, OPEN);
strokePoints(rightEye, CLOSE);
strokePoints(rightEyeBrow, OPEN);
strokePoints(jawOutline, OPEN);
// }
}
function strokePoints(points, closed) {
beginShape();
for (let i = 0; i < points.length; i++) {
const x = points[i].x;
const y = points[i].y;
vertex(x, y)
}
if(typeof closed === 'undefined') {
closed = CLOSE;
}
endShape(closed)
}
function drawPoints(points, radius) {
if(typeof radius === 'undefined') {
radius = 2;
}
for (let i = 0; i < points.length; i++) {
const x = points[i].x;
const y = points[i].y;
circle(x, y, radius);
}
}
function faceDistance(face1, face2){
// distance between faces, in pixels, not meters.. for now
// we cheat a little: take centers, visualise circle with r = max(width, height)
// and find distance between these circles
box1 = (face1.x, face1.x + face1.width)
box2 = (face2.x, face2.x + face2.width)
c1 = {
x: face1.x + face1.width / 2,
y: face1.y + face1.height / 2,
}
c2 = {
x: face2.x + face2.width / 2,
y: face2.y + face2.height / 2,
}
r1 = Math.max(face1.width, face1.height) / 2;
r2 = Math.max(face2.width, face2.height) / 2;
dx = c1.x - c2.x;
dy = c1.y - c2.y;
return Math.sqrt( Math.pow(dx, 2) + Math.pow(dy, 2) ) - r1 - r2;
}
function mergePoints() {
// a points should be {x: , y: }
// collect all points in the arguments:
let points = [];
for(let arg of arguments) {
if(Array.isArray(arg)) {
points.push(...arg);
} else {
points.push(arg);
}
}
return points;
}
function getBoundingBox(){
// arguments contains points, or sets of points. Find bbox
const points = mergePoints(...arguments);
const xs = points.map((point) => point.x);
const ys = points.map((point) => point.y);
const minx = Math.min(...xs);
const miny = Math.min(...ys);
return {
x: minx,
y: miny,
width: Math.max(...xs) - minx,
height: Math.max(...ys) - miny,
}
}
function parseDetectionResults(results, flip, frameWidth) {
let detections = [];
for(let result of results) {
const landmarks = result.landmarks._positions.map((pos) => parseCoordinate(pos, flip, frameWidth));
let x = result.alignedRect._box._x * factor_x;
if(flip) {
x *= -1;
x += frameWidth;
}
let detection = {
'points': landmarks,
// TODO: rotation
'parts': {},
x: x,
y: result.alignedRect._box._y * factor_y,
width: result.alignedRect._box._width * factor_x,
height: result.alignedRect._box._height * factor_y,
}
// for(let idx in result.parts) {
// detection.parts[idx] = result.parts[idx].map((pos) => parseCoordinate(pos));
// }
detection['center'] = {
x: detection.x + detection.width / 2,
y: detection.y + detection.height / 2,
}
detections.push(detection);
}
return detections;
}
/**
* face api detector returns coordinates with _x and _y attributes.
* We convert this to the canvas's coordinates
* @param Object {_x: , _y: }
*/
function parseCoordinate(position, flip, frameWidth) {
let x = position._x * factor_x;
if (flip) {
x *= -1;
x += frameWidth;
}
return {
x: x,
y: position._y * factor_y,
}
}
function transformDetection(original) {
const b = original.points[36]; // outer point on left eye
const a = original.points[45]; // outer point on right eye
const angle = atan2(a.y - b.y, a.x - b.x);
// let cx =a.x/2 + b.x/2
// let cy = a.y/2 + b.y/2
const cx = original.x
const cy = original.y
let detection = {
'points': original.points.map(p => transformPoint(p, cx, cy, angle)),
'origin': {x:cx, y:cy},
'angle': angle,
'original': original
}
const bbox = getBoundingBox(detection.points);
padding_x = bbox.width * .1;
padding_y = bbox.height * .1;
detection['x'] = bbox.x - padding_x,
detection['y'] = bbox.y - padding_y,
detection['width'] = bbox.width * 1.2,
detection['height'] = bbox.height * 1.2
// detection['x'] = original.x - cx
// detection['y'] = original.y - cy
// detection['width'] = original.width
// detection['height'] = original.height
return detection;
}
function transformPoint(p, cx, cy, angle) {
const px = p.x-cx;
const py = p.y-cy;
return {
x: px * cos(-angle) - py * sin(-angle),
y: px * sin(-angle) + py * cos(-angle)
}
}
// error handling from consoleUtils.js::hijackConsoleErrorsScript
function getScriptOff(line) {
var offs = 0;
var l = 0;
var file = '';
for (var i=0; i<offs.length; i++) {
var n = offs[i][0];
if (n < line && n > l) {
l = n;
file = offs[i][1];
}
}
return [line - l, file];
}
// catch reference errors, via http://stackoverflow.com/a/12747364/2994108
window.onerror = function (msg, url, lineNumber, columnNo, error) {
var string = msg.toLowerCase();
var substring = "script error";
var data = {};
// if (url.match(${EXTERNAL_LINK_REGEX}) !== null && error.stack){
// var errorNum = error.stack.split('about:srcdoc:')[1].split(':')[0];
// var fileInfo = getScriptOff(errorNum);
// data = msg + ' (' + fileInfo[1] + ': line ' + fileInfo[0] + ')';
// } else {
// var fileInfo = getScriptOff(lineNumber);
data = msg + ' (' + error.fileName + ': line ' + error.lineNumber + ')';
// }
window.parent.postMessage([{
log: [{
method: 'error',
data: [data],
id: Date.now().toString()
}],
source: error.fileName
}], '*');
return false;
};
// catch rejected promises
window.onunhandledrejection = function (event) {
if (event.reason && event.reason.message && event.reason.stack){
// var errorNum = event.reason.stack.split('about:srcdoc:')[1].split(':')[0];
// var fileInfo = getScriptOff(errorNum);
var data = event.reason.message + ' (' + event.reason.stack + ': line ' + event.reason.stack.split("\n")[0] + ')';
window.parent.postMessage([{
log: [{
method: 'error',
data: [data],
id: Date.now().toString()
}],
source: event.reason.stack.split("\n")[0]
}], '*');
}
};