p5.js-web-editor/dist/static/assets/webcam.js

278 lines
7.4 KiB
JavaScript

console.log('p5 version:', p5);
console.log('ml5 version:', ml5);
let assets = {};
var draw = function () {
// //test
// background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255));
// image(video, -width/2, -height/2, width, height);
// console.log(detections)
};
var gotResults = function(err, result) {
if (err) {
console.log(err)
return
}
};
function code_error(type, error) {
window.parent.postMessage({
'type': type,
'error': error.message,
'name': error.name,
'line': error.lineNumber - 2, // seems it giveswrong line numbers
'column': error.columnNumber
}, '*');
}
function no_code_error(type){
window.parent.postMessage({
'type': type,
'error': null
}, '*');
}
window.addEventListener("message", function (e) {
if (event.origin !== window.location.origin) {
console.error("Invalid origin of message. Ignored");
return;
}
console.debug("receive", e.data);
switch (e.data.action) {
case 'asset':
if(e.data.content === null){
delete assets[e.data.id];
} else {
assets[e.data.id] = loadImage(e.data.content);
}
break;
case 'code':
let f = new Function("");
try {
f = new Function(e.data.draw);
no_code_error('syntax');
} catch (error) {
code_error('syntax', error);
// window.parent.postMessage({'syntax': error.lineNumber});
}
handleResults = f;
break;
default:
console.error("Invalid action", e.data.action);
break;
}
});
let faceapi;
var video;
var detections;
var graphics;
let running = true;
function pause() {
if (running)
running = false;
else {
running = true;
faceapi.detect(gotResults);
}
}
// by default all options are set to true
const detection_options = {
withLandmarks: true,
withDescriptors: false,
minConfidence: 0.5,
Mobilenetv1Model: location.origin + '/assets/faceapi',
FaceLandmarkModel: location.origin + '/assets/faceapi',
FaceLandmark68TinyNet: location.origin + '/assets/faceapi',
FaceRecognitionModel: location.origin + '/assets/faceapi',
}
function setup() {
createCanvas(1280,720, WEBGL);
smooth();
noFill();
let constraints = {
video: {
width: { min: 720 },
height: { min: 540 }
},
audio: false
};
// graphics = createGraphics();
video = createCapture(constraints);
console.log(video.videoWidth);
console.log(video);
// HeadGazeSetup(video);
// video.size(width, height);
video.hide(); // Hide the video element, and just show the canvas
faceapi = ml5.faceApi(video, detection_options, modelReady);
textAlign(RIGHT);
}
function modelReady() {
faceapi.detect(gotResults);
}
var handleResults = function(){
// background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255));
background((millis()/100)%255,0,0);
image(video, -width/2 + 10, -height/2 + 10, width - 20, height -20);
};
gotResults = function(err, result) {
if (err) {
console.log(err)
return
}
// console.log(result)
detections = result;
try{
push();
translate(-width/2, -height/2);
handleResults();
pop();
no_code_error('runtime');
}catch(error){code_error('runtime', error);}
// // background(220);
// background(255);
// push();
// // with WEBGL, the coordinate system is 0,0 in the center.
// translate(-width / 2, -height / 2, 0);
// image(video, 0, 0, width, height);
// // image(video, 0,0, width, height)
// if (detections) {
// if (detections.length > 0) {
// // console.log(detections)
// drawBox(detections)
// drawLandmarks(detections)
// for (let detection of detections) {
// let t = HeadGazeDetect(detection);
// let rot = vecToRotation(t.rotation);
// document.getElementById('yaw').value = rot[0];
// document.getElementById('roll').value = rot[1];
// document.getElementById('pitch').value = rot[2];
// // let gaze = getMappedVectors()
// // noFill();
// // stroke(161, 255, 0,100);
// // strokeWeight(2);
// // beginShape();
// // vertex(gaze[0].x,gaze[0].y);
// // vertex(gaze[1].x,gaze[1].y);
// // endShape();
// // stroke(255, 255, 0,100);
// // beginShape();
// // vertex(gaze[0].x,gaze[0].y);
// // vertex(gaze[2].x,gaze[2].y);
// // endShape();
// // stroke(0, 0, 255,100);
// // beginShape();
// // vertex(gaze[0].x,gaze[0].y);
// // vertex(gaze[3].x,gaze[3].y);
// // endShape();
// // normalMaterial();
// push();
// console.log('translate', t.translation.data64F);
// // texture(graphics);
// translate(width/2, height/2, 10);
// // plane(70);
// // translate(t.translation.data64F[0], t.translation.data64F[1], t.translation.data64F[2])
// // rotateX(-rot[2]);
// rotateY(rot[0]);
// // rotateZ(rot[1]);
// stroke(255, 0, 0);
// // texture(graphics);
// plane(70);
// pop();
// }
// }
// }
// pop();
if (running)
faceapi.detect(gotResults);
}
function drawBox(detections) {
for (let i = 0; i < detections.length; i++) {
const alignedRect = detections[i].alignedRect;
const x = alignedRect._box._x
const y = alignedRect._box._y
const boxWidth = alignedRect._box._width
const boxHeight = alignedRect._box._height
noFill();
stroke(161, 95, 251);
strokeWeight(2);
rect(x, y, boxWidth, boxHeight);
}
}
function drawLandmarks(detections) {
// noFill();
// stroke(161, 95, 251)
// strokeWeight(2)
for (let i = 0; i < detections.length; i++) {
const mouth = detections[i].parts.mouth;
const nose = detections[i].parts.nose;
const leftEye = detections[i].parts.leftEye;
const rightEye = detections[i].parts.rightEye;
const rightEyeBrow = detections[i].parts.rightEyeBrow;
const leftEyeBrow = detections[i].parts.leftEyeBrow;
const jawOutline = detections[i].parts.jawOutline;
drawPart(mouth, true);
drawPart(nose, true);
drawPart(leftEye, true);
drawPart(leftEyeBrow, false);
drawPart(rightEye, true);
drawPart(rightEyeBrow, false);
drawPart(jawOutline, false);
}
}
function drawPart(feature, closed) {
beginShape();
for (let i = 0; i < feature.length; i++) {
const x = feature[i]._x
const y = feature[i]._y
vertex(x, y)
}
if (closed === true) {
endShape(CLOSE);
} else {
endShape();
}
}