add assets
This commit is contained in:
parent
bd8391bcf4
commit
8d2c3601d5
21 changed files with 511 additions and 0 deletions
1
dist/static/assets/faceapi/README
vendored
Normal file
1
dist/static/assets/faceapi/README
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Model weights from https://github.com/justadudewhohacks/face-api.js
|
BIN
dist/static/assets/faceapi/age_gender_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/age_gender_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/age_gender_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/age_gender_model-weights_manifest.json
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
dist/static/assets/faceapi/face_expression_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/face_expression_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/face_expression_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/face_expression_model-weights_manifest.json
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
dist/static/assets/faceapi/face_landmark_68_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/face_landmark_68_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/face_landmark_68_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/face_landmark_68_model-weights_manifest.json
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
dist/static/assets/faceapi/face_landmark_68_tiny_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/face_landmark_68_tiny_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/face_landmark_68_tiny_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/face_landmark_68_tiny_model-weights_manifest.json
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
[{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}],"paths":["face_landmark_68_tiny_model-shard1"]}]
|
BIN
dist/static/assets/faceapi/face_recognition_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/face_recognition_model-shard1
vendored
Normal file
Binary file not shown.
6
dist/static/assets/faceapi/face_recognition_model-shard2
vendored
Normal file
6
dist/static/assets/faceapi/face_recognition_model-shard2
vendored
Normal file
File diff suppressed because one or more lines are too long
1
dist/static/assets/faceapi/face_recognition_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/face_recognition_model-weights_manifest.json
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
dist/static/assets/faceapi/mtcnn_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/mtcnn_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/mtcnn_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/mtcnn_model-weights_manifest.json
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
[{"paths":["mtcnn_model-shard1"],"weights":[{"dtype":"float32","name":"pnet/conv1/weights","shape":[3,3,3,10]},{"dtype":"float32","name":"pnet/conv1/bias","shape":[10]},{"dtype":"float32","name":"pnet/prelu1_alpha","shape":[10]},{"dtype":"float32","name":"pnet/conv2/weights","shape":[3,3,10,16]},{"dtype":"float32","name":"pnet/conv2/bias","shape":[16]},{"dtype":"float32","name":"pnet/prelu2_alpha","shape":[16]},{"dtype":"float32","name":"pnet/conv3/weights","shape":[3,3,16,32]},{"dtype":"float32","name":"pnet/conv3/bias","shape":[32]},{"dtype":"float32","name":"pnet/prelu3_alpha","shape":[32]},{"dtype":"float32","name":"pnet/conv4_1/weights","shape":[1,1,32,2]},{"dtype":"float32","name":"pnet/conv4_1/bias","shape":[2]},{"dtype":"float32","name":"pnet/conv4_2/weights","shape":[1,1,32,4]},{"dtype":"float32","name":"pnet/conv4_2/bias","shape":[4]},{"dtype":"float32","name":"rnet/conv1/weights","shape":[3,3,3,28]},{"dtype":"float32","name":"rnet/conv1/bias","shape":[28]},{"dtype":"float32","name":"rnet/prelu1_alpha","shape":[28]},{"dtype":"float32","name":"rnet/conv2/weights","shape":[3,3,28,48]},{"dtype":"float32","name":"rnet/conv2/bias","shape":[48]},{"dtype":"float32","name":"rnet/prelu2_alpha","shape":[48]},{"dtype":"float32","name":"rnet/conv3/weights","shape":[2,2,48,64]},{"dtype":"float32","name":"rnet/conv3/bias","shape":[64]},{"dtype":"float32","name":"rnet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"rnet/fc1/weights","shape":[576,128]},{"dtype":"float32","name":"rnet/fc1/bias","shape":[128]},{"dtype":"float32","name":"rnet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"rnet/fc2_1/weights","shape":[128,2]},{"dtype":"float32","name":"rnet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"rnet/fc2_2/weights","shape":[128,4]},{"dtype":"float32","name":"rnet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/conv1/weights","shape":[3,3,3,32]},{"dtype":"float32","name":"onet/conv1/bias","shape":[32]},{"dtype":"float32","name":"onet/prelu1_alpha","shape":[32]},{"dtype":"float32","name":"onet/conv2/weights","shape":[3,3,32,64]},{"dtype":"float32","name":"onet/conv2/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu2_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv3/weights","shape":[3,3,64,64]},{"dtype":"float32","name":"onet/conv3/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv4/weights","shape":[2,2,64,128]},{"dtype":"float32","name":"onet/conv4/bias","shape":[128]},{"dtype":"float32","name":"onet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"onet/fc1/weights","shape":[1152,256]},{"dtype":"float32","name":"onet/fc1/bias","shape":[256]},{"dtype":"float32","name":"onet/prelu5_alpha","shape":[256]},{"dtype":"float32","name":"onet/fc2_1/weights","shape":[256,2]},{"dtype":"float32","name":"onet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"onet/fc2_2/weights","shape":[256,4]},{"dtype":"float32","name":"onet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/fc2_3/weights","shape":[256,10]},{"dtype":"float32","name":"onet/fc2_3/bias","shape":[10]}]}]
|
BIN
dist/static/assets/faceapi/ssd_mobilenetv1_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/ssd_mobilenetv1_model-shard1
vendored
Normal file
Binary file not shown.
137
dist/static/assets/faceapi/ssd_mobilenetv1_model-shard2
vendored
Normal file
137
dist/static/assets/faceapi/ssd_mobilenetv1_model-shard2
vendored
Normal file
File diff suppressed because one or more lines are too long
1
dist/static/assets/faceapi/ssd_mobilenetv1_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/ssd_mobilenetv1_model-weights_manifest.json
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
dist/static/assets/faceapi/tiny_face_detector_model-shard1
vendored
Normal file
BIN
dist/static/assets/faceapi/tiny_face_detector_model-shard1
vendored
Normal file
Binary file not shown.
1
dist/static/assets/faceapi/tiny_face_detector_model-weights_manifest.json
vendored
Normal file
1
dist/static/assets/faceapi/tiny_face_detector_model-weights_manifest.json
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}],"paths":["tiny_face_detector_model-shard1"]}]
|
81
dist/static/assets/opencv.js
vendored
Normal file
81
dist/static/assets/opencv.js
vendored
Normal file
File diff suppressed because one or more lines are too long
278
dist/static/assets/webcam.js
vendored
Normal file
278
dist/static/assets/webcam.js
vendored
Normal file
|
@ -0,0 +1,278 @@
|
|||
console.log('p5 version:', p5);
|
||||
console.log('ml5 version:', ml5);
|
||||
|
||||
let assets = {};
|
||||
|
||||
var draw = function () {
|
||||
// //test
|
||||
// background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255));
|
||||
// image(video, -width/2, -height/2, width, height);
|
||||
// console.log(detections)
|
||||
};
|
||||
|
||||
var gotResults = function(err, result) {
|
||||
if (err) {
|
||||
console.log(err)
|
||||
return
|
||||
}
|
||||
};
|
||||
|
||||
function code_error(type, error) {
|
||||
window.parent.postMessage({
|
||||
'type': type,
|
||||
'error': error.message,
|
||||
'name': error.name,
|
||||
'line': error.lineNumber - 2, // seems it giveswrong line numbers
|
||||
'column': error.columnNumber
|
||||
}, '*');
|
||||
|
||||
}
|
||||
|
||||
function no_code_error(type){
|
||||
window.parent.postMessage({
|
||||
'type': type,
|
||||
'error': null
|
||||
}, '*');
|
||||
}
|
||||
|
||||
window.addEventListener("message", function (e) {
|
||||
if (event.origin !== window.location.origin) {
|
||||
console.error("Invalid origin of message. Ignored");
|
||||
return;
|
||||
}
|
||||
|
||||
console.debug("receive", e.data);
|
||||
|
||||
switch (e.data.action) {
|
||||
case 'asset':
|
||||
if(e.data.content === null){
|
||||
delete assets[e.data.id];
|
||||
} else {
|
||||
assets[e.data.id] = loadImage(e.data.content);
|
||||
}
|
||||
|
||||
break;
|
||||
case 'code':
|
||||
let f = new Function("");
|
||||
try {
|
||||
f = new Function(e.data.draw);
|
||||
no_code_error('syntax');
|
||||
} catch (error) {
|
||||
code_error('syntax', error);
|
||||
// window.parent.postMessage({'syntax': error.lineNumber});
|
||||
}
|
||||
handleResults = f;
|
||||
break;
|
||||
|
||||
default:
|
||||
console.error("Invalid action", e.data.action);
|
||||
break;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
let faceapi;
|
||||
var video;
|
||||
var detections;
|
||||
var graphics;
|
||||
|
||||
let running = true;
|
||||
|
||||
function pause() {
|
||||
if (running)
|
||||
running = false;
|
||||
else {
|
||||
running = true;
|
||||
faceapi.detect(gotResults);
|
||||
}
|
||||
}
|
||||
|
||||
// by default all options are set to true
|
||||
const detection_options = {
|
||||
withLandmarks: true,
|
||||
withDescriptors: false,
|
||||
minConfidence: 0.5,
|
||||
Mobilenetv1Model: location.origin + '/assets/faceapi',
|
||||
FaceLandmarkModel: location.origin + '/assets/faceapi',
|
||||
FaceLandmark68TinyNet: location.origin + '/assets/faceapi',
|
||||
FaceRecognitionModel: location.origin + '/assets/faceapi',
|
||||
}
|
||||
|
||||
|
||||
function setup() {
|
||||
createCanvas(1280,720, WEBGL);
|
||||
smooth();
|
||||
noFill();
|
||||
|
||||
let constraints = {
|
||||
video: {
|
||||
width: { min: 720 },
|
||||
height: { min: 540 }
|
||||
},
|
||||
audio: false
|
||||
};
|
||||
|
||||
// graphics = createGraphics();
|
||||
video = createCapture(constraints);
|
||||
console.log(video.videoWidth);
|
||||
console.log(video);
|
||||
// HeadGazeSetup(video);
|
||||
// video.size(width, height);
|
||||
video.hide(); // Hide the video element, and just show the canvas
|
||||
faceapi = ml5.faceApi(video, detection_options, modelReady);
|
||||
textAlign(RIGHT);
|
||||
}
|
||||
|
||||
function modelReady() {
|
||||
faceapi.detect(gotResults);
|
||||
}
|
||||
|
||||
var handleResults = function(){
|
||||
// background(parseInt(Math.random()*255),parseInt(Math.random()*255),parseInt(Math.random()*255));
|
||||
background((millis()/100)%255,0,0);
|
||||
image(video, -width/2 + 10, -height/2 + 10, width - 20, height -20);
|
||||
};
|
||||
|
||||
gotResults = function(err, result) {
|
||||
if (err) {
|
||||
console.log(err)
|
||||
return
|
||||
}
|
||||
// console.log(result)
|
||||
detections = result;
|
||||
|
||||
try{
|
||||
push();
|
||||
translate(-width/2, -height/2);
|
||||
handleResults();
|
||||
pop();
|
||||
|
||||
no_code_error('runtime');
|
||||
}catch(error){code_error('runtime', error);}
|
||||
|
||||
// // background(220);
|
||||
// background(255);
|
||||
|
||||
// push();
|
||||
// // with WEBGL, the coordinate system is 0,0 in the center.
|
||||
// translate(-width / 2, -height / 2, 0);
|
||||
// image(video, 0, 0, width, height);
|
||||
|
||||
// // image(video, 0,0, width, height)
|
||||
// if (detections) {
|
||||
// if (detections.length > 0) {
|
||||
// // console.log(detections)
|
||||
// drawBox(detections)
|
||||
// drawLandmarks(detections)
|
||||
// for (let detection of detections) {
|
||||
// let t = HeadGazeDetect(detection);
|
||||
|
||||
// let rot = vecToRotation(t.rotation);
|
||||
|
||||
// document.getElementById('yaw').value = rot[0];
|
||||
// document.getElementById('roll').value = rot[1];
|
||||
// document.getElementById('pitch').value = rot[2];
|
||||
// // let gaze = getMappedVectors()
|
||||
// // noFill();
|
||||
// // stroke(161, 255, 0,100);
|
||||
// // strokeWeight(2);
|
||||
// // beginShape();
|
||||
// // vertex(gaze[0].x,gaze[0].y);
|
||||
// // vertex(gaze[1].x,gaze[1].y);
|
||||
// // endShape();
|
||||
// // stroke(255, 255, 0,100);
|
||||
// // beginShape();
|
||||
// // vertex(gaze[0].x,gaze[0].y);
|
||||
// // vertex(gaze[2].x,gaze[2].y);
|
||||
// // endShape();
|
||||
// // stroke(0, 0, 255,100);
|
||||
// // beginShape();
|
||||
// // vertex(gaze[0].x,gaze[0].y);
|
||||
// // vertex(gaze[3].x,gaze[3].y);
|
||||
// // endShape();
|
||||
|
||||
|
||||
// // normalMaterial();
|
||||
// push();
|
||||
// console.log('translate', t.translation.data64F);
|
||||
|
||||
// // texture(graphics);
|
||||
// translate(width/2, height/2, 10);
|
||||
// // plane(70);
|
||||
// // translate(t.translation.data64F[0], t.translation.data64F[1], t.translation.data64F[2])
|
||||
// // rotateX(-rot[2]);
|
||||
// rotateY(rot[0]);
|
||||
// // rotateZ(rot[1]);
|
||||
|
||||
// stroke(255, 0, 0);
|
||||
// // texture(graphics);
|
||||
// plane(70);
|
||||
// pop();
|
||||
// }
|
||||
// }
|
||||
|
||||
// }
|
||||
// pop();
|
||||
|
||||
if (running)
|
||||
faceapi.detect(gotResults);
|
||||
}
|
||||
|
||||
function drawBox(detections) {
|
||||
for (let i = 0; i < detections.length; i++) {
|
||||
const alignedRect = detections[i].alignedRect;
|
||||
const x = alignedRect._box._x
|
||||
const y = alignedRect._box._y
|
||||
const boxWidth = alignedRect._box._width
|
||||
const boxHeight = alignedRect._box._height
|
||||
|
||||
|
||||
noFill();
|
||||
stroke(161, 95, 251);
|
||||
strokeWeight(2);
|
||||
rect(x, y, boxWidth, boxHeight);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function drawLandmarks(detections) {
|
||||
// noFill();
|
||||
// stroke(161, 95, 251)
|
||||
// strokeWeight(2)
|
||||
|
||||
for (let i = 0; i < detections.length; i++) {
|
||||
const mouth = detections[i].parts.mouth;
|
||||
const nose = detections[i].parts.nose;
|
||||
const leftEye = detections[i].parts.leftEye;
|
||||
const rightEye = detections[i].parts.rightEye;
|
||||
const rightEyeBrow = detections[i].parts.rightEyeBrow;
|
||||
const leftEyeBrow = detections[i].parts.leftEyeBrow;
|
||||
const jawOutline = detections[i].parts.jawOutline;
|
||||
|
||||
drawPart(mouth, true);
|
||||
drawPart(nose, true);
|
||||
drawPart(leftEye, true);
|
||||
drawPart(leftEyeBrow, false);
|
||||
drawPart(rightEye, true);
|
||||
drawPart(rightEyeBrow, false);
|
||||
drawPart(jawOutline, false);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function drawPart(feature, closed) {
|
||||
beginShape();
|
||||
for (let i = 0; i < feature.length; i++) {
|
||||
const x = feature[i]._x
|
||||
const y = feature[i]._y
|
||||
vertex(x, y)
|
||||
}
|
||||
|
||||
if (closed === true) {
|
||||
endShape(CLOSE);
|
||||
} else {
|
||||
endShape();
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in a new issue