mirror of https://github.com/vladmandic/human
reimplemented blazeface processing
parent
99fadef352
commit
250187f259
|
@ -71,10 +71,10 @@ export default {
|
|||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated face analysis as the head probably hasn't moved much
|
||||
// in short time (10 * 1/25 = 0.25 sec)
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in
|
||||
minConfidence: 0.5, // threshold for discarding a prediction
|
||||
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
|
||||
// non-maximum suppression (0.1 means drop if overlap 10%)
|
||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
|
||||
// in non-maximum suppression,
|
||||
// this is applied on detection objects only and before minConfidence
|
||||
},
|
||||
|
|
|
@ -393,8 +393,7 @@ async function main() {
|
|||
// this is not required, just pre-warms all models for faster initial inference
|
||||
if (ui.modelsWarmup) {
|
||||
status('initializing');
|
||||
const warmup = new ImageData(256, 256);
|
||||
await human.detect(warmup, userConfig);
|
||||
await human.warmup(userConfig);
|
||||
}
|
||||
status('human: ready');
|
||||
document.getElementById('loader').style.display = 'none';
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
<!-- <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu@0.0.1-alpha.0/dist/tf-webgpu.js"></script> -->
|
||||
<!-- load compiled demo js -->
|
||||
<script src="../dist/demo-browser-index.js"></script>
|
||||
<!-- alternatively load demo sources directly -->
|
||||
<!-- <script src="browser.js" type="module"></script> -->
|
||||
<!-- alternatively load demo sources directly, this is not supported on mobile platforms as they don't support type=module -->
|
||||
<!-- <script src="./browser.js" type="module"></script> -->
|
||||
<style>
|
||||
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 400; src: local('Lato'), url('../assets/lato.ttf') format('truetype'); }
|
||||
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
|
||||
|
|
|
@ -25,17 +25,19 @@ exports.face = (res) => {
|
|||
// if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {
|
||||
// gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);
|
||||
// }
|
||||
const eyeFacing = face.mesh[35][2] - face.mesh[263][2];
|
||||
if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');
|
||||
else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);
|
||||
const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||
if (openLeft < 0.2) gestures.push('blink left eye');
|
||||
const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||
if (openRight < 0.2) gestures.push('blink right eye');
|
||||
const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));
|
||||
if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);
|
||||
const chinDepth = face.mesh[152][2];
|
||||
if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);
|
||||
if (face.mesh && face.mesh.length > 0) {
|
||||
const eyeFacing = face.mesh[35][2] - face.mesh[263][2];
|
||||
if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');
|
||||
else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);
|
||||
const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||
if (openLeft < 0.2) gestures.push('blink left eye');
|
||||
const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||
if (openRight < 0.2) gestures.push('blink right eye');
|
||||
const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));
|
||||
if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);
|
||||
const chinDepth = face.mesh[152][2];
|
||||
if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);
|
||||
}
|
||||
}
|
||||
return gestures;
|
||||
};
|
||||
|
|
|
@ -33,7 +33,7 @@ class HandPipeline {
|
|||
this.meshDetector = meshDetector;
|
||||
this.inputSize = inputSize;
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.skipped = 1000;
|
||||
this.detectedHands = 0;
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,8 @@ class HandPipeline {
|
|||
let boxes;
|
||||
if ((this.skipped > config.skipFrames) || !config.landmarks) {
|
||||
boxes = await this.boxDetector.estimateHandBounds(image, config);
|
||||
this.skipped = 0;
|
||||
// don't reset on test image
|
||||
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
|
||||
}
|
||||
|
||||
// if detector result count doesn't match current working set, use it to reset current working set
|
||||
|
|
|
@ -373,6 +373,12 @@ class Human {
|
|||
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
|
||||
});
|
||||
}
|
||||
|
||||
async warmup(userConfig) {
|
||||
const warmup = new ImageData(255, 255);
|
||||
await this.detect(warmup, userConfig);
|
||||
this.log('warmed up');
|
||||
}
|
||||
}
|
||||
|
||||
export { Human as default };
|
||||
|
|
|
@ -52,8 +52,10 @@ function process(input, config) {
|
|||
if (config.filter.polaroid) this.fx.addFilter('polaroid');
|
||||
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
|
||||
this.fx.apply(inCanvas);
|
||||
} else {
|
||||
outCanvas = inCanvas;
|
||||
}
|
||||
if (!outCanvas) outCanvas = inCanvas;
|
||||
// if (!outCanvas) outCanvas = inCanvas;
|
||||
let pixels;
|
||||
if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
|
||||
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
||||
|
|
Loading…
Reference in New Issue