reimplemented blazeface processing

pull/280/head
Vladimir Mandic 2020-11-09 14:26:10 -05:00
parent 24b6fa1f23
commit 57a93768f9
7 changed files with 31 additions and 21 deletions

View File

@ -71,10 +71,10 @@ export default {
// e.g., if model is running st 25 FPS, we can re-use existing bounding // e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated face analysis as the head probably hasn't moved much // box for updated face analysis as the head probably hasn't moved much
// in short time (10 * 1/25 = 0.25 sec) // in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.5, // threshold for discarding a prediction
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
// non-maximum suppression (0.1 means drop if overlap 10%) // non-maximum suppression (0.1 means drop if overlap 10%)
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression, // in non-maximum suppression,
// this is applied on detection objects only and before minConfidence // this is applied on detection objects only and before minConfidence
}, },

View File

@ -393,8 +393,7 @@ async function main() {
// this is not required, just pre-warms all models for faster initial inference // this is not required, just pre-warms all models for faster initial inference
if (ui.modelsWarmup) { if (ui.modelsWarmup) {
status('initializing'); status('initializing');
const warmup = new ImageData(256, 256); await human.warmup(userConfig);
await human.detect(warmup, userConfig);
} }
status('human: ready'); status('human: ready');
document.getElementById('loader').style.display = 'none'; document.getElementById('loader').style.display = 'none';

View File

@ -19,8 +19,8 @@
<!-- <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu@0.0.1-alpha.0/dist/tf-webgpu.js"></script> --> <!-- <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu@0.0.1-alpha.0/dist/tf-webgpu.js"></script> -->
<!-- load compiled demo js --> <!-- load compiled demo js -->
<script src="../dist/demo-browser-index.js"></script> <script src="../dist/demo-browser-index.js"></script>
<!-- alternatively load demo sources directly --> <!-- alternatively load demo sources directly, this is not supported on mobile platforms as they don't support type=module -->
<!-- <script src="browser.js" type="module"></script> --> <!-- <script src="./browser.js" type="module"></script> -->
<style> <style>
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 400; src: local('Lato'), url('../assets/lato.ttf') format('truetype'); } @font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 400; src: local('Lato'), url('../assets/lato.ttf') format('truetype'); }
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; } html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }

View File

@ -25,6 +25,7 @@ exports.face = (res) => {
// if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) { // if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {
// gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`); // gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);
// } // }
if (face.mesh && face.mesh.length > 0) {
const eyeFacing = face.mesh[35][2] - face.mesh[263][2]; const eyeFacing = face.mesh[35][2] - face.mesh[263][2];
if (Math.abs(eyeFacing) < 10) gestures.push('facing camera'); if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');
else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`); else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);
@ -37,6 +38,7 @@ exports.face = (res) => {
const chinDepth = face.mesh[152][2]; const chinDepth = face.mesh[152][2];
if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`); if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);
} }
}
return gestures; return gestures;
}; };

View File

@ -33,7 +33,7 @@ class HandPipeline {
this.meshDetector = meshDetector; this.meshDetector = meshDetector;
this.inputSize = inputSize; this.inputSize = inputSize;
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 1000;
this.detectedHands = 0; this.detectedHands = 0;
} }
@ -91,7 +91,8 @@ class HandPipeline {
let boxes; let boxes;
if ((this.skipped > config.skipFrames) || !config.landmarks) { if ((this.skipped > config.skipFrames) || !config.landmarks) {
boxes = await this.boxDetector.estimateHandBounds(image, config); boxes = await this.boxDetector.estimateHandBounds(image, config);
this.skipped = 0; // don't reset on test image
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
} }
// if detector result count doesn't match current working set, use it to reset current working set // if detector result count doesn't match current working set, use it to reset current working set

View File

@ -373,6 +373,12 @@ class Human {
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas }); resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
}); });
} }
async warmup(userConfig) {
const warmup = new ImageData(255, 255);
await this.detect(warmup, userConfig);
this.log('warmed up');
}
} }
export { Human as default }; export { Human as default };

View File

@ -52,8 +52,10 @@ function process(input, config) {
if (config.filter.polaroid) this.fx.addFilter('polaroid'); if (config.filter.polaroid) this.fx.addFilter('polaroid');
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate); if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
this.fx.apply(inCanvas); this.fx.apply(inCanvas);
} else {
outCanvas = inCanvas;
} }
if (!outCanvas) outCanvas = inCanvas; // if (!outCanvas) outCanvas = inCanvas;
let pixels; let pixels;
if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) { if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it // tf kernel-optimized method to get imagedata, also if input is imagedata, just use it