remove blazeface-front and faceboxes

pull/280/head
Vladimir Mandic 2021-03-09 18:32:35 -05:00
parent 47ff4f8d40
commit f8b13a8bba
5 changed files with 30 additions and 41 deletions

View File

@ -66,13 +66,11 @@ export default {
// detector, mesh, iris, age, gender, emotion
// (note: module is not loaded until it is required)
detector: {
modelPath: '../models/blazeface-back.json', // can be 'blazeface-front', 'blazeface-back' or 'faceboxes'
// 'blazeface-front' is blazeface model optimized for large faces such as front-facing camera
// 'blazeface-back' is blazeface model optimized for smaller and/or distanct faces
// 'faceboxes' is alternative model to 'blazeface'
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
modelPath: '../models/blazeface-back.json',
inputSize: 256, // fixed value
rotation: true, // use best-guess rotated face image or just box with rotation as-is
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
// this parameter is not valid in nodejs
maxFaces: 10, // maximum number of faces detected in the input
// should be set to the minimum number for performance
skipFrames: 21, // how many frames to go without re-running the face bounding box detector
@ -136,7 +134,7 @@ export default {
body: {
enabled: true,
modelPath: '../models/posenet.json', // can be 'posenet', 'blazepose' or 'blazepose-upper'
modelPath: '../models/posenet.json', // can be 'posenet' or 'blazepose'
inputSize: 257, // fixed value, 257 for posenet and 256 for blazepose
maxDetections: 10, // maximum number of people detected in the input
// should be set to the minimum number for performance

View File

@ -4,7 +4,7 @@
const UICSS = `
#gl-bench { position: absolute; right: 1rem; bottom: 1rem; z-index:1000; -webkit-user-select: none; -moz-user-select: none; user-select: none; }
#gl-bench div { position: relative; display: block; margin: 4px; padding: 0 7px 0 10px; background: darkslategray; border-radius: 0.2rem; cursor: pointer; opacity: 0.9; }
#gl-bench div { position: relative; display: block; margin: 4px; padding: 0 2px 0 2px; background: darkslategray; border-radius: 0.1rem; cursor: pointer; opacity: 0.9; }
#gl-bench svg { height: 60px; margin: 0 0px 0px 4px; }
#gl-bench text { font-size: 16px; font-family: 'Lato', 'Segoe UI'; dominant-baseline: middle; text-anchor: middle; }
#gl-bench .gl-mem { font-size: 12px; fill: white; }
@ -17,10 +17,10 @@ const UICSS = `
const UISVG = `
<div class="gl-box">
<svg viewBox="0 0 55 60">
<svg viewBox="0 0 60 60">
<text x="27" y="56" class="gl-fps">00 FPS</text>
<text x="30" y="8" class="gl-mem"></text>
<rect x="0" y="14" rx="4" ry="4" width="65" height="32"></rect>
<rect x="0" y="14" rx="4" ry="4" width="60" height="32"></rect>
<polyline class="gl-chart"></polyline>
</svg>
<svg viewBox="0 0 14 60" class="gl-cpu-svg">
@ -163,7 +163,7 @@ class GLBench {
const len = chart.length;
for (let j = 0; j < len; j++) {
const id = (circularId + j + 1) % len;
if (chart[id] !== undefined) points = points + ' ' + (55 * j / (len - 1)).toFixed(1) + ',' + (45 - chart[id] * 22 / 60 / this.detected).toFixed(1);
if (chart[id] !== undefined) points = points + ' ' + (60 * j / (len - 1)).toFixed(1) + ',' + (45 - chart[id] * 0.5 / this.detected).toFixed(1);
}
nodes['gl-chart'][i].setAttribute('points', points);
logger(this.names[i], chart, circularId);

View File

@ -15,9 +15,8 @@ const myConfig = {
async: false,
face: {
enabled: true,
detector: { modelPath: 'file://models/faceboxes.json', enabled: true, minConfidence: 0.5 },
// detector: { modelPath: 'file://models/blazeface-back.json', enabled: false }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
mesh: { modelPath: 'file://models/facemesh.json', enabled: false }, // depends on blazeface detector
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true },
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
iris: { modelPath: 'file://models/iris.json', enabled: true },
age: { modelPath: 'file://models/age-ssrnet-imdb.json', enabled: true },
gender: { modelPath: 'file://models/gender.json', enabled: true },
@ -58,13 +57,11 @@ async function detect(input) {
log.state('Processing:', image.shape);
// run actual detection
const result = await human.detect(image, myConfig);
// no need to print results as they are printed to console during detection from within the library due to human.config.debug set
// dispose image tensor as we no longer need it
image.dispose();
// print data to console
log.data('Face: ', result.face);
log.data('Body:', result.body);
log.data('Hand:', result.hand);
log.data('Gesture:', result.gesture);
return result;
}
async function test() {
@ -74,14 +71,12 @@ async function test() {
log.state('Processing embedded warmup image: face');
myConfig.warmup = 'face';
result = await human.warmup(myConfig);
log.data('Face: ', result.face);
log.state('Processing embedded warmup image: full');
myConfig.warmup = 'full';
result = await human.warmup(myConfig);
log.data('Body:', result.body);
log.data('Hand:', result.hand);
log.data('Gesture:', result.gesture);
// no need to print results as they are printed to console during detection from within the library due to human.config.debug set
return result;
}
async function main() {

View File

@ -43,25 +43,24 @@
"blazeface",
"blazepose"
],
"peerDependencies": {},
"devDependencies": {
"@tensorflow/tfjs": "^3.2.0",
"@tensorflow/tfjs-backend-cpu": "^3.2.0",
"@tensorflow/tfjs-backend-wasm": "^3.2.0",
"@tensorflow/tfjs-backend-webgl": "^3.2.0",
"@tensorflow/tfjs-converter": "^3.2.0",
"@tensorflow/tfjs-core": "^3.2.0",
"@tensorflow/tfjs-data": "^3.2.0",
"@tensorflow/tfjs-layers": "^3.2.0",
"@tensorflow/tfjs-node": "^3.2.0",
"@tensorflow/tfjs-node-gpu": "^3.2.0",
"@types/node": "^14.14.32",
"@typescript-eslint/eslint-plugin": "^4.16.1",
"@typescript-eslint/parser": "^4.16.1",
"@tensorflow/tfjs": "^3.3.0",
"@tensorflow/tfjs-backend-cpu": "^3.3.0",
"@tensorflow/tfjs-backend-wasm": "^3.3.0",
"@tensorflow/tfjs-backend-webgl": "^3.3.0",
"@tensorflow/tfjs-converter": "^3.3.0",
"@tensorflow/tfjs-core": "^3.3.0",
"@tensorflow/tfjs-data": "^3.3.0",
"@tensorflow/tfjs-layers": "^3.3.0",
"@tensorflow/tfjs-node": "^3.3.0",
"@tensorflow/tfjs-node-gpu": "^3.3.0",
"@types/node": "^14.14.33",
"@typescript-eslint/eslint-plugin": "^4.17.0",
"@typescript-eslint/parser": "^4.17.0",
"@vladmandic/pilogger": "^0.2.14",
"chokidar": "^3.5.1",
"dayjs": "^1.10.4",
"esbuild": "^0.8.57",
"esbuild": "^0.9.0",
"eslint": "^7.21.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.22.1",
@ -69,7 +68,6 @@
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.3.1",
"rimraf": "^3.0.2",
"seedrandom": "^3.0.5",
"simple-git": "^2.36.1",
"tslib": "^2.1.0",
"typescript": "^4.2.3"

View File

@ -3,7 +3,6 @@ import * as sysinfo from './sysinfo';
import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend';
import * as facemesh from './blazeface/facemesh';
import * as faceboxes from './faceboxes/faceboxes';
import * as age from './age/age';
import * as gender from './gender/gender';
import * as emotion from './emotion/emotion';
@ -154,7 +153,6 @@ class Human {
if (this.config.debug) log('tf flags:', this.tf.ENV.flags);
}
}
const face = this.config.face.detector.modelPath.includes('faceboxes') ? faceboxes : facemesh;
if (this.config.async) {
[
this.models.face,
@ -166,7 +164,7 @@ class Human {
this.models.posenet,
this.models.blazepose,
] = await Promise.all([
this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
this.models.face || (this.config.face.enabled ? facemesh.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
@ -176,7 +174,7 @@ class Human {
this.models.posenet || (this.config.body.enabled && this.config.body.modelType.startsWith('blazepose') ? blazepose.load(this.config) : null),
]);
} else {
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
if (this.config.face.enabled && !this.models.face) this.models.face = await facemesh.load(this.config);
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);