mirror of https://github.com/vladmandic/human
added faceboxes alternative model
parent
90f8bacc23
commit
16854c097d
|
@ -62,10 +62,10 @@ export default {
|
|||
// detector, mesh, iris, age, gender, emotion
|
||||
// (note: module is not loaded until it is required)
|
||||
detector: {
|
||||
modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.
|
||||
// 'front' is optimized for large faces
|
||||
// such as front-facing camera and
|
||||
// 'back' is optimized for distanct faces.
|
||||
modelPath: '../models/blazeface-back.json', // can be 'blazeface-front', 'blazeface-front' or 'faceboxes'
|
||||
// 'blazeface-front' is blazeface model optimized for large faces such as front-facing camera
|
||||
// 'blazeface-back' is blazeface model optimized for smaller and/or distanct faces
|
||||
// 'faceboxes' is alternative model to 'blazeface
|
||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||
|
|
|
@ -8,7 +8,9 @@ const userConfig = {}; // add any user configuration overrides
|
|||
/*
|
||||
const userConfig = {
|
||||
// backend: 'humangl',
|
||||
face: { enabled: true, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false } },
|
||||
async: false,
|
||||
videoOptimized: false,
|
||||
face: { enabled: true, detector: { modelPath: '../models/faceboxes.json' }, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: true } },
|
||||
body: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
};
|
||||
|
|
19
demo/node.js
19
demo/node.js
|
@ -12,8 +12,9 @@ const myConfig = {
|
|||
backend: 'tensorflow',
|
||||
console: true,
|
||||
videoOptimized: false,
|
||||
async: false,
|
||||
face: {
|
||||
detector: { modelPath: 'file://models/blazeface-back.json' },
|
||||
detector: { modelPath: 'file://models/faceboxes.json' }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
|
||||
mesh: { modelPath: 'file://models/facemesh.json' },
|
||||
iris: { modelPath: 'file://models/iris.json' },
|
||||
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
|
||||
|
@ -45,11 +46,7 @@ async function detect(input) {
|
|||
decoded.dispose();
|
||||
casted.dispose();
|
||||
// image shape contains image dimensions and depth
|
||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
||||
log.state('Processing:', image.shape);
|
||||
// must disable face model when runing in tfjs-node as it's missing required ops
|
||||
// see <https://github.com/tensorflow/tfjs/issues/4066>
|
||||
myConfig.face.enabled = false;
|
||||
// run actual detection
|
||||
const result = await human.detect(image, myConfig);
|
||||
// dispose image tensor as we no longer need it
|
||||
|
@ -59,12 +56,14 @@ async function detect(input) {
|
|||
}
|
||||
|
||||
async function test() {
|
||||
log.state('Processing embedded warmup image');
|
||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
||||
myConfig.face.enabled = false;
|
||||
log.state('Processing embedded warmup image: face');
|
||||
myConfig.warmup = 'face';
|
||||
const resultFace = await human.warmup(myConfig);
|
||||
log.data(resultFace);
|
||||
log.state('Processing embedded warmup image: full');
|
||||
myConfig.warmup = 'full';
|
||||
const result = await human.warmup(myConfig);
|
||||
log.data(result);
|
||||
const resultFull = await human.warmup(myConfig);
|
||||
log.data(resultFull);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
|
|
@ -50,7 +50,7 @@
|
|||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
"lint": "eslint src/*.js demo/*.js",
|
||||
"dev": "npm install && node server/dev.js",
|
||||
"build": "npm install && rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||
"build": "rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
||||
},
|
||||
"keywords": [
|
||||
|
|
|
@ -65,7 +65,6 @@ async function predict(image, config) {
|
|||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
// @ts-ignore
|
||||
profile.run('emotion', profileData);
|
||||
}
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
|
|
21
src/human.js
21
src/human.js
|
@ -2,6 +2,7 @@ import { log } from './log.js';
|
|||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as backend from './tfjs/backend.js';
|
||||
import * as facemesh from './blazeface/facemesh.js';
|
||||
import * as faceboxes from './faceboxes/faceboxes.js';
|
||||
import * as age from './age/age.js';
|
||||
import * as gender from './gender/gender.js';
|
||||
import * as emotion from './emotion/emotion.js';
|
||||
|
@ -120,10 +121,10 @@ class Human {
|
|||
log('tf flags:', tf.ENV.flags);
|
||||
}
|
||||
}
|
||||
|
||||
const face = this.config.face.detector.modelPath.includes('faceboxes') ? faceboxes : facemesh;
|
||||
if (this.config.async) {
|
||||
[
|
||||
this.models.facemesh,
|
||||
this.models.face,
|
||||
this.models.age,
|
||||
this.models.gender,
|
||||
this.models.emotion,
|
||||
|
@ -131,7 +132,7 @@ class Human {
|
|||
this.models.posenet,
|
||||
this.models.handpose,
|
||||
] = await Promise.all([
|
||||
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
|
||||
this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
|
||||
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
||||
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
||||
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
||||
|
@ -140,7 +141,7 @@ class Human {
|
|||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
|
||||
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
||||
|
@ -218,7 +219,7 @@ class Human {
|
|||
const faceRes = [];
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
|
||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||
this.perf.face = Math.trunc(now() - timeStamp);
|
||||
for (const face of faces) {
|
||||
this.analyze('Get Face');
|
||||
|
@ -281,16 +282,14 @@ class Human {
|
|||
}
|
||||
|
||||
this.analyze('Finish Face:');
|
||||
// dont need face anymore
|
||||
face.image.dispose();
|
||||
|
||||
// calculate iris distance
|
||||
// iris: array[ center, left, top, right, bottom]
|
||||
if (!this.config.face.iris.enabled) {
|
||||
if (!this.config.face.iris.enabled && face?.annotations?.leftEyeIris && face?.annotations?.rightEyeIris) {
|
||||
delete face.annotations.leftEyeIris;
|
||||
delete face.annotations.rightEyeIris;
|
||||
}
|
||||
const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
|
||||
const irisSize = (face.annotations?.leftEyeIris && face.annotations?.rightEyeIris)
|
||||
/* average human iris size is 11.7mm */
|
||||
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
||||
: 0;
|
||||
|
@ -309,7 +308,11 @@ class Human {
|
|||
emotion: emotionRes,
|
||||
embedding: embeddingRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
image: face.image.toInt().squeeze(),
|
||||
});
|
||||
|
||||
// dont need face anymore
|
||||
face.image?.dispose();
|
||||
this.analyze('End Face');
|
||||
}
|
||||
this.analyze('End FaceMesh:');
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit fb11ed0f097e9aa8c21643a45a8324b88d7aecee
|
||||
Subproject commit 0fa077bf63fa7a3f26826eb9c88fd837e3728be7
|
Loading…
Reference in New Issue