mirror of https://github.com/vladmandic/human
added faceboxes alternative model
parent
ad229863a6
commit
4f09368406
|
@ -62,10 +62,10 @@ export default {
|
|||
// detector, mesh, iris, age, gender, emotion
|
||||
// (note: module is not loaded until it is required)
|
||||
detector: {
|
||||
modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.
|
||||
// 'front' is optimized for large faces
|
||||
// such as front-facing camera and
|
||||
// 'back' is optimized for distanct faces.
|
||||
modelPath: '../models/blazeface-back.json', // can be 'blazeface-front', 'blazeface-front' or 'faceboxes'
|
||||
// 'blazeface-front' is blazeface model optimized for large faces such as front-facing camera
|
||||
// 'blazeface-back' is blazeface model optimized for smaller and/or distanct faces
|
||||
// 'faceboxes' is alternative model to 'blazeface
|
||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||
|
|
|
@ -8,7 +8,9 @@ const userConfig = {}; // add any user configuration overrides
|
|||
/*
|
||||
const userConfig = {
|
||||
// backend: 'humangl',
|
||||
face: { enabled: true, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false } },
|
||||
async: false,
|
||||
videoOptimized: false,
|
||||
face: { enabled: true, detector: { modelPath: '../models/faceboxes.json' }, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: true } },
|
||||
body: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
};
|
||||
|
|
19
demo/node.js
19
demo/node.js
|
@ -12,8 +12,9 @@ const myConfig = {
|
|||
backend: 'tensorflow',
|
||||
console: true,
|
||||
videoOptimized: false,
|
||||
async: false,
|
||||
face: {
|
||||
detector: { modelPath: 'file://models/blazeface-back.json' },
|
||||
detector: { modelPath: 'file://models/faceboxes.json' }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
|
||||
mesh: { modelPath: 'file://models/facemesh.json' },
|
||||
iris: { modelPath: 'file://models/iris.json' },
|
||||
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
|
||||
|
@ -45,11 +46,7 @@ async function detect(input) {
|
|||
decoded.dispose();
|
||||
casted.dispose();
|
||||
// image shape contains image dimensions and depth
|
||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
||||
log.state('Processing:', image.shape);
|
||||
// must disable face model when runing in tfjs-node as it's missing required ops
|
||||
// see <https://github.com/tensorflow/tfjs/issues/4066>
|
||||
myConfig.face.enabled = false;
|
||||
// run actual detection
|
||||
const result = await human.detect(image, myConfig);
|
||||
// dispose image tensor as we no longer need it
|
||||
|
@ -59,12 +56,14 @@ async function detect(input) {
|
|||
}
|
||||
|
||||
async function test() {
|
||||
log.state('Processing embedded warmup image');
|
||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
||||
myConfig.face.enabled = false;
|
||||
log.state('Processing embedded warmup image: face');
|
||||
myConfig.warmup = 'face';
|
||||
const resultFace = await human.warmup(myConfig);
|
||||
log.data(resultFace);
|
||||
log.state('Processing embedded warmup image: full');
|
||||
myConfig.warmup = 'full';
|
||||
const result = await human.warmup(myConfig);
|
||||
log.data(result);
|
||||
const resultFull = await human.warmup(myConfig);
|
||||
log.data(resultFull);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"inputs": {
|
||||
"dist/human.esm.js": {
|
||||
"bytes": 1347321,
|
||||
"bytes": 1349003,
|
||||
"imports": []
|
||||
},
|
||||
"demo/draw.js": {
|
||||
|
@ -17,7 +17,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"demo/browser.js": {
|
||||
"bytes": 25654,
|
||||
"bytes": 25747,
|
||||
"imports": [
|
||||
{
|
||||
"path": "dist/human.esm.js",
|
||||
|
@ -43,14 +43,14 @@
|
|||
"imports": [],
|
||||
"exports": [],
|
||||
"inputs": {},
|
||||
"bytes": 2023220
|
||||
"bytes": 2028101
|
||||
},
|
||||
"dist/demo-browser-index.js": {
|
||||
"imports": [],
|
||||
"exports": [],
|
||||
"inputs": {
|
||||
"dist/human.esm.js": {
|
||||
"bytesInOutput": 1338790
|
||||
"bytesInOutput": 1340450
|
||||
},
|
||||
"demo/draw.js": {
|
||||
"bytesInOutput": 6204
|
||||
|
@ -65,7 +65,7 @@
|
|||
"bytesInOutput": 16815
|
||||
}
|
||||
},
|
||||
"bytes": 1386649
|
||||
"bytes": 1388309
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
|||
]
|
||||
},
|
||||
"src/blazeface/facemesh.js": {
|
||||
"bytes": 2991,
|
||||
"bytes": 2973,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -110,6 +110,23 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytes": 2612,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "dist/tfjs.esm.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/profile.js",
|
||||
"kind": "import-statement"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytes": 2037,
|
||||
"imports": [
|
||||
|
@ -145,7 +162,7 @@
|
|||
]
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytes": 3078,
|
||||
"bytes": 3056,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -417,7 +434,7 @@
|
|||
]
|
||||
},
|
||||
"config.js": {
|
||||
"bytes": 9644,
|
||||
"bytes": 9786,
|
||||
"imports": []
|
||||
},
|
||||
"src/sample.js": {
|
||||
|
@ -425,11 +442,11 @@
|
|||
"imports": []
|
||||
},
|
||||
"package.json": {
|
||||
"bytes": 2336,
|
||||
"bytes": 2321,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 18719,
|
||||
"bytes": 18962,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -447,6 +464,10 @@
|
|||
"path": "src/blazeface/facemesh.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/faceboxes/faceboxes.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/age/age.js",
|
||||
"kind": "import-statement"
|
||||
|
@ -503,7 +524,7 @@
|
|||
"imports": [],
|
||||
"exports": [],
|
||||
"inputs": {},
|
||||
"bytes": 1927417
|
||||
"bytes": 1932071
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -532,6 +553,9 @@
|
|||
"src/profile.js": {
|
||||
"bytesInOutput": 637
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytesInOutput": 1387
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytesInOutput": 813
|
||||
},
|
||||
|
@ -602,13 +626,13 @@
|
|||
"bytesInOutput": 252
|
||||
},
|
||||
"dist/tfjs.esm.js": {
|
||||
"bytesInOutput": 1062880
|
||||
"bytesInOutput": 1062888
|
||||
},
|
||||
"src/tfjs/backend.js": {
|
||||
"bytesInOutput": 1205
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10250
|
||||
"bytesInOutput": 10537
|
||||
},
|
||||
"src/handpose/box.js": {
|
||||
"bytesInOutput": 938
|
||||
|
@ -626,7 +650,7 @@
|
|||
"bytesInOutput": 16
|
||||
}
|
||||
},
|
||||
"bytes": 1347321
|
||||
"bytes": 1349003
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
|||
]
|
||||
},
|
||||
"src/blazeface/facemesh.js": {
|
||||
"bytes": 2991,
|
||||
"bytes": 2973,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -110,6 +110,23 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytes": 2612,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "dist/tfjs.esm.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/profile.js",
|
||||
"kind": "import-statement"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytes": 2037,
|
||||
"imports": [
|
||||
|
@ -145,7 +162,7 @@
|
|||
]
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytes": 3078,
|
||||
"bytes": 3056,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -417,7 +434,7 @@
|
|||
]
|
||||
},
|
||||
"config.js": {
|
||||
"bytes": 9644,
|
||||
"bytes": 9786,
|
||||
"imports": []
|
||||
},
|
||||
"src/sample.js": {
|
||||
|
@ -425,11 +442,11 @@
|
|||
"imports": []
|
||||
},
|
||||
"package.json": {
|
||||
"bytes": 2336,
|
||||
"bytes": 2321,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 18719,
|
||||
"bytes": 18962,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -447,6 +464,10 @@
|
|||
"path": "src/blazeface/facemesh.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/faceboxes/faceboxes.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/age/age.js",
|
||||
"kind": "import-statement"
|
||||
|
@ -503,7 +524,7 @@
|
|||
"imports": [],
|
||||
"exports": [],
|
||||
"inputs": {},
|
||||
"bytes": 1927434
|
||||
"bytes": 1932088
|
||||
},
|
||||
"dist/human.js": {
|
||||
"imports": [],
|
||||
|
@ -530,6 +551,9 @@
|
|||
"src/profile.js": {
|
||||
"bytesInOutput": 637
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytesInOutput": 1387
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytesInOutput": 813
|
||||
},
|
||||
|
@ -597,13 +621,13 @@
|
|||
"bytesInOutput": 2480
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10286
|
||||
"bytesInOutput": 10573
|
||||
},
|
||||
"src/log.js": {
|
||||
"bytesInOutput": 252
|
||||
},
|
||||
"dist/tfjs.esm.js": {
|
||||
"bytesInOutput": 1062880
|
||||
"bytesInOutput": 1062888
|
||||
},
|
||||
"src/tfjs/backend.js": {
|
||||
"bytesInOutput": 1205
|
||||
|
@ -624,7 +648,7 @@
|
|||
"bytesInOutput": 16
|
||||
}
|
||||
},
|
||||
"bytes": 1347363
|
||||
"bytes": 1349045
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
|||
]
|
||||
},
|
||||
"src/blazeface/facemesh.js": {
|
||||
"bytes": 2991,
|
||||
"bytes": 2973,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -110,6 +110,23 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytes": 2612,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "dist/tfjs.esm.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/profile.js",
|
||||
"kind": "import-statement"
|
||||
}
|
||||
]
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytes": 2037,
|
||||
"imports": [
|
||||
|
@ -145,7 +162,7 @@
|
|||
]
|
||||
},
|
||||
"src/emotion/emotion.js": {
|
||||
"bytes": 3078,
|
||||
"bytes": 3056,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -417,7 +434,7 @@
|
|||
]
|
||||
},
|
||||
"config.js": {
|
||||
"bytes": 9644,
|
||||
"bytes": 9786,
|
||||
"imports": []
|
||||
},
|
||||
"src/sample.js": {
|
||||
|
@ -425,11 +442,11 @@
|
|||
"imports": []
|
||||
},
|
||||
"package.json": {
|
||||
"bytes": 2336,
|
||||
"bytes": 2321,
|
||||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 18719,
|
||||
"bytes": 18962,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/log.js",
|
||||
|
@ -447,6 +464,10 @@
|
|||
"path": "src/blazeface/facemesh.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/faceboxes/faceboxes.js",
|
||||
"kind": "import-statement"
|
||||
},
|
||||
{
|
||||
"path": "src/age/age.js",
|
||||
"kind": "import-statement"
|
||||
|
@ -503,7 +524,7 @@
|
|||
"imports": [],
|
||||
"exports": [],
|
||||
"inputs": {},
|
||||
"bytes": 710163
|
||||
"bytes": 714830
|
||||
},
|
||||
"dist/human.node-gpu.js": {
|
||||
"imports": [],
|
||||
|
@ -533,6 +554,9 @@
|
|||
"src/profile.js": {
|
||||
"bytesInOutput": 631
|
||||
},
|
||||
"src/faceboxes/faceboxes.js": {
|
||||
"bytesInOutput": 1425
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytesInOutput": 852
|
||||
},
|
||||
|
@ -600,7 +624,7 @@
|
|||
"bytesInOutput": 2478
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10397
|
||||
"bytesInOutput": 10681
|
||||
},
|
||||
"src/log.js": {
|
||||
"bytesInOutput": 251
|
||||
|
@ -624,7 +648,7 @@
|
|||
"bytesInOutput": 16
|
||||
}
|
||||
},
|
||||
"bytes": 278542
|
||||
"bytes": 280251
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -50,7 +50,7 @@
|
|||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
"lint": "eslint src/*.js demo/*.js",
|
||||
"dev": "npm install && node server/dev.js",
|
||||
"build": "npm install && rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||
"build": "rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
||||
},
|
||||
"keywords": [
|
||||
|
|
|
@ -50,7 +50,6 @@ class MediaPipeFaceMesh {
|
|||
let faceModels = [null, null, null];
|
||||
async function load(config) {
|
||||
faceModels = await Promise.all([
|
||||
// @ts-ignore
|
||||
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
|
||||
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
|
||||
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
|
||||
|
|
|
@ -65,7 +65,6 @@ async function predict(image, config) {
|
|||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
// @ts-ignore
|
||||
profile.run('emotion', profileData);
|
||||
}
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
import { log } from '../log.js';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile.js';
|
||||
|
||||
class FaceBoxes {
|
||||
constructor(model, config) {
|
||||
this.model = model;
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
async estimateFaces(input, config) {
|
||||
if (config) this.config = config;
|
||||
const results = [];
|
||||
const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
|
||||
const castT = resizeT.toInt();
|
||||
let scores;
|
||||
let boxes;
|
||||
if (!config.profile) {
|
||||
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT);
|
||||
scores = scoresT.dataSync();
|
||||
const squeezeT = boxesT.squeeze();
|
||||
boxes = squeezeT.arraySync();
|
||||
scoresT.dispose();
|
||||
boxesT.dispose();
|
||||
squeezeT.dispose();
|
||||
numT.dispose();
|
||||
} else {
|
||||
const profileData = await tf.profile(() => this.model.executeAsync(castT));
|
||||
scores = profileData.result[0].dataSync();
|
||||
const squeezeT = profileData.result[1].squeeze();
|
||||
boxes = squeezeT.arraySync();
|
||||
profileData.result.forEach((t) => t.dispose());
|
||||
profile.run('faceboxes', profileData);
|
||||
}
|
||||
castT.dispose();
|
||||
resizeT.dispose();
|
||||
for (const i in boxes) {
|
||||
if (scores[i] && scores[i] > this.config.face.detector.minConfidence) {
|
||||
const enlarge = 1.05;
|
||||
const crop = [boxes[i][0] / enlarge, boxes[i][1] / enlarge, boxes[i][2] * enlarge, boxes[i][3] * enlarge];
|
||||
const boxRaw = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
|
||||
const box = [parseInt(boxRaw[0] * input.shape[2]), parseInt(boxRaw[1] * input.shape[1]), parseInt(boxRaw[2] * input.shape[2]), parseInt(boxRaw[3] * input.shape[1])];
|
||||
const image = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
|
||||
results.push({
|
||||
confidence: scores[i],
|
||||
box,
|
||||
boxRaw,
|
||||
image,
|
||||
// mesh,
|
||||
// meshRaw,
|
||||
// annotations,
|
||||
});
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
async function load(config) {
|
||||
const model = await tf.loadGraphModel(config.face.detector.modelPath);
|
||||
log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
const faceboxes = new FaceBoxes(model, config);
|
||||
if (config.face.mesh.enabled) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
if (config.face.iris.enabled) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return faceboxes;
|
||||
}
|
||||
|
||||
exports.load = load;
|
||||
exports.FaceBoxes = FaceBoxes;
|
21
src/human.js
21
src/human.js
|
@ -2,6 +2,7 @@ import { log } from './log.js';
|
|||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as backend from './tfjs/backend.js';
|
||||
import * as facemesh from './blazeface/facemesh.js';
|
||||
import * as faceboxes from './faceboxes/faceboxes.js';
|
||||
import * as age from './age/age.js';
|
||||
import * as gender from './gender/gender.js';
|
||||
import * as emotion from './emotion/emotion.js';
|
||||
|
@ -120,10 +121,10 @@ class Human {
|
|||
log('tf flags:', tf.ENV.flags);
|
||||
}
|
||||
}
|
||||
|
||||
const face = this.config.face.detector.modelPath.includes('faceboxes') ? faceboxes : facemesh;
|
||||
if (this.config.async) {
|
||||
[
|
||||
this.models.facemesh,
|
||||
this.models.face,
|
||||
this.models.age,
|
||||
this.models.gender,
|
||||
this.models.emotion,
|
||||
|
@ -131,7 +132,7 @@ class Human {
|
|||
this.models.posenet,
|
||||
this.models.handpose,
|
||||
] = await Promise.all([
|
||||
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
|
||||
this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
|
||||
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
||||
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
||||
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
||||
|
@ -140,7 +141,7 @@ class Human {
|
|||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
|
||||
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
||||
|
@ -218,7 +219,7 @@ class Human {
|
|||
const faceRes = [];
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
|
||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||
this.perf.face = Math.trunc(now() - timeStamp);
|
||||
for (const face of faces) {
|
||||
this.analyze('Get Face');
|
||||
|
@ -281,16 +282,14 @@ class Human {
|
|||
}
|
||||
|
||||
this.analyze('Finish Face:');
|
||||
// dont need face anymore
|
||||
face.image.dispose();
|
||||
|
||||
// calculate iris distance
|
||||
// iris: array[ center, left, top, right, bottom]
|
||||
if (!this.config.face.iris.enabled) {
|
||||
if (!this.config.face.iris.enabled && face?.annotations?.leftEyeIris && face?.annotations?.rightEyeIris) {
|
||||
delete face.annotations.leftEyeIris;
|
||||
delete face.annotations.rightEyeIris;
|
||||
}
|
||||
const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
|
||||
const irisSize = (face.annotations?.leftEyeIris && face.annotations?.rightEyeIris)
|
||||
/* average human iris size is 11.7mm */
|
||||
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
||||
: 0;
|
||||
|
@ -309,7 +308,11 @@ class Human {
|
|||
emotion: emotionRes,
|
||||
embedding: embeddingRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
image: face.image.toInt().squeeze(),
|
||||
});
|
||||
|
||||
// dont need face anymore
|
||||
face.image?.dispose();
|
||||
this.analyze('End Face');
|
||||
}
|
||||
this.analyze('End FaceMesh:');
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit fb11ed0f097e9aa8c21643a45a8324b88d7aecee
|
||||
Subproject commit 0fa077bf63fa7a3f26826eb9c88fd837e3728be7
|
Loading…
Reference in New Issue