mirror of https://github.com/vladmandic/human
added faceboxes alternative model
parent
ad229863a6
commit
4f09368406
|
@ -62,10 +62,10 @@ export default {
|
||||||
// detector, mesh, iris, age, gender, emotion
|
// detector, mesh, iris, age, gender, emotion
|
||||||
// (note: module is not loaded until it is required)
|
// (note: module is not loaded until it is required)
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.
|
modelPath: '../models/blazeface-back.json', // can be 'blazeface-front', 'blazeface-front' or 'faceboxes'
|
||||||
// 'front' is optimized for large faces
|
// 'blazeface-front' is blazeface model optimized for large faces such as front-facing camera
|
||||||
// such as front-facing camera and
|
// 'blazeface-back' is blazeface model optimized for smaller and/or distanct faces
|
||||||
// 'back' is optimized for distanct faces.
|
// 'faceboxes' is alternative model to 'blazeface
|
||||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||||
|
|
|
@ -8,7 +8,9 @@ const userConfig = {}; // add any user configuration overrides
|
||||||
/*
|
/*
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
// backend: 'humangl',
|
// backend: 'humangl',
|
||||||
face: { enabled: true, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false } },
|
async: false,
|
||||||
|
videoOptimized: false,
|
||||||
|
face: { enabled: true, detector: { modelPath: '../models/faceboxes.json' }, iris: { enabled: false }, mesh: { enabled: false }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: true } },
|
||||||
body: { enabled: false },
|
body: { enabled: false },
|
||||||
hand: { enabled: false },
|
hand: { enabled: false },
|
||||||
};
|
};
|
||||||
|
|
19
demo/node.js
19
demo/node.js
|
@ -12,8 +12,9 @@ const myConfig = {
|
||||||
backend: 'tensorflow',
|
backend: 'tensorflow',
|
||||||
console: true,
|
console: true,
|
||||||
videoOptimized: false,
|
videoOptimized: false,
|
||||||
|
async: false,
|
||||||
face: {
|
face: {
|
||||||
detector: { modelPath: 'file://models/blazeface-back.json' },
|
detector: { modelPath: 'file://models/faceboxes.json' }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
|
||||||
mesh: { modelPath: 'file://models/facemesh.json' },
|
mesh: { modelPath: 'file://models/facemesh.json' },
|
||||||
iris: { modelPath: 'file://models/iris.json' },
|
iris: { modelPath: 'file://models/iris.json' },
|
||||||
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
|
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
|
||||||
|
@ -45,11 +46,7 @@ async function detect(input) {
|
||||||
decoded.dispose();
|
decoded.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
// image shape contains image dimensions and depth
|
// image shape contains image dimensions and depth
|
||||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
|
||||||
log.state('Processing:', image.shape);
|
log.state('Processing:', image.shape);
|
||||||
// must disable face model when runing in tfjs-node as it's missing required ops
|
|
||||||
// see <https://github.com/tensorflow/tfjs/issues/4066>
|
|
||||||
myConfig.face.enabled = false;
|
|
||||||
// run actual detection
|
// run actual detection
|
||||||
const result = await human.detect(image, myConfig);
|
const result = await human.detect(image, myConfig);
|
||||||
// dispose image tensor as we no longer need it
|
// dispose image tensor as we no longer need it
|
||||||
|
@ -59,12 +56,14 @@ async function detect(input) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function test() {
|
async function test() {
|
||||||
log.state('Processing embedded warmup image');
|
log.state('Processing embedded warmup image: face');
|
||||||
log.warn('Face model is disabled in NodeJS due to missing required TFJS functions');
|
myConfig.warmup = 'face';
|
||||||
myConfig.face.enabled = false;
|
const resultFace = await human.warmup(myConfig);
|
||||||
|
log.data(resultFace);
|
||||||
|
log.state('Processing embedded warmup image: full');
|
||||||
myConfig.warmup = 'full';
|
myConfig.warmup = 'full';
|
||||||
const result = await human.warmup(myConfig);
|
const resultFull = await human.warmup(myConfig);
|
||||||
log.data(result);
|
log.data(resultFull);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 1347321,
|
"bytes": 1349003,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
|
@ -17,7 +17,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 25654,
|
"bytes": 25747,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js",
|
"path": "dist/human.esm.js",
|
||||||
|
@ -43,14 +43,14 @@
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 2023220
|
"bytes": 2028101
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 1338790
|
"bytesInOutput": 1340450
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytesInOutput": 6204
|
"bytesInOutput": 6204
|
||||||
|
@ -65,7 +65,7 @@
|
||||||
"bytesInOutput": 16815
|
"bytesInOutput": 16815
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1386649
|
"bytes": 1388309
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/blazeface/facemesh.js": {
|
"src/blazeface/facemesh.js": {
|
||||||
"bytes": 2991,
|
"bytes": 2973,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -110,6 +110,23 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytes": 2612,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/log.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "dist/tfjs.esm.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 2037,
|
"bytes": 2037,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -145,7 +162,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3056,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -417,7 +434,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 9644,
|
"bytes": 9786,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
|
@ -425,11 +442,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2336,
|
"bytes": 2321,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 18719,
|
"bytes": 18962,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -447,6 +464,10 @@
|
||||||
"path": "src/blazeface/facemesh.js",
|
"path": "src/blazeface/facemesh.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/faceboxes/faceboxes.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/age/age.js",
|
"path": "src/age/age.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
|
@ -503,7 +524,7 @@
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 1927417
|
"bytes": 1932071
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -532,6 +553,9 @@
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 637
|
"bytesInOutput": 637
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytesInOutput": 1387
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 813
|
"bytesInOutput": 813
|
||||||
},
|
},
|
||||||
|
@ -602,13 +626,13 @@
|
||||||
"bytesInOutput": 252
|
"bytesInOutput": 252
|
||||||
},
|
},
|
||||||
"dist/tfjs.esm.js": {
|
"dist/tfjs.esm.js": {
|
||||||
"bytesInOutput": 1062880
|
"bytesInOutput": 1062888
|
||||||
},
|
},
|
||||||
"src/tfjs/backend.js": {
|
"src/tfjs/backend.js": {
|
||||||
"bytesInOutput": 1205
|
"bytesInOutput": 1205
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 10250
|
"bytesInOutput": 10537
|
||||||
},
|
},
|
||||||
"src/handpose/box.js": {
|
"src/handpose/box.js": {
|
||||||
"bytesInOutput": 938
|
"bytesInOutput": 938
|
||||||
|
@ -626,7 +650,7 @@
|
||||||
"bytesInOutput": 16
|
"bytesInOutput": 16
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1347321
|
"bytes": 1349003
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/blazeface/facemesh.js": {
|
"src/blazeface/facemesh.js": {
|
||||||
"bytes": 2991,
|
"bytes": 2973,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -110,6 +110,23 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytes": 2612,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/log.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "dist/tfjs.esm.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 2037,
|
"bytes": 2037,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -145,7 +162,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3056,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -417,7 +434,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 9644,
|
"bytes": 9786,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
|
@ -425,11 +442,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2336,
|
"bytes": 2321,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 18719,
|
"bytes": 18962,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -447,6 +464,10 @@
|
||||||
"path": "src/blazeface/facemesh.js",
|
"path": "src/blazeface/facemesh.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/faceboxes/faceboxes.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/age/age.js",
|
"path": "src/age/age.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
|
@ -503,7 +524,7 @@
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 1927434
|
"bytes": 1932088
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -530,6 +551,9 @@
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 637
|
"bytesInOutput": 637
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytesInOutput": 1387
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 813
|
"bytesInOutput": 813
|
||||||
},
|
},
|
||||||
|
@ -597,13 +621,13 @@
|
||||||
"bytesInOutput": 2480
|
"bytesInOutput": 2480
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 10286
|
"bytesInOutput": 10573
|
||||||
},
|
},
|
||||||
"src/log.js": {
|
"src/log.js": {
|
||||||
"bytesInOutput": 252
|
"bytesInOutput": 252
|
||||||
},
|
},
|
||||||
"dist/tfjs.esm.js": {
|
"dist/tfjs.esm.js": {
|
||||||
"bytesInOutput": 1062880
|
"bytesInOutput": 1062888
|
||||||
},
|
},
|
||||||
"src/tfjs/backend.js": {
|
"src/tfjs/backend.js": {
|
||||||
"bytesInOutput": 1205
|
"bytesInOutput": 1205
|
||||||
|
@ -624,7 +648,7 @@
|
||||||
"bytesInOutput": 16
|
"bytesInOutput": 16
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1347363
|
"bytes": 1349045
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -77,7 +77,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/blazeface/facemesh.js": {
|
"src/blazeface/facemesh.js": {
|
||||||
"bytes": 2991,
|
"bytes": 2973,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -110,6 +110,23 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytes": 2612,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/log.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "dist/tfjs.esm.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 2037,
|
"bytes": 2037,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -145,7 +162,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3056,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -417,7 +434,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 9644,
|
"bytes": 9786,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/sample.js": {
|
"src/sample.js": {
|
||||||
|
@ -425,11 +442,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2336,
|
"bytes": 2321,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 18719,
|
"bytes": 18962,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/log.js",
|
"path": "src/log.js",
|
||||||
|
@ -447,6 +464,10 @@
|
||||||
"path": "src/blazeface/facemesh.js",
|
"path": "src/blazeface/facemesh.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/faceboxes/faceboxes.js",
|
||||||
|
"kind": "import-statement"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/age/age.js",
|
"path": "src/age/age.js",
|
||||||
"kind": "import-statement"
|
"kind": "import-statement"
|
||||||
|
@ -503,7 +524,7 @@
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"exports": [],
|
"exports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 710163
|
"bytes": 714830
|
||||||
},
|
},
|
||||||
"dist/human.node-gpu.js": {
|
"dist/human.node-gpu.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -533,6 +554,9 @@
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 631
|
"bytesInOutput": 631
|
||||||
},
|
},
|
||||||
|
"src/faceboxes/faceboxes.js": {
|
||||||
|
"bytesInOutput": 1425
|
||||||
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 852
|
"bytesInOutput": 852
|
||||||
},
|
},
|
||||||
|
@ -600,7 +624,7 @@
|
||||||
"bytesInOutput": 2478
|
"bytesInOutput": 2478
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 10397
|
"bytesInOutput": 10681
|
||||||
},
|
},
|
||||||
"src/log.js": {
|
"src/log.js": {
|
||||||
"bytesInOutput": 251
|
"bytesInOutput": 251
|
||||||
|
@ -624,7 +648,7 @@
|
||||||
"bytesInOutput": 16
|
"bytesInOutput": 16
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 278542
|
"bytes": 280251
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -50,7 +50,7 @@
|
||||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||||
"lint": "eslint src/*.js demo/*.js",
|
"lint": "eslint src/*.js demo/*.js",
|
||||||
"dev": "npm install && node server/dev.js",
|
"dev": "npm install && node server/dev.js",
|
||||||
"build": "npm install && rimraf dist/* && node server/build.js && node server/changelog.js",
|
"build": "rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||||
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
|
|
@ -50,7 +50,6 @@ class MediaPipeFaceMesh {
|
||||||
let faceModels = [null, null, null];
|
let faceModels = [null, null, null];
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
faceModels = await Promise.all([
|
faceModels = await Promise.all([
|
||||||
// @ts-ignore
|
|
||||||
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
|
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
|
||||||
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
|
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
|
||||||
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
|
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
|
||||||
|
|
|
@ -65,7 +65,6 @@ async function predict(image, config) {
|
||||||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
||||||
data = profileData.result.dataSync();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
// @ts-ignore
|
|
||||||
profile.run('emotion', profileData);
|
profile.run('emotion', profileData);
|
||||||
}
|
}
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
import { log } from '../log.js';
|
||||||
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
|
import * as profile from '../profile.js';
|
||||||
|
|
||||||
|
class FaceBoxes {
|
||||||
|
constructor(model, config) {
|
||||||
|
this.model = model;
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
async estimateFaces(input, config) {
|
||||||
|
if (config) this.config = config;
|
||||||
|
const results = [];
|
||||||
|
const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
|
||||||
|
const castT = resizeT.toInt();
|
||||||
|
let scores;
|
||||||
|
let boxes;
|
||||||
|
if (!config.profile) {
|
||||||
|
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT);
|
||||||
|
scores = scoresT.dataSync();
|
||||||
|
const squeezeT = boxesT.squeeze();
|
||||||
|
boxes = squeezeT.arraySync();
|
||||||
|
scoresT.dispose();
|
||||||
|
boxesT.dispose();
|
||||||
|
squeezeT.dispose();
|
||||||
|
numT.dispose();
|
||||||
|
} else {
|
||||||
|
const profileData = await tf.profile(() => this.model.executeAsync(castT));
|
||||||
|
scores = profileData.result[0].dataSync();
|
||||||
|
const squeezeT = profileData.result[1].squeeze();
|
||||||
|
boxes = squeezeT.arraySync();
|
||||||
|
profileData.result.forEach((t) => t.dispose());
|
||||||
|
profile.run('faceboxes', profileData);
|
||||||
|
}
|
||||||
|
castT.dispose();
|
||||||
|
resizeT.dispose();
|
||||||
|
for (const i in boxes) {
|
||||||
|
if (scores[i] && scores[i] > this.config.face.detector.minConfidence) {
|
||||||
|
const enlarge = 1.05;
|
||||||
|
const crop = [boxes[i][0] / enlarge, boxes[i][1] / enlarge, boxes[i][2] * enlarge, boxes[i][3] * enlarge];
|
||||||
|
const boxRaw = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
|
||||||
|
const box = [parseInt(boxRaw[0] * input.shape[2]), parseInt(boxRaw[1] * input.shape[1]), parseInt(boxRaw[2] * input.shape[2]), parseInt(boxRaw[3] * input.shape[1])];
|
||||||
|
const image = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
|
||||||
|
results.push({
|
||||||
|
confidence: scores[i],
|
||||||
|
box,
|
||||||
|
boxRaw,
|
||||||
|
image,
|
||||||
|
// mesh,
|
||||||
|
// meshRaw,
|
||||||
|
// annotations,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function load(config) {
|
||||||
|
const model = await tf.loadGraphModel(config.face.detector.modelPath);
|
||||||
|
log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
const faceboxes = new FaceBoxes(model, config);
|
||||||
|
if (config.face.mesh.enabled) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
if (config.face.iris.enabled) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
return faceboxes;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.load = load;
|
||||||
|
exports.FaceBoxes = FaceBoxes;
|
21
src/human.js
21
src/human.js
|
@ -2,6 +2,7 @@ import { log } from './log.js';
|
||||||
import * as tf from '../dist/tfjs.esm.js';
|
import * as tf from '../dist/tfjs.esm.js';
|
||||||
import * as backend from './tfjs/backend.js';
|
import * as backend from './tfjs/backend.js';
|
||||||
import * as facemesh from './blazeface/facemesh.js';
|
import * as facemesh from './blazeface/facemesh.js';
|
||||||
|
import * as faceboxes from './faceboxes/faceboxes.js';
|
||||||
import * as age from './age/age.js';
|
import * as age from './age/age.js';
|
||||||
import * as gender from './gender/gender.js';
|
import * as gender from './gender/gender.js';
|
||||||
import * as emotion from './emotion/emotion.js';
|
import * as emotion from './emotion/emotion.js';
|
||||||
|
@ -120,10 +121,10 @@ class Human {
|
||||||
log('tf flags:', tf.ENV.flags);
|
log('tf flags:', tf.ENV.flags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const face = this.config.face.detector.modelPath.includes('faceboxes') ? faceboxes : facemesh;
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
[
|
[
|
||||||
this.models.facemesh,
|
this.models.face,
|
||||||
this.models.age,
|
this.models.age,
|
||||||
this.models.gender,
|
this.models.gender,
|
||||||
this.models.emotion,
|
this.models.emotion,
|
||||||
|
@ -131,7 +132,7 @@ class Human {
|
||||||
this.models.posenet,
|
this.models.posenet,
|
||||||
this.models.handpose,
|
this.models.handpose,
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
|
this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
|
||||||
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
||||||
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
||||||
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
||||||
|
@ -140,7 +141,7 @@ class Human {
|
||||||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
|
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
|
||||||
]);
|
]);
|
||||||
} else {
|
} else {
|
||||||
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
|
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
||||||
|
@ -218,7 +219,7 @@ class Human {
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
this.state = 'run:face';
|
this.state = 'run:face';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
|
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||||
this.perf.face = Math.trunc(now() - timeStamp);
|
this.perf.face = Math.trunc(now() - timeStamp);
|
||||||
for (const face of faces) {
|
for (const face of faces) {
|
||||||
this.analyze('Get Face');
|
this.analyze('Get Face');
|
||||||
|
@ -281,16 +282,14 @@ class Human {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.analyze('Finish Face:');
|
this.analyze('Finish Face:');
|
||||||
// dont need face anymore
|
|
||||||
face.image.dispose();
|
|
||||||
|
|
||||||
// calculate iris distance
|
// calculate iris distance
|
||||||
// iris: array[ center, left, top, right, bottom]
|
// iris: array[ center, left, top, right, bottom]
|
||||||
if (!this.config.face.iris.enabled) {
|
if (!this.config.face.iris.enabled && face?.annotations?.leftEyeIris && face?.annotations?.rightEyeIris) {
|
||||||
delete face.annotations.leftEyeIris;
|
delete face.annotations.leftEyeIris;
|
||||||
delete face.annotations.rightEyeIris;
|
delete face.annotations.rightEyeIris;
|
||||||
}
|
}
|
||||||
const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
|
const irisSize = (face.annotations?.leftEyeIris && face.annotations?.rightEyeIris)
|
||||||
/* average human iris size is 11.7mm */
|
/* average human iris size is 11.7mm */
|
||||||
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
||||||
: 0;
|
: 0;
|
||||||
|
@ -309,7 +308,11 @@ class Human {
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
embedding: embeddingRes,
|
embedding: embeddingRes,
|
||||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||||
|
image: face.image.toInt().squeeze(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// dont need face anymore
|
||||||
|
face.image?.dispose();
|
||||||
this.analyze('End Face');
|
this.analyze('End Face');
|
||||||
}
|
}
|
||||||
this.analyze('End FaceMesh:');
|
this.analyze('End FaceMesh:');
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit fb11ed0f097e9aa8c21643a45a8324b88d7aecee
|
Subproject commit 0fa077bf63fa7a3f26826eb9c88fd837e3728be7
|
Loading…
Reference in New Issue