full async operations

pull/50/head
Vladimir Mandic 2020-11-06 11:39:39 -05:00
parent 0b07b548d1
commit b65c824d88
56 changed files with 7876 additions and 7636 deletions

View File

@ -48,6 +48,7 @@
"promise/always-return": "off",
"promise/catch-or-return": "off",
"promise/no-nesting": "off",
"no-async-promise-executor": "off",
"import/no-absolute-path": "off",
"import/no-extraneous-dependencies": "off",
"node/no-unpublished-import": "off",

View File

@ -247,7 +247,7 @@ All configuration details can be changed in real-time!
config = {
backend: 'webgl', // select tfjs backend to use
console: true, // enable debugging output to console
async: false, // execute enabled models in parallel
async: true, // execute enabled models in parallel
// this disables per-model performance data but slightly increases performance
// cannot be used if profiling is enabled
profile: false, // enable tfjs profiling
@ -434,8 +434,9 @@ result = {
hand, // <array of string>
}
performance = { // performance data of last execution for each module measuredin miliseconds
backend, // time to initialize tf backend, valid only during backend startup
load, // time to load models, valid only during model load
// note that per-model performance data is not available in async execution mode
backend, // time to initialize tf backend, keeps longest value measured
load, // time to load models, keeps longest value measured
image, // time for image processing
gesture, // gesture analysis time
body, // model time

View File

@ -4,7 +4,7 @@
export default {
backend: 'webgl', // select tfjs backend to use
console: true, // enable debugging output to console
async: false, // execute enabled models in parallel
async: true, // execute enabled models in parallel
// this disables per-model performance data but slightly increases performance
// cannot be used if profiling is enabled
profile: false, // enable tfjs profiling

View File

@ -35,6 +35,7 @@ const config = {
profile: false,
deallocate: false,
wasm: { path: '../assets' },
async: true,
filter: {
enabled: true,
width: 0,
@ -324,6 +325,7 @@ function setupMenu() {
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], config.backend, (val) => config.backend = val);
menu.addBool('Async Operations', config, 'async');
menu.addBool('Enable Profiler', config, 'profile');
menu.addBool('Memory Shield', config, 'deallocate');
menu.addBool('Use Web Worker', ui, 'useWorker');

View File

@ -27,7 +27,7 @@ async function drawFace(result, canvas, ui, triangulation) {
}
// silly hack since fillText does not suport new line
const labels = [];
if (face.agConfidence) labels.push(`${Math.trunc(100 * face.agConfidence)}% ${face.gender || ''}`);
if (face.genderConfidence) labels.push(`${Math.trunc(100 * face.genderConfidence)}% ${face.gender || ''}`);
if (face.age) labels.push(`age: ${face.age || ''}`);
if (face.iris) labels.push(`iris: ${face.iris}`);
if (face.emotion && face.emotion.length > 0) {

View File

@ -6,9 +6,11 @@
- monitors specified filed and folders for changes
- triggers library and application rebuild
- any build errors are immediately displayed and can be corrected without need for restart
- passthrough data compression
*/
const fs = require('fs');
const zlib = require('zlib');
const path = require('path');
const http2 = require('http2');
const chokidar = require('chokidar');
@ -106,12 +108,12 @@ async function watch() {
// get file content for a valid url request
function content(url) {
return new Promise((resolve) => {
let obj = {};
let obj = { ok: false };
obj.file = url;
if (!fs.existsSync(obj.file)) resolve(null);
obj.stat = fs.statSync(obj.file);
// should really use streams here instead of reading entire content in-memory, but this is micro-http2 not intended to serve huge files
if (obj.stat.isFile()) obj.data = fs.readFileSync(obj.file);
if (obj.stat.isFile()) obj.ok = true;
if (obj.stat.isDirectory()) {
obj.file = path.join(obj.file, options.default);
obj = content(obj.file);
@ -125,20 +127,32 @@ async function httpRequest(req, res) {
content(path.join(__dirname, options.root, req.url)).then((result) => {
const forwarded = (req.headers['forwarded'] || '').match(/for="\[(.*)\]:/);
const ip = (Array.isArray(forwarded) ? forwarded[1] : null) || req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress;
if (!result || !result.data) {
if (!result || !result.ok) {
res.writeHead(404, { 'Content-Type': 'text/html' });
res.end('Error 404: Not Found\n', 'utf-8');
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, `${req.headers['host']}${req.url}`, ip);
log.warn(`${req.method}/${req.httpVersion}`, res.statusCode, req.url, ip);
} else {
const ext = String(path.extname(result.file)).toLowerCase();
const contentType = mime[ext] || 'application/octet-stream';
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
res.writeHead(200, {
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': '', 'Content-Length': result.stat.size, 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Powered-By': `NodeJS/${process.version}`,
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Powered-By': `NodeJS/${process.version}`, // 'Content-Length': result.stat.size,
});
// ideally this should be passed through compress
res.end(result.data);
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, `${req.headers['host']}${req.url}`, ip);
res.end();
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
const stream = fs.createReadStream(result.file);
if (!accept) stream.pipe(res); // don't compress data
else stream.pipe(compress).pipe(res); // compress data
// alternative methods of sending data
/// 2. read stream and send by chunk
// const stream = fs.createReadStream(result.file);
// stream.on('data', (chunk) => res.write(chunk));
// stream.on('end', () => res.end());
// 3. read entire file and send it as blob
// const data = fs.readFileSync(result.file);
// res.write(data);
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
}
});
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"demo/browser.js": {
"bytes": 17998,
"bytes": 18066,
"imports": [
{
"path": "dist/human.esm.js"
@ -15,7 +15,7 @@
]
},
"demo/draw.js": {
"bytes": 7553,
"bytes": 7561,
"imports": []
},
"demo/menu.js": {
@ -23,7 +23,7 @@
"imports": []
},
"dist/human.esm.js": {
"bytes": 1275163,
"bytes": 1277557,
"imports": []
}
},
@ -31,28 +31,28 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 5522897
"bytes": 5529553
},
"dist/demo-browser-index.js": {
"imports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1660982
"bytesInOutput": 1663845
},
"dist/human.esm.js": {
"bytesInOutput": 8716
},
"demo/draw.js": {
"bytesInOutput": 7443
"bytesInOutput": 7451
},
"demo/menu.js": {
"bytesInOutput": 12359
},
"demo/browser.js": {
"bytesInOutput": 16212
"bytesInOutput": 16281
}
},
"bytes": 1705834
"bytes": 1708774
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,145 +1,267 @@
{
"inputs": {
"config.js": {
"bytes": 7320,
"bytes": 7319,
"imports": []
},
"package.json": {
"bytes": 3260,
"bytes": 3374,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2596,
"src/age/ssrnet.js": {
"bytes": 1746,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/facemesh/blazeface.js": {
"src/body/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/body/heapSort.js"
}
]
},
"src/body/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/body/buildParts.js"
},
{
"path": "src/body/decodePose.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/body/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/body/modelBase.js": {
"bytes": 1512,
"imports": []
},
"src/body/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/body/modelBase.js"
}
]
},
"src/body/modelPoseNet.js": {
"bytes": 3539,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/modelPoseNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/body/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/emotion/emotion.js": {
"bytes": 2767,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/face/blazeface.js": {
"bytes": 6991,
"imports": []
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytes": 1924,
"imports": []
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytes": 2572,
"imports": [
{
"path": "src/facemesh/blazeface.js"
"path": "src/face/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/facepipeline.js"
"path": "src/face/facepipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
"path": "src/face/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
"path": "src/face/triangulation.js"
}
]
},
"src/facemesh/facepipeline.js": {
"bytes": 14296,
"src/face/facepipeline.js": {
"bytes": 14368,
"imports": [
{
"path": "src/facemesh/box.js"
"path": "src/face/box.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/util.js"
"path": "src/face/util.js"
}
]
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/gender/ssrnet.js": {
"bytes": 2003,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/gesture.js": {
"bytes": 2175,
"imports": []
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytes": 224151,
"imports": []
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytes": 3192,
"imports": []
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytes": 4313,
"imports": [
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
}
]
},
"src/handpose/handpipeline.js": {
"bytes": 8657,
"src/hand/handpipeline.js": {
"bytes": 8632,
"imports": [
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
},
{
"path": "src/handpose/util.js"
"path": "src/hand/util.js"
}
]
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytes": 2839,
"imports": [
{
"path": "src/handpose/handdetector.js"
"path": "src/hand/handdetector.js"
},
{
"path": "src/handpose/handpipeline.js"
"path": "src/hand/handpipeline.js"
},
{
"path": "src/handpose/anchors.js"
"path": "src/hand/anchors.js"
}
]
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytes": 3030,
"imports": []
},
"src/human.js": {
"bytes": 11165,
"bytes": 14051,
"imports": [
{
"path": "src/facemesh/facemesh.js"
"path": "src/face/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
"path": "src/age/ssrnet.js"
},
{
"path": "src/gender/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
"path": "src/body/posenet.js"
},
{
"path": "src/handpose/handpose.js"
"path": "src/hand/handpose.js"
},
{
"path": "src/gesture.js"
@ -170,213 +292,105 @@
"bytes": 19352,
"imports": []
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1512,
"imports": []
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3447,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2587,
"imports": [
{
"path": "src/profile.js"
}
]
}
},
"outputs": {
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 613319
"bytes": 619939
},
"dist/human.esm-nobundle.js": {
"imports": [],
"inputs": {
"src/facemesh/blazeface.js": {
"src/face/blazeface.js": {
"bytesInOutput": 3082
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytesInOutput": 1945
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytesInOutput": 1021
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytesInOutput": 1171
},
"src/facemesh/facepipeline.js": {
"src/face/facepipeline.js": {
"bytesInOutput": 5585
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytesInOutput": 16785
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytesInOutput": 9990
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytesInOutput": 1254
},
"src/profile.js": {
"bytesInOutput": 619
"bytesInOutput": 618
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1397
"src/age/ssrnet.js": {
"bytesInOutput": 892
},
"src/gender/ssrnet.js": {
"bytesInOutput": 1028
},
"src/emotion/emotion.js": {
"bytesInOutput": 1305
"bytesInOutput": 1344
},
"src/posenet/modelBase.js": {
"src/body/modelBase.js": {
"bytesInOutput": 455
},
"src/posenet/modelMobileNet.js": {
"src/body/modelMobileNet.js": {
"bytesInOutput": 267
},
"src/posenet/heapSort.js": {
"src/body/heapSort.js": {
"bytesInOutput": 1041
},
"src/posenet/buildParts.js": {
"src/body/buildParts.js": {
"bytesInOutput": 546
},
"src/posenet/keypoints.js": {
"src/body/keypoints.js": {
"bytesInOutput": 1621
},
"src/posenet/vectors.js": {
"src/body/vectors.js": {
"bytesInOutput": 607
},
"src/posenet/decodePose.js": {
"src/body/decodePose.js": {
"bytesInOutput": 1016
},
"src/posenet/decodeMultiple.js": {
"src/body/decodeMultiple.js": {
"bytesInOutput": 603
},
"src/posenet/util.js": {
"src/body/util.js": {
"bytesInOutput": 1053
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 841
"src/body/modelPoseNet.js": {
"bytesInOutput": 868
},
"src/posenet/posenet.js": {
"src/body/posenet.js": {
"bytesInOutput": 459
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytesInOutput": 1420
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytesInOutput": 1806
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytesInOutput": 997
},
"src/handpose/handpipeline.js": {
"bytesInOutput": 3069
"src/hand/handpipeline.js": {
"bytesInOutput": 3057
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytesInOutput": 127000
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytesInOutput": 1127
},
"src/gesture.js": {
@ -392,16 +406,16 @@
"bytesInOutput": 1299
},
"package.json": {
"bytesInOutput": 2898
"bytesInOutput": 3004
},
"src/human.js": {
"bytesInOutput": 5614
"bytesInOutput": 7355
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 213471
"bytes": 215895
}
}
}

496
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

411
dist/human.esm.json vendored
View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 7320,
"bytes": 7319,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -149,11 +149,11 @@
]
},
"package.json": {
"bytes": 3260,
"bytes": 3374,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2596,
"src/age/ssrnet.js": {
"bytes": 1746,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -163,7 +163,131 @@
}
]
},
"src/facemesh/blazeface.js": {
"src/body/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/body/heapSort.js"
}
]
},
"src/body/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/body/buildParts.js"
},
{
"path": "src/body/decodePose.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/body/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/body/modelBase.js": {
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/body/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/body/modelBase.js"
}
]
},
"src/body/modelPoseNet.js": {
"bytes": 3539,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/modelPoseNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/body/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/emotion/emotion.js": {
"bytes": 2767,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"src/face/blazeface.js": {
"bytes": 6991,
"imports": [
{
@ -171,7 +295,7 @@
}
]
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytes": 1924,
"imports": [
{
@ -179,71 +303,82 @@
}
]
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytes": 2572,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/blazeface.js"
"path": "src/face/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/facepipeline.js"
"path": "src/face/facepipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
"path": "src/face/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
"path": "src/face/triangulation.js"
}
]
},
"src/facemesh/facepipeline.js": {
"bytes": 14296,
"src/face/facepipeline.js": {
"bytes": 14368,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/box.js"
"path": "src/face/box.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/util.js"
"path": "src/face/util.js"
}
]
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/gender/ssrnet.js": {
"bytes": 2003,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"src/gesture.js": {
"bytes": 2175,
"imports": []
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytes": 224151,
"imports": []
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytes": 3192,
"imports": [
{
@ -251,72 +386,75 @@
}
]
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytes": 4313,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
}
]
},
"src/handpose/handpipeline.js": {
"bytes": 8657,
"src/hand/handpipeline.js": {
"bytes": 8632,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
},
{
"path": "src/handpose/util.js"
"path": "src/hand/util.js"
}
]
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytes": 2839,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/handdetector.js"
"path": "src/hand/handdetector.js"
},
{
"path": "src/handpose/handpipeline.js"
"path": "src/hand/handpipeline.js"
},
{
"path": "src/handpose/anchors.js"
"path": "src/hand/anchors.js"
}
]
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytes": 3030,
"imports": []
},
"src/human.js": {
"bytes": 11165,
"bytes": 14051,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/facemesh.js"
"path": "src/face/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
"path": "src/age/ssrnet.js"
},
{
"path": "src/gender/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
"path": "src/body/posenet.js"
},
{
"path": "src/handpose/handpose.js"
"path": "src/hand/handpose.js"
},
{
"path": "src/gesture.js"
@ -350,134 +488,10 @@
"bytes": 19352,
"imports": []
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3447,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2587,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0,
"imports": []
@ -499,7 +513,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 5409300
"bytes": 5415919
},
"dist/human.esm.js": {
"imports": [],
@ -561,88 +575,91 @@
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 760
},
"src/facemesh/blazeface.js": {
"src/face/blazeface.js": {
"bytesInOutput": 3093
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytesInOutput": 1946
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytesInOutput": 1006
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytesInOutput": 1190
},
"src/facemesh/facepipeline.js": {
"src/face/facepipeline.js": {
"bytesInOutput": 5577
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytesInOutput": 16786
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytesInOutput": 9991
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytesInOutput": 1237
},
"src/profile.js": {
"bytesInOutput": 620
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1397
"src/age/ssrnet.js": {
"bytesInOutput": 877
},
"src/gender/ssrnet.js": {
"bytesInOutput": 1007
},
"src/emotion/emotion.js": {
"bytesInOutput": 1295
"bytesInOutput": 1334
},
"src/posenet/modelBase.js": {
"src/body/modelBase.js": {
"bytesInOutput": 433
},
"src/posenet/modelMobileNet.js": {
"src/body/modelMobileNet.js": {
"bytesInOutput": 245
},
"src/posenet/heapSort.js": {
"src/body/heapSort.js": {
"bytesInOutput": 1042
},
"src/posenet/buildParts.js": {
"src/body/buildParts.js": {
"bytesInOutput": 547
},
"src/posenet/keypoints.js": {
"src/body/keypoints.js": {
"bytesInOutput": 1633
},
"src/posenet/vectors.js": {
"src/body/vectors.js": {
"bytesInOutput": 616
},
"src/posenet/decodePose.js": {
"src/body/decodePose.js": {
"bytesInOutput": 1024
},
"src/posenet/decodeMultiple.js": {
"src/body/decodeMultiple.js": {
"bytesInOutput": 604
},
"src/posenet/util.js": {
"src/body/util.js": {
"bytesInOutput": 1062
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 819
"src/body/modelPoseNet.js": {
"bytesInOutput": 846
},
"src/posenet/posenet.js": {
"src/body/posenet.js": {
"bytesInOutput": 474
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytesInOutput": 1398
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytesInOutput": 1812
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytesInOutput": 1005
},
"src/handpose/handpipeline.js": {
"bytesInOutput": 3067
"src/hand/handpipeline.js": {
"bytesInOutput": 3055
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytesInOutput": 127001
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytesInOutput": 1105
},
"src/gesture.js": {
@ -658,16 +675,16 @@
"bytesInOutput": 1300
},
"package.json": {
"bytesInOutput": 2899
"bytesInOutput": 3005
},
"src/human.js": {
"bytesInOutput": 5628
"bytesInOutput": 7374
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 1275163
"bytes": 1277557
}
}
}

496
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

411
dist/human.json vendored
View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 7320,
"bytes": 7319,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -149,11 +149,11 @@
]
},
"package.json": {
"bytes": 3260,
"bytes": 3374,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2596,
"src/age/ssrnet.js": {
"bytes": 1746,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -163,7 +163,131 @@
}
]
},
"src/facemesh/blazeface.js": {
"src/body/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/body/heapSort.js"
}
]
},
"src/body/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/body/buildParts.js"
},
{
"path": "src/body/decodePose.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/body/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/body/modelBase.js": {
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/body/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/body/modelBase.js"
}
]
},
"src/body/modelPoseNet.js": {
"bytes": 3539,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/modelPoseNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/body/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/emotion/emotion.js": {
"bytes": 2767,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"src/face/blazeface.js": {
"bytes": 6991,
"imports": [
{
@ -171,7 +295,7 @@
}
]
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytes": 1924,
"imports": [
{
@ -179,71 +303,82 @@
}
]
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytes": 2572,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/blazeface.js"
"path": "src/face/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/facepipeline.js"
"path": "src/face/facepipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
"path": "src/face/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
"path": "src/face/triangulation.js"
}
]
},
"src/facemesh/facepipeline.js": {
"bytes": 14296,
"src/face/facepipeline.js": {
"bytes": 14368,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/box.js"
"path": "src/face/box.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/util.js"
"path": "src/face/util.js"
}
]
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/gender/ssrnet.js": {
"bytes": 2003,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"src/gesture.js": {
"bytes": 2175,
"imports": []
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytes": 224151,
"imports": []
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytes": 3192,
"imports": [
{
@ -251,72 +386,75 @@
}
]
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytes": 4313,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
}
]
},
"src/handpose/handpipeline.js": {
"bytes": 8657,
"src/hand/handpipeline.js": {
"bytes": 8632,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
},
{
"path": "src/handpose/util.js"
"path": "src/hand/util.js"
}
]
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytes": 2839,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/handpose/handdetector.js"
"path": "src/hand/handdetector.js"
},
{
"path": "src/handpose/handpipeline.js"
"path": "src/hand/handpipeline.js"
},
{
"path": "src/handpose/anchors.js"
"path": "src/hand/anchors.js"
}
]
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytes": 3030,
"imports": []
},
"src/human.js": {
"bytes": 11165,
"bytes": 14051,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/facemesh/facemesh.js"
"path": "src/face/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
"path": "src/age/ssrnet.js"
},
{
"path": "src/gender/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
"path": "src/body/posenet.js"
},
{
"path": "src/handpose/handpose.js"
"path": "src/hand/handpose.js"
},
{
"path": "src/gesture.js"
@ -350,134 +488,10 @@
"bytes": 19352,
"imports": []
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
}
]
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3447,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2587,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
"bytes": 0,
"imports": []
@ -499,7 +513,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 5409296
"bytes": 5415915
},
"dist/human.js": {
"imports": [],
@ -561,88 +575,91 @@
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 760
},
"src/facemesh/blazeface.js": {
"src/face/blazeface.js": {
"bytesInOutput": 3093
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytesInOutput": 1946
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytesInOutput": 1006
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytesInOutput": 1190
},
"src/facemesh/facepipeline.js": {
"src/face/facepipeline.js": {
"bytesInOutput": 5577
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytesInOutput": 16786
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytesInOutput": 9991
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytesInOutput": 1237
},
"src/profile.js": {
"bytesInOutput": 620
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1397
"src/age/ssrnet.js": {
"bytesInOutput": 877
},
"src/gender/ssrnet.js": {
"bytesInOutput": 1007
},
"src/emotion/emotion.js": {
"bytesInOutput": 1295
"bytesInOutput": 1334
},
"src/posenet/modelBase.js": {
"src/body/modelBase.js": {
"bytesInOutput": 433
},
"src/posenet/modelMobileNet.js": {
"src/body/modelMobileNet.js": {
"bytesInOutput": 245
},
"src/posenet/heapSort.js": {
"src/body/heapSort.js": {
"bytesInOutput": 1042
},
"src/posenet/buildParts.js": {
"src/body/buildParts.js": {
"bytesInOutput": 547
},
"src/posenet/keypoints.js": {
"src/body/keypoints.js": {
"bytesInOutput": 1633
},
"src/posenet/vectors.js": {
"src/body/vectors.js": {
"bytesInOutput": 616
},
"src/posenet/decodePose.js": {
"src/body/decodePose.js": {
"bytesInOutput": 1024
},
"src/posenet/decodeMultiple.js": {
"src/body/decodeMultiple.js": {
"bytesInOutput": 604
},
"src/posenet/util.js": {
"src/body/util.js": {
"bytesInOutput": 1062
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 819
"src/body/modelPoseNet.js": {
"bytesInOutput": 846
},
"src/posenet/posenet.js": {
"src/body/posenet.js": {
"bytesInOutput": 474
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytesInOutput": 1398
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytesInOutput": 1812
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytesInOutput": 1005
},
"src/handpose/handpipeline.js": {
"bytesInOutput": 3067
"src/hand/handpipeline.js": {
"bytesInOutput": 3055
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytesInOutput": 127001
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytesInOutput": 1105
},
"src/gesture.js": {
@ -658,13 +675,13 @@
"bytesInOutput": 1300
},
"package.json": {
"bytesInOutput": 2898
"bytesInOutput": 3004
},
"src/human.js": {
"bytesInOutput": 5666
"bytesInOutput": 7412
}
},
"bytes": 1275208
"bytes": 1277602
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

496
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

386
dist/human.node.json vendored
View File

@ -1,145 +1,267 @@
{
"inputs": {
"config.js": {
"bytes": 7320,
"bytes": 7319,
"imports": []
},
"package.json": {
"bytes": 3260,
"bytes": 3374,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2596,
"src/age/ssrnet.js": {
"bytes": 1746,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/facemesh/blazeface.js": {
"src/body/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/body/heapSort.js"
}
]
},
"src/body/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/body/buildParts.js"
},
{
"path": "src/body/decodePose.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/vectors.js"
}
]
},
"src/body/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/body/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/body/modelBase.js": {
"bytes": 1512,
"imports": []
},
"src/body/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/body/modelBase.js"
}
]
},
"src/body/modelPoseNet.js": {
"bytes": 3539,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/body/modelMobileNet.js"
},
{
"path": "src/body/modelPoseNet.js"
},
{
"path": "src/body/decodeMultiple.js"
},
{
"path": "src/body/keypoints.js"
},
{
"path": "src/body/util.js"
}
]
},
"src/body/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/body/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/body/keypoints.js"
}
]
},
"src/emotion/emotion.js": {
"bytes": 2767,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/face/blazeface.js": {
"bytes": 6991,
"imports": []
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytes": 1924,
"imports": []
},
"src/facemesh/facemesh.js": {
"src/face/facemesh.js": {
"bytes": 2572,
"imports": [
{
"path": "src/facemesh/blazeface.js"
"path": "src/face/blazeface.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/facepipeline.js"
"path": "src/face/facepipeline.js"
},
{
"path": "src/facemesh/uvcoords.js"
"path": "src/face/uvcoords.js"
},
{
"path": "src/facemesh/triangulation.js"
"path": "src/face/triangulation.js"
}
]
},
"src/facemesh/facepipeline.js": {
"bytes": 14296,
"src/face/facepipeline.js": {
"bytes": 14368,
"imports": [
{
"path": "src/facemesh/box.js"
"path": "src/face/box.js"
},
{
"path": "src/facemesh/keypoints.js"
"path": "src/face/keypoints.js"
},
{
"path": "src/facemesh/util.js"
"path": "src/face/util.js"
}
]
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytes": 2507,
"imports": []
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytes": 12940,
"imports": []
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytes": 3078,
"imports": []
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytes": 19592,
"imports": []
},
"src/gender/ssrnet.js": {
"bytes": 2003,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/gesture.js": {
"bytes": 2175,
"imports": []
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytes": 224151,
"imports": []
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytes": 3192,
"imports": []
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytes": 4313,
"imports": [
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
}
]
},
"src/handpose/handpipeline.js": {
"bytes": 8657,
"src/hand/handpipeline.js": {
"bytes": 8632,
"imports": [
{
"path": "src/handpose/box.js"
"path": "src/hand/box.js"
},
{
"path": "src/handpose/util.js"
"path": "src/hand/util.js"
}
]
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytes": 2839,
"imports": [
{
"path": "src/handpose/handdetector.js"
"path": "src/hand/handdetector.js"
},
{
"path": "src/handpose/handpipeline.js"
"path": "src/hand/handpipeline.js"
},
{
"path": "src/handpose/anchors.js"
"path": "src/hand/anchors.js"
}
]
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytes": 3030,
"imports": []
},
"src/human.js": {
"bytes": 11165,
"bytes": 14051,
"imports": [
{
"path": "src/facemesh/facemesh.js"
"path": "src/face/facemesh.js"
},
{
"path": "src/ssrnet/ssrnet.js"
"path": "src/age/ssrnet.js"
},
{
"path": "src/gender/ssrnet.js"
},
{
"path": "src/emotion/emotion.js"
},
{
"path": "src/posenet/posenet.js"
"path": "src/body/posenet.js"
},
{
"path": "src/handpose/handpose.js"
"path": "src/hand/handpose.js"
},
{
"path": "src/gesture.js"
@ -170,213 +292,105 @@
"bytes": 19352,
"imports": []
},
"src/posenet/buildParts.js": {
"bytes": 2035,
"imports": [
{
"path": "src/posenet/heapSort.js"
}
]
},
"src/posenet/decodeMultiple.js": {
"bytes": 5605,
"imports": [
{
"path": "src/posenet/buildParts.js"
},
{
"path": "src/posenet/decodePose.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/decodePose.js": {
"bytes": 4540,
"imports": [
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/vectors.js"
}
]
},
"src/posenet/heapSort.js": {
"bytes": 1590,
"imports": []
},
"src/posenet/keypoints.js": {
"bytes": 2291,
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1512,
"imports": []
},
"src/posenet/modelMobileNet.js": {
"bytes": 593,
"imports": [
{
"path": "src/posenet/modelBase.js"
}
]
},
"src/posenet/modelPoseNet.js": {
"bytes": 3447,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/posenet.js": {
"bytes": 830,
"imports": [
{
"path": "src/posenet/modelMobileNet.js"
},
{
"path": "src/posenet/modelPoseNet.js"
},
{
"path": "src/posenet/decodeMultiple.js"
},
{
"path": "src/posenet/keypoints.js"
},
{
"path": "src/posenet/util.js"
}
]
},
"src/posenet/util.js": {
"bytes": 2260,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/posenet/vectors.js": {
"bytes": 1273,
"imports": [
{
"path": "src/posenet/keypoints.js"
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2587,
"imports": [
{
"path": "src/profile.js"
}
]
}
},
"outputs": {
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 624831
"bytes": 634416
},
"dist/human.node-nobundle.js": {
"imports": [],
"inputs": {
"src/facemesh/blazeface.js": {
"src/face/blazeface.js": {
"bytesInOutput": 3082
},
"src/facemesh/keypoints.js": {
"src/face/keypoints.js": {
"bytesInOutput": 1945
},
"src/facemesh/box.js": {
"src/face/box.js": {
"bytesInOutput": 1021
},
"src/facemesh/util.js": {
"src/face/util.js": {
"bytesInOutput": 1171
},
"src/facemesh/facepipeline.js": {
"src/face/facepipeline.js": {
"bytesInOutput": 5585
},
"src/facemesh/uvcoords.js": {
"src/face/uvcoords.js": {
"bytesInOutput": 16785
},
"src/facemesh/triangulation.js": {
"src/face/triangulation.js": {
"bytesInOutput": 9989
},
"src/facemesh/facemesh.js": {
"bytesInOutput": 1259
"src/face/facemesh.js": {
"bytesInOutput": 1254
},
"src/profile.js": {
"bytesInOutput": 619
"bytesInOutput": 618
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1397
"src/age/ssrnet.js": {
"bytesInOutput": 898
},
"src/gender/ssrnet.js": {
"bytesInOutput": 1028
},
"src/emotion/emotion.js": {
"bytesInOutput": 1305
"bytesInOutput": 1344
},
"src/posenet/modelBase.js": {
"src/body/modelBase.js": {
"bytesInOutput": 455
},
"src/posenet/modelMobileNet.js": {
"src/body/modelMobileNet.js": {
"bytesInOutput": 267
},
"src/posenet/heapSort.js": {
"src/body/heapSort.js": {
"bytesInOutput": 1041
},
"src/posenet/buildParts.js": {
"src/body/buildParts.js": {
"bytesInOutput": 546
},
"src/posenet/keypoints.js": {
"src/body/keypoints.js": {
"bytesInOutput": 1621
},
"src/posenet/vectors.js": {
"src/body/vectors.js": {
"bytesInOutput": 607
},
"src/posenet/decodePose.js": {
"src/body/decodePose.js": {
"bytesInOutput": 1016
},
"src/posenet/decodeMultiple.js": {
"src/body/decodeMultiple.js": {
"bytesInOutput": 603
},
"src/posenet/util.js": {
"src/body/util.js": {
"bytesInOutput": 1053
},
"src/posenet/modelPoseNet.js": {
"bytesInOutput": 841
"src/body/modelPoseNet.js": {
"bytesInOutput": 868
},
"src/posenet/posenet.js": {
"src/body/posenet.js": {
"bytesInOutput": 459
},
"src/handpose/box.js": {
"src/hand/box.js": {
"bytesInOutput": 1419
},
"src/handpose/handdetector.js": {
"src/hand/handdetector.js": {
"bytesInOutput": 1806
},
"src/handpose/util.js": {
"src/hand/util.js": {
"bytesInOutput": 996
},
"src/handpose/handpipeline.js": {
"bytesInOutput": 3069
"src/hand/handpipeline.js": {
"bytesInOutput": 3057
},
"src/handpose/anchors.js": {
"src/hand/anchors.js": {
"bytesInOutput": 127000
},
"src/handpose/handpose.js": {
"src/hand/handpose.js": {
"bytesInOutput": 1127
},
"src/gesture.js": {
@ -392,16 +406,16 @@
"bytesInOutput": 1298
},
"package.json": {
"bytesInOutput": 2898
"bytesInOutput": 3004
},
"src/human.js": {
"bytesInOutput": 28
},
"src/human.js": {
"bytesInOutput": 5614
"bytesInOutput": 7355
}
},
"bytes": 213478
"bytes": 215903
}
}
}

View File

@ -41,6 +41,7 @@
"scripts": {
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
"lint": "eslint src/*.js demo/*.js",
"dev": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation dev-server.js",
"build-iife": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=iife --external:fs --global-name=Human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
"build-esm-bundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
"build-esm-nobundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",

59
src/age/ssrnet.js Normal file
View File

@ -0,0 +1,59 @@
const tf = require('@tensorflow/tfjs');
const profile = require('../profile.js');
const models = {};
let last = { age: 0 };
let frame = Number.MAX_SAFE_INTEGER;
// tuning values
const zoom = [0, 0]; // 0..1 meaning 0%..100%
async function load(config) {
if (!models.age) models.age = await tf.loadGraphModel(config.face.age.modelPath);
return models.age;
}
async function predict(image, config) {
return new Promise(async (resolve) => {
if (frame < config.face.age.skipFrames) {
frame += 1;
resolve(last);
}
frame = 0;
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);
let ageT;
const obj = {};
if (!config.profile) {
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile.run('age', profileAge);
}
enhance.dispose();
if (ageT) {
const data = ageT.dataSync();
obj.age = Math.trunc(10 * data[0]) / 10;
}
ageT.dispose();
last = obj;
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;

View File

@ -29,25 +29,27 @@ class PoseNet {
* in the same scale as the original image
*/
async estimatePoses(input, config) {
const outputStride = config.outputStride;
// const inputResolution = config.inputResolution;
const height = input.shape[1];
const width = input.shape[2];
const resized = util.resizeTo(input, [config.inputResolution, config.inputResolution]);
const { heatmapScores, offsets, displacementFwd, displacementBwd } = this.baseModel.predict(resized);
const allTensorBuffers = await util.toTensorBuffers3D([heatmapScores, offsets, displacementFwd, displacementBwd]);
const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1];
const displacementsFwdBuffer = allTensorBuffers[2];
const displacementsBwdBuffer = allTensorBuffers[3];
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, config.maxDetections, config.scoreThreshold, config.nmsRadius);
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.inputResolution, config.inputResolution]);
heatmapScores.dispose();
offsets.dispose();
displacementFwd.dispose();
displacementBwd.dispose();
resized.dispose();
return resultPoses;
return new Promise(async (resolve) => {
const outputStride = config.outputStride;
// const inputResolution = config.inputResolution;
const height = input.shape[1];
const width = input.shape[2];
const resized = util.resizeTo(input, [config.inputResolution, config.inputResolution]);
const { heatmapScores, offsets, displacementFwd, displacementBwd } = this.baseModel.predict(resized);
const allTensorBuffers = await util.toTensorBuffers3D([heatmapScores, offsets, displacementFwd, displacementBwd]);
const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1];
const displacementsFwdBuffer = allTensorBuffers[2];
const displacementsBwdBuffer = allTensorBuffers[3];
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, config.maxDetections, config.scoreThreshold, config.nmsRadius);
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.inputResolution, config.inputResolution]);
heatmapScores.dispose();
offsets.dispose();
displacementFwd.dispose();
displacementBwd.dispose();
resized.dispose();
resolve(resultPoses);
});
}
dispose() {

View File

@ -17,54 +17,57 @@ async function load(config) {
}
async function predict(image, config) {
if (frame < config.face.emotion.skipFrames) {
frame += 1;
return last;
}
frame = 0;
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
const [red, green, blue] = tf.split(resize, 3, 3);
resize.dispose();
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
red.dispose();
green.dispose();
blue.dispose();
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
redNorm.dispose();
greenNorm.dispose();
blueNorm.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(normalize);
data = emotionT.dataSync();
tf.dispose(emotionT);
} else {
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
data = profileData.result.dataSync();
profileData.result.dispose();
profile.run('emotion', profileData);
return new Promise(async (resolve) => {
if (frame < config.face.emotion.skipFrames) {
frame += 1;
resolve(last);
}
for (let i = 0; i < data.length; i++) {
if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });
frame = 0;
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
const [red, green, blue] = tf.split(resize, 3, 3);
resize.dispose();
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
red.dispose();
green.dispose();
blue.dispose();
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
redNorm.dispose();
greenNorm.dispose();
blueNorm.dispose();
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
grayscale.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(normalize);
data = emotionT.dataSync();
tf.dispose(emotionT);
} else {
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
data = profileData.result.dataSync();
profileData.result.dispose();
profile.run('emotion', profileData);
}
for (let i = 0; i < data.length; i++) {
if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });
}
obj.sort((a, b) => b.score - a.score);
}
obj.sort((a, b) => b.score - a.score);
}
tf.dispose(grayscale);
last = obj;
return obj;
normalize.dispose();
last = obj;
resolve(obj);
});
}
exports.predict = predict;

View File

@ -173,6 +173,7 @@ class Pipeline {
let rotatedImage = input;
let rotationMatrix = util.IDENTITY_MATRIX;
if (angle !== 0) {
// bug: input becomes disposed here when running in async mode!
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
}

63
src/gender/ssrnet.js Normal file
View File

@ -0,0 +1,63 @@
const tf = require('@tensorflow/tfjs');
const profile = require('../profile.js');
const models = {};
let last = { gender: '' };
let frame = Number.MAX_SAFE_INTEGER;
// tuning values
const zoom = [0, 0]; // 0..1 meaning 0%..100%
async function load(config) {
if (!models.gender) models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
return models.gender;
}
async function predict(image, config) {
return new Promise(async (resolve) => {
if (frame < config.face.age.skipFrames) {
frame += 1;
resolve(last);
}
frame = 0;
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);
let genderT;
const obj = {};
if (!config.profile) {
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
} else {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile.run('gender', profileGender);
}
enhance.dispose();
if (genderT) {
const data = genderT.dataSync();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
obj.confidence = confidence;
}
}
genderT.dispose();
last = obj;
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;

View File

@ -97,7 +97,6 @@ class HandPipeline {
if (useFreshBox) {
this.regionsOfInterest = [];
if (!boundingBoxPredictions || boundingBoxPredictions.length === 0) {
image.dispose();
this.detectedHands = 0;
return null;
}

View File

@ -1,9 +1,10 @@
const tf = require('@tensorflow/tfjs');
const facemesh = require('./facemesh/facemesh.js');
const ssrnet = require('./ssrnet/ssrnet.js');
const facemesh = require('./face/facemesh.js');
const age = require('./age/ssrnet.js');
const gender = require('./gender/ssrnet.js');
const emotion = require('./emotion/emotion.js');
const posenet = require('./posenet/posenet.js');
const handpose = require('./handpose/handpose.js');
const posenet = require('./body/posenet.js');
const handpose = require('./hand/handpose.js');
const gesture = require('./gesture.js');
const image = require('./image.js');
const profile = require('./profile.js');
@ -53,6 +54,7 @@ class Human {
this.analyzeMemoryLeaks = false;
this.checkSanity = false;
this.firstRun = true;
this.perf = {};
// object that contains all initialized models
this.models = {
facemesh: null,
@ -65,10 +67,11 @@ class Human {
};
// export raw access to underlying models
this.facemesh = facemesh;
this.ssrnet = ssrnet;
this.age = age;
this.gender = gender;
this.emotion = emotion;
this.posenet = posenet;
this.handpose = handpose;
this.body = posenet;
this.hand = handpose;
}
// helper function: wrapper around console output
@ -109,6 +112,8 @@ class Human {
// preload models, not explicitly required as it's done automatically on first use
async load(userConfig) {
this.state = 'load';
const timeStamp = now();
if (userConfig) this.config = mergeDeep(defaults, userConfig);
if (this.firstRun) {
@ -118,35 +123,55 @@ class Human {
this.log('flags:', tf.ENV.flags);
this.firstRun = false;
}
if (this.config.face.enabled && !this.models.facemesh) {
this.log('load model: face');
this.models.facemesh = await facemesh.load(this.config.face);
}
if (this.config.body.enabled && !this.models.posenet) {
this.log('load model: body');
this.models.posenet = await posenet.load(this.config.body);
}
if (this.config.hand.enabled && !this.models.handpose) {
this.log('load model: hand');
this.models.handpose = await handpose.load(this.config.hand);
}
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) {
this.log('load model: age');
this.models.age = await ssrnet.loadAge(this.config);
}
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) {
this.log('load model: gender');
this.models.gender = await ssrnet.loadGender(this.config);
}
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) {
this.log('load model: emotion');
this.models.emotion = await emotion.load(this.config);
if (this.config.async) {
[
this.models.age,
this.models.gender,
this.models.emotion,
this.models.facemesh,
this.models.posenet,
this.models.handpose,
] = await Promise.all([
this.models.age || age.load(this.config),
this.models.gender || gender.load(this.config),
this.models.emotion || emotion.load(this.config),
this.models.facemesh || facemesh.load(this.config.face),
this.models.posenet || posenet.load(this.config.body),
this.models.handpose || handpose.load(this.config.hand),
]);
} else {
if (this.config.face.enabled && !this.models.facemesh) {
this.log('load model: face');
this.models.facemesh = await facemesh.load(this.config.face);
}
if (this.config.body.enabled && !this.models.posenet) {
this.log('load model: body');
this.models.posenet = await posenet.load(this.config.body);
}
if (this.config.hand.enabled && !this.models.handpose) {
this.log('load model: hand');
this.models.handpose = await handpose.load(this.config.hand);
}
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) {
this.log('load model: age');
this.models.age = await age.load(this.config);
}
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) {
this.log('load model: gender');
this.models.gender = await gender.load(this.config);
}
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) {
this.log('load model: emotion');
this.models.emotion = await emotion.load(this.config);
}
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0)) this.perf.load = current;
}
// check if backend needs initialization if it changed
async checkBackend(force) {
const timeStamp = now();
if (force || (tf.getBackend() !== this.config.backend)) {
this.state = 'backend';
/* force backend reload
@ -174,12 +199,104 @@ class Human {
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
await tf.ready();
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.backend || 0)) this.perf.backend = current;
}
async detectFace(input) {
// run facemesh, includes blazeface and iris
// eslint-disable-next-line no-async-promise-executor
let timeStamp;
let ageRes;
let genderRes;
let emotionRes;
const faceRes = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config.face);
this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
this.analyze('Get Face');
// is something went wrong, skip the face
if (!face.image || face.image.isDisposedInternal) {
this.log('Face object is disposed:', face.image);
continue;
}
// run age, inherits face from blazeface
this.analyze('Start Age:');
if (this.config.async) {
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
} else {
this.state = 'run:age';
timeStamp = now();
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
this.perf.age = Math.trunc(now() - timeStamp);
}
// run gender, inherits face from blazeface
this.analyze('Start Gender:');
if (this.config.async) {
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
} else {
this.state = 'run:gender';
timeStamp = now();
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
this.perf.gender = Math.trunc(now() - timeStamp);
}
// run emotion, inherits face from blazeface
this.analyze('Start Emotion:');
if (this.config.async) {
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
} else {
this.state = 'run:emotion';
timeStamp = now();
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
this.perf.emotion = Math.trunc(now() - timeStamp);
}
this.analyze('End Emotion:');
// if async wait for results
if (this.config.async) {
[ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);
}
this.analyze('Finish Face:');
// dont need face anymore
face.image.dispose();
// calculate iris distance
// iris: array[ bottom, left, top, right, center ]
const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0])
: 0;
// combine results
faceRes.push({
confidence: face.confidence,
box: face.box,
mesh: face.mesh,
annotations: face.annotations,
age: ageRes.age,
gender: genderRes.gender,
genderConfidence: genderRes.confidence,
emotion: emotionRes,
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
});
this.analyze('End Face');
}
this.analyze('End FaceMesh:');
if (this.config.async) {
if (this.perf.face) delete this.perf.face;
if (this.perf.age) delete this.perf.age;
if (this.perf.gender) delete this.perf.gender;
if (this.perf.emotion) delete this.perf.emotion;
}
return faceRes;
}
// main detect function
async detect(input, userConfig = {}) {
this.state = 'config';
const perf = {};
let timeStamp;
// update configuration
@ -195,109 +312,69 @@ class Human {
}
// detection happens inside a promise
// eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve) => {
let poseRes;
let handRes;
let ssrRes;
let emotionRes;
let faceRes;
const timeStart = now();
// configure backend
timeStamp = now();
await this.checkBackend();
perf.backend = Math.trunc(now() - timeStamp);
// load models if enabled
timeStamp = now();
this.state = 'load';
await this.load();
perf.load = Math.trunc(now() - timeStamp);
if (this.config.scoped) tf.engine().startScope();
this.analyze('Start Detect:');
this.analyze('Start Scope:');
timeStamp = now();
const process = image.process(input, this.config);
perf.image = Math.trunc(now() - timeStamp);
this.perf.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
// run facemesh, includes blazeface and iris
const faceRes = [];
if (this.config.face.enabled) {
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];
if (this.perf.face) delete this.perf.face;
} else {
this.state = 'run:face';
timeStamp = now();
this.analyze('Start FaceMesh:');
const faces = await this.models.facemesh.estimateFaces(process.tensor, this.config.face);
perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
// is something went wrong, skip the face
if (!face.image || face.image.isDisposedInternal) {
this.log('Face object is disposed:', face.image);
continue;
}
// run ssr-net age & gender, inherits face from blazeface
this.state = 'run:agegender';
timeStamp = now();
ssrRes = (this.config.face.age.enabled || this.config.face.gender.enabled) ? await ssrnet.predict(face.image, this.config) : {};
perf.agegender = Math.trunc(now() - timeStamp);
// run emotion, inherits face from blazeface
this.state = 'run:emotion';
timeStamp = now();
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
perf.emotion = Math.trunc(now() - timeStamp);
// dont need face anymore
face.image.dispose();
// calculate iris distance
// iris: array[ bottom, left, top, right, center ]
const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0])
: 0;
faceRes.push({
confidence: face.confidence,
box: face.box,
mesh: face.mesh,
annotations: face.annotations,
age: ssrRes.age,
gender: ssrRes.gender,
agConfidence: ssrRes.confidence,
emotion: emotionRes,
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
});
this.analyze('End FaceMesh:');
}
faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];
this.perf.face = Math.trunc(now() - timeStamp);
}
// run posenet
this.analyze('Start Body:');
if (this.config.async) {
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config.body) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
this.analyze('Start PoseNet');
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config.body) : [];
this.analyze('End PoseNet:');
perf.body = Math.trunc(now() - timeStamp);
this.perf.body = Math.trunc(now() - timeStamp);
}
this.analyze('End Body:');
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
this.analyze('Start HandPose:');
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
this.analyze('End HandPose:');
perf.hand = Math.trunc(now() - timeStamp);
this.perf.hand = Math.trunc(now() - timeStamp);
}
// this.analyze('End Hand:');
if (this.config.async) [poseRes, handRes] = await Promise.all([poseRes, handRes]);
// if async wait for results
if (this.config.async) {
[faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);
}
process.tensor.dispose();
this.state = 'idle';
if (this.config.scoped) tf.engine().endScope();
this.analyze('End Scope:');
@ -306,11 +383,13 @@ class Human {
if (this.config.gesture.enabled) {
timeStamp = now();
gestureRes = { body: gesture.body(poseRes), hand: gesture.hand(handRes), face: gesture.face(faceRes) };
perf.gesture = Math.trunc(now() - timeStamp);
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture;
}
perf.total = Math.trunc(now() - timeStart);
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: perf, canvas: process.canvas });
this.perf.total = Math.trunc(now() - timeStart);
this.state = 'idle';
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
});
}
}

View File

@ -1,80 +0,0 @@
const tf = require('@tensorflow/tfjs');
const profile = require('../profile.js');
const models = {};
let last = { age: 0, gender: '' };
let frame = Number.MAX_SAFE_INTEGER;
// tuning values
const zoom = [0, 0]; // 0..1 meaning 0%..100%
async function loadAge(config) {
if (!models.age) models.age = await tf.loadGraphModel(config.face.age.modelPath);
return models.age;
}
async function loadGender(config) {
if (!models.gender) models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
return models.gender;
}
async function predict(image, config) {
if (frame < config.face.age.skipFrames) {
frame += 1;
return last;
}
frame = 0;
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);
const promises = [];
let ageT;
let genderT;
const obj = {};
if (!config.profile || config.async) {
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile.run('age', profileAge);
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile.run('gender', profileGender);
}
if (ageT) {
const data = ageT.dataSync();
obj.age = Math.trunc(10 * data[0]) / 10;
tf.dispose(ageT);
}
if (genderT) {
const data = genderT.dataSync();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
obj.confidence = confidence;
}
tf.dispose(genderT);
}
tf.dispose(enhance);
last = obj;
return obj;
}
exports.predict = predict;
exports.loadAge = loadAge;
exports.loadGender = loadGender;