added state handling

pull/50/head
Vladimir Mandic 2020-10-17 07:15:23 -04:00
parent effd8028dd
commit e1c2d99628
19 changed files with 284 additions and 264 deletions

View File

@ -133,15 +133,6 @@ const config = {
} }
``` ```
Note that when using `Human` in NodeJS, you must load and parse the image *before* you pass it for detection
For example:
```js
const buffer = fs.readFileSync(input);
const image = tf.node.decodeImage(buffer);
const result = human.detect(image, config);
image.dispose();
```
### Weights ### Weights
Pretrained model weights are includes in `./models` Pretrained model weights are includes in `./models`
@ -167,34 +158,48 @@ NodeJS:
## Usage ## Usage
`Human` library does not require special initialization. `Human` library does not require special initialization.
All configuration is done in a single JSON object and all model weights will be dynamically loaded upon their first usage(and only then, `Human` will not load weights that it doesn't need according to configuration). All configuration is done in a single JSON object and all model weights will be dynamically loaded upon their first usage
(and only then, `Human` will not load weights that it doesn't need according to configuration).
There is only *ONE* method you need: There is only *ONE* method you need:
```js ```js
import * as tf from '@tensorflow/tfjs'; // 'image': can be of any type of an image object: HTMLImage, HTMLVideo, HTMLMedia, Canvas, Tensor4D
import human from '@vladmandic/human'; // 'config': optional parameter used to override any options present in default configuration
// configuration is fully dynamic and can change between different calls to 'detect()'
// 'image': can be of any type of an image object: HTMLImage, HTMLVideo, HTMLMedia, Canvas, Tensor4D const result = await human.detect(image, config?)
// 'options': optional parameter used to override any options present in default configuration
const result = await human.detect(image, options?)
``` ```
or if you want to use promises or if you want to use promises
```js ```js
human.detect(image, options?).then((result) => { human.detect(image, config?).then((result) => {
// your code // your code
}) })
``` ```
Additionally, `Human` library exposes several classes: Additionally, `Human` library exposes several objects and methods:
```js ```js
human.config // access to configuration object, normally set as parameter to detect() human.config // access to configuration object, normally set as parameter to detect()
human.defaults // read-only view of default configuration object human.defaults // read-only view of default configuration object
human.models // dynamically maintained list of object of any loaded models human.models // dynamically maintained list of object of any loaded models
human.tf // instance of tfjs used by human human.tf // instance of tfjs used by human
human.state // <string> describing current operation in progress
// progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
human.load(config) // explicitly call load method that loads configured models
// if you want to pre-load them instead of on-demand loading during 'human.detect()'
```
Note that when using `Human` library in `NodeJS`, you must load and parse the image *before* you pass it for detection and dispose it afterwards
For example:
```js
const imageFile = '../assets/sample1.jpg';
const buffer = fs.readFileSync(imageFile);
const image = tf.node.decodeImage(buffer);
const result = human.detect(image, config);
image.dispose();
``` ```
<hr> <hr>
@ -213,7 +218,7 @@ Configurtion object is large, but typically you only need to modify few values:
```js ```js
export default { config = {
backend: 'webgl', // select tfjs backend to use backend: 'webgl', // select tfjs backend to use
console: true, // enable debugging output to console console: true, // enable debugging output to console
face: { face: {
@ -221,9 +226,9 @@ export default {
// face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion // face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion
// note: module is not loaded until it is required // note: module is not loaded until it is required
detector: { detector: {
modelPath: '../models/blazeface/back/model.json', // can be 'tfhub', 'front' or 'back'. modelPath: '../models/blazeface/back/model.json', // can be 'front' or 'back'.
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces. // 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
inputSize: 256, // fixed value: 128 for front and 'tfhub' and 'front' and 256 for 'back' inputSize: 256, // fixed value: 128 for front and 256 for 'back'
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
skipFrames: 10, // how many frames to go without re-running the face bounding box detector skipFrames: 10, // how many frames to go without re-running the face bounding box detector
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis // if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis

View File

@ -9,9 +9,9 @@ export default {
// face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion // face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion
// (note: module is not loaded until it is required) // (note: module is not loaded until it is required)
detector: { detector: {
modelPath: '../models/blazeface/back/model.json', // can be 'tfhub', 'front' or 'back'. modelPath: '../models/blazeface/back/model.json', // can be 'front' or 'back'.
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces. // 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
inputSize: 256, // fixed value: 128 for front and 'tfhub' and 'front' and 256 for 'back' inputSize: 256, // fixed value: 128 for front and 256 for 'back'
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
skipFrames: 10, // how many frames to go without re-running the face bounding box detector skipFrames: 10, // how many frames to go without re-running the face bounding box detector
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis // if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis

View File

@ -5,13 +5,14 @@ import human from '../dist/human.esm.js';
const ui = { const ui = {
baseColor: 'rgba(255, 200, 255, 0.3)', baseColor: 'rgba(255, 200, 255, 0.3)',
baseLabel: 'rgba(255, 200, 255, 0.9)', baseLabel: 'rgba(255, 200, 255, 0.9)',
baseFont: 'small-caps 1.2rem "Segoe UI"', baseFontProto: 'small-caps {size} "Segoe UI"',
baseLineWidth: 16, baseLineWidth: 16,
baseLineHeight: 2, baseLineHeightProto: 2,
columns: 3, columns: 3,
busy: false, busy: false,
facing: 'user', facing: 'user',
worker: 'worker.js', worker: 'worker.js',
samples: ['../assets/sample1.jpg', '../assets/sample2.jpg', '../assets/sample3.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample6.jpg'],
}; };
const config = { const config = {
@ -285,82 +286,6 @@ async function runHumanDetect(input, canvas) {
} }
} }
function setupUI() {
// add all variables to ui control panel
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
const style = document.createElement('style');
// style.type = 'text/css';
style.innerHTML = `
.qs_main { font: 1rem "Segoe UI"; }
.qs_label { font: 0.8rem "Segoe UI"; }
.qs_content { background: darkslategray; }
.qs_container { background: transparent; color: white; margin: 6px; padding: 6px; }
.qs_checkbox_label { top: 2px; }
.qs_button { width: -webkit-fill-available; font: 1rem "Segoe UI"; cursor: pointer; }
`;
document.getElementsByTagName('head')[0].appendChild(style);
settings.addButton('Play/Pause', () => {
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
if (!video.paused) {
document.getElementById('log').innerText += '\nPaused ...';
video.pause();
} else {
document.getElementById('log').innerText += '\nStarting Human Library ...';
video.play();
}
runHumanDetect(video, canvas);
});
settings.addDropDown('Backend', ['webgl', 'wasm', 'cpu'], async (val) => config.backend = val.value);
settings.addHTML('title', 'Enabled Models'); settings.hideTitle('title');
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
settings.addBoolean('Face Emotion', config.face.emotion.enabled, (val) => config.face.emotion.enabled = val);
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
settings.addHTML('title', 'Model Parameters'); settings.hideTitle('title');
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
config.face.detector.maxFaces = parseInt(val);
config.body.maxDetections = parseInt(val);
});
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
config.face.detector.skipFrames = parseInt(val);
config.face.emotion.skipFrames = parseInt(val);
config.face.age.skipFrames = parseInt(val);
config.hand.skipFrames = parseInt(val);
});
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
config.face.detector.minConfidence = parseFloat(val);
config.face.emotion.minConfidence = parseFloat(val);
config.hand.minConfidence = parseFloat(val);
});
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
config.face.detector.scoreThreshold = parseFloat(val);
config.hand.scoreThreshold = parseFloat(val);
config.body.scoreThreshold = parseFloat(val);
});
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
config.face.detector.iouThreshold = parseFloat(val);
config.hand.iouThreshold = parseFloat(val);
});
settings.addHTML('title', 'UI Options'); settings.hideTitle('title');
settings.addBoolean('Use Web Worker', false);
settings.addBoolean('Camera Front/Back', true, (val) => {
ui.facing = val ? 'user' : 'environment';
// eslint-disable-next-line no-use-before-define
setupCamera();
});
settings.addBoolean('Draw Boxes', true);
settings.addBoolean('Draw Points', true);
settings.addBoolean('Draw Polygons', true);
settings.addBoolean('Fill Polygons', true);
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
settings.addRange('FPS', 0, 100, 0, 1);
}
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
async function setupCamera() { async function setupCamera() {
if (ui.busy) return null; if (ui.busy) return null;
@ -409,7 +334,6 @@ async function processImage(input) {
ui.baseLabel = 'rgba(200, 255, 255, 0.8)'; ui.baseLabel = 'rgba(200, 255, 255, 0.8)';
ui.baseFont = 'small-caps 3.5rem "Segoe UI"'; ui.baseFont = 'small-caps 3.5rem "Segoe UI"';
ui.baseLineWidth = 16; ui.baseLineWidth = 16;
ui.baseLineHeight = 5;
ui.columns = 3; ui.columns = 3;
const cfg = { const cfg = {
backend: 'webgl', backend: 'webgl',
@ -450,28 +374,105 @@ async function processImage(input) {
}); });
} }
async function detectVideo() {
document.getElementById('samples').style.display = 'none';
document.getElementById('canvas').style.display = 'block';
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
ui.baseFont = ui.baseFontProto.replace(/{size}/, '1.2rem');
ui.baseLineHeight = ui.baseLineHeightProto;
if (!video.paused) {
document.getElementById('log').innerText += '\nPaused ...';
video.pause();
} else {
await setupCamera();
document.getElementById('log').innerText += '\nStarting Human Library ...';
video.play();
}
runHumanDetect(video, canvas);
}
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line no-unused-vars
async function detectSampleImages() { async function detectSampleImages() {
ui.baseFont = 'small-caps 3rem "Segoe UI"'; ui.baseFont = ui.baseFontProto.replace(/{size}/, `${ui.columns}rem`);
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
document.getElementById('canvas').style.display = 'none'; document.getElementById('canvas').style.display = 'none';
document.getElementById('samples').style.display = 'block';
log('Running detection of sample images'); log('Running detection of sample images');
const samples = ['../assets/sample1.jpg', '../assets/sample2.jpg', '../assets/sample3.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample6.jpg']; for (const sample of ui.samples) await processImage(sample);
for (const sample of samples) await processImage(sample); }
function setupUI() {
// add all variables to ui control panel
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
const style = document.createElement('style');
style.innerHTML = `
.qs_main { font: 1rem "Segoe UI"; }
.qs_label { font: 0.8rem "Segoe UI"; }
.qs_content { background: darkslategray; }
.qs_container { background: transparent; color: white; margin: 6px; padding: 6px; }
.qs_checkbox_label { top: 2px; }
.qs_button { width: -webkit-fill-available; font: 1rem "Segoe UI"; cursor: pointer; }
`;
document.getElementsByTagName('head')[0].appendChild(style);
settings.addButton('Play/Pause WebCam', () => detectVideo());
settings.addButton('Process Images', () => detectSampleImages());
settings.addDropDown('Backend', ['webgl', 'wasm', 'cpu'], async (val) => config.backend = val.value);
settings.addHTML('title', 'Enabled Models'); settings.hideTitle('title');
settings.addBoolean('Face Detect', config.face.enabled, (val) => config.face.enabled = val);
settings.addBoolean('Face Mesh', config.face.mesh.enabled, (val) => config.face.mesh.enabled = val);
settings.addBoolean('Face Iris', config.face.iris.enabled, (val) => config.face.iris.enabled = val);
settings.addBoolean('Face Age', config.face.age.enabled, (val) => config.face.age.enabled = val);
settings.addBoolean('Face Gender', config.face.gender.enabled, (val) => config.face.gender.enabled = val);
settings.addBoolean('Face Emotion', config.face.emotion.enabled, (val) => config.face.emotion.enabled = val);
settings.addBoolean('Body Pose', config.body.enabled, (val) => config.body.enabled = val);
settings.addBoolean('Hand Pose', config.hand.enabled, (val) => config.hand.enabled = val);
settings.addHTML('title', 'Model Parameters'); settings.hideTitle('title');
settings.addRange('Max Objects', 1, 20, 5, 1, (val) => {
config.face.detector.maxFaces = parseInt(val);
config.body.maxDetections = parseInt(val);
});
settings.addRange('Skip Frames', 1, 20, config.face.detector.skipFrames, 1, (val) => {
config.face.detector.skipFrames = parseInt(val);
config.face.emotion.skipFrames = parseInt(val);
config.face.age.skipFrames = parseInt(val);
config.hand.skipFrames = parseInt(val);
});
settings.addRange('Min Confidence', 0.1, 1.0, config.face.detector.minConfidence, 0.05, (val) => {
config.face.detector.minConfidence = parseFloat(val);
config.face.emotion.minConfidence = parseFloat(val);
config.hand.minConfidence = parseFloat(val);
});
settings.addRange('Score Threshold', 0.1, 1.0, config.face.detector.scoreThreshold, 0.05, (val) => {
config.face.detector.scoreThreshold = parseFloat(val);
config.hand.scoreThreshold = parseFloat(val);
config.body.scoreThreshold = parseFloat(val);
});
settings.addRange('IOU Threshold', 0.1, 1.0, config.face.detector.iouThreshold, 0.05, (val) => {
config.face.detector.iouThreshold = parseFloat(val);
config.hand.iouThreshold = parseFloat(val);
});
settings.addHTML('title', 'UI Options'); settings.hideTitle('title');
settings.addBoolean('Use Web Worker', false);
settings.addBoolean('Camera Front/Back', true, (val) => {
ui.facing = val ? 'user' : 'environment';
setupCamera();
});
settings.addBoolean('Draw Boxes', true);
settings.addBoolean('Draw Points', true);
settings.addBoolean('Draw Polygons', true);
settings.addBoolean('Fill Polygons', true);
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
settings.addRange('FPS', 0, 100, 0, 1);
} }
async function main() { async function main() {
log('Human demo starting ...'); log('Human demo starting ...');
setupUI();
// setup ui control panel
await setupUI();
const msg = `Human ready: version: ${human.version} TensorFlow/JS version: ${human.tf.version_core}`; const msg = `Human ready: version: ${human.version} TensorFlow/JS version: ${human.tf.version_core}`;
document.getElementById('log').innerText += '\n' + msg; document.getElementById('log').innerText += '\n' + msg;
log(msg); log(msg);
// use one of the two:
await setupCamera();
// await detectSampleImages();
} }
window.onload = main; window.onload = main;

10
dist/human.cjs vendored
View File

@ -3823,6 +3823,8 @@ var require_facemesh = __commonJS((exports2) => {
tf2.dispose(image); tf2.dispose(image);
const results = []; const results = [];
for (const prediction of predictions || []) { for (const prediction of predictions || []) {
if (prediction.isDisposedInternal)
continue;
const confidence = prediction.confidence.arraySync(); const confidence = prediction.confidence.arraySync();
if (confidence >= this.config.detector.minConfidence) { if (confidence >= this.config.detector.minConfidence) {
const mesh = prediction.coords ? prediction.coords.arraySync() : null; const mesh = prediction.coords ? prediction.coords.arraySync() : null;
@ -5280,20 +5282,16 @@ async function detect(input, userConfig) {
models.emotion = await emotion.load(config); models.emotion = await emotion.load(config);
const perf = {}; const perf = {};
let timeStamp; let timeStamp;
timeStamp = now();
tf.engine().startScope(); tf.engine().startScope();
timeStamp = now();
const poseRes = config.body.enabled ? await models.posenet.estimatePoses(input, config.body) : []; const poseRes = config.body.enabled ? await models.posenet.estimatePoses(input, config.body) : [];
tf.engine().endScope();
perf.body = Math.trunc(now() - timeStamp); perf.body = Math.trunc(now() - timeStamp);
timeStamp = now(); timeStamp = now();
tf.engine().startScope();
const handRes = config.hand.enabled ? await models.handpose.estimateHands(input, config.hand) : []; const handRes = config.hand.enabled ? await models.handpose.estimateHands(input, config.hand) : [];
tf.engine().endScope();
perf.hand = Math.trunc(now() - timeStamp); perf.hand = Math.trunc(now() - timeStamp);
const faceRes = []; const faceRes = [];
if (config.face.enabled) { if (config.face.enabled) {
timeStamp = now(); timeStamp = now();
tf.engine().startScope();
const faces = await models.facemesh.estimateFaces(input, config.face); const faces = await models.facemesh.estimateFaces(input, config.face);
perf.face = Math.trunc(now() - timeStamp); perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) { for (const face of faces) {
@ -5321,8 +5319,8 @@ async function detect(input, userConfig) {
iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0 iris: iris !== 0 ? Math.trunc(100 * 11.7 / iris) / 100 : 0
}); });
} }
tf.engine().endScope();
} }
tf.engine().endScope();
perf.total = Object.values(perf).reduce((a, b) => a + b); perf.total = Object.values(perf).reduce((a, b) => a + b);
resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf}); resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf});
}); });

14
dist/human.cjs.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4569, "bytes": 4536,
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
@ -21,7 +21,7 @@
"imports": [] "imports": []
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytes": 2493, "bytes": 2649,
"imports": [ "imports": [
{ {
"path": "src/facemesh/blazeface.js" "path": "src/facemesh/blazeface.js"
@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 6592, "bytes": 6474,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -253,7 +253,7 @@
"dist/human.cjs.map": { "dist/human.cjs.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 216661 "bytes": 216628
}, },
"dist/human.cjs": { "dist/human.cjs": {
"imports": [], "imports": [],
@ -280,7 +280,7 @@
"bytesInOutput": 23311 "bytesInOutput": 23311
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytesInOutput": 2695 "bytesInOutput": 2758
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 2068 "bytesInOutput": 2068
@ -346,10 +346,10 @@
"bytesInOutput": 2748 "bytesInOutput": 2748
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 5268 "bytesInOutput": 5148
} }
}, },
"bytes": 132235 "bytes": 132178
} }
} }
} }

4
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4569, "bytes": 4536,
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
@ -21,7 +21,7 @@
"imports": [] "imports": []
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytes": 2493, "bytes": 2649,
"imports": [ "imports": [
{ {
"path": "src/facemesh/blazeface.js" "path": "src/facemesh/blazeface.js"
@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 6592, "bytes": 6474,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -253,7 +253,7 @@
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 194952 "bytes": 194920
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
@ -280,7 +280,7 @@
"bytesInOutput": 9995 "bytesInOutput": 9995
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytesInOutput": 1287 "bytesInOutput": 1320
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1099 "bytesInOutput": 1099
@ -346,10 +346,10 @@
"bytesInOutput": 2275 "bytesInOutput": 2275
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 2995 "bytesInOutput": 2904
} }
}, },
"bytes": 68596 "bytes": 68538
} }
} }
} }

90
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

14
dist/human.esm.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4569, "bytes": 4536,
"imports": [] "imports": []
}, },
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": { "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -177,7 +177,7 @@
] ]
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytes": 2493, "bytes": 2649,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 6592, "bytes": 6474,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4956003 "bytes": 4955971
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -548,7 +548,7 @@
"bytesInOutput": 9996 "bytesInOutput": 9996
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytesInOutput": 1273 "bytesInOutput": 1306
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100 "bytesInOutput": 1100
@ -614,10 +614,10 @@
"bytesInOutput": 2276 "bytesInOutput": 2276
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 3058 "bytesInOutput": 2963
} }
}, },
"bytes": 1105497 "bytes": 1105435
} }
} }
} }

90
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

14
dist/human.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4569, "bytes": 4536,
"imports": [] "imports": []
}, },
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": { "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -177,7 +177,7 @@
] ]
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytes": 2493, "bytes": 2649,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 6592, "bytes": 6474,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4956003 "bytes": 4955971
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -548,7 +548,7 @@
"bytesInOutput": 9996 "bytesInOutput": 9996
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytesInOutput": 1273 "bytesInOutput": 1306
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100 "bytesInOutput": 1100
@ -614,10 +614,10 @@
"bytesInOutput": 2276 "bytesInOutput": 2276
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 3058 "bytesInOutput": 2963
} }
}, },
"bytes": 1105506 "bytes": 1105444
} }
} }
} }

File diff suppressed because one or more lines are too long

View File

@ -21,6 +21,8 @@ class MediaPipeFaceMesh {
tf.dispose(image); tf.dispose(image);
const results = []; const results = [];
for (const prediction of (predictions || [])) { for (const prediction of (predictions || [])) {
// guard against disposed tensors on long running operations such as pause in middle of processing
if (prediction.isDisposedInternal) continue;
const confidence = prediction.confidence.arraySync(); const confidence = prediction.confidence.arraySync();
if (confidence >= this.config.detector.minConfidence) { if (confidence >= this.config.detector.minConfidence) {
const mesh = prediction.coords ? prediction.coords.arraySync() : null; const mesh = prediction.coords ? prediction.coords.arraySync() : null;

View File

@ -8,6 +8,7 @@ const defaults = require('../config.js').default;
const app = require('../package.json'); const app = require('../package.json');
let config; let config;
let state = 'idle';
// object that contains all initialized models // object that contains all initialized models
const models = { const models = {
@ -61,10 +62,22 @@ function sanity(input) {
return null; return null;
} }
async function detect(input, userConfig) { async function load(userConfig) {
if (userConfig) config = mergeDeep(defaults, userConfig);
if (config.face.enabled && !models.facemesh) models.facemesh = await facemesh.load(config.face);
if (config.body.enabled && !models.posenet) models.posenet = await posenet.load(config.body);
if (config.hand.enabled && !models.handpose) models.handpose = await handpose.load(config.hand);
if (config.face.enabled && config.face.age.enabled && !models.age) models.age = await ssrnet.loadAge(config);
if (config.face.enabled && config.face.gender.enabled && !models.gender) models.gender = await ssrnet.loadGender(config);
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) models.emotion = await emotion.load(config);
}
async function detect(input, userConfig = {}) {
state = 'config';
config = mergeDeep(defaults, userConfig); config = mergeDeep(defaults, userConfig);
// sanity checks // sanity checks
state = 'check';
const error = sanity(input); const error = sanity(input);
if (error) { if (error) {
log(error, input); log(error, input);
@ -79,6 +92,7 @@ async function detect(input, userConfig) {
// configure backend // configure backend
if (tf.getBackend() !== config.backend) { if (tf.getBackend() !== config.backend) {
state = 'backend';
log('Human library setting backend:', config.backend); log('Human library setting backend:', config.backend);
await tf.setBackend(config.backend); await tf.setBackend(config.backend);
await tf.ready(); await tf.ready();
@ -91,35 +105,31 @@ async function detect(input, userConfig) {
// } // }
// load models if enabled // load models if enabled
if (config.face.enabled && !models.facemesh) models.facemesh = await facemesh.load(config.face); state = 'load';
if (config.body.enabled && !models.posenet) models.posenet = await posenet.load(config.body); await load();
if (config.hand.enabled && !models.handpose) models.handpose = await handpose.load(config.hand);
if (config.face.enabled && config.face.age.enabled && !models.age) models.age = await ssrnet.loadAge(config);
if (config.face.enabled && config.face.gender.enabled && !models.gender) models.gender = await ssrnet.loadGender(config);
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) models.emotion = await emotion.load(config);
const perf = {}; const perf = {};
let timeStamp; let timeStamp;
// run posenet
timeStamp = now();
tf.engine().startScope(); tf.engine().startScope();
// run posenet
state = 'run:body';
timeStamp = now();
const poseRes = config.body.enabled ? await models.posenet.estimatePoses(input, config.body) : []; const poseRes = config.body.enabled ? await models.posenet.estimatePoses(input, config.body) : [];
tf.engine().endScope();
perf.body = Math.trunc(now() - timeStamp); perf.body = Math.trunc(now() - timeStamp);
// run handpose // run handpose
state = 'run:hand';
timeStamp = now(); timeStamp = now();
tf.engine().startScope();
const handRes = config.hand.enabled ? await models.handpose.estimateHands(input, config.hand) : []; const handRes = config.hand.enabled ? await models.handpose.estimateHands(input, config.hand) : [];
tf.engine().endScope();
perf.hand = Math.trunc(now() - timeStamp); perf.hand = Math.trunc(now() - timeStamp);
// run facemesh, includes blazeface and iris // run facemesh, includes blazeface and iris
const faceRes = []; const faceRes = [];
if (config.face.enabled) { if (config.face.enabled) {
state = 'run:face';
timeStamp = now(); timeStamp = now();
tf.engine().startScope();
const faces = await models.facemesh.estimateFaces(input, config.face); const faces = await models.facemesh.estimateFaces(input, config.face);
perf.face = Math.trunc(now() - timeStamp); perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) { for (const face of faces) {
@ -129,10 +139,12 @@ async function detect(input, userConfig) {
continue; continue;
} }
// run ssr-net age & gender, inherits face from blazeface // run ssr-net age & gender, inherits face from blazeface
state = 'run:agegender';
timeStamp = now(); timeStamp = now();
const ssrData = (config.face.age.enabled || config.face.gender.enabled) ? await ssrnet.predict(face.image, config) : {}; const ssrData = (config.face.age.enabled || config.face.gender.enabled) ? await ssrnet.predict(face.image, config) : {};
perf.agegender = Math.trunc(now() - timeStamp); perf.agegender = Math.trunc(now() - timeStamp);
// run emotion, inherits face from blazeface // run emotion, inherits face from blazeface
state = 'run:emotion';
timeStamp = now(); timeStamp = now();
const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {}; const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {};
perf.emotion = Math.trunc(now() - timeStamp); perf.emotion = Math.trunc(now() - timeStamp);
@ -154,12 +166,14 @@ async function detect(input, userConfig) {
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0, iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
}); });
} }
tf.engine().endScope(); state = 'idle';
} }
// set depthwiseconv to original value // set depthwiseconv to original value
// tf.env().set('WEBGL_PACK_DEPTHWISECONV', savedWebglPackDepthwiseConvFlag); // tf.env().set('WEBGL_PACK_DEPTHWISECONV', savedWebglPackDepthwiseConvFlag);
tf.engine().endScope();
// combine and return results // combine and return results
perf.total = Object.values(perf).reduce((a, b) => a + b); perf.total = Object.values(perf).reduce((a, b) => a + b);
resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf }); resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf });
@ -176,3 +190,4 @@ exports.posenet = posenet;
exports.handpose = handpose; exports.handpose = handpose;
exports.tf = tf; exports.tf = tf;
exports.version = app.version; exports.version = app.version;
exports.state = state;