parallelized agegender operations

pull/50/head
Vladimir Mandic 2020-10-17 11:38:24 -04:00
parent aaa170a445
commit 3776c29c12
15 changed files with 187 additions and 153 deletions

View File

@ -43,7 +43,7 @@ There are multiple ways to use `Human` library, pick one that suits you:
- `dist/human.esm-nobundle.js`: ESM format non-minified bundle without TFJS for Browsers - `dist/human.esm-nobundle.js`: ESM format non-minified bundle without TFJS for Browsers
- `dist/human.cjs`: CommonJS format non-minified bundle without TFJS for NodeJS - `dist/human.cjs`: CommonJS format non-minified bundle without TFJS for NodeJS
All versions include `sourcemap` All versions include `sourcemap` and build `manifest`
Defaults: Defaults:
```json ```json
@ -348,12 +348,15 @@ result = {
} }
], ],
performance = { // performance data of last execution for each module measuredin miliseconds performance = { // performance data of last execution for each module measuredin miliseconds
body, config, // time to parse configuration
hand, load, // time to load models
face, sanity, // time for input verification
agegender, body, // model time
emotion, hand, // model time
total, face, // model time
agegender, // model time
emotion, // model time
total, // end to end time
} }
} }
``` ```

32
dist/human.cjs vendored
View File

@ -3915,15 +3915,21 @@ var require_ssrnet = __commonJS((exports2) => {
} else { } else {
enhance = await getImage(image, config2.face.age.inputSize); enhance = await getImage(image, config2.face.age.inputSize);
} }
const promises = [];
let ageT;
let genderT;
if (config2.face.age.enabled)
promises.push(ageT = models2.age.predict(enhance));
if (config2.face.gender.enabled)
promises.push(genderT = models2.gender.predict(enhance));
await Promise.all(promises);
const obj = {}; const obj = {};
if (config2.face.age.enabled) { if (ageT) {
const ageT = await models2.age.predict(enhance);
const data = await ageT.data(); const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10; obj.age = Math.trunc(10 * data[0]) / 10;
tf2.dispose(ageT); tf2.dispose(ageT);
} }
if (config2.face.gender.enabled) { if (genderT) {
const genderT = await models2.gender.predict(enhance);
const data = await genderT.data(); const data = await genderT.data();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100; const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config2.face.gender.minConfidence) { if (confidence > config2.face.gender.minConfidence) {
@ -5132,7 +5138,7 @@ var require_config = __commonJS((exports2) => {
var require_package = __commonJS((exports2, module2) => { var require_package = __commonJS((exports2, module2) => {
module2.exports = { module2.exports = {
name: "@vladmandic/human", name: "@vladmandic/human",
version: "0.3.5", version: "0.3.6",
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction", description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
sideEffects: false, sideEffects: false,
main: "dist/human.cjs", main: "dist/human.cjs",
@ -5285,27 +5291,36 @@ async function load(userConfig) {
} }
async function detect(input, userConfig = {}) { async function detect(input, userConfig = {}) {
state = "config"; state = "config";
const perf = {};
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig); config = mergeDeep(defaults, userConfig);
perf.config = Math.trunc(now() - timeStamp);
timeStamp = now();
state = "check"; state = "check";
const error = sanity(input); const error = sanity(input);
if (error) { if (error) {
log(error, input); log(error, input);
return {error}; return {error};
} }
perf.sanity = Math.trunc(now() - timeStamp);
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const timeStart = now();
const loadedModels = Object.values(models).filter((a) => a).length; const loadedModels = Object.values(models).filter((a) => a).length;
if (loadedModels === 0) if (loadedModels === 0)
log("Human library starting"); log("Human library starting");
timeStamp = now();
if (tf.getBackend() !== config.backend) { if (tf.getBackend() !== config.backend) {
state = "backend"; state = "backend";
log("Human library setting backend:", config.backend); log("Human library setting backend:", config.backend);
await tf.setBackend(config.backend); await tf.setBackend(config.backend);
await tf.ready(); await tf.ready();
} }
perf.body = Math.trunc(now() - timeStamp);
timeStamp = now();
state = "load"; state = "load";
await load(); await load();
const perf = {}; perf.load = Math.trunc(now() - timeStamp);
let timeStamp;
if (config.scoped) if (config.scoped)
tf.engine().startScope(); tf.engine().startScope();
analyze("Start Detect:"); analyze("Start Detect:");
@ -5342,7 +5357,6 @@ async function detect(input, userConfig = {}) {
const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {}; const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {};
perf.emotion = Math.trunc(now() - timeStamp); perf.emotion = Math.trunc(now() - timeStamp);
face.image.dispose(); face.image.dispose();
delete face.image;
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0; const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
faceRes.push({ faceRes.push({
confidence: face.confidence, confidence: face.confidence,
@ -5362,7 +5376,7 @@ async function detect(input, userConfig = {}) {
if (config.scoped) if (config.scoped)
tf.engine().endScope(); tf.engine().endScope();
analyze("End Scope:"); analyze("End Scope:");
perf.total = Object.values(perf).reduce((a, b) => a + b); perf.total = Math.trunc(now() - timeStart);
resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf}); resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf});
}); });
} }

12
dist/human.cjs.json vendored
View File

@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 7175, "bytes": 7437,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
] ]
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytes": 1856, "bytes": 1965,
"imports": [] "imports": []
} }
}, },
@ -253,7 +253,7 @@
"dist/human.cjs.map": { "dist/human.cjs.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 219147 "bytes": 219752
}, },
"dist/human.cjs": { "dist/human.cjs": {
"imports": [], "imports": [],
@ -283,7 +283,7 @@
"bytesInOutput": 2950 "bytesInOutput": 2950
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 2068 "bytesInOutput": 2194
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 2134 "bytesInOutput": 2134
@ -346,10 +346,10 @@
"bytesInOutput": 2748 "bytesInOutput": 2748
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 6171 "bytesInOutput": 6431
} }
}, },
"bytes": 133638 "bytes": 134024
} }
} }
} }

4
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 7175, "bytes": 7437,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
] ]
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytes": 1856, "bytes": 1965,
"imports": [] "imports": []
} }
}, },
@ -253,7 +253,7 @@
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 197443 "bytes": 198045
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
@ -283,7 +283,7 @@
"bytesInOutput": 1391 "bytesInOutput": 1391
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1099 "bytesInOutput": 1149
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 1148 "bytesInOutput": 1148
@ -346,10 +346,10 @@
"bytesInOutput": 2275 "bytesInOutput": 2275
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 3410 "bytesInOutput": 3513
} }
}, },
"bytes": 69193 "bytes": 69346
} }
} }
} }

94
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

12
dist/human.esm.json vendored
View File

@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 7175, "bytes": 7437,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
] ]
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytes": 1856, "bytes": 1965,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4958494 "bytes": 4959096
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -551,7 +551,7 @@
"bytesInOutput": 1376 "bytesInOutput": 1376
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100 "bytesInOutput": 1150
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 1143 "bytesInOutput": 1143
@ -614,10 +614,10 @@
"bytesInOutput": 2276 "bytesInOutput": 2276
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 3495 "bytesInOutput": 3608
} }
}, },
"bytes": 1106100 "bytes": 1106263
} }
} }
} }

94
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

12
dist/human.json vendored
View File

@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/index.js": { "src/index.js": {
"bytes": 7175, "bytes": 7437,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
] ]
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytes": 1856, "bytes": 1965,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4958494 "bytes": 4959096
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -551,7 +551,7 @@
"bytesInOutput": 1376 "bytesInOutput": 1376
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100 "bytesInOutput": 1150
}, },
"src/emotion/emotion.js": { "src/emotion/emotion.js": {
"bytesInOutput": 1143 "bytesInOutput": 1143
@ -614,10 +614,10 @@
"bytesInOutput": 2276 "bytesInOutput": 2276
}, },
"src/index.js": { "src/index.js": {
"bytesInOutput": 3495 "bytesInOutput": 3608
} }
}, },
"bytes": 1106109 "bytes": 1106272
} }
} }
} }

View File

@ -89,36 +89,46 @@ async function load(userConfig) {
async function detect(input, userConfig = {}) { async function detect(input, userConfig = {}) {
state = 'config'; state = 'config';
const perf = {};
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig); config = mergeDeep(defaults, userConfig);
perf.config = Math.trunc(now() - timeStamp);
// sanity checks // sanity checks
timeStamp = now();
state = 'check'; state = 'check';
const error = sanity(input); const error = sanity(input);
if (error) { if (error) {
log(error, input); log(error, input);
return { error }; return { error };
} }
perf.sanity = Math.trunc(now() - timeStamp);
// eslint-disable-next-line no-async-promise-executor // eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const timeStart = now();
// check number of loaded models // check number of loaded models
const loadedModels = Object.values(models).filter((a) => a).length; const loadedModels = Object.values(models).filter((a) => a).length;
if (loadedModels === 0) log('Human library starting'); if (loadedModels === 0) log('Human library starting');
// configure backend // configure backend
timeStamp = now();
if (tf.getBackend() !== config.backend) { if (tf.getBackend() !== config.backend) {
state = 'backend'; state = 'backend';
log('Human library setting backend:', config.backend); log('Human library setting backend:', config.backend);
await tf.setBackend(config.backend); await tf.setBackend(config.backend);
await tf.ready(); await tf.ready();
} }
perf.body = Math.trunc(now() - timeStamp);
// load models if enabled // load models if enabled
timeStamp = now();
state = 'load'; state = 'load';
await load(); await load();
perf.load = Math.trunc(now() - timeStamp);
const perf = {};
let timeStamp;
if (config.scoped) tf.engine().startScope(); if (config.scoped) tf.engine().startScope();
@ -164,8 +174,9 @@ async function detect(input, userConfig = {}) {
timeStamp = now(); timeStamp = now();
const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {}; const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {};
perf.emotion = Math.trunc(now() - timeStamp); perf.emotion = Math.trunc(now() - timeStamp);
// dont need face anymore
face.image.dispose(); face.image.dispose();
delete face.image;
// calculate iris distance // calculate iris distance
// iris: array[ bottom, left, top, right, center ] // iris: array[ bottom, left, top, right, center ]
const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris) const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
@ -191,8 +202,7 @@ async function detect(input, userConfig = {}) {
if (config.scoped) tf.engine().endScope(); if (config.scoped) tf.engine().endScope();
analyze('End Scope:'); analyze('End Scope:');
// combine and return results perf.total = Math.trunc(now() - timeStart);
perf.total = Object.values(perf).reduce((a, b) => a + b);
resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf }); resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf });
}); });
} }

View File

@ -36,15 +36,21 @@ async function predict(image, config) {
} else { } else {
enhance = await getImage(image, config.face.age.inputSize); enhance = await getImage(image, config.face.age.inputSize);
} }
const promises = [];
let ageT;
let genderT;
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
const obj = {}; const obj = {};
if (config.face.age.enabled) { if (ageT) {
const ageT = await models.age.predict(enhance);
const data = await ageT.data(); const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10; obj.age = Math.trunc(10 * data[0]) / 10;
tf.dispose(ageT); tf.dispose(ageT);
} }
if (config.face.gender.enabled) { if (genderT) {
const genderT = await models.gender.predict(enhance);
const data = await genderT.data(); const data = await genderT.data();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100; const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) { if (confidence > config.face.gender.minConfidence) {
@ -53,6 +59,7 @@ async function predict(image, config) {
} }
tf.dispose(genderT); tf.dispose(genderT);
} }
tf.dispose(enhance); tf.dispose(enhance);
last = obj; last = obj;
return obj; return obj;