parallelized agegender operations

pull/50/head
Vladimir Mandic 2020-10-17 11:38:24 -04:00
parent aaa170a445
commit 3776c29c12
15 changed files with 187 additions and 153 deletions

View File

@ -43,7 +43,7 @@ There are multiple ways to use `Human` library, pick one that suits you:
- `dist/human.esm-nobundle.js`: ESM format non-minified bundle without TFJS for Browsers
- `dist/human.cjs`: CommonJS format non-minified bundle without TFJS for NodeJS
All versions include `sourcemap`
All versions include `sourcemap` and build `manifest`
Defaults:
```json
@ -348,12 +348,15 @@ result = {
}
],
performance = { // performance data of last execution for each module measuredin miliseconds
body,
hand,
face,
agegender,
emotion,
total,
config, // time to parse configuration
load, // time to load models
sanity, // time for input verification
body, // model time
hand, // model time
face, // model time
agegender, // model time
emotion, // model time
total, // end to end time
}
}
```

32
dist/human.cjs vendored
View File

@ -3915,15 +3915,21 @@ var require_ssrnet = __commonJS((exports2) => {
} else {
enhance = await getImage(image, config2.face.age.inputSize);
}
const promises = [];
let ageT;
let genderT;
if (config2.face.age.enabled)
promises.push(ageT = models2.age.predict(enhance));
if (config2.face.gender.enabled)
promises.push(genderT = models2.gender.predict(enhance));
await Promise.all(promises);
const obj = {};
if (config2.face.age.enabled) {
const ageT = await models2.age.predict(enhance);
if (ageT) {
const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10;
tf2.dispose(ageT);
}
if (config2.face.gender.enabled) {
const genderT = await models2.gender.predict(enhance);
if (genderT) {
const data = await genderT.data();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config2.face.gender.minConfidence) {
@ -5132,7 +5138,7 @@ var require_config = __commonJS((exports2) => {
var require_package = __commonJS((exports2, module2) => {
module2.exports = {
name: "@vladmandic/human",
version: "0.3.5",
version: "0.3.6",
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
sideEffects: false,
main: "dist/human.cjs",
@ -5285,27 +5291,36 @@ async function load(userConfig) {
}
async function detect(input, userConfig = {}) {
state = "config";
const perf = {};
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig);
perf.config = Math.trunc(now() - timeStamp);
timeStamp = now();
state = "check";
const error = sanity(input);
if (error) {
log(error, input);
return {error};
}
perf.sanity = Math.trunc(now() - timeStamp);
return new Promise(async (resolve) => {
const timeStart = now();
const loadedModels = Object.values(models).filter((a) => a).length;
if (loadedModels === 0)
log("Human library starting");
timeStamp = now();
if (tf.getBackend() !== config.backend) {
state = "backend";
log("Human library setting backend:", config.backend);
await tf.setBackend(config.backend);
await tf.ready();
}
perf.body = Math.trunc(now() - timeStamp);
timeStamp = now();
state = "load";
await load();
const perf = {};
let timeStamp;
perf.load = Math.trunc(now() - timeStamp);
if (config.scoped)
tf.engine().startScope();
analyze("Start Detect:");
@ -5342,7 +5357,6 @@ async function detect(input, userConfig = {}) {
const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {};
perf.emotion = Math.trunc(now() - timeStamp);
face.image.dispose();
delete face.image;
const iris = face.annotations.leftEyeIris && face.annotations.rightEyeIris ? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) : 0;
faceRes.push({
confidence: face.confidence,
@ -5362,7 +5376,7 @@ async function detect(input, userConfig = {}) {
if (config.scoped)
tf.engine().endScope();
analyze("End Scope:");
perf.total = Object.values(perf).reduce((a, b) => a + b);
perf.total = Math.trunc(now() - timeStart);
resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf});
});
}

12
dist/human.cjs.json vendored
View File

@ -116,7 +116,7 @@
"imports": []
},
"src/index.js": {
"bytes": 7175,
"bytes": 7437,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"bytes": 1965,
"imports": []
}
},
@ -253,7 +253,7 @@
"dist/human.cjs.map": {
"imports": [],
"inputs": {},
"bytes": 219147
"bytes": 219752
},
"dist/human.cjs": {
"imports": [],
@ -283,7 +283,7 @@
"bytesInOutput": 2950
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 2068
"bytesInOutput": 2194
},
"src/emotion/emotion.js": {
"bytesInOutput": 2134
@ -346,10 +346,10 @@
"bytesInOutput": 2748
},
"src/index.js": {
"bytesInOutput": 6171
"bytesInOutput": 6431
}
},
"bytes": 133638
"bytes": 134024
}
}
}

4
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -116,7 +116,7 @@
"imports": []
},
"src/index.js": {
"bytes": 7175,
"bytes": 7437,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"bytes": 1965,
"imports": []
}
},
@ -253,7 +253,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 197443
"bytes": 198045
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -283,7 +283,7 @@
"bytesInOutput": 1391
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1099
"bytesInOutput": 1149
},
"src/emotion/emotion.js": {
"bytesInOutput": 1148
@ -346,10 +346,10 @@
"bytesInOutput": 2275
},
"src/index.js": {
"bytesInOutput": 3410
"bytesInOutput": 3513
}
},
"bytes": 69193
"bytes": 69346
}
}
}

94
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

12
dist/human.esm.json vendored
View File

@ -291,7 +291,7 @@
"imports": []
},
"src/index.js": {
"bytes": 7175,
"bytes": 7437,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"bytes": 1965,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 4958494
"bytes": 4959096
},
"dist/human.esm.js": {
"imports": [],
@ -551,7 +551,7 @@
"bytesInOutput": 1376
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100
"bytesInOutput": 1150
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
@ -614,10 +614,10 @@
"bytesInOutput": 2276
},
"src/index.js": {
"bytesInOutput": 3495
"bytesInOutput": 3608
}
},
"bytes": 1106100
"bytes": 1106263
}
}
}

94
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

12
dist/human.json vendored
View File

@ -291,7 +291,7 @@
"imports": []
},
"src/index.js": {
"bytes": 7175,
"bytes": 7437,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1856,
"bytes": 1965,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 4958494
"bytes": 4959096
},
"dist/human.js": {
"imports": [],
@ -551,7 +551,7 @@
"bytesInOutput": 1376
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1100
"bytesInOutput": 1150
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
@ -614,10 +614,10 @@
"bytesInOutput": 2276
},
"src/index.js": {
"bytesInOutput": 3495
"bytesInOutput": 3608
}
},
"bytes": 1106109
"bytes": 1106272
}
}
}

View File

@ -89,36 +89,46 @@ async function load(userConfig) {
async function detect(input, userConfig = {}) {
state = 'config';
const perf = {};
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig);
perf.config = Math.trunc(now() - timeStamp);
// sanity checks
timeStamp = now();
state = 'check';
const error = sanity(input);
if (error) {
log(error, input);
return { error };
}
perf.sanity = Math.trunc(now() - timeStamp);
// eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve) => {
const timeStart = now();
// check number of loaded models
const loadedModels = Object.values(models).filter((a) => a).length;
if (loadedModels === 0) log('Human library starting');
// configure backend
timeStamp = now();
if (tf.getBackend() !== config.backend) {
state = 'backend';
log('Human library setting backend:', config.backend);
await tf.setBackend(config.backend);
await tf.ready();
}
perf.body = Math.trunc(now() - timeStamp);
// load models if enabled
timeStamp = now();
state = 'load';
await load();
const perf = {};
let timeStamp;
perf.load = Math.trunc(now() - timeStamp);
if (config.scoped) tf.engine().startScope();
@ -164,8 +174,9 @@ async function detect(input, userConfig = {}) {
timeStamp = now();
const emotionData = config.face.emotion.enabled ? await emotion.predict(face.image, config) : {};
perf.emotion = Math.trunc(now() - timeStamp);
// dont need face anymore
face.image.dispose();
delete face.image;
// calculate iris distance
// iris: array[ bottom, left, top, right, center ]
const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
@ -191,8 +202,7 @@ async function detect(input, userConfig = {}) {
if (config.scoped) tf.engine().endScope();
analyze('End Scope:');
// combine and return results
perf.total = Object.values(perf).reduce((a, b) => a + b);
perf.total = Math.trunc(now() - timeStart);
resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf });
});
}

View File

@ -36,15 +36,21 @@ async function predict(image, config) {
} else {
enhance = await getImage(image, config.face.age.inputSize);
}
const promises = [];
let ageT;
let genderT;
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
const obj = {};
if (config.face.age.enabled) {
const ageT = await models.age.predict(enhance);
if (ageT) {
const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10;
tf.dispose(ageT);
}
if (config.face.gender.enabled) {
const genderT = await models.gender.predict(enhance);
if (genderT) {
const data = await genderT.data();
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
@ -53,6 +59,7 @@ async function predict(image, config) {
}
tf.dispose(genderT);
}
tf.dispose(enhance);
last = obj;
return obj;