implemented memory profiler

pull/50/head
Vladimir Mandic 2020-11-01 13:07:53 -05:00
parent 7d65c83d3f
commit 184fc59e33
23 changed files with 6516 additions and 11428 deletions

View File

@ -238,16 +238,23 @@ Below is output of `human.defaults` object
Any property can be overriden by passing user object during `human.detect()`
Note that user object and default configuration are merged using deep-merge, so you do not need to redefine entire configuration
All configuration details can be changed in real-time!
Configurtion object is large, but typically you only need to modify few values:
- `enabled`: Choose which models to use
- `modelPath`: Update as needed to reflect your application's relative path
```js
config = {
backend: 'webgl', // select tfjs backend to use
console: true, // enable debugging output to console
profile: true, // enable tfjs profiling
// this has significant performance impact, only enable for debugging purposes
// currently only implemented for age,gender,emotion models
deallocate: true, // aggresively deallocate gpu memory after each usage
// only valid for webgl backend and only during first call, cannot be changed unless library is reloaded
// this has significant performance impact, only enable on low-memory devices
scoped: false, // enable scoped runs
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
// typically not needed
@ -415,6 +422,35 @@ result = {
<hr>
## Profile
If `config.profile` is enabled, call to `human.profile()` will return detailed profiling data from the last detect invokation.
example:
```js
result = {
{age: {…}, gender: {…}, emotion: {…}}
age:
timeKernelOps: 53.78892800000002
newBytes: 4
newTensors: 1
numKernelOps: 341
peakBytes: 46033948
largestKernelOps: Array(5)
0: {name: "Reshape", bytesAdded: 107648, totalBytesSnapshot: 46033948, tensorsAdded: 1, totalTensorsSnapshot: 1149, …}
1: {name: "Reshape", bytesAdded: 0, totalBytesSnapshot: 45818652, tensorsAdded: 1, totalTensorsSnapshot: 1147, …}
2: {name: "Reshape", bytesAdded: 0, totalBytesSnapshot: 45633996, tensorsAdded: 1, totalTensorsSnapshot: 1148, …}
3: {name: "Reshape", bytesAdded: 0, totalBytesSnapshot: 45389376, tensorsAdded: 1, totalTensorsSnapshot: 1154, …}
4: {name: "Reshape", bytesAdded: 53824, totalBytesSnapshot: 45381776, tensorsAdded: 1, totalTensorsSnapshot: 1155, …}
slowestKernelOps: Array(5)
0: {name: "_FusedMatMul", bytesAdded: 12, totalBytesSnapshot: 44802280, tensorsAdded: 1, totalTensorsSnapshot: 1156, …}
1: {name: "_FusedMatMul", bytesAdded: 4, totalBytesSnapshot: 44727564, tensorsAdded: 1, totalTensorsSnapshot: 1152, …}
2: {name: "_FusedMatMul", bytesAdded: 12, totalBytesSnapshot: 44789100, tensorsAdded: 1, totalTensorsSnapshot: 1157, …}
3: {name: "Add", bytesAdded: 4, totalBytesSnapshot: 44788748, tensorsAdded: 1, totalTensorsSnapshot: 1158, …}
4: {name: "Add", bytesAdded: 4, totalBytesSnapshot: 44788748, tensorsAdded: 1, totalTensorsSnapshot: 1158, …}
}
```
## Build
If you want to modify the library and perform a full rebuild:

View File

@ -4,6 +4,12 @@
export default {
backend: 'webgl', // select tfjs backend to use
console: true, // enable debugging output to console
profile: true, // enable tfjs profiling
// this has significant performance impact, only enable for debugging purposes
// currently only implemented for age,gender,emotion models
deallocate: true, // aggresively deallocate gpu memory after each usage
// only valid for webgl backend and only during first call, cannot be changed unless library is reloaded
// this has significant performance impact, only enable on low-memory devices
scoped: false, // enable scoped runs
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
// typically not needed

View File

@ -30,6 +30,8 @@ const ui = {
// configuration overrides
const config = {
backend: 'webgl',
profile: false,
deallocate: false,
wasm: { path: '../assets' },
filter: {
enabled: true,
@ -199,6 +201,7 @@ function runHumanDetect(input, canvas) {
human.detect(input, config).then((result) => {
if (result.error) log(result.error);
else drawResults(input, result, canvas);
if (config.profile) log('Profile data:', human.profile());
});
}
}
@ -269,6 +272,8 @@ function setupMenu() {
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], config.backend, (val) => config.backend = val);
menu.addBool('Enable Profiler', config, 'profile');
menu.addBool('Memory Deallocator', config, 'deallocate');
menu.addBool('Use Web Worker', ui, 'useWorker');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Enabled Models');

View File

@ -12,6 +12,7 @@ let theme = {
checkboxOff: 'lightcoral',
rangeBackground: 'lightblue',
rangeLabel: 'white',
chartColor: 'lightblue',
};
function createCSS() {
@ -69,8 +70,6 @@ class Menu {
instance++;
this._maxFPS = 0;
this.hidden = 0;
this.chartFGcolor = 'lightblue';
this.chartBGcolor = 'lightgray';
}
createMenu(parent, title = '', position = { top: null, left: null, bottom: null, right: null }) {
@ -256,13 +255,12 @@ class Menu {
else this.addValue(title, val);
}
addChart(title, id, width = 200, height = 40, fgColor, bgColor) {
if (fgColor) this.chartFGcolor = fgColor;
if (bgColor) this.chartBGcolor = bgColor;
addChart(title, id, width = 200, height = 40, color) {
if (color) theme.chartColor = color;
const el = document.createElement('div');
el.className = 'menu-item menu-chart-title';
el.id = this.newID;
el.innerHTML = `<font color=${this.chartFGcolor}>${title}</font><canvas id="menu-canvas-${id}" class="menu-chart-canvas" width="${width}px" height="${height}px"></canvas>`;
el.innerHTML = `<font color=${theme.chartColor}>${title}</font><canvas id="menu-canvas-${id}" class="menu-chart-canvas" width="${width}px" height="${height}px"></canvas>`;
this.container.appendChild(el);
}
@ -272,18 +270,18 @@ class Menu {
const canvas = document.getElementById(`menu-canvas-${id}`);
if (!canvas) return;
const ctx = canvas.getContext('2d');
ctx.fillStyle = this.chartBGcolor;
ctx.fillStyle = theme.background;
ctx.fillRect(0, 0, canvas.width, canvas.height);
const width = canvas.width / values.length;
const max = 1 + Math.max(...values);
const height = canvas.height / max;
for (const i in values) {
const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0);
gradient.addColorStop(0.1, this.chartFGcolor);
gradient.addColorStop(0.4, this.chartBGcolor);
gradient.addColorStop(0.1, theme.chartColor);
gradient.addColorStop(0.4, theme.background);
ctx.fillStyle = gradient;
ctx.fillRect(i * width, 0, width - 4, canvas.height);
ctx.fillStyle = this.chartBGcolor;
ctx.fillStyle = theme.background;
ctx.font = `${width / 1.4}px "Segoe UI"`;
ctx.fillText(Math.round(values[i]), i * width + 1, canvas.height - 1, width - 1);
}

View File

@ -3859,9 +3859,37 @@ var require_facemesh = __commonJS((exports) => {
exports.triangulation = triangulation;
});
// src/profile.js
var require_profile = __commonJS((exports) => {
const profileData = {};
function profile2(name, data) {
if (!data || !data.kernels)
return;
const maxResults = 5;
const time = data.kernels.filter((a) => a.kernelTimeMs > 0).reduce((a, b) => a += b.kernelTimeMs, 0);
const slowest = data.kernels.map((a, i) => {
a.id = i;
return a;
}).filter((a) => a.kernelTimeMs > 0).sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);
const largest = data.kernels.map((a, i) => {
a.id = i;
return a;
}).filter((a) => a.totalBytesSnapshot > 0).sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);
if (slowest.length > maxResults)
slowest.length = maxResults;
if (largest.length > maxResults)
largest.length = maxResults;
const res = {newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest};
profileData[name] = res;
}
exports.run = profile2;
exports.data = profileData;
});
// src/ssrnet/ssrnet.js
var require_ssrnet = __commonJS((exports) => {
const tf2 = require("@tensorflow/tfjs");
const profile2 = require_profile();
const models = {};
let last = {age: 0, gender: ""};
let frame = 0;
@ -3887,12 +3915,23 @@ var require_ssrnet = __commonJS((exports) => {
const promises = [];
let ageT;
let genderT;
const obj = {};
if (!config.profile) {
if (config.face.age.enabled)
promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled)
promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
const obj = {};
} else {
const profileAge = config.face.age.enabled ? await tf2.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile2.run("age", profileAge);
const profileGender = config.face.gender.enabled ? await tf2.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile2.run("gender", profileGender);
}
if (ageT) {
const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10;
@ -3919,6 +3958,7 @@ var require_ssrnet = __commonJS((exports) => {
// src/emotion/emotion.js
var require_emotion = __commonJS((exports) => {
const tf2 = require("@tensorflow/tfjs");
const profile2 = require_profile();
const annotations = ["angry", "discust", "fear", "happy", "sad", "surpise", "neutral"];
const models = {};
let last = [];
@ -3950,14 +3990,22 @@ var require_emotion = __commonJS((exports) => {
blueNorm.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(grayscale);
const data = await emotionT.data();
data = await emotionT.data();
tf2.dispose(emotionT);
} else {
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
data = await profileData.result.data();
profileData.result.dispose();
profile2.run("emotion", profileData);
}
for (let i = 0; i < data.length; i++) {
if (multiplier * data[i] > config.face.emotion.minConfidence)
obj.push({score: Math.min(0.99, Math.trunc(100 * multiplier * data[i]) / 100), emotion: annotations[i]});
}
obj.sort((a, b) => b.score - a.score);
tf2.dispose(emotionT);
}
tf2.dispose(grayscale);
last = obj;
@ -3974,8 +4022,6 @@ var require_modelBase = __commonJS((exports) => {
constructor(model, outputStride) {
this.model = model;
this.outputStride = outputStride;
const inputShape = this.model.inputs[0].shape;
tf2.util.assert(inputShape[1] === -1 && inputShape[2] === -1, () => `Input shape [${inputShape[1]}, ${inputShape[2]}] must both be equal to or -1`);
}
predict(input) {
return tf2.tidy(() => {
@ -5682,6 +5728,8 @@ var require_config = __commonJS((exports) => {
var config_default = {
backend: "webgl",
console: true,
profile: true,
deallocate: true,
scoped: false,
videoOptimized: true,
filter: {
@ -5777,7 +5825,7 @@ var require_config = __commonJS((exports) => {
var require_package = __commonJS((exports, module) => {
module.exports = {
name: "@vladmandic/human",
version: "0.5.2",
version: "0.5.3",
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
sideEffects: false,
main: "dist/human.node.js",
@ -5849,6 +5897,7 @@ const emotion = require_emotion();
const posenet = require_posenet();
const handpose = require_handpose();
const fxImage = require_imagefx();
const profile = require_profile();
const defaults = require_config().default;
const app = require_package();
let first = true;
@ -5920,6 +5969,11 @@ class Human {
if (msg && this.config.console)
console.log("Human:", ...msg);
}
profile() {
if (this.config.profile)
return profile.data;
return {};
}
analyze(...msg) {
if (!this.analyzeMemoryLeaks)
return;
@ -5961,13 +6015,14 @@ class Human {
async checkBackend() {
if (tf.getBackend() !== this.config.backend) {
this.state = "backend";
if (this.config.backend in tf.engine().registry) {
this.log("Setting backend:", this.config.backend);
await tf.setBackend(this.config.backend);
await tf.ready();
} else {
this.log("Backend not registred:", this.config.backend);
tf.enableProdMode();
if (this.config.deallocate && this.config.backend === "webgl") {
this.log("Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate);
tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1);
}
await tf.ready();
}
}
tfImage(input) {

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 6295,
"bytes": 6870,
"imports": []
},
"package.json": {
@ -9,8 +9,12 @@
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 1646,
"imports": []
"bytes": 1958,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/facemesh/blazeface.js": {
"bytes": 7161,
@ -116,7 +120,7 @@
"imports": []
},
"src/human.js": {
"bytes": 13072,
"bytes": 13664,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -136,6 +140,9 @@
{
"path": "src/imagefx.js"
},
{
"path": "src/profile.js"
},
{
"path": "config.js"
},
@ -190,7 +197,7 @@
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"bytes": 1512,
"imports": []
},
"src/posenet/modelMobileNet.js": {
@ -251,16 +258,24 @@
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1574,
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2115,
"imports": [
{
"path": "src/profile.js"
}
]
}
},
"outputs": {
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 250207
"bytes": 254266
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -289,14 +304,17 @@
"src/facemesh/facemesh.js": {
"bytesInOutput": 2661
},
"src/profile.js": {
"bytesInOutput": 1092
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1744
"bytesInOutput": 2310
},
"src/emotion/emotion.js": {
"bytesInOutput": 1718
"bytesInOutput": 2044
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1118
"bytesInOutput": 910
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 504
@ -350,19 +368,19 @@
"bytesInOutput": 20195
},
"config.js": {
"bytesInOutput": 2230
"bytesInOutput": 2271
},
"package.json": {
"bytesInOutput": 3012
},
"src/human.js": {
"bytesInOutput": 11537
"bytesInOutput": 11796
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 156000
"bytes": 158095
}
}
}

5746
dist/human.esm.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

60
dist/human.esm.json vendored
View File

@ -1,11 +1,11 @@
{
"inputs": {
"config.js": {
"bytes": 6295,
"bytes": 6870,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytes": 297728,
"bytes": 272720,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -74,7 +74,7 @@
]
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytes": 607535,
"bytes": 571410,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -82,7 +82,7 @@
]
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytes": 312103,
"bytes": 294510,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -90,7 +90,7 @@
]
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytes": 1313261,
"bytes": 1305668,
"imports": [
{
"path": "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js"
@ -104,7 +104,7 @@
]
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytes": 217014,
"bytes": 217016,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -153,10 +153,13 @@
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 1646,
"bytes": 1958,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
@ -291,7 +294,7 @@
"imports": []
},
"src/human.js": {
"bytes": 13072,
"bytes": 13664,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -314,6 +317,9 @@
{
"path": "src/imagefx.js"
},
{
"path": "src/profile.js"
},
{
"path": "config.js"
},
@ -368,7 +374,7 @@
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -439,11 +445,18 @@
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 1574,
"bytes": 2115,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
@ -468,7 +481,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 5123513
"bytes": 4999432
},
"dist/human.esm.js": {
"imports": [],
@ -483,19 +496,19 @@
"bytesInOutput": 44
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytesInOutput": 1010337
"bytesInOutput": 1004766
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytesInOutput": 514491
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytesInOutput": 258960
"bytesInOutput": 246275
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytesInOutput": 52
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytesInOutput": 129585
"bytesInOutput": 129586
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
"bytesInOutput": 2112
@ -522,10 +535,10 @@
"bytesInOutput": 458
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytesInOutput": 272394
"bytesInOutput": 246000
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytesInOutput": 561656
"bytesInOutput": 527935
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 3025
@ -554,14 +567,17 @@
"src/facemesh/facemesh.js": {
"bytesInOutput": 2651
},
"src/profile.js": {
"bytesInOutput": 1092
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1734
"bytesInOutput": 2300
},
"src/emotion/emotion.js": {
"bytesInOutput": 1708
"bytesInOutput": 2034
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1108
"bytesInOutput": 900
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 494
@ -615,19 +631,19 @@
"bytesInOutput": 20195
},
"config.js": {
"bytesInOutput": 2230
"bytesInOutput": 2271
},
"package.json": {
"bytesInOutput": 3012
},
"src/human.js": {
"bytesInOutput": 11527
"bytesInOutput": 11786
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 2924964
"bytes": 2848689
}
}
}

5742
dist/human.js vendored

File diff suppressed because it is too large Load Diff

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

60
dist/human.json vendored
View File

@ -1,11 +1,11 @@
{
"inputs": {
"config.js": {
"bytes": 6295,
"bytes": 6870,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytes": 297728,
"bytes": 272720,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -74,7 +74,7 @@
]
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytes": 607535,
"bytes": 571410,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -82,7 +82,7 @@
]
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytes": 312103,
"bytes": 294510,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -90,7 +90,7 @@
]
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytes": 1313261,
"bytes": 1305668,
"imports": [
{
"path": "empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js"
@ -104,7 +104,7 @@
]
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytes": 217014,
"bytes": 217016,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js"
@ -153,10 +153,13 @@
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 1646,
"bytes": 1958,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
@ -291,7 +294,7 @@
"imports": []
},
"src/human.js": {
"bytes": 13072,
"bytes": 13664,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -314,6 +317,9 @@
{
"path": "src/imagefx.js"
},
{
"path": "src/profile.js"
},
{
"path": "config.js"
},
@ -368,7 +374,7 @@
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"bytes": 1512,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -439,11 +445,18 @@
}
]
},
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 1574,
"bytes": 2115,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
},
{
"path": "src/profile.js"
}
]
},
@ -468,7 +481,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 5127370
"bytes": 5003247
},
"dist/human.js": {
"imports": [],
@ -483,19 +496,19 @@
"bytesInOutput": 48
},
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
"bytesInOutput": 1061246
"bytesInOutput": 1055423
},
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
"bytesInOutput": 541660
},
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
"bytesInOutput": 273096
"bytesInOutput": 259681
},
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
"bytesInOutput": 56
},
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
"bytesInOutput": 136972
"bytesInOutput": 136974
},
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
"bytesInOutput": 2276
@ -522,10 +535,10 @@
"bytesInOutput": 490
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
"bytesInOutput": 284249
"bytesInOutput": 256749
},
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
"bytesInOutput": 581862
"bytesInOutput": 546737
},
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
"bytesInOutput": 3189
@ -554,14 +567,17 @@
"src/facemesh/facemesh.js": {
"bytesInOutput": 2775
},
"src/profile.js": {
"bytesInOutput": 1140
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1835
"bytesInOutput": 2420
},
"src/emotion/emotion.js": {
"bytesInOutput": 1796
"bytesInOutput": 2137
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1165
"bytesInOutput": 954
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 519
@ -615,16 +631,16 @@
"bytesInOutput": 21613
},
"config.js": {
"bytesInOutput": 2424
"bytesInOutput": 2469
},
"package.json": {
"bytesInOutput": 3144
},
"src/human.js": {
"bytesInOutput": 12832
"bytesInOutput": 13119
}
},
"bytes": 3070917
"bytes": 2991264
}
}
}

View File

@ -3859,9 +3859,37 @@ var require_facemesh = __commonJS((exports2) => {
exports2.triangulation = triangulation;
});
// src/profile.js
var require_profile = __commonJS((exports2) => {
const profileData = {};
function profile2(name, data) {
if (!data || !data.kernels)
return;
const maxResults = 5;
const time = data.kernels.filter((a) => a.kernelTimeMs > 0).reduce((a, b) => a += b.kernelTimeMs, 0);
const slowest = data.kernels.map((a, i) => {
a.id = i;
return a;
}).filter((a) => a.kernelTimeMs > 0).sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);
const largest = data.kernels.map((a, i) => {
a.id = i;
return a;
}).filter((a) => a.totalBytesSnapshot > 0).sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);
if (slowest.length > maxResults)
slowest.length = maxResults;
if (largest.length > maxResults)
largest.length = maxResults;
const res = {newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest};
profileData[name] = res;
}
exports2.run = profile2;
exports2.data = profileData;
});
// src/ssrnet/ssrnet.js
var require_ssrnet = __commonJS((exports2) => {
const tf2 = require("@tensorflow/tfjs");
const profile2 = require_profile();
const models = {};
let last = {age: 0, gender: ""};
let frame = 0;
@ -3887,12 +3915,23 @@ var require_ssrnet = __commonJS((exports2) => {
const promises = [];
let ageT;
let genderT;
const obj = {};
if (!config.profile) {
if (config.face.age.enabled)
promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled)
promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
const obj = {};
} else {
const profileAge = config.face.age.enabled ? await tf2.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile2.run("age", profileAge);
const profileGender = config.face.gender.enabled ? await tf2.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile2.run("gender", profileGender);
}
if (ageT) {
const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10;
@ -3919,6 +3958,7 @@ var require_ssrnet = __commonJS((exports2) => {
// src/emotion/emotion.js
var require_emotion = __commonJS((exports2) => {
const tf2 = require("@tensorflow/tfjs");
const profile2 = require_profile();
const annotations = ["angry", "discust", "fear", "happy", "sad", "surpise", "neutral"];
const models = {};
let last = [];
@ -3950,14 +3990,22 @@ var require_emotion = __commonJS((exports2) => {
blueNorm.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(grayscale);
const data = await emotionT.data();
data = await emotionT.data();
tf2.dispose(emotionT);
} else {
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
data = await profileData.result.data();
profileData.result.dispose();
profile2.run("emotion", profileData);
}
for (let i = 0; i < data.length; i++) {
if (multiplier * data[i] > config.face.emotion.minConfidence)
obj.push({score: Math.min(0.99, Math.trunc(100 * multiplier * data[i]) / 100), emotion: annotations[i]});
}
obj.sort((a, b) => b.score - a.score);
tf2.dispose(emotionT);
}
tf2.dispose(grayscale);
last = obj;
@ -3974,8 +4022,6 @@ var require_modelBase = __commonJS((exports2) => {
constructor(model, outputStride) {
this.model = model;
this.outputStride = outputStride;
const inputShape = this.model.inputs[0].shape;
tf2.util.assert(inputShape[1] === -1 && inputShape[2] === -1, () => `Input shape [${inputShape[1]}, ${inputShape[2]}] must both be equal to or -1`);
}
predict(input) {
return tf2.tidy(() => {
@ -5682,6 +5728,8 @@ var require_config = __commonJS((exports2) => {
var config_default = {
backend: "webgl",
console: true,
profile: true,
deallocate: true,
scoped: false,
videoOptimized: true,
filter: {
@ -5777,7 +5825,7 @@ var require_config = __commonJS((exports2) => {
var require_package = __commonJS((exports2, module2) => {
module2.exports = {
name: "@vladmandic/human",
version: "0.5.2",
version: "0.5.3",
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
sideEffects: false,
main: "dist/human.node.js",
@ -5852,6 +5900,7 @@ const emotion = require_emotion();
const posenet = require_posenet();
const handpose = require_handpose();
const fxImage = require_imagefx();
const profile = require_profile();
const defaults = require_config().default;
const app = require_package();
let first = true;
@ -5923,6 +5972,11 @@ class Human {
if (msg && this.config.console)
console.log("Human:", ...msg);
}
profile() {
if (this.config.profile)
return profile.data;
return {};
}
analyze(...msg) {
if (!this.analyzeMemoryLeaks)
return;
@ -5964,13 +6018,14 @@ class Human {
async checkBackend() {
if (tf.getBackend() !== this.config.backend) {
this.state = "backend";
if (this.config.backend in tf.engine().registry) {
this.log("Setting backend:", this.config.backend);
await tf.setBackend(this.config.backend);
await tf.ready();
} else {
this.log("Backend not registred:", this.config.backend);
tf.enableProdMode();
if (this.config.deallocate && this.config.backend === "webgl") {
this.log("Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:", this.config.deallocate);
tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", this.config.deallocate ? 0 : -1);
}
await tf.ready();
}
}
tfImage(input) {

File diff suppressed because one or more lines are too long

5746
dist/human.node.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

46
dist/human.node.json vendored
View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 6295,
"bytes": 6870,
"imports": []
},
"package.json": {
@ -9,8 +9,12 @@
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 1646,
"imports": []
"bytes": 1958,
"imports": [
{
"path": "src/profile.js"
}
]
},
"src/facemesh/blazeface.js": {
"bytes": 7161,
@ -116,7 +120,7 @@
"imports": []
},
"src/human.js": {
"bytes": 13072,
"bytes": 13664,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -136,6 +140,9 @@
{
"path": "src/imagefx.js"
},
{
"path": "src/profile.js"
},
{
"path": "config.js"
},
@ -190,7 +197,7 @@
"imports": []
},
"src/posenet/modelBase.js": {
"bytes": 1719,
"bytes": 1512,
"imports": []
},
"src/posenet/modelMobileNet.js": {
@ -251,16 +258,24 @@
}
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1574,
"src/profile.js": {
"bytes": 1004,
"imports": []
},
"src/ssrnet/ssrnet.js": {
"bytes": 2115,
"imports": [
{
"path": "src/profile.js"
}
]
}
},
"outputs": {
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 263646
"bytes": 268314
},
"dist/human.node-nobundle.js": {
"imports": [],
@ -289,14 +304,17 @@
"src/facemesh/facemesh.js": {
"bytesInOutput": 2666
},
"src/profile.js": {
"bytesInOutput": 1095
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1748
"bytesInOutput": 2314
},
"src/emotion/emotion.js": {
"bytesInOutput": 1721
"bytesInOutput": 2047
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1120
"bytesInOutput": 912
},
"src/posenet/modelMobileNet.js": {
"bytesInOutput": 506
@ -350,7 +368,7 @@
"bytesInOutput": 20197
},
"config.js": {
"bytesInOutput": 2232
"bytesInOutput": 2273
},
"package.json": {
"bytesInOutput": 3015
@ -359,10 +377,10 @@
"bytesInOutput": 47
},
"src/human.js": {
"bytesInOutput": 11537
"bytesInOutput": 11796
}
},
"bytes": 156154
"bytes": 158252
}
}
}

View File

@ -1,4 +1,5 @@
const tf = require('@tensorflow/tfjs');
const profile = require('../profile.js');
const annotations = ['angry', 'discust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
const models = {};
@ -33,13 +34,21 @@ async function predict(image, config) {
blueNorm.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(grayscale);
const data = await emotionT.data();
data = await emotionT.data();
tf.dispose(emotionT);
} else {
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
data = await profileData.result.data();
profileData.result.dispose();
profile.run('emotion', profileData);
}
for (let i = 0; i < data.length; i++) {
if (multiplier * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * multiplier * data[i]) / 100), emotion: annotations[i] });
}
obj.sort((a, b) => b.score - a.score);
tf.dispose(emotionT);
}
tf.dispose(grayscale);
last = obj;

View File

@ -5,6 +5,7 @@ const emotion = require('./emotion/emotion.js');
const posenet = require('./posenet/posenet.js');
const handpose = require('./handpose/handpose.js');
const fxImage = require('./imagefx.js');
const profile = require('./profile.js');
const defaults = require('../config.js').default;
const app = require('../package.json');
@ -88,6 +89,11 @@ class Human {
if (msg && this.config.console) console.log('Human:', ...msg);
}
profile() {
if (this.config.profile) return profile.data;
return {};
}
// helper function: measure tensor leak
analyze(...msg) {
if (!this.analyzeMemoryLeaks) return;
@ -129,16 +135,27 @@ class Human {
async checkBackend() {
if (tf.getBackend() !== this.config.backend) {
this.state = 'backend';
/* force backend reload
if (this.config.backend in tf.engine().registry) {
this.log('Setting backend:', this.config.backend);
// const backendFactory = tf.findBackendFactory(backendName);
// tf.removeBackend(backendName);
// tf.registerBackend(backendName, backendFactory);
await tf.setBackend(this.config.backend);
await tf.ready();
const backendFactory = tf.findBackendFactory(this.config.backend);
tf.removeBackend(this.config.backend);
tf.registerBackend(this.config.backend, backendFactory);
} else {
this.log('Backend not registred:', this.config.backend);
}
*/
this.log('Setting backend:', this.config.backend);
await tf.setBackend(this.config.backend);
tf.enableProdMode();
/* debug mode is really too mcuh
if (this.config.profile) tf.enableDebugMode();
else tf.enableProdMode();
*/
if (this.config.deallocate && this.config.backend === 'webgl') {
this.log('Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
}
await tf.ready();
}
}

View File

@ -4,8 +4,6 @@ class BaseModel {
constructor(model, outputStride) {
this.model = model;
this.outputStride = outputStride;
const inputShape = this.model.inputs[0].shape;
tf.util.assert((inputShape[1] === -1) && (inputShape[2] === -1), () => `Input shape [${inputShape[1]}, ${inputShape[2]}] must both be equal to or -1`);
}
/**

24
src/profile.js Normal file
View File

@ -0,0 +1,24 @@
const profileData = {};
function profile(name, data) {
if (!data || !data.kernels) return;
const maxResults = 5;
const time = data.kernels
.filter((a) => a.kernelTimeMs > 0)
.reduce((a, b) => a += b.kernelTimeMs, 0);
const slowest = data.kernels
.map((a, i) => { a.id = i; return a; })
.filter((a) => a.kernelTimeMs > 0)
.sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);
const largest = data.kernels
.map((a, i) => { a.id = i; return a; })
.filter((a) => a.totalBytesSnapshot > 0)
.sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);
if (slowest.length > maxResults) slowest.length = maxResults;
if (largest.length > maxResults) largest.length = maxResults;
const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };
profileData[name] = res;
}
exports.run = profile;
exports.data = profileData;

View File

@ -1,4 +1,5 @@
const tf = require('@tensorflow/tfjs');
const profile = require('../profile.js');
const models = {};
let last = { age: 0, gender: '' };
@ -27,11 +28,23 @@ async function predict(image, config) {
const promises = [];
let ageT;
let genderT;
const obj = {};
if (!config.profile) {
if (config.face.age.enabled) promises.push(ageT = models.age.predict(enhance));
if (config.face.gender.enabled) promises.push(genderT = models.gender.predict(enhance));
await Promise.all(promises);
} else {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
profile.run('age', profileAge);
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile.run('gender', profileGender);
}
const obj = {};
if (ageT) {
const data = await ageT.data();
obj.age = Math.trunc(10 * data[0]) / 10;