mirror of https://github.com/vladmandic/human
changed build for optimized node & browser
parent
5373809370
commit
853745512e
|
@ -282,7 +282,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
||||||
clearTimeout(ui.drawThread);
|
clearTimeout(ui.drawThread);
|
||||||
ui.drawThread = null;
|
ui.drawThread = null;
|
||||||
log('frame statistics: drawn:', ui.framesDraw, 'detected:', ui.framesDetect);
|
log('frame statistics: process:', ui.framesDetect, 'refresh:', ui.framesDraw);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
status('');
|
status('');
|
||||||
|
|
70
demo/node.js
70
demo/node.js
|
@ -1,29 +1,12 @@
|
||||||
const tf = require('@tensorflow/tfjs-node');
|
const log = require('@vladmandic/pilogger');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
const console = require('console');
|
// for Node, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
|
||||||
const Human = require('..').default; // this resolves to project root which is '@vladmandic/human'
|
const tf = require('@tensorflow/tfjs-node'); // or const tf = require('@tensorflow/tfjs-node-gpu');
|
||||||
|
// load specific version of Human library that matches TensorFlow mode
|
||||||
|
const Human = require('../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
|
||||||
|
|
||||||
const logger = new console.Console({
|
const myConfig = {
|
||||||
stdout: process.stdout,
|
|
||||||
stderr: process.stderr,
|
|
||||||
ignoreErrors: true,
|
|
||||||
groupIndentation: 2,
|
|
||||||
inspectOptions: {
|
|
||||||
showHidden: true,
|
|
||||||
depth: 5,
|
|
||||||
colors: true,
|
|
||||||
showProxy: true,
|
|
||||||
maxArrayLength: 1024,
|
|
||||||
maxStringLength: 10240,
|
|
||||||
breakLength: 200,
|
|
||||||
compact: 64,
|
|
||||||
sorted: false,
|
|
||||||
getters: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
backend: 'tensorflow',
|
backend: 'tensorflow',
|
||||||
console: true,
|
console: true,
|
||||||
videoOptimized: false,
|
videoOptimized: false,
|
||||||
|
@ -31,9 +14,9 @@ const config = {
|
||||||
detector: { modelPath: 'file://models/blazeface-back.json' },
|
detector: { modelPath: 'file://models/blazeface-back.json' },
|
||||||
mesh: { modelPath: 'file://models/facemesh.json' },
|
mesh: { modelPath: 'file://models/facemesh.json' },
|
||||||
iris: { modelPath: 'file://models/iris.json' },
|
iris: { modelPath: 'file://models/iris.json' },
|
||||||
age: { modelPath: 'file://models/ssrnet-age-imdb.json' },
|
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
|
||||||
gender: { modelPath: 'file://models/ssrnet-gender-imdb.json' },
|
gender: { modelPath: 'file://models/gender-ssrnet-imdb.json' },
|
||||||
emotion: { modelPath: 'file://models/emotion.json' },
|
emotion: { modelPath: 'file://models/emotion-large.json' },
|
||||||
},
|
},
|
||||||
body: { modelPath: 'file://models/posenet.json' },
|
body: { modelPath: 'file://models/posenet.json' },
|
||||||
hand: {
|
hand: {
|
||||||
|
@ -42,30 +25,37 @@ const config = {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
async function detect(input, output) {
|
async function detect(input) {
|
||||||
await tf.setBackend('tensorflow');
|
// wait until tf is ready
|
||||||
await tf.ready();
|
await tf.ready();
|
||||||
logger.info('TFJS Flags:', tf.env().features);
|
// create instance of human
|
||||||
logger.log('Loading:', input);
|
const human = new Human(myConfig);
|
||||||
|
// pre-load models
|
||||||
|
await human.load();
|
||||||
|
// read input image file and create tensor to be used for processing
|
||||||
const buffer = fs.readFileSync(input);
|
const buffer = fs.readFileSync(input);
|
||||||
const decoded = tf.node.decodeImage(buffer);
|
const decoded = human.tf.node.decodeImage(buffer);
|
||||||
const casted = decoded.toFloat();
|
const casted = decoded.toFloat();
|
||||||
const image = casted.expandDims(0);
|
const image = casted.expandDims(0);
|
||||||
decoded.dispose();
|
decoded.dispose();
|
||||||
casted.dispose();
|
casted.dispose();
|
||||||
logger.log('Processing:', image.shape);
|
// image shape contains image dimensions and depth
|
||||||
const human = new Human();
|
log.state('Processing:', image.shape);
|
||||||
const result = await human.detect(image, config);
|
// must disable face model when runing in tfjs-node as it's missing required ops
|
||||||
|
// see <https://github.com/tensorflow/tfjs/issues/4066>
|
||||||
|
myConfig.face.enabled = false;
|
||||||
|
// run actual detection
|
||||||
|
const result = await human.detect(image, myConfig);
|
||||||
|
// dispose image tensor as we no longer need it
|
||||||
image.dispose();
|
image.dispose();
|
||||||
logger.log(result);
|
// print data to console
|
||||||
// Draw detected data and save processed image
|
log.data(result);
|
||||||
logger.log('TODO Saving:', output);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
if (process.argv.length !== 4) logger.error('Parameters: <input image> <output image>');
|
if (process.argv.length !== 3) log.error('Parameters: <input image>');
|
||||||
else if (!fs.existsSync(process.argv[2])) logger.error(`File not found: ${process.argv[2]}`);
|
else if (!fs.existsSync(process.argv[2])) log.error(`File not found: ${process.argv[2]}`);
|
||||||
else detect(process.argv[2], process.argv[3]);
|
else detect(process.argv[2]);
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
main();
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
"@tensorflow/tfjs-data": "^2.7.0",
|
"@tensorflow/tfjs-data": "^2.7.0",
|
||||||
"@tensorflow/tfjs-layers": "^2.7.0",
|
"@tensorflow/tfjs-layers": "^2.7.0",
|
||||||
"@tensorflow/tfjs-node": "^2.7.0",
|
"@tensorflow/tfjs-node": "^2.7.0",
|
||||||
|
"@tensorflow/tfjs-node-gpu": "^2.7.0",
|
||||||
"@vladmandic/pilogger": "^0.2.7",
|
"@vladmandic/pilogger": "^0.2.7",
|
||||||
"chokidar": "^3.4.3",
|
"chokidar": "^3.4.3",
|
||||||
"dayjs": "^1.9.6",
|
"dayjs": "^1.9.6",
|
||||||
|
@ -49,7 +50,7 @@
|
||||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||||
"lint": "eslint src/*.js demo/*.js",
|
"lint": "eslint src/*.js demo/*.js",
|
||||||
"dev": "npm install && node server/dev.js",
|
"dev": "npm install && node server/dev.js",
|
||||||
"build": "npm install && npm run lint && rimraf dist/* && node server/build.js && node server/changelog.js",
|
"build": "npm install && rimraf dist/* && node server/build.js && node server/changelog.js",
|
||||||
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
|
16
src/human.js
16
src/human.js
|
@ -117,9 +117,11 @@ class Human {
|
||||||
|
|
||||||
if (this.firstRun) {
|
if (this.firstRun) {
|
||||||
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
||||||
this.checkBackend(true);
|
await this.checkBackend(true);
|
||||||
this.log('configuration:', this.config);
|
if (tf.ENV.flags.IS_BROWSER) {
|
||||||
this.log('flags:', tf.ENV.flags);
|
this.log('configuration:', this.config);
|
||||||
|
this.log('tf flags:', tf.ENV.flags);
|
||||||
|
}
|
||||||
this.firstRun = false;
|
this.firstRun = false;
|
||||||
}
|
}
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
|
@ -155,8 +157,8 @@ class Human {
|
||||||
|
|
||||||
// check if backend needs initialization if it changed
|
// check if backend needs initialization if it changed
|
||||||
async checkBackend(force) {
|
async checkBackend(force) {
|
||||||
const timeStamp = now();
|
|
||||||
if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
|
if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
|
||||||
|
const timeStamp = now();
|
||||||
this.state = 'backend';
|
this.state = 'backend';
|
||||||
/* force backend reload
|
/* force backend reload
|
||||||
if (this.config.backend in tf.engine().registry) {
|
if (this.config.backend in tf.engine().registry) {
|
||||||
|
@ -189,11 +191,12 @@ class Human {
|
||||||
}
|
}
|
||||||
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||||
|
const gl = await tf.backend().getGPGPUContext().gl;
|
||||||
|
this.log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||||
}
|
}
|
||||||
await tf.ready();
|
await tf.ready();
|
||||||
|
this.perf.backend = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
const current = Math.trunc(now() - timeStamp);
|
|
||||||
if (current > (this.perf.backend || 0)) this.perf.backend = current;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async detectFace(input) {
|
async detectFace(input) {
|
||||||
|
@ -418,6 +421,7 @@ class Human {
|
||||||
|
|
||||||
async warmup(userConfig, sample) {
|
async warmup(userConfig, sample) {
|
||||||
if (!sample) sample = new ImageData(255, 255);
|
if (!sample) sample = new ImageData(255, 255);
|
||||||
|
// const sample = tf.zeros([1, 255, 255, 3]);
|
||||||
const warmup = await this.detect(sample, userConfig);
|
const warmup = await this.detect(sample, userConfig);
|
||||||
this.log('warmed up');
|
this.log('warmed up');
|
||||||
return warmup;
|
return warmup;
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '@tensorflow/tfjs-node-gpu';
|
|
@ -0,0 +1 @@
|
||||||
|
export * from '@tensorflow/tfjs-node';
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 0815d53e582c26b6297e1a5ac42f23b9057f56fa
|
Subproject commit 8b7ff45ead1ade2f530a45f06702c9fa65c8dc35
|
Loading…
Reference in New Issue