changed build for optimized node & browser

pull/293/head
Vladimir Mandic 2020-11-21 12:21:47 -05:00
parent 9eb2426114
commit d5b922de02
8 changed files with 46 additions and 49 deletions

View File

@ -282,7 +282,7 @@ function runHumanDetect(input, canvas, timestamp) {
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
clearTimeout(ui.drawThread);
ui.drawThread = null;
log('frame statistics: drawn:', ui.framesDraw, 'detected:', ui.framesDetect);
log('frame statistics: process:', ui.framesDetect, 'refresh:', ui.framesDraw);
return;
}
status('');

View File

@ -1,29 +1,12 @@
const tf = require('@tensorflow/tfjs-node');
const log = require('@vladmandic/pilogger');
const fs = require('fs');
const process = require('process');
const console = require('console');
const Human = require('..').default; // this resolves to project root which is '@vladmandic/human'
// for Node, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
const tf = require('@tensorflow/tfjs-node'); // or const tf = require('@tensorflow/tfjs-node-gpu');
// load specific version of Human library that matches TensorFlow mode
const Human = require('../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
const logger = new console.Console({
stdout: process.stdout,
stderr: process.stderr,
ignoreErrors: true,
groupIndentation: 2,
inspectOptions: {
showHidden: true,
depth: 5,
colors: true,
showProxy: true,
maxArrayLength: 1024,
maxStringLength: 10240,
breakLength: 200,
compact: 64,
sorted: false,
getters: true,
},
});
const config = {
const myConfig = {
backend: 'tensorflow',
console: true,
videoOptimized: false,
@ -31,9 +14,9 @@ const config = {
detector: { modelPath: 'file://models/blazeface-back.json' },
mesh: { modelPath: 'file://models/facemesh.json' },
iris: { modelPath: 'file://models/iris.json' },
age: { modelPath: 'file://models/ssrnet-age-imdb.json' },
gender: { modelPath: 'file://models/ssrnet-gender-imdb.json' },
emotion: { modelPath: 'file://models/emotion.json' },
age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
gender: { modelPath: 'file://models/gender-ssrnet-imdb.json' },
emotion: { modelPath: 'file://models/emotion-large.json' },
},
body: { modelPath: 'file://models/posenet.json' },
hand: {
@ -42,30 +25,37 @@ const config = {
},
};
async function detect(input, output) {
await tf.setBackend('tensorflow');
async function detect(input) {
// wait until tf is ready
await tf.ready();
logger.info('TFJS Flags:', tf.env().features);
logger.log('Loading:', input);
// create instance of human
const human = new Human(myConfig);
// pre-load models
await human.load();
// read input image file and create tensor to be used for processing
const buffer = fs.readFileSync(input);
const decoded = tf.node.decodeImage(buffer);
const decoded = human.tf.node.decodeImage(buffer);
const casted = decoded.toFloat();
const image = casted.expandDims(0);
decoded.dispose();
casted.dispose();
logger.log('Processing:', image.shape);
const human = new Human();
const result = await human.detect(image, config);
// image shape contains image dimensions and depth
log.state('Processing:', image.shape);
// must disable face model when runing in tfjs-node as it's missing required ops
// see <https://github.com/tensorflow/tfjs/issues/4066>
myConfig.face.enabled = false;
// run actual detection
const result = await human.detect(image, myConfig);
// dispose image tensor as we no longer need it
image.dispose();
logger.log(result);
// Draw detected data and save processed image
logger.log('TODO Saving:', output);
// print data to console
log.data(result);
}
async function main() {
if (process.argv.length !== 4) logger.error('Parameters: <input image> <output image>');
else if (!fs.existsSync(process.argv[2])) logger.error(`File not found: ${process.argv[2]}`);
else detect(process.argv[2], process.argv[3]);
if (process.argv.length !== 3) log.error('Parameters: <input image>');
else if (!fs.existsSync(process.argv[2])) log.error(`File not found: ${process.argv[2]}`);
else detect(process.argv[2]);
}
main();

View File

@ -31,6 +31,7 @@
"@tensorflow/tfjs-data": "^2.7.0",
"@tensorflow/tfjs-layers": "^2.7.0",
"@tensorflow/tfjs-node": "^2.7.0",
"@tensorflow/tfjs-node-gpu": "^2.7.0",
"@vladmandic/pilogger": "^0.2.7",
"chokidar": "^3.4.3",
"dayjs": "^1.9.6",
@ -49,7 +50,7 @@
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
"lint": "eslint src/*.js demo/*.js",
"dev": "npm install && node server/dev.js",
"build": "npm install && npm run lint && rimraf dist/* && node server/build.js && node server/changelog.js",
"build": "npm install && rimraf dist/* && node server/build.js && node server/changelog.js",
"update": "npm update --depth 20 --force && npm dedupe && npm prune && npm audit"
},
"keywords": [

View File

@ -117,9 +117,11 @@ class Human {
if (this.firstRun) {
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
this.checkBackend(true);
this.log('configuration:', this.config);
this.log('flags:', tf.ENV.flags);
await this.checkBackend(true);
if (tf.ENV.flags.IS_BROWSER) {
this.log('configuration:', this.config);
this.log('tf flags:', tf.ENV.flags);
}
this.firstRun = false;
}
if (this.config.async) {
@ -155,8 +157,8 @@ class Human {
// check if backend needs initialization if it changed
async checkBackend(force) {
const timeStamp = now();
if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
const timeStamp = now();
this.state = 'backend';
/* force backend reload
if (this.config.backend in tf.engine().registry) {
@ -189,11 +191,12 @@ class Human {
}
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
const gl = await tf.backend().getGPGPUContext().gl;
this.log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}
await tf.ready();
this.perf.backend = Math.trunc(now() - timeStamp);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.backend || 0)) this.perf.backend = current;
}
async detectFace(input) {
@ -418,6 +421,7 @@ class Human {
async warmup(userConfig, sample) {
if (!sample) sample = new ImageData(255, 255);
// const sample = tf.zeros([1, 255, 255, 3]);
const warmup = await this.detect(sample, userConfig);
this.log('warmed up');
return warmup;

1
src/tfjs/tf-node-gpu.js Normal file
View File

@ -0,0 +1 @@
export * from '@tensorflow/tfjs-node-gpu';

1
src/tfjs/tf-node.js Normal file
View File

@ -0,0 +1 @@
export * from '@tensorflow/tfjs-node';

2
wiki

@ -1 +1 @@
Subproject commit 0815d53e582c26b6297e1a5ac42f23b9057f56fa
Subproject commit 8b7ff45ead1ade2f530a45f06702c9fa65c8dc35