human/demo/node.js

122 lines
4.1 KiB
JavaScript
Raw Normal View History

const log = require('@vladmandic/pilogger');
2020-10-14 02:52:30 +02:00
const fs = require('fs');
const process = require('process');
2021-03-12 18:54:08 +01:00
// for NodeJS, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
const tf = require('@tensorflow/tfjs-node'); // or const tf = require('@tensorflow/tfjs-node-gpu');
2021-03-12 18:54:08 +01:00
// load specific version of Human library that matches TensorFlow mode
const Human = require('../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
2020-10-14 02:52:30 +02:00
2021-01-30 19:23:07 +01:00
let human = null;
const myConfig = {
2021-04-01 15:24:56 +02:00
backend: 'tensorflow',
2021-04-09 14:07:58 +02:00
modelBasePath: 'file://models/',
2021-04-01 15:24:56 +02:00
debug: true,
videoOptimized: false,
2021-02-06 23:41:53 +01:00
async: false,
2020-10-14 02:52:30 +02:00
face: {
2021-03-04 16:33:08 +01:00
enabled: true,
2021-04-09 14:07:58 +02:00
detector: { enabled: true, rotation: false },
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: true },
emotion: { enabled: true },
2020-10-14 02:52:30 +02:00
},
hand: {
2021-03-04 16:33:08 +01:00
enabled: true,
2020-10-14 02:52:30 +02:00
},
2021-04-09 14:07:58 +02:00
// body: { modelPath: 'efficientpose.json', enabled: true },
// body: { modelPath: 'blazepose.json', enabled: true },
body: { enabled: true },
object: { enabled: true },
2020-10-14 02:52:30 +02:00
};
2021-01-30 19:23:07 +01:00
async function init() {
// wait until tf is ready
2020-10-14 02:52:30 +02:00
await tf.ready();
// create instance of human
2021-01-30 19:23:07 +01:00
human = new Human(myConfig);
// pre-load models
2021-03-04 16:33:08 +01:00
log.info('Human:', human.version);
log.info('Active Configuration', human.config);
await human.load();
2021-03-04 16:33:08 +01:00
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
log.info('Loaded:', loaded);
log.info('Memory state:', human.tf.engine().memory());
2021-01-30 19:23:07 +01:00
}
async function detect(input) {
// read input image file and create tensor to be used for processing
2020-10-14 02:52:30 +02:00
const buffer = fs.readFileSync(input);
const decoded = human.tf.node.decodeImage(buffer);
const casted = decoded.toFloat();
const image = casted.expandDims(0);
decoded.dispose();
casted.dispose();
// image shape contains image dimensions and depth
log.state('Processing:', image.shape);
// run actual detection
const result = await human.detect(image, myConfig);
2021-03-10 00:32:35 +01:00
// no need to print results as they are printed to console during detection from within the library due to human.config.debug set
// dispose image tensor as we no longer need it
2020-10-14 17:43:33 +02:00
image.dispose();
// print data to console
2021-04-01 15:24:56 +02:00
log.data('Results:');
for (let i = 0; i < result.face.length; i++) {
const face = result.face[i];
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.boxConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
}
for (let i = 0; i < result.body.length; i++) {
const body = result.body[i];
log.data(` Body: #${i} score:${body.score}`);
}
for (let i = 0; i < result.hand.length; i++) {
const hand = result.hand[i];
log.data(` Hand: #${i} confidence:${hand.confidence}`);
}
for (let i = 0; i < result.gesture.length; i++) {
const [key, val] = Object.entries(result.gesture[i]);
log.data(` Gesture: ${key[0]}#${key[1]} gesture:${val[1]}`);
}
for (let i = 0; i < result.object.length; i++) {
const object = result.object[i];
log.data(` Object: #${i} score:${object.score} label:${object.label}`);
}
result.face.length = 0;
2021-03-10 00:32:35 +01:00
return result;
2020-10-14 02:52:30 +02:00
}
2021-01-30 19:23:07 +01:00
async function test() {
2021-03-06 16:38:04 +01:00
// test with embedded full body image
let result;
2021-02-06 23:41:53 +01:00
log.state('Processing embedded warmup image: face');
myConfig.warmup = 'face';
2021-03-06 16:38:04 +01:00
result = await human.warmup(myConfig);
2021-02-08 17:39:09 +01:00
2021-02-06 23:41:53 +01:00
log.state('Processing embedded warmup image: full');
2021-01-30 19:23:07 +01:00
myConfig.warmup = 'full';
2021-03-06 16:38:04 +01:00
result = await human.warmup(myConfig);
2021-03-10 00:32:35 +01:00
// no need to print results as they are printed to console during detection from within the library due to human.config.debug set
return result;
2021-01-30 19:23:07 +01:00
}
2020-10-14 02:52:30 +02:00
async function main() {
2021-03-06 16:38:04 +01:00
log.header();
2021-03-04 16:33:08 +01:00
log.info('Current folder:', process.env.PWD);
2021-01-30 19:23:07 +01:00
await init();
if (process.argv.length !== 3) {
log.warn('Parameters: <input image> missing');
await test();
} else if (!fs.existsSync(process.argv[2])) {
log.error(`File not found: ${process.argv[2]}`);
} else {
await detect(process.argv[2]);
}
2020-10-14 02:52:30 +02:00
}
main();