human/demo/nodejs/node.js

214 lines
7.2 KiB
JavaScript
Raw Normal View History

2021-05-25 14:58:20 +02:00
/**
* Human demo for NodeJS
2022-08-20 15:38:08 +02:00
*/
2021-05-25 14:58:20 +02:00
2020-10-14 02:52:30 +02:00
const fs = require('fs');
2021-05-30 18:03:34 +02:00
const path = require('path');
2020-10-14 02:52:30 +02:00
const process = require('process');
2023-08-05 15:03:11 +02:00
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
2021-08-31 19:29:29 +02:00
2022-08-21 19:34:51 +02:00
// in nodejs environments tfjs-node is required to be loaded before human
const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
2022-07-18 14:22:19 +02:00
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
2022-01-01 14:13:04 +01:00
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
2020-10-14 02:52:30 +02:00
2021-01-30 19:23:07 +01:00
let human = null;
const myConfig = {
2021-11-18 16:10:06 +01:00
// backend: 'tensorflow',
2021-04-09 14:07:58 +02:00
modelBasePath: 'file://models/',
2021-04-01 15:24:56 +02:00
debug: true,
2021-02-06 23:41:53 +01:00
async: false,
2021-04-19 22:02:47 +02:00
filter: {
enabled: true,
flip: true,
},
2020-10-14 02:52:30 +02:00
face: {
2021-03-04 16:33:08 +01:00
enabled: true,
2021-04-09 14:07:58 +02:00
detector: { enabled: true, rotation: false },
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: true },
emotion: { enabled: true },
2020-10-14 02:52:30 +02:00
},
hand: {
2021-03-04 16:33:08 +01:00
enabled: true,
2020-10-14 02:52:30 +02:00
},
2021-04-09 14:07:58 +02:00
// body: { modelPath: 'blazepose.json', enabled: true },
body: { enabled: true },
object: { enabled: true },
2020-10-14 02:52:30 +02:00
};
2021-01-30 19:23:07 +01:00
async function init() {
// create instance of human
2022-01-01 14:13:04 +01:00
human = new Human.Human(myConfig);
2021-05-11 16:11:55 +02:00
// wait until tf is ready
await human.tf.ready();
2022-08-20 15:38:08 +02:00
log.info('human:', human.version, 'tf:', tf.version_core);
// pre-load models
2021-03-04 16:33:08 +01:00
log.info('Human:', human.version);
2021-09-11 03:21:29 +02:00
// log.info('Active Configuration', human.config);
await human.load();
2022-11-20 22:20:02 +01:00
log.info('Loaded:', human.models.loaded());
2021-11-15 15:26:38 +01:00
// log.info('Memory state:', human.tf.engine().memory());
2022-08-21 19:34:51 +02:00
log.data(tf.backend().binding ? tf.backend().binding.TF_Version : null);
2021-01-30 19:23:07 +01:00
}
async function detect(input) {
// read input image file and create tensor to be used for processing
2021-04-19 17:15:29 +02:00
let buffer;
log.info('Loading image:', input);
2021-04-19 22:02:47 +02:00
if (input.startsWith('http:') || input.startsWith('https:')) {
2021-04-19 17:15:29 +02:00
const res = await fetch(input);
2022-10-17 16:47:20 +02:00
if (res && res.ok) buffer = Buffer.from(await res.arrayBuffer());
2021-04-19 17:20:24 +02:00
else log.error('Invalid image URL:', input, res.status, res.statusText, res.headers.get('content-type'));
2021-04-19 17:15:29 +02:00
} else {
buffer = fs.readFileSync(input);
}
2022-10-17 16:47:20 +02:00
log.data('Image bytes:', buffer?.length, 'buffer:', buffer?.slice(0, 32));
2021-04-19 17:17:55 +02:00
// decode image using tfjs-node so we don't need external depenencies
2021-04-19 17:36:44 +02:00
// can also be done using canvas.js or some other 3rd party image library
2021-04-19 22:02:47 +02:00
if (!buffer) return {};
2021-05-11 16:11:55 +02:00
const tensor = human.tf.tidy(() => {
2021-04-24 18:12:10 +02:00
const decode = human.tf.node.decodeImage(buffer, 3);
let expand;
if (decode.shape[2] === 4) { // input is in rgba format, need to convert to rgb
const channels = human.tf.split(decode, 4, 2); // tf.split(tensor, 4, 2); // split rgba to channels
const rgb = human.tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb and ignore alpha
expand = human.tf.reshape(rgb, [1, decode.shape[0], decode.shape[1], 3]); // move extra dim from the end of tensor and use it as batch number instead
} else {
expand = human.tf.expandDims(decode, 0);
}
const cast = human.tf.cast(expand, 'float32');
return cast;
});
2021-04-19 17:17:55 +02:00
// image shape contains image dimensions and depth
2022-08-21 19:34:51 +02:00
log.state('Processing:', tensor.shape);
2021-04-19 17:17:55 +02:00
// run actual detection
let result;
try {
result = await human.detect(tensor, myConfig);
} catch (err) {
log.error('caught', err);
}
2021-04-19 17:17:55 +02:00
// dispose image tensor as we no longer need it
2021-05-11 16:11:55 +02:00
human.tf.dispose(tensor);
2021-04-19 17:17:55 +02:00
// print data to console
2021-04-01 15:24:56 +02:00
log.data('Results:');
2021-04-19 17:36:44 +02:00
if (result && result.face && result.face.length > 0) {
for (let i = 0; i < result.face.length; i++) {
const face = result.face[i];
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
2022-11-17 16:18:26 +01:00
log.data(` Face: #${i} boxScore:${face.boxScore} faceScore:${face.faceScore} age:${face.age} genderScore:${face.genderScore} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} distance:${face.distance}`);
2021-04-19 17:36:44 +02:00
}
} else {
log.data(' Face: N/A');
2021-04-01 15:24:56 +02:00
}
2021-04-19 17:36:44 +02:00
if (result && result.body && result.body.length > 0) {
for (let i = 0; i < result.body.length; i++) {
const body = result.body[i];
2021-06-01 14:59:09 +02:00
log.data(` Body: #${i} score:${body.score} keypoints:${body.keypoints?.length}`);
2021-04-19 17:36:44 +02:00
}
} else {
log.data(' Body: N/A');
2021-04-01 15:24:56 +02:00
}
2021-04-19 17:36:44 +02:00
if (result && result.hand && result.hand.length > 0) {
for (let i = 0; i < result.hand.length; i++) {
const hand = result.hand[i];
2021-08-12 15:31:16 +02:00
log.data(` Hand: #${i} score:${hand.score} keypoints:${hand.keypoints?.length}`);
2021-04-19 17:36:44 +02:00
}
} else {
log.data(' Hand: N/A');
2021-04-01 15:24:56 +02:00
}
2021-04-19 17:36:44 +02:00
if (result && result.gesture && result.gesture.length > 0) {
for (let i = 0; i < result.gesture.length; i++) {
const [key, val] = Object.entries(result.gesture[i]);
log.data(` Gesture: ${key[0]}#${key[1]} gesture:${val[1]}`);
}
} else {
log.data(' Gesture: N/A');
2021-04-01 15:24:56 +02:00
}
2021-10-03 14:12:26 +02:00
2021-04-19 17:36:44 +02:00
if (result && result.object && result.object.length > 0) {
for (let i = 0; i < result.object.length; i++) {
const object = result.object[i];
log.data(` Object: #${i} score:${object.score} label:${object.label}`);
}
} else {
log.data(' Object: N/A');
2021-04-01 15:24:56 +02:00
}
// print data to console
if (result) {
2021-06-01 14:59:09 +02:00
// invoke persons getter
const persons = result.persons;
2021-06-01 14:59:09 +02:00
// write result objects to file
// fs.writeFileSync('result.json', JSON.stringify(result, null, 2));
log.data('Persons:');
for (let i = 0; i < persons.length; i++) {
const face = persons[i].face;
2021-06-01 14:59:09 +02:00
const faceTxt = face ? `score:${face.score} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
const body = persons[i].body;
2021-06-01 14:59:09 +02:00
const bodyTxt = body ? `score:${body.score} keypoints:${body.keypoints?.length}` : null;
log.data(` #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
}
}
2021-03-10 00:32:35 +01:00
return result;
2020-10-14 02:52:30 +02:00
}
2021-01-30 19:23:07 +01:00
async function test() {
process.on('unhandledRejection', (err) => {
// @ts-ignore // no idea if exception message is compelte
log.error(err?.message || err || 'no error message');
});
2021-03-06 16:38:04 +01:00
// test with embedded full body image
let result;
2021-02-06 23:41:53 +01:00
log.state('Processing embedded warmup image: face');
myConfig.warmup = 'face';
2021-03-06 16:38:04 +01:00
result = await human.warmup(myConfig);
2021-02-08 17:39:09 +01:00
2021-02-06 23:41:53 +01:00
log.state('Processing embedded warmup image: full');
2021-01-30 19:23:07 +01:00
myConfig.warmup = 'full';
2021-03-06 16:38:04 +01:00
result = await human.warmup(myConfig);
2021-03-10 00:32:35 +01:00
// no need to print results as they are printed to console during detection from within the library due to human.config.debug set
return result;
2021-01-30 19:23:07 +01:00
}
2020-10-14 02:52:30 +02:00
async function main() {
2021-09-11 03:21:29 +02:00
log.configure({ inspect: { breakLength: 265 } });
2021-03-06 16:38:04 +01:00
log.header();
2021-03-04 16:33:08 +01:00
log.info('Current folder:', process.env.PWD);
2021-01-30 19:23:07 +01:00
await init();
2021-05-30 18:03:34 +02:00
const f = process.argv[2];
2021-01-30 19:23:07 +01:00
if (process.argv.length !== 3) {
2021-05-30 18:03:34 +02:00
log.warn('Parameters: <input image | folder> missing');
2021-01-30 19:23:07 +01:00
await test();
2021-05-30 18:03:34 +02:00
} else if (!fs.existsSync(f) && !f.startsWith('http')) {
2021-01-30 19:23:07 +01:00
log.error(`File not found: ${process.argv[2]}`);
2022-08-21 19:34:51 +02:00
} else if (fs.existsSync(f)) {
const stat = fs.statSync(f);
if (stat.isDirectory()) {
const dir = fs.readdirSync(f);
for (const file of dir) {
await detect(path.join(f, file));
2021-05-30 18:03:34 +02:00
}
} else {
await detect(f);
}
2022-08-21 19:34:51 +02:00
} else {
await detect(f);
2021-01-30 19:23:07 +01:00
}
2020-10-14 02:52:30 +02:00
}
main();