human/demo/nodejs/node-video.js

92 lines
4.1 KiB
JavaScript
Raw Normal View History

2021-05-25 14:58:20 +02:00
/**
* Human demo for NodeJS
* Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
*
* Uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
* Each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
* If you want process at specific intervals, set output fps to some value
* If you want to process an input stream, set real-time flag and set input as required
*
2022-08-20 15:38:08 +02:00
* Note that [pipe2jpeg](https://www.npmjs.com/package/pipe2jpeg) is not part of Human dependencies and should be installed manually
* Working version of `ffmpeg` must be present on the system
2021-05-17 05:55:08 +02:00
*/
2022-12-30 01:37:38 +01:00
const process = require('process');
2021-05-17 05:55:08 +02:00
const spawn = require('child_process').spawn;
2022-08-21 19:34:51 +02:00
const log = require('@vladmandic/pilogger'); // eslint-disable-line node/no-unpublished-require
// in nodejs environments tfjs-node is required to be loaded before human
2022-12-30 01:37:38 +01:00
// const tf = require('@tensorflow/tfjs-node'); // eslint-disable-line node/no-unpublished-require
2022-08-21 19:34:51 +02:00
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
const Pipe2Jpeg = require('pipe2jpeg'); // eslint-disable-line node/no-missing-require, import/no-unresolved
2022-07-18 14:22:19 +02:00
// const human = require('@vladmandic/human'); // use this when human is installed as module (majority of use cases)
2022-01-01 14:13:04 +01:00
const Human = require('../../dist/human.node.js'); // use this when using human in dev mode
2021-05-17 05:55:08 +02:00
let count = 0; // counter
let busy = false; // busy flag
2022-12-30 01:37:38 +01:00
let inputFile = './test.mp4';
if (process.argv.length === 3) inputFile = process.argv[2];
2021-05-17 05:55:08 +02:00
const humanConfig = {
2021-06-14 14:16:10 +02:00
modelBasePath: 'file://models/',
2021-05-17 05:55:08 +02:00
debug: false,
async: true,
filter: { enabled: false },
face: {
enabled: true,
detector: { enabled: true, rotation: false },
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: true },
emotion: { enabled: true },
},
hand: { enabled: false },
body: { enabled: false },
object: { enabled: false },
};
2022-01-01 14:13:04 +01:00
const human = new Human.Human(humanConfig);
2021-05-17 05:55:08 +02:00
const pipe2jpeg = new Pipe2Jpeg();
const ffmpegParams = [
'-loglevel', 'quiet',
// input
// '-re', // optional process video in real-time not as fast as possible
'-i', `${inputFile}`, // input file
// output
'-an', // drop audio
'-c:v', 'mjpeg', // use motion jpeg as output encoder
'-pix_fmt', 'yuvj422p', // typical for mp4, may need different settings for some videos
'-f', 'image2pipe', // pipe images as output
// '-vf', 'fps=5,scale=800:600', // optional video filter, do anything here such as process at fixed 5fps or resize to specific resulution
'pipe:1', // output to unix pipe that is then captured by pipe2jpeg
];
2022-12-30 01:37:38 +01:00
async function detect(jpegBuffer) {
2021-05-17 05:55:08 +02:00
if (busy) return; // skip processing if busy
busy = true;
2022-01-01 14:13:04 +01:00
const tensor = human.tf.node.decodeJpeg(jpegBuffer, 3); // decode jpeg buffer to raw tensor
2021-05-17 05:55:08 +02:00
const res = await human.detect(tensor);
2022-12-30 01:37:38 +01:00
human.tf.dispose(tensor); // must dispose tensor
// start custom processing here
log.data('frame', { frame: ++count, size: jpegBuffer.length, shape: tensor.shape, face: res?.face?.length, body: res?.body?.length, hand: res?.hand?.length, gesture: res?.gesture?.length });
if (res?.face?.[0]) log.data('person', { score: [res.face[0].boxScore, res.face[0].faceScore], age: res.face[0].age || 0, gender: [res.face[0].genderScore || 0, res.face[0].gender], emotion: res.face[0].emotion?.[0] });
// at the of processing mark loop as not busy so it can process next frame
2021-05-17 05:55:08 +02:00
busy = false;
}
async function main() {
log.header();
await human.tf.ready();
// pre-load models
2022-12-30 01:37:38 +01:00
log.info({ human: human.version, tf: human.tf.version_core });
log.info({ input: inputFile });
pipe2jpeg.on('data', (jpegBuffer) => detect(jpegBuffer));
2021-05-17 05:55:08 +02:00
const ffmpeg = spawn('ffmpeg', ffmpegParams, { stdio: ['ignore', 'pipe', 'ignore'] });
ffmpeg.on('error', (error) => log.error('ffmpeg error:', error));
ffmpeg.on('exit', (code, signal) => log.info('ffmpeg exit', code, signal));
ffmpeg.stdout.pipe(pipe2jpeg);
}
main();