human/demo/typescript/index.ts

114 lines
5.7 KiB
TypeScript
Raw Normal View History

2021-10-27 14:16:06 +02:00
/**
* Human demo for browsers
2021-10-27 15:45:38 +02:00
* @default Human Library
* @summary <https://github.com/vladmandic/human>
* @author <https://github.com/vladmandic>
* @copyright <https://github.com/vladmandic>
* @license MIT
2021-10-27 14:16:06 +02:00
*/
import { Human } from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
2021-10-27 14:16:06 +02:00
2021-11-05 16:28:06 +01:00
const humanConfig = { // user configuration for human, used to fine-tune behavior
2021-11-16 19:07:44 +01:00
// backend: 'webgpu' as 'webgpu,
2021-11-05 16:28:06 +01:00
// async: true,
2021-11-18 16:10:06 +01:00
modelBasePath: '../../models',
filter: { enabled: true, equalization: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: false },
gesture: { enabled: true },
2021-10-27 14:16:06 +02:00
};
2021-11-05 16:28:06 +01:00
const human = new Human(humanConfig); // create instance of human with overrides from user configuration
2021-10-27 14:16:06 +02:00
2021-11-05 16:28:06 +01:00
human.env['perfadd'] = false; // is performance data showing instant or total values
2021-11-05 18:36:53 +01:00
human.draw.options.font = 'small-caps 18px "Lato"'; // set font used to draw labels when using draw methods
human.draw.options.lineHeight = 20;
2021-11-05 16:28:06 +01:00
const dom = { // grab instances of dom objects so we dont have to look them up later
2021-10-27 15:45:38 +02:00
video: document.getElementById('video') as HTMLVideoElement,
canvas: document.getElementById('canvas') as HTMLCanvasElement,
log: document.getElementById('log') as HTMLPreElement,
fps: document.getElementById('status') as HTMLPreElement,
perf: document.getElementById('performance') as HTMLDivElement,
};
2021-11-05 16:28:06 +01:00
const timestamp = { detect: 0, draw: 0, tensors: 0 }; // holds information used to calculate performance and possible memory leaks
const fps = { detect: 0, draw: 0 }; // holds calculated fps information for both detect and screen refresh
2021-10-27 15:45:38 +02:00
2021-11-05 16:28:06 +01:00
const log = (...msg) => { // helper method to output messages
2021-10-27 15:45:38 +02:00
dom.log.innerText += msg.join(' ') + '\n';
// eslint-disable-next-line no-console
console.log(...msg);
};
2021-11-05 16:28:06 +01:00
const status = (msg) => dom.fps.innerText = msg; // print status element
const perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors + ' | performance: ' + JSON.stringify(msg).replace(/"|{|}/g, '').replace(/,/g, ' | '); // print performance element
2021-10-27 14:16:06 +02:00
2021-11-05 16:28:06 +01:00
async function webCam() { // initialize webcam
2021-10-27 14:16:06 +02:00
status('starting webcam...');
2021-11-05 16:28:06 +01:00
// @ts-ignore resizeMode is not yet defined in tslib
const options: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };
2021-10-27 14:16:06 +02:00
const stream: MediaStream = await navigator.mediaDevices.getUserMedia(options);
2021-10-27 15:45:38 +02:00
const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });
dom.video.srcObject = stream;
dom.video.play();
2021-10-27 14:16:06 +02:00
await ready;
2021-10-27 15:45:38 +02:00
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
2021-10-27 14:16:06 +02:00
const track: MediaStreamTrack = stream.getVideoTracks()[0];
2021-10-28 23:25:50 +02:00
const capabilities: MediaTrackCapabilities | string = track.getCapabilities ? track.getCapabilities() : '';
const settings: MediaTrackSettings | string = track.getSettings ? track.getSettings() : '';
const constraints: MediaTrackConstraints | string = track.getConstraints ? track.getConstraints() : '';
2021-10-27 15:45:38 +02:00
log('video:', dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
2021-11-05 16:28:06 +01:00
dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click
2021-10-27 15:45:38 +02:00
if (dom.video.paused) dom.video.play();
else dom.video.pause();
2021-10-27 14:16:06 +02:00
};
}
2021-11-05 16:28:06 +01:00
async function detectionLoop() { // main detection loop
2021-10-27 15:45:38 +02:00
if (!dom.video.paused) {
2021-11-05 16:28:06 +01:00
// console.log('profiling data:', await human.profile(dom.video));
await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result
const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks
if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak
timestamp.tensors = tensors;
2021-10-27 15:45:38 +02:00
}
2021-11-05 16:28:06 +01:00
const now = human.now();
fps.detect = 1000 / (now - timestamp.detect);
timestamp.detect = now;
requestAnimationFrame(detectionLoop); // start new frame immediately
2021-10-27 14:16:06 +02:00
}
2021-11-05 16:28:06 +01:00
async function drawLoop() { // main screen refresh loop
2021-10-27 15:45:38 +02:00
if (!dom.video.paused) {
2021-11-05 16:28:06 +01:00
const interpolated = await human.next(human.result); // smoothen result using last-known results
await human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen
await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.
perf(interpolated.performance); // write performance data
2021-10-27 14:16:06 +02:00
}
2021-11-05 16:28:06 +01:00
const now = human.now();
fps.draw = 1000 / (now - timestamp.draw);
timestamp.draw = now;
status(dom.video.paused ? 'paused' : `fps: ${fps.detect.toFixed(1).padStart(5, ' ')} detect | ${fps.draw.toFixed(1).padStart(5, ' ')} draw`); // write status
// requestAnimationFrame(drawLoop); // refresh at screen refresh rate
setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps
2021-10-27 14:16:06 +02:00
}
2021-11-05 16:28:06 +01:00
async function main() { // main entry point
2021-11-18 16:10:06 +01:00
log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);
2021-11-06 15:21:51 +01:00
log('platform:', human.env.platform, '| agent:', human.env.agent);
2021-10-27 14:16:06 +02:00
status('loading...');
2021-11-05 16:28:06 +01:00
await human.load(); // preload all models
log('backend:', human.tf.getBackend(), '| available:', human.env.backends);
log('loaded models:' + Object.values(human.models).filter((model) => model !== null).length);
2021-10-27 14:16:06 +02:00
status('initializing...');
2021-11-05 16:28:06 +01:00
await human.warmup(); // warmup function to initialize backend for future faster detection
await webCam(); // start webcam
await detectionLoop(); // start detection loop
await drawLoop(); // start draw loop
2021-10-27 14:16:06 +02:00
}
window.onload = main;