2022-09-17 23:19:51 +02:00
<!DOCTYPE html>
< html lang = "en" >
< head >
< meta charset = "utf-8" >
< title > Human< / title >
< meta name = "viewport" content = "width=device-width" id = "viewport" >
< meta name = "keywords" content = "Human" >
2022-09-30 03:28:13 +02:00
< meta name = "description" content = "Human: Demo; Author: Vladimir Mandic <https://github.com/vladmandic>" >
2022-09-17 23:19:51 +02:00
< link rel = "manifest" href = "../manifest.webmanifest" >
< link rel = "shortcut icon" href = "../../favicon.ico" type = "image/x-icon" >
< style >
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../../assets/lato-light.woff2') }
body { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; margin: 0; background: black; color: white; overflow: hidden; width: 100vw; height: 100vh; }
< / style >
< / head >
< body >
< canvas id = "canvas" style = "margin: 0 auto; width: 100%" > < / canvas >
2022-09-30 03:28:13 +02:00
< pre id = "log" style = "padding: 8px; position: fixed; bottom: 0" > < / pre >
2022-09-17 23:19:51 +02:00
< script type = "module" >
import * as H from '../../dist/human.esm.js'; // equivalent of import @vladmandic/Human
const humanConfig = { // user configuration for human, used to fine-tune behavior
2022-09-30 03:28:13 +02:00
modelBasePath: '../../models', // models can be loaded directly from cdn as well
filter: { enabled: true, equalization: true, flip: false },
2022-09-17 23:19:51 +02:00
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
body: { enabled: true },
hand: { enabled: true },
gesture: { enabled: true },
2022-09-30 03:28:13 +02:00
object: { enabled: false },
segmentation: { enabled: false },
2022-09-17 23:19:51 +02:00
};
const human = new H.Human(humanConfig); // create instance of human with overrides from user configuration
2022-09-30 03:28:13 +02:00
const canvas = document.getElementById('canvas'); // output canvas to draw both webcam and detection results
2022-09-17 23:19:51 +02:00
async function drawLoop() { // main screen refresh loop
2022-09-30 03:28:13 +02:00
const interpolated = human.next(); // get smoothened result using last-known results which are continously updated based on input webcam video
human.draw.canvas(human.webcam.element, canvas); // draw webcam video to screen canvas // better than using procesed image as this loop happens faster than processing loop
2022-09-17 23:19:51 +02:00
await human.draw.all(canvas, interpolated); // draw labels, boxes, lines, etc.
2022-09-30 03:28:13 +02:00
setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 1000/30 ~ 30 fps
2022-09-17 23:19:51 +02:00
}
async function main() { // main entry point
2022-09-30 03:28:13 +02:00
document.getElementById('log').innerHTML = `human version: ${human.version} | tfjs version: ${human.tf.version['tfjs-core']}< br > platform: ${human.env.platform} | agent ${human.env.agent}`;
await human.webcam.start({ crop: true }); // find webcam and start it
human.video(human.webcam.element); // instruct human to continously detect video frames
canvas.width = human.webcam.width; // set canvas resolution to input webcam native resolution
canvas.height = human.webcam.height;
canvas.onclick = async () => { // pause when clicked on screen and resume on next click
if (human.webcam.paused) await human.webcam.play();
else human.webcam.pause();
};
2022-09-17 23:19:51 +02:00
await drawLoop(); // start draw loop
}
window.onload = main;
< / script >
< / body >
< / html >