import Human from '../dist/human.esm.js'; import draw from './draw.js'; import Menu from './menu.js'; const human = new Human(); // ui options const ui = { baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel baseBackground: 'rgba(50, 50, 50, 1)', // 'grey' baseLabel: 'rgba(173, 216, 230, 0.9)', // 'lightblue' with dark alpha channel baseFontProto: 'small-caps {size} "Segoe UI"', baseLineWidth: 12, baseLineHeightProto: 2, columns: 2, busy: false, facing: true, useWorker: false, worker: 'demo/worker.js', samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'], drawBoxes: true, drawPoints: false, drawPolygons: true, fillPolygons: true, useDepth: true, console: true, maxFrames: 10, modelsPreload: true, modelsWarmup: true, }; // global variables let menu; let menuFX; let worker; let timeStamp; let camera = {}; const fps = []; // helper function: translates json to human readable string function str(...msg) { if (!Array.isArray(msg)) return msg; let line = ''; for (const entry of msg) { if (typeof entry === 'object') line += JSON.stringify(entry).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', '); else line += entry; } return line; } // helper function: wrapper around console output const log = (...msg) => { // eslint-disable-next-line no-console if (ui.console) console.log(...msg); }; const status = (msg) => { // eslint-disable-next-line no-console document.getElementById('status').innerText = msg; }; // draws processed results and starts processing of a next frame function drawResults(input, result, canvas) { // update fps data fps.push(1000 / (performance.now() - timeStamp)); if (fps.length > ui.maxFrames) fps.shift(); // enable for continous performance monitoring // console.log(result.performance); // eslint-disable-next-line no-use-before-define requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop before we even draw results // draw fps chart menu.updateChart('FPS', fps); // draw image from video const ctx = canvas.getContext('2d'); ctx.fillStyle = ui.baseBackground; ctx.fillRect(0, 0, canvas.width, canvas.height); if (result.canvas) { if (result.canvas.width !== canvas.width) canvas.width = result.canvas.width; if (result.canvas.height !== canvas.height) canvas.height = result.canvas.height; ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height); } else { ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height); } // draw all results draw.face(result.face, canvas, ui, human.facemesh.triangulation); draw.body(result.body, canvas, ui); draw.hand(result.hand, canvas, ui); draw.gesture(result.gesture, canvas, ui); // update log const engine = human.tf.engine(); const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : ''; const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`; const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : ''; const avg = Math.trunc(10 * fps.reduce((a, b) => a + b) / fps.length) / 10; document.getElementById('log').innerText = ` video: ${camera.name} | facing: ${camera.facing} | resolution: ${camera.width} x ${camera.height} ${processing} backend: ${human.tf.getBackend()} | ${memory} performance: ${str(result.performance)} FPS:${avg} `; } // setup webcam async function setupCamera() { if (ui.busy) return null; ui.busy = true; const video = document.getElementById('video'); const canvas = document.getElementById('canvas'); const output = document.getElementById('log'); const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false; let msg = ''; status('setting up camera'); // setup webcam. note that navigator.mediaDevices requires that page is accessed via https if (!navigator.mediaDevices) { msg = 'camera access not supported'; output.innerText += `\n${msg}`; log(msg); status(msg); return null; } let stream; const constraints = { audio: false, video: { facingMode: (ui.facing ? 'user' : 'environment'), resizeMode: 'none' }, }; try { if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth }; else constraints.video.height = { ideal: window.innerHeight }; stream = await navigator.mediaDevices.getUserMedia(constraints); } catch (err) { if (err.name === 'PermissionDeniedError') msg = 'camera permission denied'; else if (err.name === 'SourceUnavailableError') msg = 'camera not available'; else msg = 'camera error'; output.innerText += `\n${msg}`; status(msg); log(err); } if (stream) video.srcObject = stream; else return null; const track = stream.getVideoTracks()[0]; const settings = track.getSettings(); log('camera constraints:', constraints, 'window:', { width: window.innerWidth, height: window.innerHeight }, 'settings:', settings, 'track:', track); camera = { name: track.label, width: settings.width, height: settings.height, facing: settings.facingMode === 'user' ? 'front' : 'back' }; return new Promise((resolve) => { video.onloadeddata = async () => { video.width = video.videoWidth; video.height = video.videoHeight; canvas.width = video.width; canvas.height = video.height; canvas.style.width = canvas.width > canvas.height ? '100vw' : ''; canvas.style.height = canvas.width > canvas.height ? '' : '100vh'; if (live) video.play(); ui.busy = false; // do once more because onresize events can be delayed or skipped // if (video.width > window.innerWidth) await setupCamera(); status(''); resolve(video); }; }); } // wrapper for worker.postmessage that creates worker if one does not exist function webWorker(input, image, canvas) { if (!worker) { // create new webworker and add event handler only once log('creating worker thread'); worker = new Worker(ui.worker, { type: 'module' }); worker.warned = false; // after receiving message from webworker, parse&draw results and send new frame for processing worker.addEventListener('message', (msg) => { if (!worker.warned) { log('warning: cannot transfer canvas from worked thread'); log('warning: image will not show filter effects'); worker.warned = true; } drawResults(input, msg.data.result, canvas); }); } // pass image data as arraybuffer to worker by reference to avoid copy worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height }, [image.data.buffer]); } // main processing function when input is webcam, can use direct invocation or web worker function runHumanDetect(input, canvas) { timeStamp = performance.now(); // if live video const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused); if (!live) { // if we want to continue and camera not ready, retry in 0.5sec, else just give up if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500); else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); return; } status(''); if (ui.useWorker) { // get image data from video as we cannot send html objects to webworker const offscreen = new OffscreenCanvas(canvas.width, canvas.height); const ctx = offscreen.getContext('2d'); ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height); const data = ctx.getImageData(0, 0, canvas.width, canvas.height); // perform detection in worker webWorker(input, data, canvas); } else { human.detect(input).then((result) => { if (result.error) log(result.error); else drawResults(input, result, canvas); if (human.config.profile) log('profile data:', human.profile()); }); } } // main processing function when input is image, can use direct invocation or web worker async function processImage(input) { timeStamp = performance.now(); return new Promise((resolve) => { const image = new Image(); image.onload = async () => { log('Processing image:', image.src); const canvas = document.getElementById('canvas'); image.width = image.naturalWidth; image.height = image.naturalHeight; canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth; canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight; const result = await human.detect(image); drawResults(image, result, canvas); const thumb = document.createElement('canvas'); thumb.className = 'thumbnail'; thumb.width = window.innerWidth / (ui.columns + 0.1); thumb.height = canvas.height / (window.innerWidth / thumb.width); const ctx = thumb.getContext('2d'); ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height); document.getElementById('samples-container').appendChild(thumb); image.src = ''; resolve(true); }; image.src = input; }); } // just initialize everything and call main function async function detectVideo() { human.config.videoOptimized = true; document.getElementById('samples-container').style.display = 'none'; document.getElementById('canvas').style.display = 'block'; const video = document.getElementById('video'); const canvas = document.getElementById('canvas'); const size = 12 + Math.trunc(window.innerWidth / 400); ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`); ui.baseLineHeight = ui.baseLineHeightProto; if ((video.srcObject !== null) && !video.paused) { document.getElementById('play').style.display = 'block'; status('paused'); video.pause(); } else { await setupCamera(); document.getElementById('play').style.display = 'none'; status(''); video.play(); } runHumanDetect(video, canvas); } // just initialize everything and call main function async function detectSampleImages() { document.getElementById('play').style.display = 'none'; human.config.videoOptimized = false; const size = Math.trunc(ui.columns * 25600 / window.innerWidth); ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`); ui.baseLineHeight = ui.baseLineHeightProto * ui.columns; document.getElementById('canvas').style.display = 'none'; document.getElementById('samples-container').style.display = 'block'; log('Running detection of sample images'); status('processing images'); document.getElementById('samples-container').innerHTML = ''; for (const sample of ui.samples) await processImage(sample); status(''); } function setupMenu() { menu = new Menu(document.body, '...', { top: '1rem', right: '1rem' }); const btn = menu.addButton('Start Video', 'Pause Video', () => detectVideo()); menu.addButton('Process Images', 'Process Images', () => detectSampleImages()); document.getElementById('play').addEventListener('click', () => btn.click()); menu.addHTML('