human/demo/browser.js

314 lines
12 KiB
JavaScript
Raw Normal View History

2020-10-12 16:08:00 +02:00
import human from '../dist/human.esm.js';
2020-10-17 13:34:45 +02:00
import draw from './draw.js';
2020-10-18 02:59:43 +02:00
import Menu from './menu.js';
2020-10-12 16:08:00 +02:00
// ui options
2020-10-13 15:59:21 +02:00
const ui = {
2020-10-18 02:59:43 +02:00
baseColor: 'rgba(173, 216, 230, 0.3)', // this is 'lightblue', just with alpha channel
baseLabel: 'rgba(173, 216, 230, 0.9)',
2020-10-17 13:15:23 +02:00
baseFontProto: 'small-caps {size} "Segoe UI"',
2020-10-13 15:59:21 +02:00
baseLineWidth: 16,
2020-10-17 13:15:23 +02:00
baseLineHeightProto: 2,
2020-10-18 02:59:43 +02:00
columns: 2,
2020-10-16 16:12:12 +02:00
busy: false,
2020-10-18 02:59:43 +02:00
facing: true,
2020-10-17 13:34:45 +02:00
useWorker: false,
2020-10-17 12:30:00 +02:00
worker: 'worker.js',
2020-10-18 02:59:43 +02:00
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
2020-10-17 13:34:45 +02:00
drawBoxes: true,
drawPoints: false,
drawPolygons: true,
fillPolygons: true,
useDepth: true,
console: true,
2020-10-18 02:59:43 +02:00
maxFrames: 10,
2020-10-13 15:59:21 +02:00
};
// configuration overrides
2020-10-12 16:08:00 +02:00
const config = {
2020-10-18 02:59:43 +02:00
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
2020-10-12 16:08:00 +02:00
face: {
2020-10-15 00:22:38 +02:00
enabled: true,
2020-10-14 17:43:33 +02:00
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
2020-10-13 04:03:55 +02:00
mesh: { enabled: true },
2020-10-16 17:23:59 +02:00
iris: { enabled: true },
2020-10-16 02:29:51 +02:00
age: { enabled: true, skipFrames: 10 },
gender: { enabled: true },
emotion: { enabled: true, minConfidence: 0.5, useGrayscale: true },
2020-10-12 16:08:00 +02:00
},
2020-10-16 21:04:51 +02:00
body: { enabled: true, maxDetections: 10, scoreThreshold: 0.7, nmsRadius: 20 },
hand: { enabled: true, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
2020-10-12 16:08:00 +02:00
};
// global variables
2020-10-18 02:59:43 +02:00
let menu;
2020-10-15 15:43:16 +02:00
let worker;
let timeStamp;
2020-10-16 00:16:05 +02:00
const fps = [];
2020-10-12 16:08:00 +02:00
// helper function: translates json to human readable string
2020-10-15 00:22:38 +02:00
function str(...msg) {
if (!Array.isArray(msg)) return msg;
let line = '';
for (const entry of msg) {
if (typeof entry === 'object') line += JSON.stringify(entry).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ');
else line += entry;
}
return line;
}
// helper function: wrapper around console output
2020-10-15 21:25:58 +02:00
const log = (...msg) => {
// eslint-disable-next-line no-console
if (ui.console) console.log(...msg);
2020-10-15 21:25:58 +02:00
};
2020-10-15 14:16:34 +02:00
// draws processed results and starts processing of a next frame
2020-10-18 02:59:43 +02:00
function drawResults(input, result, canvas) {
2020-10-15 15:43:16 +02:00
// update fps
2020-10-16 00:16:05 +02:00
fps.push(1000 / (performance.now() - timeStamp));
2020-10-18 02:59:43 +02:00
if (fps.length > ui.maxFrames) fps.shift();
menu.updateChart('FPS', fps);
2020-10-16 00:16:05 +02:00
// eslint-disable-next-line no-use-before-define
requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop
2020-10-15 15:43:16 +02:00
// draw image from video
const ctx = canvas.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
// draw all results
2020-10-17 13:34:45 +02:00
draw.face(result.face, canvas, ui, human.facemesh.triangulation);
draw.body(result.body, canvas, ui);
draw.hand(result.hand, canvas, ui);
2020-10-15 15:43:16 +02:00
// update log
2020-10-18 02:59:43 +02:00
const engine = human.tf.engine();
2020-10-15 15:43:16 +02:00
const memory = `${engine.state.numBytes.toLocaleString()} bytes ${engine.state.numDataBuffers.toLocaleString()} buffers ${engine.state.numTensors.toLocaleString()} tensors`;
2020-10-16 00:16:05 +02:00
const gpu = engine.backendInstance ? `GPU: ${engine.backendInstance.numBytesInGPU.toLocaleString()} bytes` : '';
2020-10-15 21:25:58 +02:00
document.getElementById('log').innerText = `
TFJS Version: ${human.tf.version_core} | Backend: ${human.tf.getBackend()} | Memory: ${memory} ${gpu}
2020-10-15 15:43:16 +02:00
Performance: ${str(result.performance)} | Object size: ${(str(result)).length.toLocaleString()} bytes
`;
}
// setup webcam
async function setupCamera() {
if (ui.busy) return null;
ui.busy = true;
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const output = document.getElementById('log');
const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;
2020-10-18 02:59:43 +02:00
let msg = `Setting up camera: live: ${live} facing: ${ui.facing ? 'front' : 'back'}`;
2020-10-17 16:25:27 +02:00
output.innerText += `\n${msg}`;
log(msg);
// setup webcam. note that navigator.mediaDevices requires that page is accessed via https
if (!navigator.mediaDevices) {
2020-10-17 16:25:27 +02:00
msg = 'Camera access not supported';
output.innerText += `\n${msg}`;
log(msg);
return null;
}
let stream;
try {
stream = await navigator.mediaDevices.getUserMedia({
audio: false,
2020-10-18 02:59:43 +02:00
video: { facingMode: (ui.facing ? 'user' : 'environment'), width: window.innerWidth, height: window.innerHeight },
});
} catch (err) {
output.innerText += '\nCamera permission denied';
log(err);
}
if (stream) video.srcObject = stream;
else return null;
return new Promise((resolve) => {
video.onloadeddata = async () => {
video.width = video.videoWidth;
video.height = video.videoHeight;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
if (live) video.play();
ui.busy = false;
// do once more because onresize events can be delayed or skipped
2020-10-17 16:25:27 +02:00
if (video.width > window.innerWidth) await setupCamera();
output.innerText += `\nCamera resolution: ${video.width} x ${video.height}`;
resolve(video);
};
});
}
// wrapper for worker.postmessage that creates worker if one does not exist
2020-10-16 00:16:05 +02:00
function webWorker(input, image, canvas) {
2020-10-15 15:43:16 +02:00
if (!worker) {
2020-10-16 00:16:05 +02:00
// create new webworker and add event handler only once
2020-10-15 21:25:58 +02:00
log('Creating worker thread');
2020-10-17 12:30:00 +02:00
worker = new Worker(ui.worker, { type: 'module' });
2020-10-15 15:43:16 +02:00
// after receiving message from webworker, parse&draw results and send new frame for processing
2020-10-16 02:20:37 +02:00
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas));
2020-10-15 15:43:16 +02:00
}
2020-10-16 00:16:05 +02:00
// pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
2020-10-15 15:43:16 +02:00
}
// main processing function when input is webcam, can use direct invocation or web worker
2020-10-18 02:59:43 +02:00
function runHumanDetect(input, canvas) {
2020-10-15 15:43:16 +02:00
timeStamp = performance.now();
2020-10-13 15:59:21 +02:00
// perform detect if live video or not video at all
2020-10-16 16:12:12 +02:00
if (input.srcObject) {
// if video not ready, just redo
const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live) {
if (!input.paused) log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500);
return;
}
2020-10-17 13:34:45 +02:00
if (ui.useWorker) {
2020-10-15 15:43:16 +02:00
// get image data from video as we cannot send html objects to webworker
const offscreen = new OffscreenCanvas(canvas.width, canvas.height);
const ctx = offscreen.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
2020-10-16 00:16:05 +02:00
// perform detection in worker
webWorker(input, data, canvas);
2020-10-15 15:43:16 +02:00
} else {
2020-10-18 02:59:43 +02:00
human.detect(input, config).then((result) => {
if (result.error) log(result.error);
else drawResults(input, result, canvas);
});
2020-10-13 15:59:21 +02:00
}
2020-10-12 16:08:00 +02:00
}
}
// main processing function when input is image, can use direct invocation or web worker
2020-10-16 21:04:51 +02:00
async function processImage(input) {
2020-10-18 02:59:43 +02:00
// must be zero for images
config.face.detector.skipFrames = 0;
config.face.emotion.skipFrames = 0;
config.face.age.skipFrames = 0;
config.hand.skipFrames = 0;
timeStamp = performance.now();
2020-10-13 15:59:21 +02:00
return new Promise((resolve) => {
2020-10-16 21:04:51 +02:00
const image = document.getElementById('image');
image.onload = async () => {
log('Processing image:', image.src);
const canvas = document.getElementById('canvas');
image.width = image.naturalWidth;
image.height = image.naturalHeight;
canvas.width = image.naturalWidth;
canvas.height = image.naturalHeight;
2020-10-18 02:59:43 +02:00
const result = await human.detect(image, config);
drawResults(image, result, canvas);
2020-10-16 21:04:51 +02:00
const thumb = document.createElement('canvas');
2020-10-18 02:59:43 +02:00
thumb.width = (window.innerWidth - menu.width) / (ui.columns + 0.1);
2020-10-16 21:04:51 +02:00
thumb.height = canvas.height / (window.innerWidth / thumb.width);
2020-10-18 02:59:43 +02:00
thumb.style.margin = '8px';
thumb.style.boxShadow = '4px 4px 4px 0 dimgrey';
2020-10-16 21:04:51 +02:00
const ctx = thumb.getContext('2d');
ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);
document.getElementById('samples').appendChild(thumb);
image.src = '';
resolve(true);
};
image.src = input;
2020-10-13 15:59:21 +02:00
});
2020-10-12 16:59:55 +02:00
}
// just initialize everything and call main function
2020-10-17 13:15:23 +02:00
async function detectVideo() {
document.getElementById('samples').style.display = 'none';
document.getElementById('canvas').style.display = 'block';
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
ui.baseFont = ui.baseFontProto.replace(/{size}/, '1.2rem');
ui.baseLineHeight = ui.baseLineHeightProto;
2020-10-17 16:25:27 +02:00
if ((video.srcObject !== null) && !video.paused) {
2020-10-17 13:15:23 +02:00
document.getElementById('log').innerText += '\nPaused ...';
video.pause();
} else {
await setupCamera();
document.getElementById('log').innerText += '\nStarting Human Library ...';
video.play();
}
runHumanDetect(video, canvas);
}
// just initialize everything and call main function
2020-10-16 21:04:51 +02:00
async function detectSampleImages() {
2020-10-17 13:15:23 +02:00
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${ui.columns}rem`);
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
2020-10-16 21:04:51 +02:00
document.getElementById('canvas').style.display = 'none';
2020-10-17 13:15:23 +02:00
document.getElementById('samples').style.display = 'block';
2020-10-16 21:04:51 +02:00
log('Running detection of sample images');
2020-10-17 13:15:23 +02:00
for (const sample of ui.samples) await processImage(sample);
}
2020-10-18 02:59:43 +02:00
function setupMenu() {
menu = new Menu(document.body);
menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt));
menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Enabled Models');
menu.addBool('Face Detect', config.face, 'enabled');
menu.addBool('Face Mesh', config.face.mesh, 'enabled');
menu.addBool('Face Iris', config.face.iris, 'enabled');
menu.addBool('Face Age', config.face.age, 'enabled');
menu.addBool('Face Gender', config.face.gender, 'enabled');
menu.addBool('Face Emotion', config.face.emotion, 'enabled');
menu.addBool('Body Pose', config.body, 'enabled');
menu.addBool('Hand Pose', config.hand, 'enabled');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Model Parameters');
menu.addRange('Max Objects', config.face.detector, 'maxFaces', 0, 50, 1, (val) => {
2020-10-17 13:15:23 +02:00
config.face.detector.maxFaces = parseInt(val);
config.body.maxDetections = parseInt(val);
2020-10-18 02:59:43 +02:00
config.hand.maxHands = parseInt(val);
2020-10-17 13:15:23 +02:00
});
2020-10-18 02:59:43 +02:00
menu.addRange('Skip Frames', config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
2020-10-17 13:15:23 +02:00
config.face.detector.skipFrames = parseInt(val);
config.face.emotion.skipFrames = parseInt(val);
config.face.age.skipFrames = parseInt(val);
config.hand.skipFrames = parseInt(val);
});
2020-10-18 02:59:43 +02:00
menu.addRange('Min Confidence', config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
2020-10-17 13:15:23 +02:00
config.face.detector.minConfidence = parseFloat(val);
config.face.emotion.minConfidence = parseFloat(val);
config.hand.minConfidence = parseFloat(val);
});
2020-10-18 02:59:43 +02:00
menu.addRange('Score Threshold', config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
2020-10-17 13:15:23 +02:00
config.face.detector.scoreThreshold = parseFloat(val);
config.hand.scoreThreshold = parseFloat(val);
config.body.scoreThreshold = parseFloat(val);
});
2020-10-18 02:59:43 +02:00
menu.addRange('IOU Threshold', config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
2020-10-17 13:15:23 +02:00
config.face.detector.iouThreshold = parseFloat(val);
config.hand.iouThreshold = parseFloat(val);
});
2020-10-18 02:59:43 +02:00
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('UI Options');
menu.addBool('Use Web Worker', ui, 'useWorker');
menu.addBool('Camera Front/Back', ui, 'facing', () => setupCamera());
menu.addBool('Use 3D Depth', ui, 'useDepth');
menu.addBool('Draw Boxes', ui, 'drawBoxes');
menu.addBool('Draw Points', ui, 'drawPoints');
menu.addBool('Draw Polygons', ui, 'drawPolygons');
menu.addBool('Fill Polygons', ui, 'fillPolygons');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addValue('State', '');
menu.addChart('FPS', 'FPS');
2020-10-16 21:04:51 +02:00
}
2020-10-12 16:08:00 +02:00
async function main() {
2020-10-16 16:12:12 +02:00
log('Human demo starting ...');
2020-10-18 02:59:43 +02:00
setupMenu();
2020-10-15 21:25:58 +02:00
const msg = `Human ready: version: ${human.version} TensorFlow/JS version: ${human.tf.version_core}`;
2020-10-16 17:23:59 +02:00
document.getElementById('log').innerText += '\n' + msg;
2020-10-15 21:25:58 +02:00
log(msg);
2020-10-12 16:08:00 +02:00
}
window.onload = main;
2020-10-16 16:12:12 +02:00
window.onresize = setupCamera;