mirror of https://github.com/vladmandic/human
added multithreaded demo
parent
2432f19ea5
commit
c10f31ef6c
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **2.0.1**
|
||||
Version: **2.0.2**
|
||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,8 +9,12 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/06/11 mandic00@live.com
|
||||
### **2.0.2** 2021/06/14 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/06/14 mandic00@live.com
|
||||
|
||||
- fix centernet box width & height
|
||||
- add body segmentation sample
|
||||
|
||||
### **release: 2.0.1** 2021/06/08 mandic00@live.com
|
||||
|
|
|
@ -43,6 +43,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) ap
|
|||
- [**Main Application**](https://vladmandic.github.io/human/demo/index.html)
|
||||
- [**Face Extraction, Description, Identification and Matching**](https://vladmandic.github.io/human/demo/facematch/index.html)
|
||||
- [**Face Extraction and 3D Rendering**](https://vladmandic.github.io/human/demo/face3d/index.html)
|
||||
- [**Multithreaded Detection Showcasing Maximum Performance**](https://vladmandic.github.io/human/demo/multithread/index.html)
|
||||
- [**Details on Demo Applications**](https://github.com/vladmandic/human/wiki/Demos)
|
||||
|
||||
## Project pages
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Human</title>
|
||||
<meta name="viewport" content="width=device-width" id="viewport">
|
||||
<meta name="keywords" content="Human">
|
||||
<meta name="application-name" content="Human">
|
||||
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||
<meta name="msapplication-tooltip" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
|
||||
<meta name="theme-color" content="#000000">
|
||||
<link rel="manifest" href="../manifest.webmanifest">
|
||||
<link rel="shortcut icon" href="../../favicon.ico" type="image/x-icon">
|
||||
<link rel="apple-touch-icon" href="../../assets/icon.png">
|
||||
<script src="multithread/index.js" type="module"></script>
|
||||
<style>
|
||||
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../assets/lato-light.woff2') }
|
||||
html { font-family: 'Lato', 'Segoe UI'; font-size: 16px; font-variant: small-caps; }
|
||||
body { margin: 0; background: black; color: white; overflow-x: hidden; width: 100vw; height: 100vh; }
|
||||
body::-webkit-scrollbar { display: none; }
|
||||
.status { position: absolute; width: 100vw; bottom: 10%; text-align: center; font-size: 3rem; font-weight: 100; text-shadow: 2px 2px #303030; }
|
||||
.log { position: absolute; bottom: 0; margin: 0.4rem 0.4rem 0 0.4rem; font-size: 0.9rem; }
|
||||
.video { display: none; }
|
||||
.canvas { margin: 0 auto; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="status" class="status"></div>
|
||||
<canvas id="canvas" class="canvas"></canvas>
|
||||
<video id="video" playsinline class="video"></video>
|
||||
<div id="log" class="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,268 @@
|
|||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* @description Demo app that enables all Human modules and runs them in separate worker threads
|
||||
*
|
||||
*/
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
import Human from '../../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||
import GLBench from '../helpers/gl-bench.js';
|
||||
|
||||
const workerJS = 'multithread/worker.js';
|
||||
|
||||
const config = {
|
||||
main: { // processes input and runs gesture analysis
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
modelBasePath: '../../models/',
|
||||
async: false,
|
||||
filter: { enabled: true },
|
||||
face: { enabled: false },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
hand: { enabled: false },
|
||||
body: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
},
|
||||
face: { // runs all face models
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
modelBasePath: '../../models/',
|
||||
async: false,
|
||||
filter: { enabled: false },
|
||||
face: { enabled: true },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
body: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
},
|
||||
body: { // runs body model
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
modelBasePath: '../../models/',
|
||||
async: false,
|
||||
filter: { enabled: false },
|
||||
face: { enabled: false },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
body: { enabled: true },
|
||||
segmentation: { enabled: false },
|
||||
},
|
||||
hand: { // runs hands model
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
modelBasePath: '../../models/',
|
||||
async: false,
|
||||
filter: { enabled: false },
|
||||
face: { enabled: false },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: false },
|
||||
hand: { enabled: true },
|
||||
body: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
},
|
||||
object: { // runs object model
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
modelBasePath: '../../models/',
|
||||
async: false,
|
||||
filter: { enabled: false },
|
||||
face: { enabled: false },
|
||||
object: { enabled: true },
|
||||
gesture: { enabled: false },
|
||||
hand: { enabled: false },
|
||||
body: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
},
|
||||
};
|
||||
|
||||
let human;
|
||||
let canvas;
|
||||
let video;
|
||||
let bench;
|
||||
|
||||
const busy = {
|
||||
face: false,
|
||||
hand: false,
|
||||
body: false,
|
||||
object: false,
|
||||
};
|
||||
|
||||
const workers = {
|
||||
face: null,
|
||||
body: null,
|
||||
hand: null,
|
||||
object: null,
|
||||
};
|
||||
|
||||
const time = {
|
||||
main: 0,
|
||||
draw: 0,
|
||||
face: '[warmup]',
|
||||
body: '[warmup]',
|
||||
hand: '[warmup]',
|
||||
object: '[warmup]',
|
||||
};
|
||||
|
||||
const start = {
|
||||
main: 0,
|
||||
draw: 0,
|
||||
face: 0,
|
||||
body: 0,
|
||||
hand: 0,
|
||||
object: 0,
|
||||
};
|
||||
|
||||
const result = { // initialize empty result object which will be partially filled with results from each thread
|
||||
performance: {},
|
||||
hand: [],
|
||||
body: [],
|
||||
face: [],
|
||||
object: [],
|
||||
};
|
||||
|
||||
function log(...msg) {
|
||||
const dt = new Date();
|
||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(ts, ...msg);
|
||||
}
|
||||
|
||||
async function drawResults() {
|
||||
start.draw = performance.now();
|
||||
const interpolated = human.next(result);
|
||||
await human.draw.all(canvas, interpolated);
|
||||
time.draw = Math.round(1 + performance.now() - start.draw);
|
||||
const fps = Math.round(10 * 1000 / time.main) / 10;
|
||||
const draw = Math.round(10 * 1000 / time.draw) / 10;
|
||||
document.getElementById('log').innerText = `Human: version ${human.version} | Performance: Main ${time.main}ms Face: ${time.face}ms Body: ${time.body}ms Hand: ${time.hand}ms Object ${time.object}ms | FPS: ${fps} / ${draw}`;
|
||||
requestAnimationFrame(drawResults);
|
||||
}
|
||||
|
||||
async function receiveMessage(msg) {
|
||||
result[msg.data.type] = msg.data.result;
|
||||
busy[msg.data.type] = false;
|
||||
time[msg.data.type] = Math.round(performance.now() - start[msg.data.type]);
|
||||
}
|
||||
|
||||
async function runDetection() {
|
||||
start.main = performance.now();
|
||||
if (!bench) {
|
||||
bench = new GLBench(null, { trackGPU: false, chartHz: 20, chartLen: 20 });
|
||||
bench.begin();
|
||||
}
|
||||
const ctx = canvas.getContext('2d');
|
||||
// const image = await human.image(video);
|
||||
// ctx.drawImage(image.canvas, 0, 0, canvas.width, canvas.height);
|
||||
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
||||
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
if (!busy.face) {
|
||||
busy.face = true;
|
||||
start.face = performance.now();
|
||||
workers.face.postMessage({ image: imageData.data.buffer, width: canvas.width, height: canvas.height, config: config.face, type: 'face' }, [imageData.data.buffer.slice(0)]);
|
||||
}
|
||||
if (!busy.body) {
|
||||
busy.body = true;
|
||||
start.body = performance.now();
|
||||
workers.body.postMessage({ image: imageData.data.buffer, width: canvas.width, height: canvas.height, config: config.body, type: 'body' }, [imageData.data.buffer.slice(0)]);
|
||||
}
|
||||
if (!busy.hand) {
|
||||
busy.hand = true;
|
||||
start.hand = performance.now();
|
||||
workers.hand.postMessage({ image: imageData.data.buffer, width: canvas.width, height: canvas.height, config: config.hand, type: 'hand' }, [imageData.data.buffer.slice(0)]);
|
||||
}
|
||||
if (!busy.object) {
|
||||
busy.object = true;
|
||||
start.object = performance.now();
|
||||
workers.object.postMessage({ image: imageData.data.buffer, width: canvas.width, height: canvas.height, config: config.object, type: 'object' }, [imageData.data.buffer.slice(0)]);
|
||||
}
|
||||
|
||||
time.main = Math.round(performance.now() - start.main);
|
||||
|
||||
bench.nextFrame();
|
||||
requestAnimationFrame(runDetection);
|
||||
}
|
||||
|
||||
async function setupCamera() {
|
||||
video = document.getElementById('video');
|
||||
canvas = document.getElementById('canvas');
|
||||
const output = document.getElementById('log');
|
||||
let stream;
|
||||
const constraints = {
|
||||
audio: false,
|
||||
video: {
|
||||
facingMode: 'user',
|
||||
resizeMode: 'crop-and-scale',
|
||||
width: { ideal: document.body.clientWidth },
|
||||
// height: { ideal: document.body.clientHeight }, // not set as we're using aspectRation to get height instead
|
||||
aspectRatio: document.body.clientWidth / document.body.clientHeight,
|
||||
},
|
||||
};
|
||||
// enumerate devices for diag purposes
|
||||
navigator.mediaDevices.enumerateDevices().then((devices) => log('enumerated devices:', devices));
|
||||
log('camera constraints', constraints);
|
||||
try {
|
||||
stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
} catch (err) {
|
||||
output.innerText += `\n${err.name}: ${err.message}`;
|
||||
status(err.name);
|
||||
log('camera error:', err);
|
||||
}
|
||||
const tracks = stream.getVideoTracks();
|
||||
log('enumerated viable tracks:', tracks);
|
||||
const track = stream.getVideoTracks()[0];
|
||||
const settings = track.getSettings();
|
||||
log('selected video source:', track, settings);
|
||||
const promise = !stream || new Promise((resolve) => {
|
||||
video.onloadeddata = () => {
|
||||
if (settings.width > settings.height) canvas.style.width = '100vw';
|
||||
else canvas.style.height = '100vh';
|
||||
canvas.width = video.videoWidth;
|
||||
canvas.height = video.videoHeight;
|
||||
video.play();
|
||||
resolve();
|
||||
};
|
||||
});
|
||||
// attach input to video element
|
||||
if (stream) video.srcObject = stream;
|
||||
return promise;
|
||||
}
|
||||
|
||||
async function startWorkers() {
|
||||
if (!workers.face) workers.face = new Worker(workerJS);
|
||||
if (!workers.body) workers.body = new Worker(workerJS);
|
||||
if (!workers.hand) workers.hand = new Worker(workerJS);
|
||||
if (!workers.object) workers.object = new Worker(workerJS);
|
||||
workers.face.onmessage = receiveMessage;
|
||||
workers.body.onmessage = receiveMessage;
|
||||
workers.hand.onmessage = receiveMessage;
|
||||
workers.object.onmessage = receiveMessage;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
window.addEventListener('unhandledrejection', (evt) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(evt.reason || evt);
|
||||
document.getElementById('log').innerHTML = evt.reason.message || evt.reason || evt;
|
||||
status('exception error');
|
||||
evt.preventDefault();
|
||||
});
|
||||
|
||||
if (typeof Worker === 'undefined' || typeof OffscreenCanvas === 'undefined') {
|
||||
status('workers are not supported');
|
||||
return;
|
||||
}
|
||||
|
||||
human = new Human(config.main);
|
||||
document.getElementById('log').innerText = `Human: version ${human.version}`;
|
||||
|
||||
await startWorkers();
|
||||
await setupCamera();
|
||||
runDetection();
|
||||
drawResults();
|
||||
}
|
||||
|
||||
window.onload = main;
|
|
@ -0,0 +1,20 @@
|
|||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||
|
||||
// import Human from '../dist/human.esm.js';
|
||||
self.importScripts('../../dist/human.js');
|
||||
|
||||
let human;
|
||||
|
||||
onmessage = async (msg) => {
|
||||
// received from index.js using:
|
||||
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
||||
|
||||
// @ts-ignore // Human is registered as global namespace using IIFE script
|
||||
// eslint-disable-next-line no-undef, new-cap
|
||||
if (!human) human = new Human.default(msg.data.config);
|
||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||
let result = {};
|
||||
result = await human.detect(image, msg.data.config);
|
||||
// @ts-ignore tslint wrong type matching for worker
|
||||
postMessage({ result: result[msg.data.type], type: msg.data.type });
|
||||
};
|
|
@ -51,10 +51,10 @@ export const options: DrawOptions = {
|
|||
labelColor: <string>'rgba(173, 216, 230, 1)', // 'lightblue' with dark alpha channel
|
||||
shadowColor: <string>'black',
|
||||
font: <string>'small-caps 14px "Segoe UI"',
|
||||
lineHeight: <number>24,
|
||||
lineWidth: <number>6,
|
||||
lineHeight: <number>18,
|
||||
lineWidth: <number>4,
|
||||
pointSize: <number>2,
|
||||
roundRect: <number>28,
|
||||
roundRect: <number>8,
|
||||
drawPoints: <boolean>false,
|
||||
drawLabels: <boolean>true,
|
||||
drawBoxes: <boolean>true,
|
||||
|
@ -460,7 +460,7 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
|
|||
ctx.fillStyle = localOptions.color;
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
const label = `${h.label} ${Math.round(100 * h.score)}%`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText(label, h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
|
|
|
@ -114,19 +114,21 @@ export function calc(newResult: Result): Result {
|
|||
}
|
||||
|
||||
// interpolate person results
|
||||
const newPersons = newResult.persons; // trigger getter function
|
||||
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
|
||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as Person[]));
|
||||
} else {
|
||||
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
||||
bufferedResult.persons[i].box = (newPersons[i].box
|
||||
.map((box, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / bufferedFactor)) as [number, number, number, number];
|
||||
if (newResult.persons) {
|
||||
const newPersons = newResult.persons; // trigger getter function
|
||||
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
|
||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as Person[]));
|
||||
} else {
|
||||
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
||||
bufferedResult.persons[i].box = (newPersons[i].box
|
||||
.map((box, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / bufferedFactor)) as [number, number, number, number];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// just copy latest gestures without interpolation
|
||||
bufferedResult.gesture = newResult.gesture as Gesture[];
|
||||
bufferedResult.performance = newResult.performance;
|
||||
if (newResult.gesture) bufferedResult.gesture = newResult.gesture as Gesture[];
|
||||
if (newResult.performance) bufferedResult.performance = newResult.performance;
|
||||
|
||||
return bufferedResult;
|
||||
}
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit d270e2eb172e90acefb4e3be22260e3ad6a03ab1
|
||||
Subproject commit f6641d6559b940a7bf99297bf9004f43d1e87bdc
|
Loading…
Reference in New Issue