implemented buffered processing

pull/293/head
Vladimir Mandic 2020-11-12 09:21:26 -05:00
parent 2a71f81462
commit c929c2d1d6
3 changed files with 61 additions and 24 deletions

View File

@ -27,20 +27,25 @@ const ui = {
fillPolygons: false, fillPolygons: false,
useDepth: true, useDepth: true,
console: true, console: true,
maxFrames: 10, maxFPSframes: 10,
modelsPreload: true, modelsPreload: true,
modelsWarmup: true, modelsWarmup: true,
menuWidth: 0, menuWidth: 0,
menuHeight: 0, menuHeight: 0,
camera: {}, camera: {},
fps: [], fps: [],
buffered: true,
bufferedFPSTarget: 24,
drawThread: null,
framesDraw: 0,
framesDetect: 0,
}; };
// global variables // global variables
let menu; let menu;
let menuFX; let menuFX;
let worker; let worker;
let timeStamp; let lastDetectedResult = {};
// helper function: translates json to human readable string // helper function: translates json to human readable string
function str(...msg) { function str(...msg) {
@ -65,24 +70,24 @@ const status = (msg) => {
}; };
// draws processed results and starts processing of a next frame // draws processed results and starts processing of a next frame
function drawResults(input, result, canvas) { async function drawResults(input) {
const result = lastDetectedResult;
const canvas = document.getElementById('canvas');
// update fps data // update fps data
const elapsed = performance.now() - timeStamp; // const elapsed = performance.now() - timeStamp;
ui.fps.push(1000 / elapsed); ui.fps.push(1000 / result.performance.total);
if (ui.fps.length > ui.maxFrames) ui.fps.shift(); if (ui.fps.length > ui.maxFPSframes) ui.fps.shift();
// enable for continous performance monitoring // enable for continous performance monitoring
// console.log(result.performance); // console.log(result.performance);
// immediate loop before we even draw results, but limit frame rate to 30
if (input.srcObject) {
// eslint-disable-next-line no-use-before-define
if (elapsed > 33) requestAnimationFrame(() => runHumanDetect(input, canvas));
// eslint-disable-next-line no-use-before-define
else setTimeout(() => runHumanDetect(input, canvas), 33 - elapsed);
}
// draw fps chart // draw fps chart
menu.updateChart('FPS', ui.fps); await menu.updateChart('FPS', ui.fps);
// get updated canvas
result.canvas = await human.image(input, userConfig);
// draw image from video // draw image from video
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx.fillStyle = ui.baseBackground; ctx.fillStyle = ui.baseBackground;
@ -95,10 +100,10 @@ function drawResults(input, result, canvas) {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height); ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
} }
// draw all results // draw all results
draw.face(result.face, canvas, ui, human.facemesh.triangulation); await draw.face(result.face, canvas, ui, human.facemesh.triangulation);
draw.body(result.body, canvas, ui); await draw.body(result.body, canvas, ui);
draw.hand(result.hand, canvas, ui); await draw.hand(result.hand, canvas, ui);
draw.gesture(result.gesture, canvas, ui); await draw.gesture(result.gesture, canvas, ui);
// update log // update log
const engine = human.tf.engine(); const engine = human.tf.engine();
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : ''; const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
@ -112,6 +117,16 @@ function drawResults(input, result, canvas) {
performance: ${str(result.performance)} FPS:${avg}<br> performance: ${str(result.performance)} FPS:${avg}<br>
${warning} ${warning}
`; `;
ui.framesDraw++;
ui.lastFrame = performance.now();
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
if (ui.buffered && !ui.drawThread) ui.drawThread = setInterval(() => drawResults(input, canvas), 1000 / ui.bufferedFPSTarget);
// stop buffering
if (!ui.buffered && ui.drawThread) {
clearTimeout(ui.drawThread);
ui.drawThread = null;
}
} }
// setup webcam // setup webcam
@ -197,7 +212,11 @@ function webWorker(input, image, canvas) {
log('warning: image will not show filter effects'); log('warning: image will not show filter effects');
worker.warned = true; worker.warned = true;
} }
drawResults(input, msg.data.result, canvas); lastDetectedResult = msg.data.result;
ui.framesDetect++;
if (!ui.drawThread) drawResults(input);
// eslint-disable-next-line no-use-before-define
requestAnimationFrame(() => runHumanDetect(input, canvas));
}); });
} }
// pass image data as arraybuffer to worker by reference to avoid copy // pass image data as arraybuffer to worker by reference to avoid copy
@ -206,14 +225,19 @@ function webWorker(input, image, canvas) {
// main processing function when input is webcam, can use direct invocation or web worker // main processing function when input is webcam, can use direct invocation or web worker
function runHumanDetect(input, canvas) { function runHumanDetect(input, canvas) {
timeStamp = performance.now();
// if live video // if live video
const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused); const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live && input.srcObject) { if (!live && input.srcObject) {
// stop ui refresh
if (ui.drawThread) clearTimeout(ui.drawThread);
ui.drawThread = null;
// if we want to continue and camera not ready, retry in 0.5sec, else just give up // if we want to continue and camera not ready, retry in 0.5sec, else just give up
if (input.paused) log('camera paused'); if (input.paused) log('camera paused');
else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500); else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
clearTimeout(ui.drawThread);
ui.drawThread = null;
log('frame statistics: drawn:', ui.framesDraw, 'detected:', ui.framesDetect);
return; return;
} }
status(''); status('');
@ -228,14 +252,18 @@ function runHumanDetect(input, canvas) {
} else { } else {
human.detect(input, userConfig).then((result) => { human.detect(input, userConfig).then((result) => {
if (result.error) log(result.error); if (result.error) log(result.error);
else drawResults(input, result, canvas); else {
lastDetectedResult = result;
if (!ui.drawThread) drawResults(input);
ui.framesDetect++;
requestAnimationFrame(() => runHumanDetect(input, canvas));
}
}); });
} }
} }
// main processing function when input is image, can use direct invocation or web worker // main processing function when input is image, can use direct invocation or web worker
async function processImage(input) { async function processImage(input) {
timeStamp = performance.now();
return new Promise((resolve) => { return new Promise((resolve) => {
const image = new Image(); const image = new Image();
image.onload = async () => { image.onload = async () => {
@ -356,6 +384,7 @@ function setupMenu() {
menuFX = new Menu(document.body, '', { top: '1rem', right: '18rem' }); menuFX = new Menu(document.body, '', { top: '1rem', right: '18rem' });
menuFX.addLabel('ui options'); menuFX.addLabel('ui options');
menuFX.addBool('buffered output', ui, 'buffered', (val) => ui.buffered = val);
menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera()); menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera());
menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera()); menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera());
menuFX.addBool('use 3D depth', ui, 'useDepth'); menuFX.addBool('use 3D depth', ui, 'useDepth');
@ -387,7 +416,7 @@ async function main() {
log('Human: demo starting ...'); log('Human: demo starting ...');
setupMenu(); setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`; document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// this is not required, just pre-loads all models // this is not required, just pre-loads all models
if (ui.modelsPreload) { if (ui.modelsPreload) {
status('loading'); status('loading');

View File

@ -284,6 +284,14 @@ class Human {
return faceRes; return faceRes;
} }
async image(input, userConfig = {}) {
this.state = 'image';
this.config = mergeDeep(this.config, userConfig);
const process = image.process(input, this.config);
process.tensor.dispose();
return process.canvas;
}
// main detect function // main detect function
async detect(input, userConfig = {}) { async detect(input, userConfig = {}) {
this.state = 'config'; this.state = 'config';

2
wiki

@ -1 +1 @@
Subproject commit 6b460e9f5252038ef7a94b044fdb789e35d610bd Subproject commit 5dcbe8ad56fc4dc21378046c225185e6203250eb