From 91ddc0c57dca72738aff9b90e9acf7e7eb6e17a7 Mon Sep 17 00:00:00 2001 From: Vladimir Mandic Date: Thu, 5 Nov 2020 08:21:23 -0500 Subject: [PATCH] fixed worker and filter compatibility --- demo/browser.js | 20 ++++++++++++++------ demo/worker.js | 6 ++++-- src/image.js | 2 +- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/demo/browser.js b/demo/browser.js index fc81fc6e..4080ada6 100644 --- a/demo/browser.js +++ b/demo/browser.js @@ -201,10 +201,18 @@ async function setupCamera() { function webWorker(input, image, canvas) { if (!worker) { // create new webworker and add event handler only once - log('Creating worker thread'); + log('creating worker thread'); worker = new Worker(ui.worker, { type: 'module' }); + worker.warned = false; // after receiving message from webworker, parse&draw results and send new frame for processing - worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas)); + worker.addEventListener('message', (msg) => { + if (!worker.warned) { + log('warning: cannot transfer canvas from worked thread'); + log('warning: image will not show filter effects'); + worker.warned = true; + } + drawResults(input, msg.data.result, canvas); + }); } // pass image data as arraybuffer to worker by reference to avoid copy worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]); @@ -219,7 +227,7 @@ function runHumanDetect(input, canvas) { const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused); if (!live) { if (!input.paused) { - log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); + log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); setTimeout(() => runHumanDetect(input, canvas), 500); } return; @@ -236,7 +244,7 @@ function runHumanDetect(input, canvas) { human.detect(input, config).then((result) => { if (result.error) log(result.error); else drawResults(input, result, canvas); - if (config.profile) log('Profile data:', human.profile()); + if (config.profile) log('profile data:', human.profile()); }); } } @@ -394,11 +402,11 @@ async function main() { setupMenu(); document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`; // this is not required, just pre-warms the library - if (ui.modelsPreload) { + if (!ui.modelsPreload) { status('loading'); await human.load(); } - if (ui.modelsWarmup) { + if (!ui.modelsWarmup) { status('initializing'); const warmup = new ImageData(50, 50); await human.detect(warmup); diff --git a/demo/worker.js b/demo/worker.js index b698b0c2..3ba194c1 100644 --- a/demo/worker.js +++ b/demo/worker.js @@ -20,8 +20,10 @@ onmessage = async (msg) => { result = await human.detect(image, config); } catch (err) { result.error = err.message; - log('Worker thread error:', err.message); + log('worker thread error:', err.message); } - postMessage(result); + // must strip canvas from return value as it cannot be transfered from worker thread + if (result.canvas) result.canvas = null; + postMessage({ result }); busy = false; }; diff --git a/src/image.js b/src/image.js index 364916f8..fe64cd31 100644 --- a/src/image.js +++ b/src/image.js @@ -34,7 +34,7 @@ function process(input, config) { outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas'); if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width; if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height; - this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: outCanvas }) : null; + this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined') } this.fx.reset(); this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled