fixed worker and filter compatibility

pull/280/head
Vladimir Mandic 2020-11-05 08:21:23 -05:00
parent 14b89145c9
commit 91ddc0c57d
3 changed files with 19 additions and 9 deletions

View File

@ -201,10 +201,18 @@ async function setupCamera() {
function webWorker(input, image, canvas) { function webWorker(input, image, canvas) {
if (!worker) { if (!worker) {
// create new webworker and add event handler only once // create new webworker and add event handler only once
log('Creating worker thread'); log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' }); worker = new Worker(ui.worker, { type: 'module' });
worker.warned = false;
// after receiving message from webworker, parse&draw results and send new frame for processing // after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas)); worker.addEventListener('message', (msg) => {
if (!worker.warned) {
log('warning: cannot transfer canvas from worked thread');
log('warning: image will not show filter effects');
worker.warned = true;
}
drawResults(input, msg.data.result, canvas);
});
} }
// pass image data as arraybuffer to worker by reference to avoid copy // pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]); worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
@ -219,7 +227,7 @@ function runHumanDetect(input, canvas) {
const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused); const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live) { if (!live) {
if (!input.paused) { if (!input.paused) {
log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500); setTimeout(() => runHumanDetect(input, canvas), 500);
} }
return; return;
@ -236,7 +244,7 @@ function runHumanDetect(input, canvas) {
human.detect(input, config).then((result) => { human.detect(input, config).then((result) => {
if (result.error) log(result.error); if (result.error) log(result.error);
else drawResults(input, result, canvas); else drawResults(input, result, canvas);
if (config.profile) log('Profile data:', human.profile()); if (config.profile) log('profile data:', human.profile());
}); });
} }
} }
@ -394,11 +402,11 @@ async function main() {
setupMenu(); setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`; document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
// this is not required, just pre-warms the library // this is not required, just pre-warms the library
if (ui.modelsPreload) { if (!ui.modelsPreload) {
status('loading'); status('loading');
await human.load(); await human.load();
} }
if (ui.modelsWarmup) { if (!ui.modelsWarmup) {
status('initializing'); status('initializing');
const warmup = new ImageData(50, 50); const warmup = new ImageData(50, 50);
await human.detect(warmup); await human.detect(warmup);

View File

@ -20,8 +20,10 @@ onmessage = async (msg) => {
result = await human.detect(image, config); result = await human.detect(image, config);
} catch (err) { } catch (err) {
result.error = err.message; result.error = err.message;
log('Worker thread error:', err.message); log('worker thread error:', err.message);
} }
postMessage(result); // must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null;
postMessage({ result });
busy = false; busy = false;
}; };

View File

@ -34,7 +34,7 @@ function process(input, config) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas'); outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width; if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height; if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: outCanvas }) : null; this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
} }
this.fx.reset(); this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled