fixed worker and filter compatibility

pull/50/head
Vladimir Mandic 2020-11-05 08:21:23 -05:00
parent affdb2836e
commit 8d91c9237d
20 changed files with 73 additions and 55 deletions

View File

@ -201,10 +201,18 @@ async function setupCamera() {
function webWorker(input, image, canvas) { function webWorker(input, image, canvas) {
if (!worker) { if (!worker) {
// create new webworker and add event handler only once // create new webworker and add event handler only once
log('Creating worker thread'); log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' }); worker = new Worker(ui.worker, { type: 'module' });
worker.warned = false;
// after receiving message from webworker, parse&draw results and send new frame for processing // after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas)); worker.addEventListener('message', (msg) => {
if (!worker.warned) {
log('warning: cannot transfer canvas from worked thread');
log('warning: image will not show filter effects');
worker.warned = true;
}
drawResults(input, msg.data.result, canvas);
});
} }
// pass image data as arraybuffer to worker by reference to avoid copy // pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]); worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
@ -219,7 +227,7 @@ function runHumanDetect(input, canvas) {
const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused); const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live) { if (!live) {
if (!input.paused) { if (!input.paused) {
log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500); setTimeout(() => runHumanDetect(input, canvas), 500);
} }
return; return;
@ -236,7 +244,7 @@ function runHumanDetect(input, canvas) {
human.detect(input, config).then((result) => { human.detect(input, config).then((result) => {
if (result.error) log(result.error); if (result.error) log(result.error);
else drawResults(input, result, canvas); else drawResults(input, result, canvas);
if (config.profile) log('Profile data:', human.profile()); if (config.profile) log('profile data:', human.profile());
}); });
} }
} }
@ -394,11 +402,11 @@ async function main() {
setupMenu(); setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`; document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
// this is not required, just pre-warms the library // this is not required, just pre-warms the library
if (ui.modelsPreload) { if (!ui.modelsPreload) {
status('loading'); status('loading');
await human.load(); await human.load();
} }
if (ui.modelsWarmup) { if (!ui.modelsWarmup) {
status('initializing'); status('initializing');
const warmup = new ImageData(50, 50); const warmup = new ImageData(50, 50);
await human.detect(warmup); await human.detect(warmup);

View File

@ -20,8 +20,10 @@ onmessage = async (msg) => {
result = await human.detect(image, config); result = await human.detect(image, config);
} catch (err) { } catch (err) {
result.error = err.message; result.error = err.message;
log('Worker thread error:', err.message); log('worker thread error:', err.message);
} }
postMessage(result); // must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null;
postMessage({ result });
busy = false; busy = false;
}; };

View File

@ -33109,7 +33109,7 @@ var Yx = we((qx) => {
let a = r, s = i; let a = r, s = i;
t.filter.width > 0 ? a = t.filter.width : t.filter.height > 0 && (a = r * (t.filter.height / i)), t.filter.height > 0 ? s = t.filter.height : t.filter.width > 0 && (s = i * (t.filter.width / r)), (!yt || yt.width !== a || yt.height !== s) && (yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(a, s) : document.createElement("canvas"), yt.width !== a && (yt.width = a), yt.height !== s && (yt.height = s)); t.filter.width > 0 ? a = t.filter.width : t.filter.height > 0 && (a = r * (t.filter.height / i)), t.filter.height > 0 ? s = t.filter.height : t.filter.width > 0 && (s = i * (t.filter.width / r)), (!yt || yt.width !== a || yt.height !== s) && (yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(a, s) : document.createElement("canvas"), yt.width !== a && (yt.width = a), yt.height !== s && (yt.height = s));
const o = yt.getContext("2d"); const o = yt.getContext("2d");
n instanceof ImageData ? o.putImageData(n, 0, 0) : o.drawImage(n, 0, 0, r, i, 0, 0, yt.width, yt.height), t.filter.enabled && ((!this.fx || !Yt || yt.width !== Yt.width || yt.height !== Yt.height) && (Yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(yt.width, yt.height) : document.createElement("canvas"), Yt.width !== yt.width && (Yt.width = yt.width), Yt.height !== yt.height && (Yt.height = yt.height), this.fx = ys.ENV.flags.IS_BROWSER && typeof document != "undefined" ? new GV.Canvas({canvas: Yt}) : null), this.fx.reset(), this.fx.addFilter("brightness", t.filter.brightness), t.filter.contrast !== 0 && this.fx.addFilter("contrast", t.filter.contrast), t.filter.sharpness !== 0 && this.fx.addFilter("sharpen", t.filter.sharpness), t.filter.blur !== 0 && this.fx.addFilter("blur", t.filter.blur), t.filter.saturation !== 0 && this.fx.addFilter("saturation", t.filter.saturation), t.filter.hue !== 0 && this.fx.addFilter("hue", t.filter.hue), t.filter.negative && this.fx.addFilter("negative"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.vintage && this.fx.addFilter("brownie"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.kodachrome && this.fx.addFilter("kodachrome"), t.filter.technicolor && this.fx.addFilter("technicolor"), t.filter.polaroid && this.fx.addFilter("polaroid"), t.filter.pixelate !== 0 && this.fx.addFilter("pixelate", t.filter.pixelate), this.fx.apply(yt)), Yt || (Yt = yt); n instanceof ImageData ? o.putImageData(n, 0, 0) : o.drawImage(n, 0, 0, r, i, 0, 0, yt.width, yt.height), t.filter.enabled && ((!this.fx || !Yt || yt.width !== Yt.width || yt.height !== Yt.height) && (Yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(yt.width, yt.height) : document.createElement("canvas"), Yt.width !== yt.width && (Yt.width = yt.width), Yt.height !== yt.height && (Yt.height = yt.height), this.fx = ys.ENV.flags.IS_BROWSER ? new GV.Canvas({canvas: Yt}) : null), this.fx.reset(), this.fx.addFilter("brightness", t.filter.brightness), t.filter.contrast !== 0 && this.fx.addFilter("contrast", t.filter.contrast), t.filter.sharpness !== 0 && this.fx.addFilter("sharpen", t.filter.sharpness), t.filter.blur !== 0 && this.fx.addFilter("blur", t.filter.blur), t.filter.saturation !== 0 && this.fx.addFilter("saturation", t.filter.saturation), t.filter.hue !== 0 && this.fx.addFilter("hue", t.filter.hue), t.filter.negative && this.fx.addFilter("negative"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.vintage && this.fx.addFilter("brownie"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.kodachrome && this.fx.addFilter("kodachrome"), t.filter.technicolor && this.fx.addFilter("technicolor"), t.filter.polaroid && this.fx.addFilter("polaroid"), t.filter.pixelate !== 0 && this.fx.addFilter("pixelate", t.filter.pixelate), this.fx.apply(yt)), Yt || (Yt = yt);
let c; let c;
if (t.backend === "webgl" || Yt instanceof ImageData) if (t.backend === "webgl" || Yt instanceof ImageData)
c = ys.browser.fromPixels(Yt); c = ys.browser.fromPixels(Yt);
@ -34103,9 +34103,17 @@ ${msg}`;
} }
function webWorker(input, image, canvas) { function webWorker(input, image, canvas) {
if (!worker) { if (!worker) {
log("Creating worker thread"); log("creating worker thread");
worker = new Worker(ui.worker, {type: "module"}); worker = new Worker(ui.worker, {type: "module"});
worker.addEventListener("message", (msg) => drawResults(input, msg.data, canvas)); worker.warned = false;
worker.addEventListener("message", (msg) => {
if (!worker.warned) {
log("warning: cannot transfer canvas from worked thread");
log("warning: image will not show filter effects");
worker.warned = true;
}
drawResults(input, msg.data.result, canvas);
});
} }
worker.postMessage({image: image.data.buffer, width: canvas.width, height: canvas.height, config}, [image.data.buffer]); worker.postMessage({image: image.data.buffer, width: canvas.width, height: canvas.height, config}, [image.data.buffer]);
} }
@ -34115,7 +34123,7 @@ function runHumanDetect(input, canvas) {
const live = input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused; const live = input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
if (!live) { if (!live) {
if (!input.paused) { if (!input.paused) {
log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`); log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500); setTimeout(() => runHumanDetect(input, canvas), 500);
} }
return; return;
@ -34133,7 +34141,7 @@ function runHumanDetect(input, canvas) {
else else
drawResults(input, result, canvas); drawResults(input, result, canvas);
if (config.profile) if (config.profile)
log("Profile data:", human.profile()); log("profile data:", human.profile());
}); });
} }
} }
@ -34279,11 +34287,11 @@ async function main() {
log("Human: demo starting ..."); log("Human: demo starting ...");
setupMenu(); setupMenu();
document.getElementById("log").innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`; document.getElementById("log").innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
if (ui.modelsPreload) { if (!ui.modelsPreload) {
status("loading"); status("loading");
await human.load(); await human.load();
} }
if (ui.modelsWarmup) { if (!ui.modelsWarmup) {
status("initializing"); status("initializing");
const warmup = new ImageData(50, 50); const warmup = new ImageData(50, 50);
await human.detect(warmup); await human.detect(warmup);

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"demo/browser.js": { "demo/browser.js": {
"bytes": 17175, "bytes": 17419,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js" "path": "dist/human.esm.js"
@ -23,7 +23,7 @@
"imports": [] "imports": []
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1274638, "bytes": 1274608,
"imports": [] "imports": []
} }
}, },
@ -31,13 +31,13 @@
"dist/demo-browser-index.js.map": { "dist/demo-browser-index.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5519692 "bytes": 5520028
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1660540 "bytesInOutput": 1660506
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 8716 "bytesInOutput": 8716
@ -49,10 +49,10 @@
"bytesInOutput": 12356 "bytesInOutput": 12356
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 15336 "bytesInOutput": 15580
} }
}, },
"bytes": 1704459 "bytes": 1704669
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -159,7 +159,7 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4603, "bytes": 4604,
"imports": [ "imports": [
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
@ -290,7 +290,7 @@
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 611698 "bytes": 611683
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
@ -386,7 +386,7 @@
"bytesInOutput": 11013 "bytesInOutput": 11013
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 2379 "bytesInOutput": 2349
}, },
"config.js": { "config.js": {
"bytesInOutput": 1293 "bytesInOutput": 1293
@ -401,7 +401,7 @@
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 213094 "bytes": 213064
} }
} }
} }

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
dist/human.esm.json vendored
View File

@ -336,7 +336,7 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4603, "bytes": 4604,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5407479 "bytes": 5407464
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -652,7 +652,7 @@
"bytesInOutput": 11014 "bytesInOutput": 11014
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 2395 "bytesInOutput": 2365
}, },
"config.js": { "config.js": {
"bytesInOutput": 1294 "bytesInOutput": 1294
@ -667,7 +667,7 @@
"bytesInOutput": 0 "bytesInOutput": 0
} }
}, },
"bytes": 1274638 "bytes": 1274608
} }
} }
} }

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

8
dist/human.json vendored
View File

@ -336,7 +336,7 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4603, "bytes": 4604,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 5407475 "bytes": 5407460
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -652,7 +652,7 @@
"bytesInOutput": 11014 "bytesInOutput": 11014
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 2395 "bytesInOutput": 2365
}, },
"config.js": { "config.js": {
"bytesInOutput": 1294 "bytesInOutput": 1294
@ -664,7 +664,7 @@
"bytesInOutput": 5663 "bytesInOutput": 5663
} }
}, },
"bytes": 1274683 "bytes": 1274653
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -159,7 +159,7 @@
] ]
}, },
"src/image.js": { "src/image.js": {
"bytes": 4603, "bytes": 4604,
"imports": [ "imports": [
{ {
"path": "src/imagefx.js" "path": "src/imagefx.js"
@ -290,7 +290,7 @@
"dist/human.node-nobundle.js.map": { "dist/human.node-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 623210 "bytes": 623195
}, },
"dist/human.node-nobundle.js": { "dist/human.node-nobundle.js": {
"imports": [], "imports": [],
@ -386,7 +386,7 @@
"bytesInOutput": 11013 "bytesInOutput": 11013
}, },
"src/image.js": { "src/image.js": {
"bytesInOutput": 2379 "bytesInOutput": 2349
}, },
"config.js": { "config.js": {
"bytesInOutput": 1292 "bytesInOutput": 1292
@ -401,7 +401,7 @@
"bytesInOutput": 5611 "bytesInOutput": 5611
} }
}, },
"bytes": 213101 "bytes": 213071
} }
} }
} }

View File

@ -34,7 +34,7 @@ function process(input, config) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas'); outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width; if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height; if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: outCanvas }) : null; this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
} }
this.fx.reset(); this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled