fixed worker and filter compatibility

pull/50/head
Vladimir Mandic 2020-11-05 08:21:23 -05:00
parent affdb2836e
commit 8d91c9237d
20 changed files with 73 additions and 55 deletions

View File

@ -201,10 +201,18 @@ async function setupCamera() {
function webWorker(input, image, canvas) {
if (!worker) {
// create new webworker and add event handler only once
log('Creating worker thread');
log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' });
worker.warned = false;
// after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => drawResults(input, msg.data, canvas));
worker.addEventListener('message', (msg) => {
if (!worker.warned) {
log('warning: cannot transfer canvas from worked thread');
log('warning: image will not show filter effects');
worker.warned = true;
}
drawResults(input, msg.data.result, canvas);
});
}
// pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
@ -219,7 +227,7 @@ function runHumanDetect(input, canvas) {
const live = (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live) {
if (!input.paused) {
log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500);
}
return;
@ -236,7 +244,7 @@ function runHumanDetect(input, canvas) {
human.detect(input, config).then((result) => {
if (result.error) log(result.error);
else drawResults(input, result, canvas);
if (config.profile) log('Profile data:', human.profile());
if (config.profile) log('profile data:', human.profile());
});
}
}
@ -394,11 +402,11 @@ async function main() {
setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
// this is not required, just pre-warms the library
if (ui.modelsPreload) {
if (!ui.modelsPreload) {
status('loading');
await human.load();
}
if (ui.modelsWarmup) {
if (!ui.modelsWarmup) {
status('initializing');
const warmup = new ImageData(50, 50);
await human.detect(warmup);

View File

@ -20,8 +20,10 @@ onmessage = async (msg) => {
result = await human.detect(image, config);
} catch (err) {
result.error = err.message;
log('Worker thread error:', err.message);
log('worker thread error:', err.message);
}
postMessage(result);
// must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null;
postMessage({ result });
busy = false;
};

View File

@ -33109,7 +33109,7 @@ var Yx = we((qx) => {
let a = r, s = i;
t.filter.width > 0 ? a = t.filter.width : t.filter.height > 0 && (a = r * (t.filter.height / i)), t.filter.height > 0 ? s = t.filter.height : t.filter.width > 0 && (s = i * (t.filter.width / r)), (!yt || yt.width !== a || yt.height !== s) && (yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(a, s) : document.createElement("canvas"), yt.width !== a && (yt.width = a), yt.height !== s && (yt.height = s));
const o = yt.getContext("2d");
n instanceof ImageData ? o.putImageData(n, 0, 0) : o.drawImage(n, 0, 0, r, i, 0, 0, yt.width, yt.height), t.filter.enabled && ((!this.fx || !Yt || yt.width !== Yt.width || yt.height !== Yt.height) && (Yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(yt.width, yt.height) : document.createElement("canvas"), Yt.width !== yt.width && (Yt.width = yt.width), Yt.height !== yt.height && (Yt.height = yt.height), this.fx = ys.ENV.flags.IS_BROWSER && typeof document != "undefined" ? new GV.Canvas({canvas: Yt}) : null), this.fx.reset(), this.fx.addFilter("brightness", t.filter.brightness), t.filter.contrast !== 0 && this.fx.addFilter("contrast", t.filter.contrast), t.filter.sharpness !== 0 && this.fx.addFilter("sharpen", t.filter.sharpness), t.filter.blur !== 0 && this.fx.addFilter("blur", t.filter.blur), t.filter.saturation !== 0 && this.fx.addFilter("saturation", t.filter.saturation), t.filter.hue !== 0 && this.fx.addFilter("hue", t.filter.hue), t.filter.negative && this.fx.addFilter("negative"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.vintage && this.fx.addFilter("brownie"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.kodachrome && this.fx.addFilter("kodachrome"), t.filter.technicolor && this.fx.addFilter("technicolor"), t.filter.polaroid && this.fx.addFilter("polaroid"), t.filter.pixelate !== 0 && this.fx.addFilter("pixelate", t.filter.pixelate), this.fx.apply(yt)), Yt || (Yt = yt);
n instanceof ImageData ? o.putImageData(n, 0, 0) : o.drawImage(n, 0, 0, r, i, 0, 0, yt.width, yt.height), t.filter.enabled && ((!this.fx || !Yt || yt.width !== Yt.width || yt.height !== Yt.height) && (Yt = typeof OffscreenCanvas != "undefined" ? new OffscreenCanvas(yt.width, yt.height) : document.createElement("canvas"), Yt.width !== yt.width && (Yt.width = yt.width), Yt.height !== yt.height && (Yt.height = yt.height), this.fx = ys.ENV.flags.IS_BROWSER ? new GV.Canvas({canvas: Yt}) : null), this.fx.reset(), this.fx.addFilter("brightness", t.filter.brightness), t.filter.contrast !== 0 && this.fx.addFilter("contrast", t.filter.contrast), t.filter.sharpness !== 0 && this.fx.addFilter("sharpen", t.filter.sharpness), t.filter.blur !== 0 && this.fx.addFilter("blur", t.filter.blur), t.filter.saturation !== 0 && this.fx.addFilter("saturation", t.filter.saturation), t.filter.hue !== 0 && this.fx.addFilter("hue", t.filter.hue), t.filter.negative && this.fx.addFilter("negative"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.vintage && this.fx.addFilter("brownie"), t.filter.sepia && this.fx.addFilter("sepia"), t.filter.kodachrome && this.fx.addFilter("kodachrome"), t.filter.technicolor && this.fx.addFilter("technicolor"), t.filter.polaroid && this.fx.addFilter("polaroid"), t.filter.pixelate !== 0 && this.fx.addFilter("pixelate", t.filter.pixelate), this.fx.apply(yt)), Yt || (Yt = yt);
let c;
if (t.backend === "webgl" || Yt instanceof ImageData)
c = ys.browser.fromPixels(Yt);
@ -34103,9 +34103,17 @@ ${msg}`;
}
function webWorker(input, image, canvas) {
if (!worker) {
log("Creating worker thread");
log("creating worker thread");
worker = new Worker(ui.worker, {type: "module"});
worker.addEventListener("message", (msg) => drawResults(input, msg.data, canvas));
worker.warned = false;
worker.addEventListener("message", (msg) => {
if (!worker.warned) {
log("warning: cannot transfer canvas from worked thread");
log("warning: image will not show filter effects");
worker.warned = true;
}
drawResults(input, msg.data.result, canvas);
});
}
worker.postMessage({image: image.data.buffer, width: canvas.width, height: canvas.height, config}, [image.data.buffer]);
}
@ -34115,7 +34123,7 @@ function runHumanDetect(input, canvas) {
const live = input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
if (!live) {
if (!input.paused) {
log(`Video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
log(`video not ready: state: ${input.srcObject.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
setTimeout(() => runHumanDetect(input, canvas), 500);
}
return;
@ -34133,7 +34141,7 @@ function runHumanDetect(input, canvas) {
else
drawResults(input, result, canvas);
if (config.profile)
log("Profile data:", human.profile());
log("profile data:", human.profile());
});
}
}
@ -34279,11 +34287,11 @@ async function main() {
log("Human: demo starting ...");
setupMenu();
document.getElementById("log").innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
if (ui.modelsPreload) {
if (!ui.modelsPreload) {
status("loading");
await human.load();
}
if (ui.modelsWarmup) {
if (!ui.modelsWarmup) {
status("initializing");
const warmup = new ImageData(50, 50);
await human.detect(warmup);

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"demo/browser.js": {
"bytes": 17175,
"bytes": 17419,
"imports": [
{
"path": "dist/human.esm.js"
@ -23,7 +23,7 @@
"imports": []
},
"dist/human.esm.js": {
"bytes": 1274638,
"bytes": 1274608,
"imports": []
}
},
@ -31,13 +31,13 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 5519692
"bytes": 5520028
},
"dist/demo-browser-index.js": {
"imports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1660540
"bytesInOutput": 1660506
},
"dist/human.esm.js": {
"bytesInOutput": 8716
@ -49,10 +49,10 @@
"bytesInOutput": 12356
},
"demo/browser.js": {
"bytesInOutput": 15336
"bytesInOutput": 15580
}
},
"bytes": 1704459
"bytes": 1704669
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -159,7 +159,7 @@
]
},
"src/image.js": {
"bytes": 4603,
"bytes": 4604,
"imports": [
{
"path": "src/imagefx.js"
@ -290,7 +290,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 611698
"bytes": 611683
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -386,7 +386,7 @@
"bytesInOutput": 11013
},
"src/image.js": {
"bytesInOutput": 2379
"bytesInOutput": 2349
},
"config.js": {
"bytesInOutput": 1293
@ -401,7 +401,7 @@
"bytesInOutput": 0
}
},
"bytes": 213094
"bytes": 213064
}
}
}

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
dist/human.esm.json vendored
View File

@ -336,7 +336,7 @@
]
},
"src/image.js": {
"bytes": 4603,
"bytes": 4604,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 5407479
"bytes": 5407464
},
"dist/human.esm.js": {
"imports": [],
@ -652,7 +652,7 @@
"bytesInOutput": 11014
},
"src/image.js": {
"bytesInOutput": 2395
"bytesInOutput": 2365
},
"config.js": {
"bytesInOutput": 1294
@ -667,7 +667,7 @@
"bytesInOutput": 0
}
},
"bytes": 1274638
"bytes": 1274608
}
}
}

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

8
dist/human.json vendored
View File

@ -336,7 +336,7 @@
]
},
"src/image.js": {
"bytes": 4603,
"bytes": 4604,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -499,7 +499,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 5407475
"bytes": 5407460
},
"dist/human.js": {
"imports": [],
@ -652,7 +652,7 @@
"bytesInOutput": 11014
},
"src/image.js": {
"bytesInOutput": 2395
"bytesInOutput": 2365
},
"config.js": {
"bytesInOutput": 1294
@ -664,7 +664,7 @@
"bytesInOutput": 5663
}
},
"bytes": 1274683
"bytes": 1274653
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -159,7 +159,7 @@
]
},
"src/image.js": {
"bytes": 4603,
"bytes": 4604,
"imports": [
{
"path": "src/imagefx.js"
@ -290,7 +290,7 @@
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 623210
"bytes": 623195
},
"dist/human.node-nobundle.js": {
"imports": [],
@ -386,7 +386,7 @@
"bytesInOutput": 11013
},
"src/image.js": {
"bytesInOutput": 2379
"bytesInOutput": 2349
},
"config.js": {
"bytesInOutput": 1292
@ -401,7 +401,7 @@
"bytesInOutput": 5611
}
},
"bytes": 213101
"bytes": 213071
}
}
}

View File

@ -34,7 +34,7 @@ function process(input, config) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: outCanvas }) : null;
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled