mirror of https://github.com/vladmandic/human
update
parent
791087bb78
commit
819b4a3f88
|
@ -29,11 +29,13 @@ onmessage = async (msg) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result.canvas) { // convert canvas to imageData and send it by reference
|
if (result.canvas) { // convert canvas to imageData and send it by reference
|
||||||
const ctx = result.canvas.getContext('2d');
|
const canvas = new OffscreenCanvas(result.canvas.width, result.canvas.height);
|
||||||
const img = ctx?.getImageData(0, 0, result.canvas.width, result.canvas.height);
|
const ctx = canvas.getContext('2d');
|
||||||
|
if (ctx) ctx.drawImage(result.canvas, 0, 0);
|
||||||
|
const img = ctx ? ctx.getImageData(0, 0, result.canvas.width, result.canvas.height) : null;
|
||||||
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
|
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
// @ts-ignore tslint wrong type matching for worker
|
||||||
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img?.data.buffer]);
|
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img.data.buffer]);
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
// @ts-ignore tslint wrong type matching for worker
|
||||||
else postMessage({ result });
|
else postMessage({ result });
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -415,7 +415,10 @@ function webWorker(input, image, canvas, timestamp) {
|
||||||
}
|
}
|
||||||
|
|
||||||
ui.framesDetect++;
|
ui.framesDetect++;
|
||||||
if (!ui.drawThread) drawResults(input);
|
if (!ui.drawThread) {
|
||||||
|
status();
|
||||||
|
drawResults(input);
|
||||||
|
}
|
||||||
// eslint-disable-next-line no-use-before-define
|
// eslint-disable-next-line no-use-before-define
|
||||||
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
ui.detectThread = requestAnimationFrame((now) => runHumanDetect(input, canvas, now));
|
||||||
});
|
});
|
||||||
|
@ -455,7 +458,6 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||||
// perform detection in worker
|
// perform detection in worker
|
||||||
webWorker(input, data, canvas, timestamp);
|
webWorker(input, data, canvas, timestamp);
|
||||||
status();
|
|
||||||
} else {
|
} else {
|
||||||
human.detect(input, userConfig).then((result) => {
|
human.detect(input, userConfig).then((result) => {
|
||||||
status();
|
status();
|
||||||
|
|
Loading…
Reference in New Issue