mirror of https://github.com/vladmandic/human
fix firefox compatibility bug
parent
43df4b8bd8
commit
7cea99953f
|
@ -5777,7 +5777,7 @@ var require_config = __commonJS((exports2) => {
|
|||
var require_package = __commonJS((exports2, module2) => {
|
||||
module2.exports = {
|
||||
name: "@vladmandic/human",
|
||||
version: "0.4.4",
|
||||
version: "0.4.5",
|
||||
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
||||
sideEffects: false,
|
||||
main: "dist/human.cjs",
|
||||
|
@ -5974,7 +5974,9 @@ class Human {
|
|||
targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0)
|
||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||
offscreenCanvas.width = targetWidth;
|
||||
offscreenCanvas.height = targetHeight;
|
||||
const ctx = offscreenCanvas.getContext("2d");
|
||||
if (input instanceof ImageData)
|
||||
ctx.putImageData(input, 0, 0);
|
||||
|
|
|
@ -116,7 +116,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11743,
|
||||
"bytes": 11908,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -260,7 +260,7 @@
|
|||
"dist/human.cjs.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 260478
|
||||
"bytes": 260880
|
||||
},
|
||||
"dist/human.cjs": {
|
||||
"imports": [],
|
||||
|
@ -359,10 +359,10 @@
|
|||
"bytesInOutput": 47
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10611
|
||||
"bytesInOutput": 10775
|
||||
}
|
||||
},
|
||||
"bytes": 154950
|
||||
"bytes": 155114
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -5777,7 +5777,7 @@ var require_config = __commonJS((exports) => {
|
|||
var require_package = __commonJS((exports, module) => {
|
||||
module.exports = {
|
||||
name: "@vladmandic/human",
|
||||
version: "0.4.4",
|
||||
version: "0.4.5",
|
||||
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
||||
sideEffects: false,
|
||||
main: "dist/human.cjs",
|
||||
|
@ -5971,7 +5971,9 @@ class Human {
|
|||
targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0)
|
||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||
offscreenCanvas.width = targetWidth;
|
||||
offscreenCanvas.height = targetHeight;
|
||||
const ctx = offscreenCanvas.getContext("2d");
|
||||
if (input instanceof ImageData)
|
||||
ctx.putImageData(input, 0, 0);
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -116,7 +116,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11743,
|
||||
"bytes": 11908,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/facemesh/facemesh.js"
|
||||
|
@ -260,7 +260,7 @@
|
|||
"dist/human.esm-nobundle.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 248392
|
||||
"bytes": 248628
|
||||
},
|
||||
"dist/human.esm-nobundle.js": {
|
||||
"imports": [],
|
||||
|
@ -356,13 +356,13 @@
|
|||
"bytesInOutput": 2747
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10611
|
||||
"bytesInOutput": 10775
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 154809
|
||||
"bytes": 154973
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72826,7 +72826,7 @@ var require_config = __commonJS((exports) => {
|
|||
var require_package = __commonJS((exports, module) => {
|
||||
module.exports = {
|
||||
name: "@vladmandic/human",
|
||||
version: "0.4.4",
|
||||
version: "0.4.5",
|
||||
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
||||
sideEffects: false,
|
||||
main: "dist/human.cjs",
|
||||
|
@ -73020,7 +73020,9 @@ class Human {
|
|||
targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0)
|
||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||
offscreenCanvas.width = targetWidth;
|
||||
offscreenCanvas.height = targetHeight;
|
||||
const ctx = offscreenCanvas.getContext("2d");
|
||||
if (input instanceof ImageData)
|
||||
ctx.putImageData(input, 0, 0);
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -291,7 +291,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11743,
|
||||
"bytes": 11908,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -468,7 +468,7 @@
|
|||
"dist/human.esm.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5121362
|
||||
"bytes": 5121598
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -621,13 +621,13 @@
|
|||
"bytesInOutput": 2747
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 10601
|
||||
"bytesInOutput": 10765
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 2923437
|
||||
"bytes": 2923601
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72827,7 +72827,7 @@ var Human = (() => {
|
|||
var require_package = __commonJS((exports, module) => {
|
||||
module.exports = {
|
||||
name: "@vladmandic/human",
|
||||
version: "0.4.4",
|
||||
version: "0.4.5",
|
||||
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
||||
sideEffects: false,
|
||||
main: "dist/human.cjs",
|
||||
|
@ -73025,7 +73025,9 @@ var Human = (() => {
|
|||
targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0)
|
||||
targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const offscreenCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
|
||||
offscreenCanvas.width = targetWidth;
|
||||
offscreenCanvas.height = targetHeight;
|
||||
const ctx = offscreenCanvas.getContext("2d");
|
||||
if (input instanceof ImageData)
|
||||
ctx.putImageData(input, 0, 0);
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -291,7 +291,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 11743,
|
||||
"bytes": 11908,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -468,7 +468,7 @@
|
|||
"dist/human.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5125218
|
||||
"bytes": 5125454
|
||||
},
|
||||
"dist/human.js": {
|
||||
"imports": [],
|
||||
|
@ -621,10 +621,10 @@
|
|||
"bytesInOutput": 2875
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 11822
|
||||
"bytesInOutput": 11994
|
||||
}
|
||||
},
|
||||
"bytes": 3069302
|
||||
"bytes": 3069474
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,8 +139,9 @@ class Human {
|
|||
let targetHeight = originalHeight;
|
||||
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
|
||||
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
||||
|
||||
const offscreenCanvas = new OffscreenCanvas(targetWidth, targetHeight);
|
||||
const offscreenCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
||||
offscreenCanvas.width = targetWidth;
|
||||
offscreenCanvas.height = targetHeight;
|
||||
const ctx = offscreenCanvas.getContext('2d');
|
||||
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
||||
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
|
||||
|
|
Loading…
Reference in New Issue