added performance notes

pull/50/head
Vladimir Mandic 2020-11-11 15:02:49 -05:00
parent 59d1f27ec7
commit 8234339318
23 changed files with 252 additions and 111 deletions

View File

@ -105,10 +105,12 @@ function drawResults(input, result, canvas) {
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;
document.getElementById('log').innerText = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}
backend: ${human.tf.getBackend()} | ${memory}
performance: ${str(result.performance)} FPS:${avg}
const warning = (ui.fps.length > 5) && (avg < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
document.getElementById('log').innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory}<br>
performance: ${str(result.performance)} FPS:${avg}<br>
${warning}
`;
}

View File

@ -86320,11 +86320,30 @@ var require_image = __commonJS((exports) => {
if (config2.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config2.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -98001,12 +98020,12 @@ class Human {
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)
@ -98836,10 +98855,12 @@ function drawResults(input, result, canvas) {
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : "";
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;
document.getElementById("log").innerText = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}
backend: ${human.tf.getBackend()} | ${memory}
performance: ${str(result.performance)} FPS:${avg}
const warning = ui.fps.length > 5 && avg < 5 ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : "";
document.getElementById("log").innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory}<br>
performance: ${str(result.performance)} FPS:${avg}<br>
${warning}
`;
}
async function setupCamera() {

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"demo/browser.js": {
"bytes": 18524,
"bytes": 18768,
"imports": [
{
"path": "dist/human.esm.js"
@ -23,7 +23,7 @@
"imports": []
},
"dist/human.esm.js": {
"bytes": 3442243,
"bytes": 3443154,
"imports": []
}
},
@ -31,13 +31,13 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 5408428
"bytes": 5410315
},
"dist/demo-browser-index.js": {
"imports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 3431560
"bytesInOutput": 3432471
},
"demo/draw.js": {
"bytesInOutput": 8898
@ -46,10 +46,10 @@
"bytesInOutput": 13813
},
"demo/browser.js": {
"bytesInOutput": 16241
"bytesInOutput": 16481
}
},
"bytes": 3470634
"bytes": 3471785
}
}
}

View File

@ -20822,11 +20822,30 @@ var require_image = __commonJS((exports) => {
if (config2.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config2.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -23878,12 +23897,12 @@ class Human {
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@
"imports": []
},
"package.json": {
"bytes": 3556,
"bytes": 3554,
"imports": []
},
"src/age/age.js": {
@ -289,7 +289,7 @@
"imports": []
},
"src/human.js": {
"bytes": 14448,
"bytes": 14460,
"imports": [
{
"path": "src/tf.js"
@ -330,7 +330,7 @@
]
},
"src/image.js": {
"bytes": 4638,
"bytes": 5688,
"imports": [
{
"path": "src/tf.js"
@ -349,7 +349,7 @@
"imports": []
},
"src/tf.js": {
"bytes": 871,
"bytes": 1135,
"imports": []
}
},
@ -357,7 +357,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 766195
"bytes": 768064
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -447,7 +447,7 @@
"bytesInOutput": 20097
},
"src/image.js": {
"bytesInOutput": 4483
"bytesInOutput": 5382
},
"src/tf.js": {
"bytesInOutput": 174
@ -477,13 +477,13 @@
"bytesInOutput": 23
},
"src/human.js": {
"bytesInOutput": 11998
"bytesInOutput": 12010
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 417911
"bytes": 418822
}
}
}

33
dist/human.esm.js vendored
View File

@ -86395,11 +86395,30 @@ var require_image = __commonJS((exports) => {
if (config2.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config2.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -98378,12 +98397,12 @@ class Human {
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)

File diff suppressed because one or more lines are too long

14
dist/human.esm.json vendored
View File

@ -12316,7 +12316,7 @@
]
},
"package.json": {
"bytes": 3557,
"bytes": 3554,
"imports": []
},
"src/age/age.js": {
@ -12600,7 +12600,7 @@
"imports": []
},
"src/human.js": {
"bytes": 14448,
"bytes": 14460,
"imports": [
{
"path": "src/tf.js"
@ -12641,7 +12641,7 @@
]
},
"src/image.js": {
"bytes": 4638,
"bytes": 5688,
"imports": [
{
"path": "src/tf.js"
@ -12695,7 +12695,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 5458440
"bytes": 5460036
},
"dist/human.esm.js": {
"imports": [],
@ -12812,7 +12812,7 @@
"bytesInOutput": 20097
},
"src/image.js": {
"bytesInOutput": 4495
"bytesInOutput": 5394
},
"src/tf.js": {
"bytesInOutput": 44
@ -13433,13 +13433,13 @@
"bytesInOutput": 24
},
"src/human.js": {
"bytesInOutput": 12024
"bytesInOutput": 12036
},
"src/human.js": {
"bytesInOutput": 0
}
},
"bytes": 3442243
"bytes": 3443154
}
}
}

33
dist/human.js vendored
View File

@ -86396,11 +86396,30 @@ return a / b;`;
if (config.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -86544,12 +86563,12 @@ return a / b;`;
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

16
dist/human.json vendored
View File

@ -12316,7 +12316,7 @@
]
},
"package.json": {
"bytes": 3556,
"bytes": 3554,
"imports": []
},
"src/age/age.js": {
@ -12600,7 +12600,7 @@
"imports": []
},
"src/human.js": {
"bytes": 14448,
"bytes": 14460,
"imports": [
{
"path": "src/tf.js"
@ -12641,7 +12641,7 @@
]
},
"src/image.js": {
"bytes": 4638,
"bytes": 5688,
"imports": [
{
"path": "src/tf.js"
@ -12660,7 +12660,7 @@
"imports": []
},
"src/tf.js": {
"bytes": 871,
"bytes": 1135,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.es2017.js"
@ -12695,7 +12695,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 5433478
"bytes": 5435596
},
"dist/human.js": {
"imports": [],
@ -12812,10 +12812,10 @@
"bytesInOutput": 21513
},
"src/image.js": {
"bytesInOutput": 4651
"bytesInOutput": 5588
},
"src/human.js": {
"bytesInOutput": 13878
"bytesInOutput": 13890
},
"src/tf.js": {
"bytesInOutput": 46
@ -13430,7 +13430,7 @@
"bytesInOutput": 26
}
},
"bytes": 3631076
"bytes": 3632025
}
}
}

View File

@ -20827,11 +20827,30 @@ var require_image = __commonJS((exports2) => {
if (config2.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config2.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -23888,12 +23907,12 @@ class Human {
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)

File diff suppressed because one or more lines are too long

33
dist/human.node.js vendored
View File

@ -112545,11 +112545,30 @@ var require_image = __commonJS((exports2) => {
if (config2.filter.pixelate !== 0)
this.fx.addFilter("pixelate", config2.filter.pixelate);
this.fx.apply(inCanvas);
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
let pixels;
if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, "int32");
} else if (config2.backend === "webgl" || outCanvas instanceof ImageData) {
pixels = tf.browser.fromPixels(outCanvas);
} else {
const tempCanvas = typeof OffscreenCanvas !== "undefined" ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement("canvas");
@ -119712,12 +119731,12 @@ class Human {
this.models.posenet,
this.models.handpose
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
]);
} else {
if (this.config.face.enabled && !this.models.facemesh)

File diff suppressed because one or more lines are too long

16
dist/human.node.json vendored
View File

@ -5,7 +5,7 @@
"imports": []
},
"package.json": {
"bytes": 3556,
"bytes": 3554,
"imports": []
},
"src/age/age.js": {
@ -289,7 +289,7 @@
"imports": []
},
"src/human.js": {
"bytes": 14448,
"bytes": 14460,
"imports": [
{
"path": "src/tf.js"
@ -330,7 +330,7 @@
]
},
"src/image.js": {
"bytes": 4638,
"bytes": 5688,
"imports": [
{
"path": "src/tf.js"
@ -349,7 +349,7 @@
"imports": []
},
"src/tf.js": {
"bytes": 871,
"bytes": 1135,
"imports": []
}
},
@ -357,7 +357,7 @@
"dist/human.node-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 781095
"bytes": 782976
},
"dist/human.node-nobundle.js": {
"imports": [],
@ -447,7 +447,7 @@
"bytesInOutput": 20099
},
"src/image.js": {
"bytesInOutput": 4485
"bytesInOutput": 5384
},
"src/human.js": {
"bytesInOutput": 47
@ -480,10 +480,10 @@
"bytesInOutput": 23
},
"src/human.js": {
"bytesInOutput": 12003
"bytesInOutput": 12015
}
},
"bytes": 418258
"bytes": 419169
}
}
}

4
package-lock.json generated
View File

@ -184,6 +184,7 @@
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-wasm/-/tfjs-backend-wasm-2.7.0.tgz",
"integrity": "sha512-q1p6vR99kvxNjPMH4SMzg5R43g2ACmp3GRMoS2JVCruMyBWroD3KrmgXX2f94t0bl8BysW0JBkGXihajifjd6Q==",
"dev": true,
"requires": {
"@types/emscripten": "~0.0.34"
}
@ -284,7 +285,8 @@
"@types/emscripten": {
"version": "0.0.34",
"resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-0.0.34.tgz",
"integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ=="
"integrity": "sha512-QSb9ojDincskc+uKMI0KXp8e1NALFINCrMlp8VGKGcTSxeEyRTTKyjWw75NYrCZHUsVEEEpr1tYHpbtaC++/sQ==",
"dev": true
},
"@types/json5": {
"version": "0.0.29",

View File

@ -19,8 +19,7 @@
"type": "git",
"url": "git+https://github.com/vladmandic/human.git"
},
"dependencies": {
},
"dependencies": {},
"peerDependencies": {},
"devDependencies": {
"@tensorflow/tfjs": "^2.7.0",

View File

@ -130,12 +130,12 @@ class Human {
this.models.posenet,
this.models.handpose,
] = await Promise.all([
this.config.face.age.enabled ? this.models.age || age.load(this.config) : null,
this.config.face.gender.enabled ? this.models.gender || gender.load(this.config) : null,
this.config.face.emotion.enabled ? this.models.emotion || emotion.load(this.config) : null,
this.config.face.enabled ? this.models.facemesh || facemesh.load(this.config.face) : null,
this.config.body.enabled ? this.models.posenet || posenet.load(this.config) : null,
this.config.hand.enabled ? this.models.handpose || handpose.load(this.config.hand) : null,
this.models.age || (this.config.face.age.enabled ? age.load(this.config) : null),
this.models.gender || (this.config.face.gender.enabled ? gender.load(this.config) : null),
this.models.emotion || (this.config.face.emotion.enabled ? emotion.load(this.config) : null),
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
]);
} else {
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);

View File

@ -52,12 +52,34 @@ function process(input, config) {
if (config.filter.polaroid) this.fx.addFilter('polaroid');
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
this.fx.apply(inCanvas);
// read pixel data
// const gl = outCanvas.getContext('webgl');
const gl = false;
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);
// gl returns rbga while we only need rgb, so discarding alpha channel
// gl returns starting point as lower left, so need to invert vertical
let i = 0;
for (let y = outCanvas.height - 1; y >= 0; y--) {
for (let x = 0; x < outCanvas.width; x++) {
const index = (x + y * outCanvas.width) * 4;
pixBuffer[i++] = glBuffer[index + 0];
pixBuffer[i++] = glBuffer[index + 1];
pixBuffer[i++] = glBuffer[index + 2];
}
}
outCanvas.data = pixBuffer;
}
} else {
outCanvas = inCanvas;
}
// if (!outCanvas) outCanvas = inCanvas;
let pixels;
if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
if (outCanvas.data) {
const shape = [outCanvas.height, outCanvas.width, 3];
pixels = tf.tensor3d(outCanvas.data, shape, 'int32');
} else if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
pixels = tf.browser.fromPixels(outCanvas);
} else {

2
wiki

@ -1 +1 @@
Subproject commit ca3688663e1c427fdd26f55bde54336e939dcc5f
Subproject commit 6b460e9f5252038ef7a94b044fdb789e35d610bd