minor compatibility fixes

pull/50/head
Vladimir Mandic 2020-11-26 10:37:04 -05:00
parent bef5619d5d
commit 7d0e63981c
27 changed files with 115 additions and 117 deletions

View File

@ -134,11 +134,11 @@ async function drawResults(input) {
const avgDetect = Math.trunc(10 * ui.detectFPS.reduce((a, b) => a + b, 0) / ui.detectFPS.length) / 10;
const avgDraw = Math.trunc(10 * ui.drawFPS.reduce((a, b) => a + b, 0) / ui.drawFPS.length) / 10;
const warning = (ui.detectFPS.length > 5) && (avgDetect < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
document.getElementById('log').innerText = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}
backend: ${human.tf.getBackend()} | ${memory}
performance: ${str(result.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}
${warning}
document.getElementById('log').innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory}<br>
performance: ${str(result.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}<br>
${warning}<br>
`;
ui.framesDraw++;

View File

@ -4,7 +4,7 @@ async function drawGesture(result, canvas, ui) {
ctx.font = ui.baseFont;
ctx.fillStyle = ui.baseLabel;
let i = 1;
for (const gesture in result) {
for (let gesture = 0; gesture < result.length; gesture++) {
const [where, what] = Object.entries(result[gesture]);
if ((what.length > 1) && (what[1].length > 0)) {
const person = where[1] > 0 ? `#${where[1]}` : '';
@ -115,14 +115,14 @@ async function drawBody(result, canvas, ui) {
if (!result) return;
const ctx = canvas.getContext('2d');
ctx.lineJoin = 'round';
for (const i in result) {
for (let i = 0; i < result.length; i++) {
if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] };
ctx.fillStyle = ui.baseColor;
ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth;
if (ui.drawPoints) {
for (const pt in result[i].keypoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.beginPath();
if (ui.buffered) {
lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;

View File

@ -300,7 +300,7 @@ class Menu {
const width = canvas.width / values.length;
const max = 1 + Math.max(...values);
const height = canvas.height / max;
for (const i in values) {
for (let i = 0; i < values.length; i++) {
const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0);
gradient.addColorStop(0.1, theme.chartColor);
gradient.addColorStop(0.4, theme.background);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"demo/browser.js": {
"bytes": 25178,
"bytes": 25194,
"imports": [
{
"path": "dist/human.esm.js"
@ -18,7 +18,7 @@
]
},
"demo/draw.js": {
"bytes": 10568,
"bytes": 10630,
"imports": []
},
"demo/gl-bench.js": {
@ -26,11 +26,11 @@
"imports": []
},
"demo/menu.js": {
"bytes": 13842,
"bytes": 13858,
"imports": []
},
"dist/human.esm.js": {
"bytes": 1774850,
"bytes": 1774844,
"imports": []
}
},
@ -38,29 +38,29 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 2678894
"bytes": 2679258
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1767609
"bytesInOutput": 1767603
},
"demo/draw.js": {
"bytesInOutput": 7724
"bytesInOutput": 7776
},
"demo/menu.js": {
"bytesInOutput": 11788
"bytesInOutput": 11800
},
"demo/gl-bench.js": {
"bytesInOutput": 7382
},
"demo/browser.js": {
"bytesInOutput": 19345
"bytesInOutput": 19361
}
},
"bytes": 1821237
"bytes": 1821311
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/human.esm.json vendored
View File

@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6993,
"bytes": 7009,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2720,
"bytes": 2740,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13868,
"bytes": 13884,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
]
},
"src/gesture/gesture.js": {
"bytes": 3260,
"bytes": 3308,
"imports": []
},
"src/hand/anchors.js": {
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4276,
"bytes": 4252,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7608,
"bytes": 7615,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 2583292
"bytes": 2583490
},
"dist/human.esm.js": {
"imports": [],
@ -366,7 +366,7 @@
],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5109
"bytesInOutput": 5121
},
"src/face/box.js": {
"bytesInOutput": 1578
@ -378,10 +378,10 @@
"bytesInOutput": 30817
},
"src/face/facepipeline.js": {
"bytesInOutput": 9391
"bytesInOutput": 9403
},
"src/face/facemesh.js": {
"bytesInOutput": 2108
"bytesInOutput": 2124
},
"src/profile.js": {
"bytesInOutput": 854
@ -432,10 +432,10 @@
"bytesInOutput": 832
},
"src/hand/handdetector.js": {
"bytesInOutput": 2765
"bytesInOutput": 2741
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4691
"bytesInOutput": 4633
},
"src/hand/anchors.js": {
"bytesInOutput": 127032
@ -444,7 +444,7 @@
"bytesInOutput": 1914
},
"src/gesture/gesture.js": {
"bytesInOutput": 2427
"bytesInOutput": 2463
},
"src/imagefx.js": {
"bytesInOutput": 13576
@ -471,7 +471,7 @@
"bytesInOutput": 21
}
},
"bytes": 1774850
"bytes": 1774844
}
}
}

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

28
dist/human.json vendored
View File

@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6993,
"bytes": 7009,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2720,
"bytes": 2740,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13868,
"bytes": 13884,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
]
},
"src/gesture/gesture.js": {
"bytes": 3260,
"bytes": 3308,
"imports": []
},
"src/hand/anchors.js": {
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4276,
"bytes": 4252,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7608,
"bytes": 7615,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,14 +357,14 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 2599797
"bytes": 2599995
},
"dist/human.js": {
"imports": [],
"exports": [],
"inputs": {
"src/face/blazeface.js": {
"bytesInOutput": 5109
"bytesInOutput": 5121
},
"src/face/box.js": {
"bytesInOutput": 1578
@ -376,10 +376,10 @@
"bytesInOutput": 30817
},
"src/face/facepipeline.js": {
"bytesInOutput": 9391
"bytesInOutput": 9403
},
"src/face/facemesh.js": {
"bytesInOutput": 2108
"bytesInOutput": 2124
},
"src/profile.js": {
"bytesInOutput": 854
@ -430,10 +430,10 @@
"bytesInOutput": 832
},
"src/hand/handdetector.js": {
"bytesInOutput": 2765
"bytesInOutput": 2741
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4691
"bytesInOutput": 4633
},
"src/hand/anchors.js": {
"bytesInOutput": 127032
@ -442,7 +442,7 @@
"bytesInOutput": 1914
},
"src/gesture/gesture.js": {
"bytesInOutput": 2427
"bytesInOutput": 2463
},
"src/imagefx.js": {
"bytesInOutput": 13576
@ -469,7 +469,7 @@
"bytesInOutput": 21
}
},
"bytes": 1774928
"bytes": 1774922
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

28
dist/human.node.json vendored
View File

@ -159,7 +159,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6993,
"bytes": 7009,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -179,7 +179,7 @@
"imports": []
},
"src/face/facemesh.js": {
"bytes": 2720,
"bytes": 2740,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -196,7 +196,7 @@
]
},
"src/face/facepipeline.js": {
"bytes": 13868,
"bytes": 13884,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -228,7 +228,7 @@
]
},
"src/gesture/gesture.js": {
"bytes": 3260,
"bytes": 3308,
"imports": []
},
"src/hand/anchors.js": {
@ -244,7 +244,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4276,
"bytes": 4252,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -255,7 +255,7 @@
]
},
"src/hand/handpipeline.js": {
"bytes": 7608,
"bytes": 7615,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -357,7 +357,7 @@
"dist/human.node-gpu.js.map": {
"imports": [],
"inputs": {},
"bytes": 686404
"bytes": 686602
},
"dist/human.node-gpu.js": {
"imports": [],
@ -367,7 +367,7 @@
"bytesInOutput": 966
},
"src/face/blazeface.js": {
"bytesInOutput": 5264
"bytesInOutput": 5276
},
"src/face/box.js": {
"bytesInOutput": 1628
@ -379,10 +379,10 @@
"bytesInOutput": 30828
},
"src/face/facepipeline.js": {
"bytesInOutput": 9447
"bytesInOutput": 9459
},
"src/face/facemesh.js": {
"bytesInOutput": 2153
"bytesInOutput": 2169
},
"src/profile.js": {
"bytesInOutput": 854
@ -433,10 +433,10 @@
"bytesInOutput": 846
},
"src/hand/handdetector.js": {
"bytesInOutput": 2903
"bytesInOutput": 2879
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4735
"bytesInOutput": 4677
},
"src/hand/anchors.js": {
"bytesInOutput": 127034
@ -445,7 +445,7 @@
"bytesInOutput": 1958
},
"src/gesture/gesture.js": {
"bytesInOutput": 2431
"bytesInOutput": 2467
},
"src/imagefx.js": {
"bytesInOutput": 13558
@ -469,7 +469,7 @@
"bytesInOutput": 20
}
},
"bytes": 249843
"bytes": 249837
}
}
}

View File

@ -20,9 +20,7 @@ class ModelWeights {
}
dispose() {
for (const varName in this.variables) {
this.variables[varName].dispose();
}
for (let i = 0; i < this.variables.length; i++) this.variables[i].dispose();
}
}
exports.ModelWeights = ModelWeights;

View File

@ -112,7 +112,7 @@ class BlazeFaceModel {
const scoresVal = scores.dataSync();
const annotatedBoxes = [];
for (const i in boundingBoxes) {
for (let i = 0; i < boundingBoxes.length; i++) {
const boxIndex = boxIndices[i];
const confidence = scoresVal[boxIndex];
if (confidence > this.config.face.detector.minConfidence) {

View File

@ -18,7 +18,7 @@ class MediaPipeFaceMesh {
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
const annotations = {};
if (mesh && mesh.length > 0) {
for (const key in coords.MESH_ANNOTATIONS) {
for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) {
if (config.face.iris.enabled || key.includes('Iris') === false) {
annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
}

View File

@ -156,7 +156,7 @@ class Pipeline {
this.detectedFaces = 0;
return null;
}
for (const i in this.storedBoxes) {
for (let i = 0; i < this.storedBoxes.length; i++) {
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
const enlargedBox = bounding.enlargeBox(scaledBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync();

View File

@ -1,7 +1,7 @@
exports.body = (res) => {
if (!res) return [];
const gestures = [];
for (const i in res) {
for (let i = 0; i < res.length; i++) {
// raising hands
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
const rightWrist = res[i].keypoints.find((a) => (a.part === 'rightWrist'));
@ -21,7 +21,7 @@ exports.body = (res) => {
exports.face = (res) => {
if (!res) return [];
const gestures = [];
for (const i in res) {
for (let i = 0; i < res.length; i++) {
if (res[i].mesh && res[i].mesh.length > 0) {
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' });
@ -42,7 +42,7 @@ exports.face = (res) => {
exports.hand = (res) => {
if (!res) return [];
const gestures = [];
for (const i in res) {
for (let i = 0; i < res.length; i++) {
const fingers = [];
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger

View File

@ -50,24 +50,24 @@ class HandDetector {
const batched = this.model.predict(input);
const predictions = batched.squeeze();
batched.dispose();
const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
const scoresVal = scores.dataSync();
const scoresT = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
const scores = scoresT.dataSync();
const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes);
rawBoxes.dispose();
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.hand.maxHands, config.hand.iouThreshold, config.hand.scoreThreshold);
const filtered = filteredT.arraySync();
scores.dispose();
scoresT.dispose();
filteredT.dispose();
const hands = [];
for (const boxIndex of filtered) {
if (scoresVal[boxIndex] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);
const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));
for (const index of filtered) {
if (scores[index] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
const rawPalmLandmarks = tf.slice(predictions, [index, 5], [1, 14]);
const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, index).reshape([-1, 2]));
rawPalmLandmarks.dispose();
hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] });
hands.push({ box: matchingBox, palmLandmarks, confidence: scores[index] });
}
}
predictions.dispose();

View File

@ -28,9 +28,9 @@ const PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;
const PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;
class HandPipeline {
constructor(boundingBoxDetector, meshDetector, inputSize) {
this.boxDetector = boundingBoxDetector;
this.meshDetector = meshDetector;
constructor(handDetector, landmarkDetector, inputSize) {
this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector;
this.inputSize = inputSize;
this.storedBoxes = [];
this.skipped = 1000;
@ -90,23 +90,23 @@ class HandPipeline {
// run new detector every skipFrames unless we only want box to start with
let boxes;
if ((this.skipped > config.hand.skipFrames) || !config.hand.landmarks || !config.videoOptimized) {
boxes = await this.boxDetector.estimateHandBounds(image, config);
boxes = await this.handDetector.estimateHandBounds(image, config);
// don't reset on test image
if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;
}
// if detector result count doesn't match current working set, use it to reset current working set
if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.hand.maxHands) || !config.hand.landmarks)) {
this.storedBoxes = [];
this.detectedHands = 0;
for (const possible of boxes) this.storedBoxes.push(possible);
this.storedBoxes = [...boxes];
// for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true;
}
const hands = [];
// console.log(`skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
// go through working set of boxes
for (const i in this.storedBoxes) {
for (let i = 0; i < this.storedBoxes.length; i++) {
const currentBox = this.storedBoxes[i];
if (!currentBox) continue;
if (config.hand.landmarks) {
@ -120,11 +120,11 @@ class HandPipeline {
const handImage = croppedInput.div(255);
croppedInput.dispose();
rotatedImage.dispose();
const [confidence, keypoints] = await this.meshDetector.predict(handImage);
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
handImage.dispose();
const confidenceValue = confidence.dataSync()[0];
confidence.dispose();
if (confidenceValue >= config.hand.minConfidence) {
const confidence = confidenceT.dataSync()[0];
confidenceT.dispose();
if (confidence >= config.hand.minConfidence) {
const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);
const rawCoords = keypointsReshaped.arraySync();
keypoints.dispose();
@ -134,7 +134,7 @@ class HandPipeline {
this.storedBoxes[i] = nextBoundingBox;
const result = {
landmarks: coords,
confidence: confidenceValue,
confidence,
box: {
topLeft: nextBoundingBox.startPoint,
bottomRight: nextBoundingBox.endPoint,

2
wiki

@ -1 +1 @@
Subproject commit bcac4981f7df29e367259caf6b3b73e5ecde6519
Subproject commit 9595a995f7bcf2c6b0d70fed98260c8ab4a6f0d7