enhance strong typing

pull/134/head
Vladimir Mandic 2021-05-22 14:53:51 -04:00
parent 4c14bcd80e
commit 36e9c56746
33 changed files with 216 additions and 202 deletions

View File

@ -9,14 +9,14 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/05/22 mandic00@live.com
### **1.9.2** 2021/05/22 mandic00@live.com ### **1.9.2** 2021/05/22 mandic00@live.com
- add id and boxraw on missing objects - add id and boxraw on missing objects
- restructure results strong typing - restructure results strong typing
### **origin/main** 2021/05/21 mandic00@live.com
### **1.9.1** 2021/05/21 mandic00@live.com ### **1.9.1** 2021/05/21 mandic00@live.com
- caching improvements - caching improvements

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

30
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

30
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -4060,10 +4060,7 @@ var Pipeline = class {
boxConfidence: box4.confidence, boxConfidence: box4.confidence,
image: face5 image: face5
}; };
const storedBox = squarifyBox(box4); this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
storedBox.confidence = box4.confidence;
storedBox.faceConfidence = faceConfidence;
this.storedBoxes[i] = storedBox;
return prediction; return prediction;
})); }));
if (config3.face.mesh.enabled) if (config3.face.mesh.enabled)
@ -16817,11 +16814,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, landmarkDetector) { constructor(handDetector, handPoseModel2) {
var _a; var _a;
this.handDetector = handDetector; this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector; this.handPoseModel = handPoseModel2;
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2]; this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 0;
this.detectedHands = 0; this.detectedHands = 0;
@ -16903,7 +16900,7 @@ var HandPipeline = class {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
handImage.dispose(); handImage.dispose();
const confidence = confidenceT.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidenceT.dispose(); confidenceT.dispose();
@ -16914,7 +16911,7 @@ var HandPipeline = class {
keypointsReshaped.dispose(); keypointsReshaped.dispose();
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix); const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
const nextBoundingBox = this.getBoxForHandLandmarks(coords3); const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
this.storedBoxes[i] = nextBoundingBox; this.storedBoxes[i] = { ...nextBoundingBox, confidence };
const result = { const result = {
landmarks: coords3, landmarks: coords3,
confidence, confidence,
@ -18439,8 +18436,9 @@ function lines(ctx, points = [], localOptions) {
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(points[0][0], points[0][1]); ctx.moveTo(points[0][0], points[0][1]);
for (const pt of points) { for (const pt of points) {
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; const z = pt[2] || 0;
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.lineTo(pt[0], Math.round(pt[1])); ctx.lineTo(pt[0], Math.round(pt[1]));
} }
ctx.stroke(); ctx.stroke();
@ -18617,7 +18615,7 @@ async function body2(inCanvas2, result, drawOptions) {
} }
if (localOptions.drawPoints) { if (localOptions.drawPoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions); point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
} }
} }
@ -19927,9 +19925,9 @@ var Human = class {
this.perf.cached++; this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp); this.perf.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes;
let bodyRes; let bodyRes;
let handRes; let handRes;
let faceRes;
let objectRes; let objectRes;
let current; let current;
if (this.config.async) { if (this.config.async) {

View File

@ -4061,10 +4061,7 @@ var Pipeline = class {
boxConfidence: box4.confidence, boxConfidence: box4.confidence,
image: face5 image: face5
}; };
const storedBox = squarifyBox(box4); this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
storedBox.confidence = box4.confidence;
storedBox.faceConfidence = faceConfidence;
this.storedBoxes[i] = storedBox;
return prediction; return prediction;
})); }));
if (config3.face.mesh.enabled) if (config3.face.mesh.enabled)
@ -16818,11 +16815,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, landmarkDetector) { constructor(handDetector, handPoseModel2) {
var _a; var _a;
this.handDetector = handDetector; this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector; this.handPoseModel = handPoseModel2;
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2]; this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 0;
this.detectedHands = 0; this.detectedHands = 0;
@ -16904,7 +16901,7 @@ var HandPipeline = class {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
handImage.dispose(); handImage.dispose();
const confidence = confidenceT.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidenceT.dispose(); confidenceT.dispose();
@ -16915,7 +16912,7 @@ var HandPipeline = class {
keypointsReshaped.dispose(); keypointsReshaped.dispose();
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix); const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
const nextBoundingBox = this.getBoxForHandLandmarks(coords3); const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
this.storedBoxes[i] = nextBoundingBox; this.storedBoxes[i] = { ...nextBoundingBox, confidence };
const result = { const result = {
landmarks: coords3, landmarks: coords3,
confidence, confidence,
@ -18440,8 +18437,9 @@ function lines(ctx, points = [], localOptions) {
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(points[0][0], points[0][1]); ctx.moveTo(points[0][0], points[0][1]);
for (const pt of points) { for (const pt of points) {
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; const z = pt[2] || 0;
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.lineTo(pt[0], Math.round(pt[1])); ctx.lineTo(pt[0], Math.round(pt[1]));
} }
ctx.stroke(); ctx.stroke();
@ -18618,7 +18616,7 @@ async function body2(inCanvas2, result, drawOptions) {
} }
if (localOptions.drawPoints) { if (localOptions.drawPoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions); point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
} }
} }
@ -19928,9 +19926,9 @@ var Human = class {
this.perf.cached++; this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp); this.perf.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes;
let bodyRes; let bodyRes;
let handRes; let handRes;
let faceRes;
let objectRes; let objectRes;
let current; let current;
if (this.config.async) { if (this.config.async) {

24
dist/human.node.js vendored
View File

@ -4060,10 +4060,7 @@ var Pipeline = class {
boxConfidence: box4.confidence, boxConfidence: box4.confidence,
image: face5 image: face5
}; };
const storedBox = squarifyBox(box4); this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
storedBox.confidence = box4.confidence;
storedBox.faceConfidence = faceConfidence;
this.storedBoxes[i] = storedBox;
return prediction; return prediction;
})); }));
if (config3.face.mesh.enabled) if (config3.face.mesh.enabled)
@ -16817,11 +16814,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
var palmLandmarksPalmBase = 0; var palmLandmarksPalmBase = 0;
var palmLandmarksMiddleFingerBase = 2; var palmLandmarksMiddleFingerBase = 2;
var HandPipeline = class { var HandPipeline = class {
constructor(handDetector, landmarkDetector) { constructor(handDetector, handPoseModel2) {
var _a; var _a;
this.handDetector = handDetector; this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector; this.handPoseModel = handPoseModel2;
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2]; this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 0;
this.detectedHands = 0; this.detectedHands = 0;
@ -16903,7 +16900,7 @@ var HandPipeline = class {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
handImage.dispose(); handImage.dispose();
const confidence = confidenceT.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidenceT.dispose(); confidenceT.dispose();
@ -16914,7 +16911,7 @@ var HandPipeline = class {
keypointsReshaped.dispose(); keypointsReshaped.dispose();
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix); const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
const nextBoundingBox = this.getBoxForHandLandmarks(coords3); const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
this.storedBoxes[i] = nextBoundingBox; this.storedBoxes[i] = { ...nextBoundingBox, confidence };
const result = { const result = {
landmarks: coords3, landmarks: coords3,
confidence, confidence,
@ -18439,8 +18436,9 @@ function lines(ctx, points = [], localOptions) {
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(points[0][0], points[0][1]); ctx.moveTo(points[0][0], points[0][1]);
for (const pt of points) { for (const pt of points) {
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; const z = pt[2] || 0;
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color; ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.lineTo(pt[0], Math.round(pt[1])); ctx.lineTo(pt[0], Math.round(pt[1]));
} }
ctx.stroke(); ctx.stroke();
@ -18617,7 +18615,7 @@ async function body2(inCanvas2, result, drawOptions) {
} }
if (localOptions.drawPoints) { if (localOptions.drawPoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions); point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
} }
} }
@ -19927,9 +19925,9 @@ var Human = class {
this.perf.cached++; this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp); this.perf.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:"); this.analyze("Check Changed:");
let faceRes;
let bodyRes; let bodyRes;
let handRes; let handRes;
let faceRes;
let objectRes; let objectRes;
let current; let current;
if (this.config.async) { if (this.config.async) {

View File

@ -15,3 +15,20 @@
2021-05-22 13:15:17 INFO:  Generate types: ["src/human.ts"] 2021-05-22 13:15:17 INFO:  Generate types: ["src/human.ts"]
2021-05-22 13:15:23 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"] 2021-05-22 13:15:23 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 13:15:23 INFO:  Generate TypeDocs: ["src/human.ts"] 2021-05-22 13:15:23 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-22 14:53:16 INFO:  @vladmandic/human version 1.9.2
2021-05-22 14:53:16 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 14:53:16 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 14:53:16 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: node type: node: {"imports":36,"importBytes":419268,"outputBytes":377986,"outputFiles":"dist/human.node.js"}
2021-05-22 14:53:16 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":419276,"outputBytes":377990,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 14:53:16 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":419343,"outputBytes":378062,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 14:53:16 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":419370,"outputBytes":232264,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 14:53:17 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:17 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1529390,"outputBytes":1340125,"outputFiles":"dist/human.js"}
2021-05-22 14:53:18 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1529390,"outputBytes":1340117,"outputFiles":"dist/human.esm.js"}
2021-05-22 14:53:18 INFO:  Generate types: ["src/human.ts"]
2021-05-22 14:53:22 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 14:53:22 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -2,6 +2,7 @@ import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';
import * as util from './util'; import * as util from './util';
import { Config } from '../config';
const keypointsCount = 6; const keypointsCount = 6;
@ -21,11 +22,11 @@ function decodeBounds(boxOutputs, anchors, inputSize) {
} }
export class BlazeFaceModel { export class BlazeFaceModel {
model: any; model: any; // tf.GraphModel
anchorsData: any; anchorsData: [number, number][];
anchors: any; anchors: typeof tf.Tensor;
inputSize: number; inputSize: number;
config: any; config: Config;
constructor(model, config) { constructor(model, config) {
this.model = model; this.model = model;
@ -60,7 +61,7 @@ export class BlazeFaceModel {
const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence); const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
const nms = nmsTensor.arraySync(); const nms = nmsTensor.arraySync();
nmsTensor.dispose(); nmsTensor.dispose();
const annotatedBoxes: Array<{ box: any, landmarks: any, anchor: number[], confidence: number }> = []; const annotatedBoxes: Array<{ box: { startPoint: typeof tf.Tensor, endPoint: typeof tf.Tensor }, landmarks: typeof tf.Tensor, anchor: number[], confidence: number }> = [];
for (let i = 0; i < nms.length; i++) { for (let i = 0; i < nms.length; i++) {
const confidence = scores[nms[i]]; const confidence = scores[nms[i]];
if (confidence > this.config.face.detector.minConfidence) { if (confidence > this.config.face.detector.minConfidence) {

View File

@ -53,10 +53,10 @@ function replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {
} }
// The Pipeline coordinates between the bounding box and skeleton models. // The Pipeline coordinates between the bounding box and skeleton models.
export class Pipeline { export class Pipeline {
storedBoxes: any; storedBoxes: Array<{ startPoint: number[], endPoint: number[], landmarks: any, confidence: number, faceConfidence?: number }>; // landmarks is tensor
boundingBoxDetector: any; boundingBoxDetector: any; // tf.GraphModel
meshDetector: any; meshDetector: any; // tf.GraphModel
irisModel: any; irisModel: any; // tf.GraphModel
boxSize: number; boxSize: number;
meshSize: number; meshSize: number;
irisSize: number; irisSize: number;
@ -120,7 +120,7 @@ export class Pipeline {
// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris. // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.
getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) { getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {
const eyeRawCoords: Array<any[]> = []; const eyeRawCoords: Array<[number, number, number]> = [];
for (let i = 0; i < irisLandmarks.numCoordinates; i++) { for (let i = 0; i < irisLandmarks.numCoordinates; i++) {
const x = eyeData[i * 3]; const x = eyeData[i * 3];
const y = eyeData[i * 3 + 1]; const y = eyeData[i * 3 + 1];
@ -229,7 +229,7 @@ export class Pipeline {
} }
const [, confidence, contourCoords] = this.meshDetector.execute(face); // The first returned tensor represents facial contours which are already included in the coordinates. const [, confidence, contourCoords] = this.meshDetector.execute(face); // The first returned tensor represents facial contours which are already included in the coordinates.
const faceConfidence = confidence.dataSync()[0]; const faceConfidence = confidence.dataSync()[0] as number;
if (faceConfidence < config.face.detector.minConfidence) { if (faceConfidence < config.face.detector.minConfidence) {
this.storedBoxes[i].confidence = faceConfidence; // reset confidence of cached box this.storedBoxes[i].confidence = faceConfidence; // reset confidence of cached box
return null; // if below confidence just exit return null; // if below confidence just exit
@ -265,6 +265,7 @@ export class Pipeline {
// override box from detection with one calculated from mesh // override box from detection with one calculated from mesh
const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix); const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
const storeConfidence = box.confidence; const storeConfidence = box.confidence;
// @ts-ignore enlargeBox does not include confidence so we append it manually
box = bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5); // redefine box with mesh calculated one box = bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5); // redefine box with mesh calculated one
box.confidence = storeConfidence; box.confidence = storeConfidence;
@ -288,13 +289,7 @@ export class Pipeline {
}; };
// updated stored cache values // updated stored cache values
const storedBox = bounding.squarifyBox(box); this.storedBoxes[i] = { ...bounding.squarifyBox(box), confidence: box.confidence, faceConfidence };
// @ts-ignore box itself doesn't have those properties, but we stored them for future use
storedBox.confidence = box.confidence;
// @ts-ignore box itself doesn't have those properties, but we stored them for future use
storedBox.faceConfidence = faceConfidence;
// this.storedBoxes[i] = { ...squarifiedLandmarksBox, confidence: box.confidence, faceConfidence };
this.storedBoxes[i] = storedBox;
return prediction; return prediction;
})); }));

View File

@ -96,13 +96,14 @@ function rect(ctx, x, y, width, height, localOptions) {
ctx.stroke(); ctx.stroke();
} }
function lines(ctx, points: [number, number, number][] = [], localOptions) { function lines(ctx, points: [number, number, number?][] = [], localOptions) {
if (points === undefined || points.length === 0) return; if (points === undefined || points.length === 0) return;
ctx.beginPath(); ctx.beginPath();
ctx.moveTo(points[0][0], points[0][1]); ctx.moveTo(points[0][0], points[0][1]);
for (const pt of points) { for (const pt of points) {
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : localOptions.color; const z = pt[2] || 0;
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : localOptions.color; ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
ctx.lineTo(pt[0], Math.round(pt[1])); ctx.lineTo(pt[0], Math.round(pt[1]));
} }
ctx.stroke(); ctx.stroke();
@ -112,7 +113,7 @@ function lines(ctx, points: [number, number, number][] = [], localOptions) {
} }
} }
function curves(ctx, points: [number, number, number][] = [], localOptions) { function curves(ctx, points: [number, number, number?][] = [], localOptions) {
if (points === undefined || points.length === 0) return; if (points === undefined || points.length === 0) return;
if (!localOptions.useCurves || points.length <= 2) { if (!localOptions.useCurves || points.length <= 2) {
lines(ctx, points, localOptions); lines(ctx, points, localOptions);
@ -142,8 +143,8 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let i = 1; let i = 1;
for (let j = 0; j < result.length; j++) { for (let j = 0; j < result.length; j++) {
let where:any[] = []; let where: any[] = []; // what&where is a record
let what:any[] = []; let what: any[] = []; // what&where is a record
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if ((what.length > 1) && (what[1].length > 0)) { if ((what.length > 1) && (what[1].length > 0)) {
const person = where[1] > 0 ? `#${where[1]}` : ''; const person = where[1] > 0 ? `#${where[1]}` : '';
@ -271,7 +272,7 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
} }
if (localOptions.drawPoints) { if (localOptions.drawPoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + (2 * result[i].keypoints[pt].position.z)}, ${127.5 - (2 * result[i].keypoints[pt].position.z)}, 255, 0.5)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + (2 * (result[i].keypoints[pt].position.z || 0))}, ${127.5 - (2 * (result[i].keypoints[pt].position.z || 0))}, 255, 0.5)` : localOptions.color;
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions); point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
} }
} }
@ -286,7 +287,7 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
} }
if (localOptions.drawPolygons && result[i].keypoints) { if (localOptions.drawPolygons && result[i].keypoints) {
let part; let part;
const points: any[] = []; const points: [number, number, number?][] = [];
// shoulder line // shoulder line
points.length = 0; points.length = 0;
part = result[i].keypoints.find((a) => a.part === 'leftShoulder'); part = result[i].keypoints.find((a) => a.part === 'leftShoulder');

View File

@ -3,7 +3,10 @@ import * as tf from '../../dist/tfjs.esm.js';
import { Body } from '../result'; import { Body } from '../result';
let model; let model;
let keypoints: Array<any> = [];
type Keypoints = { score: number, part: string, position: { x: number, y: number }, positionRaw: { x: number, y: number } };
let keypoints: Array<Keypoints> = [];
let skipped = Number.MAX_SAFE_INTEGER; let skipped = Number.MAX_SAFE_INTEGER;
const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle']; const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle'];
@ -41,7 +44,8 @@ function max2d(inputs, minScore) {
export async function predict(image, config): Promise<Body[]> { export async function predict(image, config): Promise<Body[]> {
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) { if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
skipped++; skipped++;
return keypoints; const score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
return [{ id: 0, score, keypoints }];
} }
skipped = 0; skipped = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
@ -57,7 +61,7 @@ export async function predict(image, config): Promise<Body[]> {
tensor.dispose(); tensor.dispose();
if (resT) { if (resT) {
const parts: Array<{ id, score, part, position: { x, y }, positionRaw: { xRaw, yRaw} }> = []; const parts: Array<Keypoints> = [];
const squeeze = resT.squeeze(); const squeeze = resT.squeeze();
tf.dispose(resT); tf.dispose(resT);
// body parts are basically just a stack of 2d tensors // body parts are basically just a stack of 2d tensors
@ -69,12 +73,11 @@ export async function predict(image, config): Promise<Body[]> {
const [x, y, score] = max2d(stack[id], config.body.minConfidence); const [x, y, score] = max2d(stack[id], config.body.minConfidence);
if (score > config.body.minConfidence) { if (score > config.body.minConfidence) {
parts.push({ parts.push({
id,
score: Math.round(100 * score) / 100, score: Math.round(100 * score) / 100,
part: bodyParts[id], part: bodyParts[id],
positionRaw: { positionRaw: {
xRaw: x / model.inputs[0].shape[2], // x normalized to 0..1 x: x / model.inputs[0].shape[2], // x normalized to 0..1
yRaw: y / model.inputs[0].shape[1], // y normalized to 0..1 y: y / model.inputs[0].shape[1], // y normalized to 0..1
}, },
position: { position: {
x: Math.round(image.shape[2] * x / model.inputs[0].shape[2]), // x normalized to input image size x: Math.round(image.shape[2] * x / model.inputs[0].shape[2]), // x normalized to input image size

View File

@ -96,7 +96,7 @@ const calculateFaceAngle = (face, image_size): { angle: { pitch: number, yaw: nu
return { angle, matrix }; return { angle, matrix };
}; };
export const detectFace = async (parent, input): Promise<any> => { export const detectFace = async (parent, input): Promise<Face[]> => {
// run facemesh, includes blazeface and iris // run facemesh, includes blazeface and iris
// eslint-disable-next-line no-async-promise-executor // eslint-disable-next-line no-async-promise-executor
let timeStamp; let timeStamp;

View File

@ -3,12 +3,12 @@ import * as box from './box';
import * as anchors from './anchors'; import * as anchors from './anchors';
export class HandDetector { export class HandDetector {
model: any; model: any; // tf.GraphModel
anchors: any; anchors: number[][];
anchorsTensor: any; anchorsTensor: typeof tf.Tensor;
inputSize: number; inputSize: number;
inputSizeTensor: any; inputSizeTensor: typeof tf.Tensor;
doubleInputSizeTensor: any; doubleInputSizeTensor: typeof tf.Tensor;
constructor(model) { constructor(model) {
this.model = model; this.model = model;
@ -52,7 +52,7 @@ export class HandDetector {
scoresT.dispose(); scoresT.dispose();
filteredT.dispose(); filteredT.dispose();
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = []; const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = []; // box and lardmarks are tensors here
for (const index of filtered) { for (const index of filtered) {
if (scores[index] >= config.hand.minConfidence) { if (scores[index] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]); const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
@ -67,13 +67,13 @@ export class HandDetector {
return hands; return hands;
} }
async estimateHandBounds(input, config) { async estimateHandBounds(input, config): Promise<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number }[]> {
const inputHeight = input.shape[1]; const inputHeight = input.shape[1];
const inputWidth = input.shape[2]; const inputWidth = input.shape[2];
const image = tf.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1)); const image = tf.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
const predictions = await this.getBoxes(image, config); const predictions = await this.getBoxes(image, config);
image.dispose(); image.dispose();
const hands: Array<{}> = []; const hands: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number }> = [];
if (!predictions || predictions.length === 0) return hands; if (!predictions || predictions.length === 0) return hands;
for (const prediction of predictions) { for (const prediction of predictions) {
const boxes = prediction.box.dataSync(); const boxes = prediction.box.dataSync();

View File

@ -1,6 +1,7 @@
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';
import * as util from './util'; import * as util from './util';
import * as detector from './handdetector';
const palmBoxEnlargeFactor = 5; // default 3 const palmBoxEnlargeFactor = 5; // default 3
const handBoxEnlargeFactor = 1.65; // default 1.65 const handBoxEnlargeFactor = 1.65; // default 1.65
@ -9,17 +10,17 @@ const palmLandmarksPalmBase = 0;
const palmLandmarksMiddleFingerBase = 2; const palmLandmarksMiddleFingerBase = 2;
export class HandPipeline { export class HandPipeline {
handDetector: any; handDetector: detector.HandDetector;
landmarkDetector: any; handPoseModel: any; // tf.GraphModel
inputSize: number; inputSize: number;
storedBoxes: any; storedBoxes: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number } | null>;
skipped: number; skipped: number;
detectedHands: number; detectedHands: number;
constructor(handDetector, landmarkDetector) { constructor(handDetector, handPoseModel) {
this.handDetector = handDetector; this.handDetector = handDetector;
this.landmarkDetector = landmarkDetector; this.handPoseModel = handPoseModel;
this.inputSize = this.landmarkDetector?.inputs[0].shape[2]; this.inputSize = this.handPoseModel?.inputs[0].shape[2];
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 0;
this.detectedHands = 0; this.detectedHands = 0;
@ -112,7 +113,7 @@ export class HandPipeline {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
handImage.dispose(); handImage.dispose();
const confidence = confidenceT.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidenceT.dispose(); confidenceT.dispose();
@ -123,7 +124,7 @@ export class HandPipeline {
keypointsReshaped.dispose(); keypointsReshaped.dispose();
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix); const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
const nextBoundingBox = this.getBoxForHandLandmarks(coords); const nextBoundingBox = this.getBoxForHandLandmarks(coords);
this.storedBoxes[i] = nextBoundingBox; this.storedBoxes[i] = { ...nextBoundingBox, confidence };
const result = { const result = {
landmarks: coords, landmarks: coords,
confidence, confidence,

View File

@ -1,6 +1,6 @@
import { log, now, mergeDeep } from './helpers'; import { log, now, mergeDeep } from './helpers';
import { Config, defaults } from './config'; import { Config, defaults } from './config';
import { Result } from './result'; import { Result, Gesture } from './result';
import * as sysinfo from './sysinfo'; import * as sysinfo from './sysinfo';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend'; import * as backend from './tfjs/backend';
@ -114,7 +114,7 @@ export class Human {
/** Platform and agent information detected by Human */ /** Platform and agent information detected by Human */
sysinfo: { platform: string, agent: string }; sysinfo: { platform: string, agent: string };
/** Performance object that contains values for all recently performed operations */ /** Performance object that contains values for all recently performed operations */
perf: any; perf: any; // perf members are dynamically defined as needed
#numTensors: number; #numTensors: number;
#analyzeMemoryLeaks: boolean; #analyzeMemoryLeaks: boolean;
#checkSanity: boolean; #checkSanity: boolean;
@ -449,9 +449,10 @@ export class Human {
this.analyze('Check Changed:'); this.analyze('Check Changed:');
// prepare where to store model results // prepare where to store model results
// keep them with weak typing as it can be promise or not
let faceRes;
let bodyRes; let bodyRes;
let handRes; let handRes;
let faceRes;
let objectRes; let objectRes;
let current; let current;
@ -520,7 +521,7 @@ export class Human {
tf.dispose(process.tensor); tf.dispose(process.tensor);
// run gesture analysis last // run gesture analysis last
let gestureRes: any[] = []; let gestureRes: Gesture[] = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];

View File

@ -81,7 +81,7 @@ async function process(res, inputSize, outputShape, config) {
// unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered) // unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered)
const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms
const nmsScores = results.map((a) => a.score); const nmsScores = results.map((a) => a.score);
let nmsIdx: any[] = []; let nmsIdx: Array<number> = [];
if (nmsBoxes && nmsBoxes.length > 0) { if (nmsBoxes && nmsBoxes.length > 0) {
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence); const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
nmsIdx = nms.dataSync(); nmsIdx = nms.dataSync();

View File

@ -120,7 +120,7 @@ function getInstanceScore(existingPoses, keypoints) {
} }
export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) { export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) {
const poses: Array<{ keypoints: any, box: any, score: number }> = []; const poses: Array<{ keypoints, box: [number, number, number, number], score: number }> = [];
const queue = buildPartWithScoreQueue(minConfidence, scores); const queue = buildPartWithScoreQueue(minConfidence, scores);
// Generate at most maxDetected object instances per image in decreasing root part score order. // Generate at most maxDetected object instances per image in decreasing root part score order.
while (poses.length < maxDetected && !queue.empty()) { while (poses.length < maxDetected && !queue.empty()) {

View File

@ -14,7 +14,7 @@ export function getAdjacentKeyPoints(keypoints, minConfidence) {
}, []); }, []);
} }
export function getBoundingBox(keypoints) { export function getBoundingBox(keypoints): [number, number, number, number] {
const coord = keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({ const coord = keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({
maxX: Math.max(maxX, x), maxX: Math.max(maxX, x),
maxY: Math.max(maxY, y), maxY: Math.max(maxY, y),
@ -49,9 +49,9 @@ export function scalePoses(poses, [height, width], [inputResolutionHeight, input
// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort // algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort
export class MaxHeap { export class MaxHeap {
priorityQueue: any; priorityQueue: Array<any>; // don't touch
numberOfElements: number; numberOfElements: number;
getElementValue: any; getElementValue: any; // function call
constructor(maxSize, getElementValue) { constructor(maxSize, getElementValue) {
this.priorityQueue = new Array(maxSize); this.priorityQueue = new Array(maxSize);

View File

@ -2,7 +2,7 @@ import { log } from './helpers';
export const data = {}; export const data = {};
export function run(modelName: string, profileData: any): void { export function run(modelName: string, profileData: any): void { // profileData is tfjs internal type
if (!profileData || !profileData.kernels) return; if (!profileData || !profileData.kernels) return;
const maxDetected = 5; const maxDetected = 5;
const time = profileData.kernels const time = profileData.kernels

View File

@ -30,6 +30,8 @@
* - matrix: 3d transofrmation matrix as array of numeric values * - matrix: 3d transofrmation matrix as array of numeric values
* - tensor: face tensor as Tensor object which contains detected face * - tensor: face tensor as Tensor object which contains detected face
*/ */
import { Tensor } from '../dist/tfjs.esm.js';
export interface Face { export interface Face {
id: number id: number
confidence: number, confidence: number,
@ -50,7 +52,7 @@ export interface Face {
angle: { roll: number, yaw: number, pitch: number }, angle: { roll: number, yaw: number, pitch: number },
matrix: [number, number, number, number, number, number, number, number, number], matrix: [number, number, number, number, number, number, number, number, number],
} }
tensor: any, tensor: typeof Tensor,
} }
/** Body results /** Body results
@ -75,7 +77,8 @@ export interface Body {
boxRaw?: [x: number, y: number, width: number, height: number], boxRaw?: [x: number, y: number, width: number, height: number],
keypoints: Array<{ keypoints: Array<{
part: string, part: string,
position: { x: number, y: number, z: number }, position: { x: number, y: number, z?: number },
positionRaw?: { x: number, y: number, z?: number },
score: number, score: number,
presence?: number, presence?: number,
}> }>

View File

@ -134,7 +134,7 @@
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="keypoints" class="tsd-anchor"></a> <a name="keypoints" class="tsd-anchor"></a>
<h3>keypoints</h3> <h3>keypoints</h3>
<div class="tsd-signature tsd-kind-icon">keypoints<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>part<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>position<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>presence<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>score<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">[]</span></div> <div class="tsd-signature tsd-kind-icon">keypoints<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>part<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>position<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>positionRaw<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>presence<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>score<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">[]</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
</section> </section>

View File

@ -63,38 +63,6 @@
<div class="container container-main"> <div class="container container-main">
<div class="row"> <div class="row">
<div class="col-8 col-content"> <div class="col-8 col-content">
<section class="tsd-panel tsd-comment">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Face results
Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
Some values may be null if specific model is not enabled</p>
</div>
<p>Array of individual results with one object per detected face
Each result has:</p>
<ul>
<li>id: face number</li>
<li>confidence: overal detection confidence value</li>
<li>boxConfidence: face box detection confidence value</li>
<li>faceConfidence: face keypoints detection confidence value</li>
<li>box: face bounding box as array of [x, y, width, height], normalized to image resolution</li>
<li>boxRaw: face bounding box as array of [x, y, width, height], normalized to range 0..1</li>
<li>mesh: face keypoints as array of [x, y, z] points of face mesh, normalized to image resolution</li>
<li>meshRaw: face keypoints as array of [x, y, z] points of face mesh, normalized to range 0..1</li>
<li>annotations: annotated face keypoints as array of annotated face mesh points</li>
<li>age: age as value</li>
<li>gender: gender as value</li>
<li>genderConfidence: gender detection confidence as value</li>
<li>emotion: emotions as array of possible emotions with their individual scores</li>
<li>embedding: facial descriptor as array of numerical elements</li>
<li>iris: iris distance from current viewpoint as distance value</li>
<li>rotation: face rotiation that contains both angles and matrix used for 3d transformations</li>
<li>angle: face angle as object with values for roll, yaw and pitch angles</li>
<li>matrix: 3d transofrmation matrix as array of numeric values</li>
<li>tensor: face tensor as Tensor object which contains detected face</li>
</ul>
</div>
</section>
<section class="tsd-panel tsd-hierarchy"> <section class="tsd-panel tsd-hierarchy">
<h3>Hierarchy</h3> <h3>Hierarchy</h3>
<ul class="tsd-hierarchy"> <ul class="tsd-hierarchy">
@ -271,7 +239,7 @@
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="tensor" class="tsd-anchor"></a> <a name="tensor" class="tsd-anchor"></a>
<h3>tensor</h3> <h3>tensor</h3>
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">any</span></div> <div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">typeof </span><span class="tsd-signature-type">__class</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
</section> </section>

View File

@ -1,13 +1,18 @@
import * as tf from '../../dist/tfjs.esm.js';
import { Config } from '../config';
export declare class BlazeFaceModel { export declare class BlazeFaceModel {
model: any; model: any;
anchorsData: any; anchorsData: [number, number][];
anchors: any; anchors: typeof tf.Tensor;
inputSize: number; inputSize: number;
config: any; config: Config;
constructor(model: any, config: any); constructor(model: any, config: any);
getBoundingBoxes(inputImage: any): Promise<{ getBoundingBoxes(inputImage: any): Promise<{
boxes: { boxes: {
box: any; box: {
startPoint: typeof tf.Tensor;
endPoint: typeof tf.Tensor;
};
landmarks: any; landmarks: any;
anchor: number[]; anchor: number[];
confidence: number; confidence: number;

View File

@ -1,5 +1,11 @@
export declare class Pipeline { export declare class Pipeline {
storedBoxes: any; storedBoxes: Array<{
startPoint: number[];
endPoint: number[];
landmarks: any;
confidence: number;
faceConfidence?: number;
}>;
boundingBoxDetector: any; boundingBoxDetector: any;
meshDetector: any; meshDetector: any;
irisModel: any; irisModel: any;
@ -22,8 +28,8 @@ export declare class Pipeline {
crop: any; crop: any;
}; };
getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): { getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): {
rawCoords: any[][]; rawCoords: [number, number, number][];
iris: any[][]; iris: [number, number, number][];
}; };
getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any; getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any;
predict(input: any, config: any): Promise<any>; predict(input: any, config: any): Promise<any>;

3
types/face.d.ts vendored
View File

@ -1 +1,2 @@
export declare const detectFace: (parent: any, input: any) => Promise<any>; import { Face } from './result';
export declare const detectFace: (parent: any, input: any) => Promise<Face[]>;

View File

@ -1,10 +1,11 @@
import * as tf from '../../dist/tfjs.esm.js';
export declare class HandDetector { export declare class HandDetector {
model: any; model: any;
anchors: any; anchors: number[][];
anchorsTensor: any; anchorsTensor: typeof tf.Tensor;
inputSize: number; inputSize: number;
inputSizeTensor: any; inputSizeTensor: typeof tf.Tensor;
doubleInputSizeTensor: any; doubleInputSizeTensor: typeof tf.Tensor;
constructor(model: any); constructor(model: any);
normalizeBoxes(boxes: any): any; normalizeBoxes(boxes: any): any;
normalizeLandmarks(rawPalmLandmarks: any, index: any): any; normalizeLandmarks(rawPalmLandmarks: any, index: any): any;
@ -13,5 +14,10 @@ export declare class HandDetector {
palmLandmarks: any; palmLandmarks: any;
confidence: number; confidence: number;
}[]>; }[]>;
estimateHandBounds(input: any, config: any): Promise<{}[]>; estimateHandBounds(input: any, config: any): Promise<{
startPoint: number[];
endPoint: number[];
palmLandmarks: number[];
confidence: number;
}[]>;
} }

View File

@ -1,11 +1,17 @@
import * as detector from './handdetector';
export declare class HandPipeline { export declare class HandPipeline {
handDetector: any; handDetector: detector.HandDetector;
landmarkDetector: any; handPoseModel: any;
inputSize: number; inputSize: number;
storedBoxes: any; storedBoxes: Array<{
startPoint: number[];
endPoint: number[];
palmLandmarks: number[];
confidence: number;
} | null>;
skipped: number; skipped: number;
detectedHands: number; detectedHands: number;
constructor(handDetector: any, landmarkDetector: any); constructor(handDetector: any, handPoseModel: any);
calculateLandmarksBoundingBox(landmarks: any): { calculateLandmarksBoundingBox(landmarks: any): {
startPoint: number[]; startPoint: number[];
endPoint: number[]; endPoint: number[];

View File

@ -3,6 +3,6 @@ export declare function decodePose(root: any, scores: any, offsets: any, displac
export declare function buildPartWithScoreQueue(minConfidence: any, scores: any): utils.MaxHeap; export declare function buildPartWithScoreQueue(minConfidence: any, scores: any): utils.MaxHeap;
export declare function decode(offsets: any, scores: any, displacementsFwd: any, displacementsBwd: any, maxDetected: any, minConfidence: any): { export declare function decode(offsets: any, scores: any, displacementsFwd: any, displacementsBwd: any, maxDetected: any, minConfidence: any): {
keypoints: any; keypoints: any;
box: any; box: [number, number, number, number];
score: number; score: number;
}[]; }[];

View File

@ -1,9 +1,9 @@
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean; export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[]; export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
export declare function getBoundingBox(keypoints: any): any[]; export declare function getBoundingBox(keypoints: any): [number, number, number, number];
export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any; export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any;
export declare class MaxHeap { export declare class MaxHeap {
priorityQueue: any; priorityQueue: Array<any>;
numberOfElements: number; numberOfElements: number;
getElementValue: any; getElementValue: any;
constructor(maxSize: any, getElementValue: any); constructor(maxSize: any, getElementValue: any);
@ -11,7 +11,7 @@ export declare class MaxHeap {
dequeue(): any; dequeue(): any;
empty(): boolean; empty(): boolean;
size(): number; size(): number;
all(): any; all(): any[];
max(): any; max(): any;
swim(k: any): void; swim(k: any): void;
sink(k: any): void; sink(k: any): void;

10
types/result.d.ts vendored
View File

@ -29,6 +29,7 @@
* - matrix: 3d transofrmation matrix as array of numeric values * - matrix: 3d transofrmation matrix as array of numeric values
* - tensor: face tensor as Tensor object which contains detected face * - tensor: face tensor as Tensor object which contains detected face
*/ */
import { Tensor } from '../dist/tfjs.esm.js';
export interface Face { export interface Face {
id: number; id: number;
confidence: number; confidence: number;
@ -59,7 +60,7 @@ export interface Face {
}; };
matrix: [number, number, number, number, number, number, number, number, number]; matrix: [number, number, number, number, number, number, number, number, number];
}; };
tensor: any; tensor: typeof Tensor;
} }
/** Body results /** Body results
* *
@ -85,7 +86,12 @@ export interface Body {
position: { position: {
x: number; x: number;
y: number; y: number;
z: number; z?: number;
};
positionRaw?: {
x: number;
y: number;
z?: number;
}; };
score: number; score: number;
presence?: number; presence?: number;