mirror of https://github.com/vladmandic/human
enhance strong typing
parent
4c14bcd80e
commit
36e9c56746
|
@ -9,14 +9,14 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/05/22 mandic00@live.com
|
||||
|
||||
|
||||
### **1.9.2** 2021/05/22 mandic00@live.com
|
||||
|
||||
- add id and boxraw on missing objects
|
||||
- restructure results strong typing
|
||||
|
||||
### **origin/main** 2021/05/21 mandic00@live.com
|
||||
|
||||
|
||||
### **1.9.1** 2021/05/21 mandic00@live.com
|
||||
|
||||
- caching improvements
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -4060,10 +4060,7 @@ var Pipeline = class {
|
|||
boxConfidence: box4.confidence,
|
||||
image: face5
|
||||
};
|
||||
const storedBox = squarifyBox(box4);
|
||||
storedBox.confidence = box4.confidence;
|
||||
storedBox.faceConfidence = faceConfidence;
|
||||
this.storedBoxes[i] = storedBox;
|
||||
this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
|
||||
return prediction;
|
||||
}));
|
||||
if (config3.face.mesh.enabled)
|
||||
|
@ -16817,11 +16814,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
|
|||
var palmLandmarksPalmBase = 0;
|
||||
var palmLandmarksMiddleFingerBase = 2;
|
||||
var HandPipeline = class {
|
||||
constructor(handDetector, landmarkDetector) {
|
||||
constructor(handDetector, handPoseModel2) {
|
||||
var _a;
|
||||
this.handDetector = handDetector;
|
||||
this.landmarkDetector = landmarkDetector;
|
||||
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.handPoseModel = handPoseModel2;
|
||||
this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.detectedHands = 0;
|
||||
|
@ -16903,7 +16900,7 @@ var HandPipeline = class {
|
|||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
|
||||
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
|
||||
handImage.dispose();
|
||||
const confidence = confidenceT.dataSync()[0];
|
||||
confidenceT.dispose();
|
||||
|
@ -16914,7 +16911,7 @@ var HandPipeline = class {
|
|||
keypointsReshaped.dispose();
|
||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
|
||||
this.storedBoxes[i] = nextBoundingBox;
|
||||
this.storedBoxes[i] = { ...nextBoundingBox, confidence };
|
||||
const result = {
|
||||
landmarks: coords3,
|
||||
confidence,
|
||||
|
@ -18439,8 +18436,9 @@ function lines(ctx, points = [], localOptions) {
|
|||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) {
|
||||
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
const z = pt[2] || 0;
|
||||
ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.lineTo(pt[0], Math.round(pt[1]));
|
||||
}
|
||||
ctx.stroke();
|
||||
|
@ -18617,7 +18615,7 @@ async function body2(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
if (localOptions.drawPoints) {
|
||||
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
|
||||
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
|
||||
}
|
||||
}
|
||||
|
@ -19927,9 +19925,9 @@ var Human = class {
|
|||
this.perf.cached++;
|
||||
this.perf.changed = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Check Changed:");
|
||||
let faceRes;
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
let objectRes;
|
||||
let current;
|
||||
if (this.config.async) {
|
||||
|
|
|
@ -4061,10 +4061,7 @@ var Pipeline = class {
|
|||
boxConfidence: box4.confidence,
|
||||
image: face5
|
||||
};
|
||||
const storedBox = squarifyBox(box4);
|
||||
storedBox.confidence = box4.confidence;
|
||||
storedBox.faceConfidence = faceConfidence;
|
||||
this.storedBoxes[i] = storedBox;
|
||||
this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
|
||||
return prediction;
|
||||
}));
|
||||
if (config3.face.mesh.enabled)
|
||||
|
@ -16818,11 +16815,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
|
|||
var palmLandmarksPalmBase = 0;
|
||||
var palmLandmarksMiddleFingerBase = 2;
|
||||
var HandPipeline = class {
|
||||
constructor(handDetector, landmarkDetector) {
|
||||
constructor(handDetector, handPoseModel2) {
|
||||
var _a;
|
||||
this.handDetector = handDetector;
|
||||
this.landmarkDetector = landmarkDetector;
|
||||
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.handPoseModel = handPoseModel2;
|
||||
this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.detectedHands = 0;
|
||||
|
@ -16904,7 +16901,7 @@ var HandPipeline = class {
|
|||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
|
||||
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
|
||||
handImage.dispose();
|
||||
const confidence = confidenceT.dataSync()[0];
|
||||
confidenceT.dispose();
|
||||
|
@ -16915,7 +16912,7 @@ var HandPipeline = class {
|
|||
keypointsReshaped.dispose();
|
||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
|
||||
this.storedBoxes[i] = nextBoundingBox;
|
||||
this.storedBoxes[i] = { ...nextBoundingBox, confidence };
|
||||
const result = {
|
||||
landmarks: coords3,
|
||||
confidence,
|
||||
|
@ -18440,8 +18437,9 @@ function lines(ctx, points = [], localOptions) {
|
|||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) {
|
||||
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
const z = pt[2] || 0;
|
||||
ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.lineTo(pt[0], Math.round(pt[1]));
|
||||
}
|
||||
ctx.stroke();
|
||||
|
@ -18618,7 +18616,7 @@ async function body2(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
if (localOptions.drawPoints) {
|
||||
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
|
||||
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
|
||||
}
|
||||
}
|
||||
|
@ -19928,9 +19926,9 @@ var Human = class {
|
|||
this.perf.cached++;
|
||||
this.perf.changed = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Check Changed:");
|
||||
let faceRes;
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
let objectRes;
|
||||
let current;
|
||||
if (this.config.async) {
|
||||
|
|
|
@ -4060,10 +4060,7 @@ var Pipeline = class {
|
|||
boxConfidence: box4.confidence,
|
||||
image: face5
|
||||
};
|
||||
const storedBox = squarifyBox(box4);
|
||||
storedBox.confidence = box4.confidence;
|
||||
storedBox.faceConfidence = faceConfidence;
|
||||
this.storedBoxes[i] = storedBox;
|
||||
this.storedBoxes[i] = { ...squarifyBox(box4), confidence: box4.confidence, faceConfidence };
|
||||
return prediction;
|
||||
}));
|
||||
if (config3.face.mesh.enabled)
|
||||
|
@ -16817,11 +16814,11 @@ var palmLandmarkIds = [0, 5, 9, 13, 17, 1, 2];
|
|||
var palmLandmarksPalmBase = 0;
|
||||
var palmLandmarksMiddleFingerBase = 2;
|
||||
var HandPipeline = class {
|
||||
constructor(handDetector, landmarkDetector) {
|
||||
constructor(handDetector, handPoseModel2) {
|
||||
var _a;
|
||||
this.handDetector = handDetector;
|
||||
this.landmarkDetector = landmarkDetector;
|
||||
this.inputSize = (_a = this.landmarkDetector) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.handPoseModel = handPoseModel2;
|
||||
this.inputSize = (_a = this.handPoseModel) == null ? void 0 : _a.inputs[0].shape[2];
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.detectedHands = 0;
|
||||
|
@ -16903,7 +16900,7 @@ var HandPipeline = class {
|
|||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
|
||||
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
|
||||
handImage.dispose();
|
||||
const confidence = confidenceT.dataSync()[0];
|
||||
confidenceT.dispose();
|
||||
|
@ -16914,7 +16911,7 @@ var HandPipeline = class {
|
|||
keypointsReshaped.dispose();
|
||||
const coords3 = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords3);
|
||||
this.storedBoxes[i] = nextBoundingBox;
|
||||
this.storedBoxes[i] = { ...nextBoundingBox, confidence };
|
||||
const result = {
|
||||
landmarks: coords3,
|
||||
confidence,
|
||||
|
@ -18439,8 +18436,9 @@ function lines(ctx, points = [], localOptions) {
|
|||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) {
|
||||
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + 2 * pt[2]}, ${127.5 - 2 * pt[2]}, 255, 0.3)` : localOptions.color;
|
||||
const z = pt[2] || 0;
|
||||
ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
|
||||
ctx.lineTo(pt[0], Math.round(pt[1]));
|
||||
}
|
||||
ctx.stroke();
|
||||
|
@ -18617,7 +18615,7 @@ async function body2(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
if (localOptions.drawPoints) {
|
||||
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * result[i].keypoints[pt].position.z}, ${127.5 - 2 * result[i].keypoints[pt].position.z}, 255, 0.5)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + 2 * (result[i].keypoints[pt].position.z || 0)}, ${127.5 - 2 * (result[i].keypoints[pt].position.z || 0)}, 255, 0.5)` : localOptions.color;
|
||||
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
|
||||
}
|
||||
}
|
||||
|
@ -19927,9 +19925,9 @@ var Human = class {
|
|||
this.perf.cached++;
|
||||
this.perf.changed = Math.trunc(now() - timeStamp);
|
||||
this.analyze("Check Changed:");
|
||||
let faceRes;
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
let objectRes;
|
||||
let current;
|
||||
if (this.config.async) {
|
||||
|
|
|
@ -15,3 +15,20 @@
|
|||
2021-05-22 13:15:17 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-05-22 13:15:23 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-05-22 13:15:23 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-05-22 14:53:16 [36mINFO: [39m @vladmandic/human version 1.9.2
|
||||
2021-05-22 14:53:16 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-05-22 14:53:16 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: node type: node: {"imports":36,"importBytes":419268,"outputBytes":377986,"outputFiles":"dist/human.node.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":36,"importBytes":419276,"outputBytes":377990,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":36,"importBytes":419343,"outputBytes":378062,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-22 14:53:16 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":36,"importBytes":419370,"outputBytes":232264,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-05-22 14:53:17 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-22 14:53:17 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":36,"importBytes":1529390,"outputBytes":1340125,"outputFiles":"dist/human.js"}
|
||||
2021-05-22 14:53:18 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":36,"importBytes":1529390,"outputBytes":1340117,"outputFiles":"dist/human.esm.js"}
|
||||
2021-05-22 14:53:18 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-05-22 14:53:22 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-05-22 14:53:22 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
|
|
|
@ -2,6 +2,7 @@ import { log, join } from '../helpers';
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as box from './box';
|
||||
import * as util from './util';
|
||||
import { Config } from '../config';
|
||||
|
||||
const keypointsCount = 6;
|
||||
|
||||
|
@ -21,11 +22,11 @@ function decodeBounds(boxOutputs, anchors, inputSize) {
|
|||
}
|
||||
|
||||
export class BlazeFaceModel {
|
||||
model: any;
|
||||
anchorsData: any;
|
||||
anchors: any;
|
||||
model: any; // tf.GraphModel
|
||||
anchorsData: [number, number][];
|
||||
anchors: typeof tf.Tensor;
|
||||
inputSize: number;
|
||||
config: any;
|
||||
config: Config;
|
||||
|
||||
constructor(model, config) {
|
||||
this.model = model;
|
||||
|
@ -60,7 +61,7 @@ export class BlazeFaceModel {
|
|||
const nmsTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.face.detector.maxDetected, this.config.face.detector.iouThreshold, this.config.face.detector.minConfidence);
|
||||
const nms = nmsTensor.arraySync();
|
||||
nmsTensor.dispose();
|
||||
const annotatedBoxes: Array<{ box: any, landmarks: any, anchor: number[], confidence: number }> = [];
|
||||
const annotatedBoxes: Array<{ box: { startPoint: typeof tf.Tensor, endPoint: typeof tf.Tensor }, landmarks: typeof tf.Tensor, anchor: number[], confidence: number }> = [];
|
||||
for (let i = 0; i < nms.length; i++) {
|
||||
const confidence = scores[nms[i]];
|
||||
if (confidence > this.config.face.detector.minConfidence) {
|
||||
|
|
|
@ -53,10 +53,10 @@ function replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {
|
|||
}
|
||||
// The Pipeline coordinates between the bounding box and skeleton models.
|
||||
export class Pipeline {
|
||||
storedBoxes: any;
|
||||
boundingBoxDetector: any;
|
||||
meshDetector: any;
|
||||
irisModel: any;
|
||||
storedBoxes: Array<{ startPoint: number[], endPoint: number[], landmarks: any, confidence: number, faceConfidence?: number }>; // landmarks is tensor
|
||||
boundingBoxDetector: any; // tf.GraphModel
|
||||
meshDetector: any; // tf.GraphModel
|
||||
irisModel: any; // tf.GraphModel
|
||||
boxSize: number;
|
||||
meshSize: number;
|
||||
irisSize: number;
|
||||
|
@ -120,7 +120,7 @@ export class Pipeline {
|
|||
|
||||
// Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.
|
||||
getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {
|
||||
const eyeRawCoords: Array<any[]> = [];
|
||||
const eyeRawCoords: Array<[number, number, number]> = [];
|
||||
for (let i = 0; i < irisLandmarks.numCoordinates; i++) {
|
||||
const x = eyeData[i * 3];
|
||||
const y = eyeData[i * 3 + 1];
|
||||
|
@ -229,7 +229,7 @@ export class Pipeline {
|
|||
}
|
||||
|
||||
const [, confidence, contourCoords] = this.meshDetector.execute(face); // The first returned tensor represents facial contours which are already included in the coordinates.
|
||||
const faceConfidence = confidence.dataSync()[0];
|
||||
const faceConfidence = confidence.dataSync()[0] as number;
|
||||
if (faceConfidence < config.face.detector.minConfidence) {
|
||||
this.storedBoxes[i].confidence = faceConfidence; // reset confidence of cached box
|
||||
return null; // if below confidence just exit
|
||||
|
@ -265,6 +265,7 @@ export class Pipeline {
|
|||
// override box from detection with one calculated from mesh
|
||||
const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||
const storeConfidence = box.confidence;
|
||||
// @ts-ignore enlargeBox does not include confidence so we append it manually
|
||||
box = bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5); // redefine box with mesh calculated one
|
||||
box.confidence = storeConfidence;
|
||||
|
||||
|
@ -288,13 +289,7 @@ export class Pipeline {
|
|||
};
|
||||
|
||||
// updated stored cache values
|
||||
const storedBox = bounding.squarifyBox(box);
|
||||
// @ts-ignore box itself doesn't have those properties, but we stored them for future use
|
||||
storedBox.confidence = box.confidence;
|
||||
// @ts-ignore box itself doesn't have those properties, but we stored them for future use
|
||||
storedBox.faceConfidence = faceConfidence;
|
||||
// this.storedBoxes[i] = { ...squarifiedLandmarksBox, confidence: box.confidence, faceConfidence };
|
||||
this.storedBoxes[i] = storedBox;
|
||||
this.storedBoxes[i] = { ...bounding.squarifyBox(box), confidence: box.confidence, faceConfidence };
|
||||
|
||||
return prediction;
|
||||
}));
|
||||
|
|
|
@ -96,13 +96,14 @@ function rect(ctx, x, y, width, height, localOptions) {
|
|||
ctx.stroke();
|
||||
}
|
||||
|
||||
function lines(ctx, points: [number, number, number][] = [], localOptions) {
|
||||
function lines(ctx, points: [number, number, number?][] = [], localOptions) {
|
||||
if (points === undefined || points.length === 0) return;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(points[0][0], points[0][1]);
|
||||
for (const pt of points) {
|
||||
ctx.strokeStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && pt[2] ? `rgba(${127.5 + (2 * pt[2])}, ${127.5 - (2 * pt[2])}, 255, 0.3)` : localOptions.color;
|
||||
const z = pt[2] || 0;
|
||||
ctx.strokeStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
|
||||
ctx.lineTo(pt[0], Math.round(pt[1]));
|
||||
}
|
||||
ctx.stroke();
|
||||
|
@ -112,7 +113,7 @@ function lines(ctx, points: [number, number, number][] = [], localOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
function curves(ctx, points: [number, number, number][] = [], localOptions) {
|
||||
function curves(ctx, points: [number, number, number?][] = [], localOptions) {
|
||||
if (points === undefined || points.length === 0) return;
|
||||
if (!localOptions.useCurves || points.length <= 2) {
|
||||
lines(ctx, points, localOptions);
|
||||
|
@ -142,8 +143,8 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
|
|||
ctx.fillStyle = localOptions.color;
|
||||
let i = 1;
|
||||
for (let j = 0; j < result.length; j++) {
|
||||
let where:any[] = [];
|
||||
let what:any[] = [];
|
||||
let where: any[] = []; // what&where is a record
|
||||
let what: any[] = []; // what&where is a record
|
||||
[where, what] = Object.entries(result[j]);
|
||||
if ((what.length > 1) && (what[1].length > 0)) {
|
||||
const person = where[1] > 0 ? `#${where[1]}` : '';
|
||||
|
@ -271,7 +272,7 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
|
|||
}
|
||||
if (localOptions.drawPoints) {
|
||||
for (let pt = 0; pt < result[i].keypoints.length; pt++) {
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + (2 * result[i].keypoints[pt].position.z)}, ${127.5 - (2 * result[i].keypoints[pt].position.z)}, 255, 0.5)` : localOptions.color;
|
||||
ctx.fillStyle = localOptions.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + (2 * (result[i].keypoints[pt].position.z || 0))}, ${127.5 - (2 * (result[i].keypoints[pt].position.z || 0))}, 255, 0.5)` : localOptions.color;
|
||||
point(ctx, result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 0, localOptions);
|
||||
}
|
||||
}
|
||||
|
@ -286,7 +287,7 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
|
|||
}
|
||||
if (localOptions.drawPolygons && result[i].keypoints) {
|
||||
let part;
|
||||
const points: any[] = [];
|
||||
const points: [number, number, number?][] = [];
|
||||
// shoulder line
|
||||
points.length = 0;
|
||||
part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
|
||||
|
|
|
@ -3,7 +3,10 @@ import * as tf from '../../dist/tfjs.esm.js';
|
|||
import { Body } from '../result';
|
||||
|
||||
let model;
|
||||
let keypoints: Array<any> = [];
|
||||
|
||||
type Keypoints = { score: number, part: string, position: { x: number, y: number }, positionRaw: { x: number, y: number } };
|
||||
|
||||
let keypoints: Array<Keypoints> = [];
|
||||
let skipped = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle'];
|
||||
|
@ -41,7 +44,8 @@ function max2d(inputs, minScore) {
|
|||
export async function predict(image, config): Promise<Body[]> {
|
||||
if ((skipped < config.body.skipFrames) && config.skipFrame && Object.keys(keypoints).length > 0) {
|
||||
skipped++;
|
||||
return keypoints;
|
||||
const score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
|
||||
return [{ id: 0, score, keypoints }];
|
||||
}
|
||||
skipped = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
|
@ -57,7 +61,7 @@ export async function predict(image, config): Promise<Body[]> {
|
|||
tensor.dispose();
|
||||
|
||||
if (resT) {
|
||||
const parts: Array<{ id, score, part, position: { x, y }, positionRaw: { xRaw, yRaw} }> = [];
|
||||
const parts: Array<Keypoints> = [];
|
||||
const squeeze = resT.squeeze();
|
||||
tf.dispose(resT);
|
||||
// body parts are basically just a stack of 2d tensors
|
||||
|
@ -69,12 +73,11 @@ export async function predict(image, config): Promise<Body[]> {
|
|||
const [x, y, score] = max2d(stack[id], config.body.minConfidence);
|
||||
if (score > config.body.minConfidence) {
|
||||
parts.push({
|
||||
id,
|
||||
score: Math.round(100 * score) / 100,
|
||||
part: bodyParts[id],
|
||||
positionRaw: {
|
||||
xRaw: x / model.inputs[0].shape[2], // x normalized to 0..1
|
||||
yRaw: y / model.inputs[0].shape[1], // y normalized to 0..1
|
||||
x: x / model.inputs[0].shape[2], // x normalized to 0..1
|
||||
y: y / model.inputs[0].shape[1], // y normalized to 0..1
|
||||
},
|
||||
position: {
|
||||
x: Math.round(image.shape[2] * x / model.inputs[0].shape[2]), // x normalized to input image size
|
||||
|
|
|
@ -96,7 +96,7 @@ const calculateFaceAngle = (face, image_size): { angle: { pitch: number, yaw: nu
|
|||
return { angle, matrix };
|
||||
};
|
||||
|
||||
export const detectFace = async (parent, input): Promise<any> => {
|
||||
export const detectFace = async (parent, input): Promise<Face[]> => {
|
||||
// run facemesh, includes blazeface and iris
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
let timeStamp;
|
||||
|
|
|
@ -3,12 +3,12 @@ import * as box from './box';
|
|||
import * as anchors from './anchors';
|
||||
|
||||
export class HandDetector {
|
||||
model: any;
|
||||
anchors: any;
|
||||
anchorsTensor: any;
|
||||
model: any; // tf.GraphModel
|
||||
anchors: number[][];
|
||||
anchorsTensor: typeof tf.Tensor;
|
||||
inputSize: number;
|
||||
inputSizeTensor: any;
|
||||
doubleInputSizeTensor: any;
|
||||
inputSizeTensor: typeof tf.Tensor;
|
||||
doubleInputSizeTensor: typeof tf.Tensor;
|
||||
|
||||
constructor(model) {
|
||||
this.model = model;
|
||||
|
@ -52,7 +52,7 @@ export class HandDetector {
|
|||
|
||||
scoresT.dispose();
|
||||
filteredT.dispose();
|
||||
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = [];
|
||||
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = []; // box and lardmarks are tensors here
|
||||
for (const index of filtered) {
|
||||
if (scores[index] >= config.hand.minConfidence) {
|
||||
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
|
||||
|
@ -67,13 +67,13 @@ export class HandDetector {
|
|||
return hands;
|
||||
}
|
||||
|
||||
async estimateHandBounds(input, config) {
|
||||
async estimateHandBounds(input, config): Promise<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number }[]> {
|
||||
const inputHeight = input.shape[1];
|
||||
const inputWidth = input.shape[2];
|
||||
const image = tf.tidy(() => input.resizeBilinear([this.inputSize, this.inputSize]).div(127.5).sub(1));
|
||||
const predictions = await this.getBoxes(image, config);
|
||||
image.dispose();
|
||||
const hands: Array<{}> = [];
|
||||
const hands: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number }> = [];
|
||||
if (!predictions || predictions.length === 0) return hands;
|
||||
for (const prediction of predictions) {
|
||||
const boxes = prediction.box.dataSync();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as box from './box';
|
||||
import * as util from './util';
|
||||
import * as detector from './handdetector';
|
||||
|
||||
const palmBoxEnlargeFactor = 5; // default 3
|
||||
const handBoxEnlargeFactor = 1.65; // default 1.65
|
||||
|
@ -9,17 +10,17 @@ const palmLandmarksPalmBase = 0;
|
|||
const palmLandmarksMiddleFingerBase = 2;
|
||||
|
||||
export class HandPipeline {
|
||||
handDetector: any;
|
||||
landmarkDetector: any;
|
||||
handDetector: detector.HandDetector;
|
||||
handPoseModel: any; // tf.GraphModel
|
||||
inputSize: number;
|
||||
storedBoxes: any;
|
||||
storedBoxes: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number } | null>;
|
||||
skipped: number;
|
||||
detectedHands: number;
|
||||
|
||||
constructor(handDetector, landmarkDetector) {
|
||||
constructor(handDetector, handPoseModel) {
|
||||
this.handDetector = handDetector;
|
||||
this.landmarkDetector = landmarkDetector;
|
||||
this.inputSize = this.landmarkDetector?.inputs[0].shape[2];
|
||||
this.handPoseModel = handPoseModel;
|
||||
this.inputSize = this.handPoseModel?.inputs[0].shape[2];
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.detectedHands = 0;
|
||||
|
@ -112,7 +113,7 @@ export class HandPipeline {
|
|||
const handImage = croppedInput.div(255);
|
||||
croppedInput.dispose();
|
||||
rotatedImage.dispose();
|
||||
const [confidenceT, keypoints] = await this.landmarkDetector.predict(handImage);
|
||||
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage);
|
||||
handImage.dispose();
|
||||
const confidence = confidenceT.dataSync()[0];
|
||||
confidenceT.dispose();
|
||||
|
@ -123,7 +124,7 @@ export class HandPipeline {
|
|||
keypointsReshaped.dispose();
|
||||
const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);
|
||||
const nextBoundingBox = this.getBoxForHandLandmarks(coords);
|
||||
this.storedBoxes[i] = nextBoundingBox;
|
||||
this.storedBoxes[i] = { ...nextBoundingBox, confidence };
|
||||
const result = {
|
||||
landmarks: coords,
|
||||
confidence,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { log, now, mergeDeep } from './helpers';
|
||||
import { Config, defaults } from './config';
|
||||
import { Result } from './result';
|
||||
import { Result, Gesture } from './result';
|
||||
import * as sysinfo from './sysinfo';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as backend from './tfjs/backend';
|
||||
|
@ -114,7 +114,7 @@ export class Human {
|
|||
/** Platform and agent information detected by Human */
|
||||
sysinfo: { platform: string, agent: string };
|
||||
/** Performance object that contains values for all recently performed operations */
|
||||
perf: any;
|
||||
perf: any; // perf members are dynamically defined as needed
|
||||
#numTensors: number;
|
||||
#analyzeMemoryLeaks: boolean;
|
||||
#checkSanity: boolean;
|
||||
|
@ -449,9 +449,10 @@ export class Human {
|
|||
this.analyze('Check Changed:');
|
||||
|
||||
// prepare where to store model results
|
||||
// keep them with weak typing as it can be promise or not
|
||||
let faceRes;
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
let objectRes;
|
||||
let current;
|
||||
|
||||
|
@ -520,7 +521,7 @@ export class Human {
|
|||
tf.dispose(process.tensor);
|
||||
|
||||
// run gesture analysis last
|
||||
let gestureRes: any[] = [];
|
||||
let gestureRes: Gesture[] = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
||||
|
|
|
@ -81,7 +81,7 @@ async function process(res, inputSize, outputShape, config) {
|
|||
// unnecessary boxes and run nms only on good candidates (basically it just does IOU analysis as scores are already filtered)
|
||||
const nmsBoxes = results.map((a) => [a.boxRaw[1], a.boxRaw[0], a.boxRaw[3], a.boxRaw[2]]); // switches coordinates from x,y to y,x as expected by tf.nms
|
||||
const nmsScores = results.map((a) => a.score);
|
||||
let nmsIdx: any[] = [];
|
||||
let nmsIdx: Array<number> = [];
|
||||
if (nmsBoxes && nmsBoxes.length > 0) {
|
||||
const nms = await tf.image.nonMaxSuppressionAsync(nmsBoxes, nmsScores, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
||||
nmsIdx = nms.dataSync();
|
||||
|
|
|
@ -120,7 +120,7 @@ function getInstanceScore(existingPoses, keypoints) {
|
|||
}
|
||||
|
||||
export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxDetected, minConfidence) {
|
||||
const poses: Array<{ keypoints: any, box: any, score: number }> = [];
|
||||
const poses: Array<{ keypoints, box: [number, number, number, number], score: number }> = [];
|
||||
const queue = buildPartWithScoreQueue(minConfidence, scores);
|
||||
// Generate at most maxDetected object instances per image in decreasing root part score order.
|
||||
while (poses.length < maxDetected && !queue.empty()) {
|
||||
|
|
|
@ -14,7 +14,7 @@ export function getAdjacentKeyPoints(keypoints, minConfidence) {
|
|||
}, []);
|
||||
}
|
||||
|
||||
export function getBoundingBox(keypoints) {
|
||||
export function getBoundingBox(keypoints): [number, number, number, number] {
|
||||
const coord = keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({
|
||||
maxX: Math.max(maxX, x),
|
||||
maxY: Math.max(maxY, y),
|
||||
|
@ -49,9 +49,9 @@ export function scalePoses(poses, [height, width], [inputResolutionHeight, input
|
|||
|
||||
// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort
|
||||
export class MaxHeap {
|
||||
priorityQueue: any;
|
||||
priorityQueue: Array<any>; // don't touch
|
||||
numberOfElements: number;
|
||||
getElementValue: any;
|
||||
getElementValue: any; // function call
|
||||
|
||||
constructor(maxSize, getElementValue) {
|
||||
this.priorityQueue = new Array(maxSize);
|
||||
|
|
|
@ -2,7 +2,7 @@ import { log } from './helpers';
|
|||
|
||||
export const data = {};
|
||||
|
||||
export function run(modelName: string, profileData: any): void {
|
||||
export function run(modelName: string, profileData: any): void { // profileData is tfjs internal type
|
||||
if (!profileData || !profileData.kernels) return;
|
||||
const maxDetected = 5;
|
||||
const time = profileData.kernels
|
||||
|
|
|
@ -30,6 +30,8 @@
|
|||
* - matrix: 3d transofrmation matrix as array of numeric values
|
||||
* - tensor: face tensor as Tensor object which contains detected face
|
||||
*/
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
|
||||
export interface Face {
|
||||
id: number
|
||||
confidence: number,
|
||||
|
@ -50,7 +52,7 @@ export interface Face {
|
|||
angle: { roll: number, yaw: number, pitch: number },
|
||||
matrix: [number, number, number, number, number, number, number, number, number],
|
||||
}
|
||||
tensor: any,
|
||||
tensor: typeof Tensor,
|
||||
}
|
||||
|
||||
/** Body results
|
||||
|
@ -75,7 +77,8 @@ export interface Body {
|
|||
boxRaw?: [x: number, y: number, width: number, height: number],
|
||||
keypoints: Array<{
|
||||
part: string,
|
||||
position: { x: number, y: number, z: number },
|
||||
position: { x: number, y: number, z?: number },
|
||||
positionRaw?: { x: number, y: number, z?: number },
|
||||
score: number,
|
||||
presence?: number,
|
||||
}>
|
||||
|
|
|
@ -134,7 +134,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="keypoints" class="tsd-anchor"></a>
|
||||
<h3>keypoints</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">keypoints<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>part<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>position<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>presence<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>score<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">[]</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">keypoints<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>part<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">; </span>position<span class="tsd-signature-symbol">: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>positionRaw<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-symbol">{ </span>x<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>y<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>z<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">; </span>presence<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol">; </span>score<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">number</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">[]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
|
|
|
@ -63,38 +63,6 @@
|
|||
<div class="container container-main">
|
||||
<div class="row">
|
||||
<div class="col-8 col-content">
|
||||
<section class="tsd-panel tsd-comment">
|
||||
<div class="tsd-comment tsd-typography">
|
||||
<div class="lead">
|
||||
<p>Face results
|
||||
Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
||||
Some values may be null if specific model is not enabled</p>
|
||||
</div>
|
||||
<p>Array of individual results with one object per detected face
|
||||
Each result has:</p>
|
||||
<ul>
|
||||
<li>id: face number</li>
|
||||
<li>confidence: overal detection confidence value</li>
|
||||
<li>boxConfidence: face box detection confidence value</li>
|
||||
<li>faceConfidence: face keypoints detection confidence value</li>
|
||||
<li>box: face bounding box as array of [x, y, width, height], normalized to image resolution</li>
|
||||
<li>boxRaw: face bounding box as array of [x, y, width, height], normalized to range 0..1</li>
|
||||
<li>mesh: face keypoints as array of [x, y, z] points of face mesh, normalized to image resolution</li>
|
||||
<li>meshRaw: face keypoints as array of [x, y, z] points of face mesh, normalized to range 0..1</li>
|
||||
<li>annotations: annotated face keypoints as array of annotated face mesh points</li>
|
||||
<li>age: age as value</li>
|
||||
<li>gender: gender as value</li>
|
||||
<li>genderConfidence: gender detection confidence as value</li>
|
||||
<li>emotion: emotions as array of possible emotions with their individual scores</li>
|
||||
<li>embedding: facial descriptor as array of numerical elements</li>
|
||||
<li>iris: iris distance from current viewpoint as distance value</li>
|
||||
<li>rotation: face rotiation that contains both angles and matrix used for 3d transformations</li>
|
||||
<li>angle: face angle as object with values for roll, yaw and pitch angles</li>
|
||||
<li>matrix: 3d transofrmation matrix as array of numeric values</li>
|
||||
<li>tensor: face tensor as Tensor object which contains detected face</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-hierarchy">
|
||||
<h3>Hierarchy</h3>
|
||||
<ul class="tsd-hierarchy">
|
||||
|
@ -271,7 +239,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="tensor" class="tsd-anchor"></a>
|
||||
<h3>tensor</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">any</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">tensor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">typeof </span><span class="tsd-signature-type">__class</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
|
|
|
@ -1,13 +1,18 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Config } from '../config';
|
||||
export declare class BlazeFaceModel {
|
||||
model: any;
|
||||
anchorsData: any;
|
||||
anchors: any;
|
||||
anchorsData: [number, number][];
|
||||
anchors: typeof tf.Tensor;
|
||||
inputSize: number;
|
||||
config: any;
|
||||
config: Config;
|
||||
constructor(model: any, config: any);
|
||||
getBoundingBoxes(inputImage: any): Promise<{
|
||||
boxes: {
|
||||
box: any;
|
||||
box: {
|
||||
startPoint: typeof tf.Tensor;
|
||||
endPoint: typeof tf.Tensor;
|
||||
};
|
||||
landmarks: any;
|
||||
anchor: number[];
|
||||
confidence: number;
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
export declare class Pipeline {
|
||||
storedBoxes: any;
|
||||
storedBoxes: Array<{
|
||||
startPoint: number[];
|
||||
endPoint: number[];
|
||||
landmarks: any;
|
||||
confidence: number;
|
||||
faceConfidence?: number;
|
||||
}>;
|
||||
boundingBoxDetector: any;
|
||||
meshDetector: any;
|
||||
irisModel: any;
|
||||
|
@ -22,8 +28,8 @@ export declare class Pipeline {
|
|||
crop: any;
|
||||
};
|
||||
getEyeCoords(eyeData: any, eyeBox: any, eyeBoxSize: any, flip?: boolean): {
|
||||
rawCoords: any[][];
|
||||
iris: any[][];
|
||||
rawCoords: [number, number, number][];
|
||||
iris: [number, number, number][];
|
||||
};
|
||||
getAdjustedIrisCoords(rawCoords: any, irisCoords: any, direction: any): any;
|
||||
predict(input: any, config: any): Promise<any>;
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
export declare const detectFace: (parent: any, input: any) => Promise<any>;
|
||||
import { Face } from './result';
|
||||
export declare const detectFace: (parent: any, input: any) => Promise<Face[]>;
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
export declare class HandDetector {
|
||||
model: any;
|
||||
anchors: any;
|
||||
anchorsTensor: any;
|
||||
anchors: number[][];
|
||||
anchorsTensor: typeof tf.Tensor;
|
||||
inputSize: number;
|
||||
inputSizeTensor: any;
|
||||
doubleInputSizeTensor: any;
|
||||
inputSizeTensor: typeof tf.Tensor;
|
||||
doubleInputSizeTensor: typeof tf.Tensor;
|
||||
constructor(model: any);
|
||||
normalizeBoxes(boxes: any): any;
|
||||
normalizeLandmarks(rawPalmLandmarks: any, index: any): any;
|
||||
|
@ -13,5 +14,10 @@ export declare class HandDetector {
|
|||
palmLandmarks: any;
|
||||
confidence: number;
|
||||
}[]>;
|
||||
estimateHandBounds(input: any, config: any): Promise<{}[]>;
|
||||
estimateHandBounds(input: any, config: any): Promise<{
|
||||
startPoint: number[];
|
||||
endPoint: number[];
|
||||
palmLandmarks: number[];
|
||||
confidence: number;
|
||||
}[]>;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
import * as detector from './handdetector';
|
||||
export declare class HandPipeline {
|
||||
handDetector: any;
|
||||
landmarkDetector: any;
|
||||
handDetector: detector.HandDetector;
|
||||
handPoseModel: any;
|
||||
inputSize: number;
|
||||
storedBoxes: any;
|
||||
storedBoxes: Array<{
|
||||
startPoint: number[];
|
||||
endPoint: number[];
|
||||
palmLandmarks: number[];
|
||||
confidence: number;
|
||||
} | null>;
|
||||
skipped: number;
|
||||
detectedHands: number;
|
||||
constructor(handDetector: any, landmarkDetector: any);
|
||||
constructor(handDetector: any, handPoseModel: any);
|
||||
calculateLandmarksBoundingBox(landmarks: any): {
|
||||
startPoint: number[];
|
||||
endPoint: number[];
|
||||
|
|
|
@ -3,6 +3,6 @@ export declare function decodePose(root: any, scores: any, offsets: any, displac
|
|||
export declare function buildPartWithScoreQueue(minConfidence: any, scores: any): utils.MaxHeap;
|
||||
export declare function decode(offsets: any, scores: any, displacementsFwd: any, displacementsBwd: any, maxDetected: any, minConfidence: any): {
|
||||
keypoints: any;
|
||||
box: any;
|
||||
box: [number, number, number, number];
|
||||
score: number;
|
||||
}[];
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
|
||||
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
|
||||
export declare function getBoundingBox(keypoints: any): any[];
|
||||
export declare function getBoundingBox(keypoints: any): [number, number, number, number];
|
||||
export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): any;
|
||||
export declare class MaxHeap {
|
||||
priorityQueue: any;
|
||||
priorityQueue: Array<any>;
|
||||
numberOfElements: number;
|
||||
getElementValue: any;
|
||||
constructor(maxSize: any, getElementValue: any);
|
||||
|
@ -11,7 +11,7 @@ export declare class MaxHeap {
|
|||
dequeue(): any;
|
||||
empty(): boolean;
|
||||
size(): number;
|
||||
all(): any;
|
||||
all(): any[];
|
||||
max(): any;
|
||||
swim(k: any): void;
|
||||
sink(k: any): void;
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
* - matrix: 3d transofrmation matrix as array of numeric values
|
||||
* - tensor: face tensor as Tensor object which contains detected face
|
||||
*/
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
export interface Face {
|
||||
id: number;
|
||||
confidence: number;
|
||||
|
@ -59,7 +60,7 @@ export interface Face {
|
|||
};
|
||||
matrix: [number, number, number, number, number, number, number, number, number];
|
||||
};
|
||||
tensor: any;
|
||||
tensor: typeof Tensor;
|
||||
}
|
||||
/** Body results
|
||||
*
|
||||
|
@ -85,7 +86,12 @@ export interface Body {
|
|||
position: {
|
||||
x: number;
|
||||
y: number;
|
||||
z: number;
|
||||
z?: number;
|
||||
};
|
||||
positionRaw?: {
|
||||
x: number;
|
||||
y: number;
|
||||
z?: number;
|
||||
};
|
||||
score: number;
|
||||
presence?: number;
|
||||
|
|
Loading…
Reference in New Issue