add tfjs types and remove all instances of any

pull/356/head
Vladimir Mandic 2021-05-22 21:47:59 -04:00
parent b192445071
commit 9e0318ea52
16 changed files with 100 additions and 81 deletions

View File

@ -28,8 +28,6 @@
"rules": { "rules": {
"@typescript-eslint/ban-ts-comment": "off", "@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/explicit-module-boundary-types": "off", "@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-var-requires": "off", "@typescript-eslint/no-var-requires": "off",
"camelcase": "off", "camelcase": "off",
"dot-notation": "off", "dot-notation": "off",

View File

@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/05/22 mandic00@live.com ### **HEAD -> main** 2021/05/22 mandic00@live.com
- rebuild all for release
### **1.9.2** 2021/05/22 mandic00@live.com ### **1.9.2** 2021/05/22 mandic00@live.com

View File

@ -69,9 +69,9 @@
"chokidar": "^3.5.1", "chokidar": "^3.5.1",
"dayjs": "^1.10.4", "dayjs": "^1.10.4",
"esbuild": "^0.12.1", "esbuild": "^0.12.1",
"eslint": "^7.26.0", "eslint": "^7.27.0",
"eslint-config-airbnb-base": "^14.2.1", "eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.23.2", "eslint-plugin-import": "^2.23.3",
"eslint-plugin-json": "^3.0.0", "eslint-plugin-json": "^3.0.0",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0", "eslint-plugin-promise": "^5.1.0",

View File

@ -32,16 +32,16 @@ export interface DrawOptions {
lineWidth: number, lineWidth: number,
pointSize: number, pointSize: number,
roundRect: number, roundRect: number,
drawPoints: Boolean, drawPoints: boolean,
drawLabels: Boolean, drawLabels: boolean,
drawBoxes: Boolean, drawBoxes: boolean,
drawPolygons: Boolean, drawPolygons: boolean,
fillPolygons: Boolean, fillPolygons: boolean,
useDepth: Boolean, useDepth: boolean,
useCurves: Boolean, useCurves: boolean,
bufferedOutput: Boolean, bufferedOutput: boolean,
useRawBoxes: Boolean, useRawBoxes: boolean,
calculateHandBox: Boolean, calculateHandBox: boolean,
} }
export const options: DrawOptions = { export const options: DrawOptions = {
@ -53,16 +53,16 @@ export const options: DrawOptions = {
lineWidth: <number>6, lineWidth: <number>6,
pointSize: <number>2, pointSize: <number>2,
roundRect: <number>28, roundRect: <number>28,
drawPoints: <Boolean>false, drawPoints: <boolean>false,
drawLabels: <Boolean>true, drawLabels: <boolean>true,
drawBoxes: <Boolean>true, drawBoxes: <boolean>true,
drawPolygons: <Boolean>true, drawPolygons: <boolean>true,
fillPolygons: <Boolean>false, fillPolygons: <boolean>false,
useDepth: <Boolean>true, useDepth: <boolean>true,
useCurves: <Boolean>false, useCurves: <boolean>false,
bufferedOutput: <Boolean>false, // not yet implemented bufferedOutput: <boolean>false, // not yet implemented
useRawBoxes: <Boolean>false, useRawBoxes: <boolean>false,
calculateHandBox: <Boolean>true, calculateHandBox: <boolean>true,
}; };
let bufferedResult: Result; let bufferedResult: Result;
@ -143,11 +143,11 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let i = 1; let i = 1;
for (let j = 0; j < result.length; j++) { for (let j = 0; j < result.length; j++) {
let where: any[] = []; // what&where is a record let where: unknown[] = []; // what&where is a record
let what: any[] = []; // what&where is a record let what: unknown[] = []; // what&where is a record
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if ((what.length > 1) && (what[1].length > 0)) { if ((what.length > 1) && ((what[1] as string).length > 0)) {
const person = where[1] > 0 ? `#${where[1]}` : ''; const person = where[1] as number > 0 ? `#${where[1]}` : '';
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${person}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') { if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;

View File

@ -1,8 +1,9 @@
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { Body } from '../result'; import { Body } from '../result';
import { GraphModel } from '../tfjs/types';
let model; let model: GraphModel;
type Keypoints = { score: number, part: string, position: { x: number, y: number }, positionRaw: { x: number, y: number } }; type Keypoints = { score: number, part: string, position: { x: number, y: number }, positionRaw: { x: number, y: number } };
@ -13,10 +14,11 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)); model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
if (!model || !model.modelUrl) log('load model failed:', config.body.modelPath); if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model.modelUrl); else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model.modelUrl); } else if (config.debug) log('cached model:', model['modelUrl']);
return model; return model;
} }
@ -50,6 +52,7 @@ export async function predict(image, config): Promise<Body[]> {
skipped = 0; skipped = 0;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
const tensor = tf.tidy(() => { const tensor = tf.tidy(() => {
if (!model.inputs[0].shape) return null;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const enhance = tf.mul(resize, 2); const enhance = tf.mul(resize, 2);
const norm = enhance.sub(1); const norm = enhance.sub(1);
@ -75,13 +78,13 @@ export async function predict(image, config): Promise<Body[]> {
parts.push({ parts.push({
score: Math.round(100 * score) / 100, score: Math.round(100 * score) / 100,
part: bodyParts[id], part: bodyParts[id],
positionRaw: { positionRaw: { // normalized to 0..1
x: x / model.inputs[0].shape[2], // x normalized to 0..1 // @ts-ignore model is not undefined here
y: y / model.inputs[0].shape[1], // y normalized to 0..1 x: x / model.inputs[0].shape[2], y: y / model.inputs[0].shape[1],
}, },
position: { position: { // normalized to input image size
x: Math.round(image.shape[2] * x / model.inputs[0].shape[2]), // x normalized to input image size // @ts-ignore model is not undefined here
y: Math.round(image.shape[1] * y / model.inputs[0].shape[1]), // y normalized to input image size x: Math.round(image.shape[2] * x / model.inputs[0].shape[2]), y: Math.round(image.shape[1] * y / model.inputs[0].shape[1]),
}, },
}); });
} }

View File

@ -1,20 +1,22 @@
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { Tensor, GraphModel } from '../tfjs/types';
let model; let model: GraphModel;
const last: Array<{ age: number}> = []; const last: Array<{ age: number}> = [];
let lastCount = 0; let lastCount = 0;
let skipped = Number.MAX_SAFE_INTEGER; let skipped = Number.MAX_SAFE_INTEGER;
type Tensor = typeof tf.Tensor;
type DB = Array<{ name: string, source: string, embedding: number[] }>; type DB = Array<{ name: string, source: string, embedding: number[] }>;
export async function load(config) { export async function load(config) {
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
if (!model) { if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.description.modelPath)); // @ts-ignore type mismatch for GraphModel
if (!model || !model.modelUrl) log('load model failed:', config.face.description.modelPath); model = await tf.loadGraphModel(modelUrl);
else if (config.debug) log('load model:', model.modelUrl); if (!model) log('load model failed:', config.face.description.modelPath);
} else if (config.debug) log('cached model:', model.modelUrl); else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);
return model; return model;
} }
@ -52,6 +54,7 @@ export function enhance(input): Tensor {
// do a tight crop of image and resize it to fit the model // do a tight crop of image and resize it to fit the model
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
// const box = [[0.0, 0.0, 1.0, 1.0]]; // basically no crop for test // const box = [[0.0, 0.0, 1.0, 1.0]]; // basically no crop for test
if (!model.inputs[0].shape) return null; // model has no shape so no point continuing
const crop = (tensor.shape.length === 3) const crop = (tensor.shape.length === 3)
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing ? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]); : tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);

View File

@ -1,9 +1,10 @@
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';
import * as anchors from './anchors'; import * as anchors from './anchors';
import { Tensor, GraphModel } from '../tfjs/types';
export class HandDetector { export class HandDetector {
model: any; // tf.GraphModel model: GraphModel;
anchors: number[][]; anchors: number[][];
anchorsTensor: typeof tf.Tensor; anchorsTensor: typeof tf.Tensor;
inputSize: number; inputSize: number;
@ -14,6 +15,7 @@ export class HandDetector {
this.model = model; this.model = model;
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]); this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf.tensor2d(this.anchors); this.anchorsTensor = tf.tensor2d(this.anchors);
// @ts-ignore model is not undefined here
this.inputSize = this.model?.inputs[0].shape[2]; this.inputSize = this.model?.inputs[0].shape[2];
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]); this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]); this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
@ -39,7 +41,7 @@ export class HandDetector {
} }
async getBoxes(input, config) { async getBoxes(input, config) {
const batched = this.model.predict(input); const batched = this.model.predict(input) as Tensor;
const predictions = batched.squeeze(); const predictions = batched.squeeze();
batched.dispose(); batched.dispose();
const scoresT = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze()); const scoresT = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());
@ -52,7 +54,7 @@ export class HandDetector {
scoresT.dispose(); scoresT.dispose();
filteredT.dispose(); filteredT.dispose();
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = []; // box and lardmarks are tensors here const hands: Array<{ box: Tensor, palmLandmarks: Tensor, confidence: number }> = [];
for (const index of filtered) { for (const index of filtered) {
if (scores[index] >= config.hand.minConfidence) { if (scores[index] >= config.hand.minConfidence) {
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]); const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);

View File

@ -2,6 +2,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box'; import * as box from './box';
import * as util from './util'; import * as util from './util';
import * as detector from './handdetector'; import * as detector from './handdetector';
import { Tensor, GraphModel } from '../tfjs/types';
const palmBoxEnlargeFactor = 5; // default 3 const palmBoxEnlargeFactor = 5; // default 3
const handBoxEnlargeFactor = 1.65; // default 1.65 const handBoxEnlargeFactor = 1.65; // default 1.65
@ -11,7 +12,7 @@ const palmLandmarksMiddleFingerBase = 2;
export class HandPipeline { export class HandPipeline {
handDetector: detector.HandDetector; handDetector: detector.HandDetector;
handPoseModel: any; // tf.GraphModel handPoseModel: GraphModel;
inputSize: number; inputSize: number;
storedBoxes: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number } | null>; storedBoxes: Array<{ startPoint: number[]; endPoint: number[]; palmLandmarks: number[]; confidence: number } | null>;
skipped: number; skipped: number;
@ -20,6 +21,7 @@ export class HandPipeline {
constructor(handDetector, handPoseModel) { constructor(handDetector, handPoseModel) {
this.handDetector = handDetector; this.handDetector = handDetector;
this.handPoseModel = handPoseModel; this.handPoseModel = handPoseModel;
// @ts-ignore model is not undefined here
this.inputSize = this.handPoseModel?.inputs[0].shape[2]; this.inputSize = this.handPoseModel?.inputs[0].shape[2];
this.storedBoxes = []; this.storedBoxes = [];
this.skipped = 0; this.skipped = 0;
@ -96,7 +98,7 @@ export class HandPipeline {
// for (const possible of boxes) this.storedBoxes.push(possible); // for (const possible of boxes) this.storedBoxes.push(possible);
if (this.storedBoxes.length > 0) useFreshBox = true; if (this.storedBoxes.length > 0) useFreshBox = true;
} }
const hands: Array<{}> = []; const hands: Array<{ landmarks?: number[], confidence: number, box: { topLeft: number[], bottomRight: number[] } }> = [];
// go through working set of boxes // go through working set of boxes
for (let i = 0; i < this.storedBoxes.length; i++) { for (let i = 0; i < this.storedBoxes.length; i++) {
@ -113,7 +115,7 @@ export class HandPipeline {
const handImage = croppedInput.div(255); const handImage = croppedInput.div(255);
croppedInput.dispose(); croppedInput.dispose();
rotatedImage.dispose(); rotatedImage.dispose();
const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage); const [confidenceT, keypoints] = await this.handPoseModel.predict(handImage) as Array<Tensor>;
handImage.dispose(); handImage.dispose();
const confidence = confidenceT.dataSync()[0]; const confidence = confidenceT.dataSync()[0];
confidenceT.dispose(); confidenceT.dispose();

View File

@ -3,6 +3,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector'; import * as handdetector from './handdetector';
import * as handpipeline from './handpipeline'; import * as handpipeline from './handpipeline';
import { Hand } from '../result'; import { Hand } from '../result';
import { GraphModel } from '../tfjs/types';
const meshAnnotations = { const meshAnnotations = {
thumb: [1, 2, 3, 4], thumb: [1, 2, 3, 4],
@ -13,9 +14,9 @@ const meshAnnotations = {
palmBase: [0], palmBase: [0],
}; };
let handDetectorModel; let handDetectorModel: GraphModel | null;
let handPoseModel; let handPoseModel: GraphModel | null;
let handPipeline; let handPipeline: handpipeline.HandPipeline;
export async function predict(input, config): Promise<Hand[]> { export async function predict(input, config): Promise<Hand[]> {
const predictions = await handPipeline.estimateHands(input, config); const predictions = await handPipeline.estimateHands(input, config);
@ -25,6 +26,7 @@ export async function predict(input, config): Promise<Hand[]> {
const annotations = {}; const annotations = {};
if (predictions[i].landmarks) { if (predictions[i].landmarks) {
for (const key of Object.keys(meshAnnotations)) { for (const key of Object.keys(meshAnnotations)) {
// @ts-ignore landmarks are not undefined
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]); annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
} }
} }
@ -40,26 +42,28 @@ export async function predict(input, config): Promise<Hand[]> {
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1], (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
]; ];
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks: predictions[i].landmarks, annotations }); const landmarks = predictions[i].landmarks as number[];
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks, annotations });
} }
return hands; return hands;
} }
export async function load(config): Promise<[Object, Object]> { export async function load(config): Promise<[unknown, unknown]> {
if (!handDetectorModel || !handPoseModel) { if (!handDetectorModel || !handPoseModel) {
// @ts-ignore type mismatch on GraphModel
[handDetectorModel, handPoseModel] = await Promise.all([ [handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null, config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null, config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]); ]);
if (config.hand.enabled) { if (config.hand.enabled) {
if (!handDetectorModel || !handDetectorModel.modelUrl) log('load model failed:', config.hand.detector.modelPath); if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector.modelPath);
else if (config.debug) log('load model:', handDetectorModel.modelUrl); else if (config.debug) log('load model:', handDetectorModel['modelUrl']);
if (!handPoseModel || !handPoseModel.modelUrl) log('load model failed:', config.hand.skeleton.modelPath); if (!handPoseModel || !handPoseModel['modelUrl']) log('load model failed:', config.hand.skeleton.modelPath);
else if (config.debug) log('load model:', handPoseModel.modelUrl); else if (config.debug) log('load model:', handPoseModel['modelUrl']);
} }
} else { } else {
if (config.debug) log('cached model:', handDetectorModel.modelUrl); if (config.debug) log('cached model:', handDetectorModel['modelUrl']);
if (config.debug) log('cached model:', handPoseModel.modelUrl); if (config.debug) log('cached model:', handPoseModel['modelUrl']);
} }
const handDetector = new handdetector.HandDetector(handDetectorModel); const handDetector = new handdetector.HandDetector(handDetectorModel);
handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel); handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel);

View File

@ -18,9 +18,7 @@ import * as image from './image/image';
import * as draw from './draw/draw'; import * as draw from './draw/draw';
import * as sample from './sample'; import * as sample from './sample';
import * as app from '../package.json'; import * as app from '../package.json';
import { Tensor } from './tfjs/types';
/** Generic Tensor object type */
export type Tensor = typeof tf.Tensor;
export type { Config } from './config'; export type { Config } from './config';
export type { Result, Face, Hand, Body, Item, Gesture } from './result'; export type { Result, Face, Hand, Body, Item, Gesture } from './result';
@ -36,7 +34,7 @@ export type Error = { error: string };
export type TensorFlow = typeof tf; export type TensorFlow = typeof tf;
/** Generic Model object type, holds instance of individual models */ /** Generic Model object type, holds instance of individual models */
type Model = Object; type Model = unknown;
/** /**
* **Human** library main class * **Human** library main class
@ -114,7 +112,7 @@ export class Human {
/** Platform and agent information detected by Human */ /** Platform and agent information detected by Human */
sysinfo: { platform: string, agent: string }; sysinfo: { platform: string, agent: string };
/** Performance object that contains values for all recently performed operations */ /** Performance object that contains values for all recently performed operations */
perf: any; // perf members are dynamically defined as needed perf: Record<string, unknown>; // perf members are dynamically defined as needed
#numTensors: number; #numTensors: number;
#analyzeMemoryLeaks: boolean; #analyzeMemoryLeaks: boolean;
#checkSanity: boolean; #checkSanity: boolean;
@ -128,7 +126,7 @@ export class Human {
* Creates instance of Human library that is futher used for all operations * Creates instance of Human library that is futher used for all operations
* - @param userConfig: {@link Config} * - @param userConfig: {@link Config}
*/ */
constructor(userConfig: Config | Object = {}) { constructor(userConfig: Config | Record<string, unknown> = {}) {
this.tf = tf; this.tf = tf;
this.draw = draw; this.draw = draw;
this.version = app.version; this.version = app.version;
@ -215,6 +213,7 @@ export class Human {
*/ */
// eslint-disable-next-line class-methods-use-this // eslint-disable-next-line class-methods-use-this
enhance(input: Tensor): Tensor | null { enhance(input: Tensor): Tensor | null {
// @ts-ignore type mismach for Tensor
return faceres.enhance(input); return faceres.enhance(input);
} }
@ -233,7 +232,7 @@ export class Human {
/** Load method preloads all configured models on-demand /** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run * - Not explicitly required as any required model is load implicitly on it's first run
*/ */
async load(userConfig: Config | Object = {}) { async load(userConfig: Config | Record<string, unknown> = {}) {
this.state = 'load'; this.state = 'load';
const timeStamp = now(); const timeStamp = now();
if (userConfig) this.config = mergeDeep(this.config, userConfig); if (userConfig) this.config = mergeDeep(this.config, userConfig);
@ -287,7 +286,7 @@ export class Human {
} }
const current = Math.trunc(now() - timeStamp); const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0)) this.perf.load = current; if (current > (this.perf.load as number || 0)) this.perf.load = current;
} }
// check if backend needs initialization if it changed // check if backend needs initialization if it changed
@ -385,7 +384,7 @@ export class Human {
* - Run inference for all configured models * - Run inference for all configured models
* - Process and return result: {@link Result} * - Process and return result: {@link Result}
*/ */
async detect(input: Input, userConfig: Config | Object = {}): Promise<Result | Error> { async detect(input: Input, userConfig: Config | Record<string, unknown> = {}): Promise<Result | Error> {
// detection happens inside a promise // detection happens inside a promise
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
this.state = 'config'; this.state = 'config';
@ -442,7 +441,7 @@ export class Human {
this.config.skipFrame = await this.#skipFrame(process.tensor); this.config.skipFrame = await this.#skipFrame(process.tensor);
if (!this.perf.frames) this.perf.frames = 0; if (!this.perf.frames) this.perf.frames = 0;
if (!this.perf.cached) this.perf.cached = 0; if (!this.perf.cached) this.perf.cached = 0;
this.perf.frames++; (this.perf.frames as number)++;
// @ts-ignore hidden dynamic property that is not part of definitions // @ts-ignore hidden dynamic property that is not part of definitions
if (this.config.skipFrame) this.perf.cached++; if (this.config.skipFrame) this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp); this.perf.changed = Math.trunc(now() - timeStamp);
@ -629,7 +628,7 @@ export class Human {
* - can take significant time on startup * - can take significant time on startup
* - only used for `webgl` and `humangl` backends * - only used for `webgl` and `humangl` backends
*/ */
async warmup(userConfig: Config | Object = {}): Promise<Result | { error }> { async warmup(userConfig: Config | Record<string, unknown> = {}): Promise<Result | { error }> {
const t0 = now(); const t0 = now();
if (userConfig) this.config = mergeDeep(this.config, userConfig); if (userConfig) this.config = mergeDeep(this.config, userConfig);
if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' }; if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' };

View File

@ -127,7 +127,9 @@ export function decode(offsets, scores, displacementsFwd, displacementsBwd, maxD
// The top element in the queue is the next root candidate. // The top element in the queue is the next root candidate.
const root = queue.dequeue(); const root = queue.dequeue();
// Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance. // Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.
// @ts-ignore this one is tree walk
const rootImageCoords = utils.getImageCoords(root.part, outputStride, offsets); const rootImageCoords = utils.getImageCoords(root.part, outputStride, offsets);
// @ts-ignore this one is tree walk
if (withinRadius(poses, rootImageCoords, root.part.id)) continue; if (withinRadius(poses, rootImageCoords, root.part.id)) continue;
// Else start a new detection instance at the position of the root. // Else start a new detection instance at the position of the root.
let keypoints = decodePose(root, scores, offsets, displacementsFwd, displacementsBwd); let keypoints = decodePose(root, scores, offsets, displacementsFwd, displacementsBwd);

View File

@ -49,9 +49,9 @@ export function scalePoses(poses, [height, width], [inputResolutionHeight, input
// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort // algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort
export class MaxHeap { export class MaxHeap {
priorityQueue: Array<any>; // don't touch priorityQueue: Array<unknown>; // don't touch
numberOfElements: number; numberOfElements: number;
getElementValue: any; // function call getElementValue: unknown; // function call
constructor(maxSize, getElementValue) { constructor(maxSize, getElementValue) {
this.priorityQueue = new Array(maxSize); this.priorityQueue = new Array(maxSize);
@ -98,6 +98,7 @@ export class MaxHeap {
} }
getValueAt(i) { getValueAt(i) {
// @ts-ignore getter is of unknown type
return this.getElementValue(this.priorityQueue[i]); return this.getElementValue(this.priorityQueue[i]);
} }

View File

@ -2,16 +2,19 @@ import { log } from './helpers';
export const data = {}; export const data = {};
export function run(modelName: string, profileData: any): void { // profileData is tfjs internal type export function run(modelName: string, profileData: Record<string, unknown>): void { // profileData is tfjs internal type
if (!profileData || !profileData.kernels) return; if (!profileData || !profileData.kernels) return;
const maxDetected = 5; const maxDetected = 5;
// @ts-ignore profileData.kernels is tfjs internal type
const time = profileData.kernels const time = profileData.kernels
.filter((a) => a.kernelTimeMs > 0) .filter((a) => a.kernelTimeMs > 0)
.reduce((a, b) => a += b.kernelTimeMs, 0); .reduce((a, b) => a += b.kernelTimeMs, 0);
// @ts-ignore profileData.kernels is tfjs internal type
const slowest = profileData.kernels const slowest = profileData.kernels
.map((a, i) => { a.id = i; return a; }) .map((a, i) => { a.id = i; return a; })
.filter((a) => a.kernelTimeMs > 0) .filter((a) => a.kernelTimeMs > 0)
.sort((a, b) => b.kernelTimeMs - a.kernelTimeMs); .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);
// @ts-ignore profileData.kernels is tfjs internal type
const largest = profileData.kernels const largest = profileData.kernels
.map((a, i) => { a.id = i; return a; }) .map((a, i) => { a.id = i; return a; })
.filter((a) => a.totalBytesSnapshot > 0) .filter((a) => a.totalBytesSnapshot > 0)
@ -23,7 +26,7 @@ export function run(modelName: string, profileData: any): void { // profileData
newBytes: profileData.newBytes, newBytes: profileData.newBytes,
newTensors: profileData.newTensors, newTensors: profileData.newTensors,
peakBytes: profileData.peakBytes, peakBytes: profileData.peakBytes,
numKernelOps: profileData.kernels.length, numKernelOps: (profileData['kernels'] as Array<unknown>).length,
timeKernelOps: time, timeKernelOps: time,
slowestKernelOps: slowest, slowestKernelOps: slowest,
largestKernelOps: largest, largestKernelOps: largest,

View File

@ -99,9 +99,7 @@ export interface Hand {
confidence: number, confidence: number,
box: [number, number, number, number], box: [number, number, number, number],
boxRaw: [number, number, number, number], boxRaw: [number, number, number, number],
landmarks: Array<[number, number, number]>, landmarks: number[],
// annotations: Array<{ part: string, points: Array<[number, number, number]> }>,
// annotations: Annotations,
annotations: Record<string, Array<{ part: string, points: Array<[number, number, number]> }>>, annotations: Record<string, Array<{ part: string, points: Array<[number, number, number]> }>>,
} }
@ -151,7 +149,7 @@ export interface Result {
gesture: Array<Gesture>, gesture: Array<Gesture>,
/** {@link Object}: detection & analysis results */ /** {@link Object}: detection & analysis results */
object: Array<Item> object: Array<Item>
performance: { any }, performance: Record<string, unknown>,
canvas: OffscreenCanvas | HTMLCanvasElement, canvas: OffscreenCanvas | HTMLCanvasElement,
timestamp: number, timestamp: number,
} }

View File

@ -5,7 +5,7 @@ export const config = {
name: 'humangl', name: 'humangl',
priority: 99, priority: 99,
canvas: <null | OffscreenCanvas | HTMLCanvasElement>null, canvas: <null | OffscreenCanvas | HTMLCanvasElement>null,
gl: <any>null, gl: <unknown>null,
width: 1024, width: 1024,
height: 1024, height: 1024,
webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2 webGLattr: { // https://www.khronos.org/registry/webgl/specs/latest/1.0/#5.2

3
src/tfjs/types.ts Normal file
View File

@ -0,0 +1,3 @@
// export common types
export { Tensor } from '@tensorflow/tfjs-core/dist/index';
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';