strict type checks

pull/356/head
Vladimir Mandic 2021-08-17 08:51:17 -04:00
parent 71f25a8f12
commit f9a4f741a9
22 changed files with 70 additions and 102 deletions

View File

@ -9,7 +9,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/08/14 mandic00@live.com
### **HEAD -> main** 2021/08/15 mandic00@live.com
- experimental webgpu support
- add experimental webgu demo

14
TODO.md
View File

@ -1,12 +1,5 @@
# To-Do list for Human library
## Big Ticket Items
Implementation of WebGPU backend
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
<br>
## Work in Progress
WebGL shader optimizations for faster load and initial detection
@ -69,10 +62,15 @@ Object detection using CenterNet or NanoDet models is not working when using WAS
<https://github.com/tensorflow/tfjs/issues/4824>
*Target: `Human` v2.2 with `TFJS` v3.9*
### WebGPU
### WebGPU Backend
Implementation of WebGPU backend
Experimental support only
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
<br>
- Backend WebGPU missing kernel ops
<https://github.com/tensorflow/tfjs/issues/5496>
- Backend WebGPU incompatible with web workers

View File

@ -4,8 +4,6 @@
/// <reference lib="webworker" />
// // @ts-nocheck Linting of ServiceWorker is not supported for JS files
const skipCaching = false;
const cacheName = 'Human';

View File

@ -1,3 +1,5 @@
/// <reference lib="webworker" />
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
// import Human from '../dist/human.esm.js';
self.importScripts('../dist/human.js');
@ -34,12 +36,9 @@ onmessage = async (msg) => {
if (ctx) ctx.drawImage(result.canvas, 0, 0);
const img = ctx ? ctx.getImageData(0, 0, result.canvas.width, result.canvas.height) : null;
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
// @ts-ignore tslint wrong type matching for worker
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img.data.buffer]);
// @ts-ignore tslint wrong type matching for worker
else postMessage({ result });
} else {
// @ts-ignore tslint wrong type matching for worker
postMessage({ result });
}
busy = false;

View File

@ -1,5 +1,7 @@
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
/// <reference lib="webworker" />
// import Human from '../dist/human.esm.js';
self.importScripts('../../dist/human.js');
@ -15,6 +17,5 @@ onmessage = async (msg) => {
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
let result = {};
result = await human.detect(image, msg.data.config);
// @ts-ignore tslint wrong type matching for worker
postMessage({ result: result[msg.data.type], type: msg.data.type });
};

View File

@ -67,8 +67,8 @@
"@tensorflow/tfjs-node": "^3.8.0",
"@tensorflow/tfjs-node-gpu": "^3.8.0",
"@types/node": "^16.6.1",
"@typescript-eslint/eslint-plugin": "^4.29.1",
"@typescript-eslint/parser": "^4.29.1",
"@typescript-eslint/eslint-plugin": "^4.29.2",
"@typescript-eslint/parser": "^4.29.2",
"@vladmandic/pilogger": "^0.2.18",
"canvas": "^2.8.0",
"chokidar": "^3.5.2",

View File

@ -16,8 +16,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face.age.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
@ -33,7 +32,7 @@ export async function predict(image: Tensor, config: Config | any) {
}
skipped = 0;
return new Promise(async (resolve) => {
if (!model.inputs[0].shape) return;
if (!model.inputs || !model.inputs[0] || !model.inputs[0].shape) return;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);

View File

@ -278,16 +278,13 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
ctx.lineWidth = localOptions.lineWidth;
ctx.font = localOptions.font;
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
// @ts-ignore box may not exist
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
// @ts-ignore box may not exist
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
// @ts-ignore box may not exist
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
}

View File

@ -22,8 +22,7 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -162,7 +162,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.analyze('Get Face');
// is something went wrong, skip the face
// @ts-ignore possibly undefined
// @ts-ignore possibly undefied
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
log('Face object is disposed:', faces[i].tensor);
continue;

View File

@ -25,8 +25,7 @@ type DB = Array<{ name: string, source: string, embedding: number[] }>;
export async function load(config: Config): Promise<GraphModel> {
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
if (!model) {
// @ts-ignore type mismatch for GraphModel
model = await tf.loadGraphModel(modelUrl);
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
if (!model) log('load model failed:', config.face.description.modelPath);
else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);

View File

@ -19,8 +19,7 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath)) as unknown as GraphModel;
alternative = model.inputs[0].shape ? model.inputs[0]?.shape[3] === 1 : false;
if (!model || !model['modelUrl']) log('load model failed:', config.face.gender.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);

View File

@ -15,8 +15,7 @@ export class HandDetector {
this.model = model;
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf.tensor2d(this.anchors);
// @ts-ignore model is not undefined here
this.inputSize = this.model?.inputs[0].shape[2];
this.inputSize = (this.model && this.model.inputs && this.model.inputs[0].shape) ? this.model.inputs[0].shape[2] : 0;
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}

View File

@ -71,10 +71,9 @@ export async function predict(input: Tensor, config: Config): Promise<Hand[]> {
export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
if (!handDetectorModel || !handPoseModel) {
// @ts-ignore type mismatch on GraphModel
[handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
]);
if (config.hand.enabled) {
if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector.modelPath);

View File

@ -41,7 +41,7 @@ export function mergeDeep(...objects) {
}
// helper function: return min and max from input array
export const minmax = (data) => data.reduce((acc, val) => {
export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>, val) => {
acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];
acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];
return acc;

View File

@ -4,7 +4,7 @@
import { log, now, mergeDeep } from './helpers';
import { Config, defaults } from './config';
import { Result, Gesture } from './result';
import { Result, Face, Hand, Body, Item, Gesture } from './result';
import * as sysinfo from './sysinfo';
import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend';
@ -166,6 +166,7 @@ export class Human {
faceres: null,
segmentation: null,
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
// export access to image processing
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
this.image = (input: Input) => image.process(input, this.config);
@ -179,7 +180,7 @@ export class Human {
// helper function: measure tensor leak
/** @hidden */
analyze = (...msg) => {
analyze = (...msg: string[]) => {
if (!this.#analyzeMemoryLeaks) return;
const currentTensors = this.tf.engine().state.numTensors;
const previousTensors = this.#numTensors;
@ -190,7 +191,7 @@ export class Human {
// quick sanity check on inputs
/** @hidden */
#sanity = (input): null | string => {
#sanity = (input: Input): null | string => {
if (!this.#checkSanity) return null;
if (!input) return 'input is not defined';
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
@ -233,7 +234,6 @@ export class Human {
*/
// eslint-disable-next-line class-methods-use-this
enhance(input: Tensor): Tensor | null {
// @ts-ignore type mismach for Tensor
return faceres.enhance(input);
}
@ -391,9 +391,10 @@ export class Human {
// check if input changed sufficiently to trigger new detections
/** @hidden */
#skipFrame = async (input) => {
#skipFrame = async (input: Tensor) => {
if (this.config.cacheSensitivity === 0) return false;
const resizeFact = 32;
if (!input.shape[1] || !input.shape[2]) return false;
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
// use tensor sum
/*
@ -453,23 +454,6 @@ export class Human {
// load models if enabled
await this.load();
/*
// function disabled in favor of inputChanged
// disable video optimization for inputs of type image, but skip if inside worker thread
let previousVideoOptimized;
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
if (input && this.config.videoOptimized && (typeof window !== 'undefined') && (typeof WorkerGlobalScope !== 'undefined') && (
(typeof HTMLImageElement !== 'undefined' && input instanceof HTMLImageElement)
|| (typeof Image !== 'undefined' && input instanceof Image)
|| (typeof ImageData !== 'undefined' && input instanceof ImageData)
|| (typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap))
) {
log('disabling video optimization');
previousVideoOptimized = this.config.videoOptimized;
this.config.videoOptimized = false;
}
*/
timeStamp = now();
let process = image.process(input, this.config);
this.performance.image = Math.trunc(now() - timeStamp);
@ -508,10 +492,10 @@ export class Human {
// prepare where to store model results
// keep them with weak typing as it can be promise or not
let faceRes;
let bodyRes;
let handRes;
let objectRes;
let faceRes: Face[] | Promise<Face[]> | never[] = [];
let bodyRes: Body[] | Promise<Body[]> | never[] = [];
let handRes: Hand[] | Promise<Hand[]> | never[] = [];
let objectRes: Item[] | Promise<Item[]> | never[] = [];
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
@ -590,15 +574,15 @@ export class Human {
this.performance.total = Math.trunc(now() - timeStart);
this.state = 'idle';
this.result = {
face: faceRes,
body: bodyRes,
hand: handRes,
face: faceRes as Face[],
body: bodyRes as Body[],
hand: handRes as Hand[],
gesture: gestureRes,
object: objectRes,
object: objectRes as Item[],
performance: this.performance,
canvas: process.canvas,
timestamp: Date.now(),
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
get persons() { return persons.join(faceRes as Face[], bodyRes as Body[], handRes as Hand[], gestureRes, process?.tensor?.shape); },
};
// finally dispose input tensor
@ -611,7 +595,7 @@ export class Human {
/** @hidden */
#warmupBitmap = async () => {
const b64toBlob = (base64, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
let blob;
let res;
switch (this.config.warmup) {
@ -662,7 +646,7 @@ export class Human {
/** @hidden */
#warmupNode = async () => {
const atob = (str) => Buffer.from(str, 'base64');
const atob = (str: string) => Buffer.from(str, 'base64');
let img;
if (this.config.warmup === 'face') img = atob(sample.face);
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);

View File

@ -14,7 +14,7 @@ const maxSize = 2048;
let inCanvas;
let outCanvas;
// instance of fximage
let fx;
let fx: fxImage.GLImageFilter | null;
// process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement

View File

@ -18,29 +18,17 @@ import * as segmentation from './segmentation/segmentation';
export async function load(instance) {
if (instance.config.async) { // load models concurrently
[
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.face,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.emotion,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.handpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.posenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.blazepose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.efficientpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.movenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.nanodet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.centernet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.faceres,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.segmentation,
// @ts-ignore models loaded via promise array cannot be correctly inferred
// instance.models.agegenderrace,
] = await Promise.all([
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),

View File

@ -22,8 +22,7 @@ const bodyParts = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftSh
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -44,7 +44,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
// create new overarching box from all boxes beloning to person
const x: number[] = [];
const y: number[] = [];
const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
const extractXY = (box: [number, number, number, number] | undefined) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
if (box && box.length === 4) {
x.push(box[0], box[0] + box[2]);
y.push(box[1], box[1] + box[3]);
@ -59,7 +59,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
// shape is known so we calculate boxRaw as well
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
if (shape && shape[1] && shape[2]) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
persons.push(person);
}

View File

@ -1,11 +1,11 @@
import * as kpt from './keypoints';
import { Body } from '../result';
export function eitherPointDoesntMeetConfidence(a, b, minConfidence) {
export function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number) {
return (a < minConfidence || b < minConfidence);
}
export function getAdjacentKeyPoints(keypoints, minConfidence) {
export function getAdjacentKeyPoints(keypoints, minConfidence: number) {
return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {
if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {
return result;

View File

@ -1,36 +1,46 @@
{
"compilerOptions": {
"noEmitOnError": false,
"module": "es2020",
"target": "es2018",
"moduleResolution": "node",
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"baseUrl": "./",
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
"noEmitOnError": false,
"declaration": true,
"allowJs": true,
"allowSyntheticDefaultImports": true,
"allowUnreachableCode": false,
"allowUnusedLabels": false,
"alwaysStrict": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"importHelpers": true,
"noFallthroughCasesInSwitch": true,
"noImplicitAny": false,
"noImplicitOverride": true,
"noImplicitReturns": true,
"noImplicitThis": true,
"noPropertyAccessFromIndexSignature": false,
"noUncheckedIndexedAccess": false,
"noUnusedLocals": false,
"noUnusedParameters": true,
"preserveConstEnums": true,
"pretty": true,
"removeComments": false,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"allowJs": true,
"baseUrl": "./",
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
"strictBindCallApply": true,
"strictFunctionTypes": true,
"strictNullChecks": true,
"noImplicitAny": false,
"noUnusedLocals": false,
"noImplicitReturns": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedParameters": true,
"pretty": true,
"noFallthroughCasesInSwitch": true,
"allowUnreachableCode": false
"strictPropertyInitialization": true
},
"formatCodeOptions": {
"indentSize": 2,
"tabSize": 2
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"exclude": ["node_modules/", "types/", "tfjs/", "dist/"],
"include": ["src"],
"typedocOptions": {