update typescript

pull/356/head
Vladimir Mandic 2021-11-17 16:50:21 -05:00
parent eb65cabf31
commit 7517ac2d8f
16 changed files with 43 additions and 47 deletions

View File

@ -9,8 +9,9 @@
## Changelog ## Changelog
### **HEAD -> main** 2021/11/16 mandic00@live.com ### **HEAD -> main** 2021/11/17 mandic00@live.com
- optimize centernet
- cache frequent tf constants - cache frequent tf constants
- add extra face rotation prior to mesh - add extra face rotation prior to mesh
- release 2.5.2 - release 2.5.2

View File

@ -331,7 +331,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
<br><hr><br> <br><hr><br>
`Human` library is written in `TypeScript` [4.4](https://www.typescriptlang.org/docs/handbook/intro.html) `Human` library is written in `TypeScript` [4.5](https://www.typescriptlang.org/docs/handbook/intro.html)
Conforming to latest `JavaScript` [ECMAScript version 2021](https://262.ecma-international.org/) standard Conforming to latest `JavaScript` [ECMAScript version 2021](https://262.ecma-international.org/) standard
Build target is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/11.0/) Build target is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/11.0/)

20
TODO.md
View File

@ -43,13 +43,14 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
## Pending Release Notes ## Pending Release Notes
New: New:
- new demo `demos/faceid` that utilizes multiple algorithm to validate input before triggering face recognition - New type definitions rollup
- new optional model `liveness` - New demo `demos/faceid` that utilizes multiple algorithm to validate input before triggering face recognition
- New optional model `liveness`
checks if input appears to be a real-world live image or a recording checks if input appears to be a real-world live image or a recording
best used together with `antispoofing` that checks if input appears to have a realistic face best used together with `antispoofing` that checks if input appears to have a realistic face
- new face masking option in `face.config.detector.mask` - New face masking option in `face.config.detector.mask`
result is shading of face image outside of face area which is useful for increased sensitivity of other modules that rely on detected face as input result is shading of face image outside of face area which is useful for increased sensitivity of other modules that rely on detected face as input
- new face crop option in `face.config.detector.cropFactor` - New face crop option in `face.config.detector.cropFactor`
result is user-definable fine-tuning for other modules that rely on detected face as input result is user-definable fine-tuning for other modules that rely on detected face as input
Other: Other:
@ -60,14 +61,3 @@ Other:
- Documentation overhaul - Documentation overhaul
- Fix for optional `gear`, `ssrnet`, `mobilefacenet` modules - Fix for optional `gear`, `ssrnet`, `mobilefacenet` modules
- Fix for Firefox WebGPU compatibility issue - Fix for Firefox WebGPU compatibility issue
```
cp tfjs/tfjs.esm.d.ts types/lib/dist/
node_modules/.bin/api-extractor run --local --verbose
cp types/human.d.ts dist/human.esm-nobundle.d.ts
cp types/human.d.ts dist/human.esm.d.ts
cp types/human.d.ts dist/human.d.ts
cp types/human.d.ts dist/human.node-gpu.d.ts
cp types/human.d.ts dist/human.node.d.ts
cp types/human.d.ts dist/human.node-wasm.d.ts
```

View File

@ -54,6 +54,7 @@
], ],
"devDependencies": { "devDependencies": {
"@microsoft/api-extractor": "^7.18.19", "@microsoft/api-extractor": "^7.18.19",
"@tensorflow/tfjs": "^3.11.0",
"@tensorflow/tfjs-backend-cpu": "^3.11.0", "@tensorflow/tfjs-backend-cpu": "^3.11.0",
"@tensorflow/tfjs-backend-wasm": "^3.11.0", "@tensorflow/tfjs-backend-wasm": "^3.11.0",
"@tensorflow/tfjs-backend-webgl": "^3.11.0", "@tensorflow/tfjs-backend-webgl": "^3.11.0",
@ -62,9 +63,8 @@
"@tensorflow/tfjs-core": "^3.11.0", "@tensorflow/tfjs-core": "^3.11.0",
"@tensorflow/tfjs-data": "^3.11.0", "@tensorflow/tfjs-data": "^3.11.0",
"@tensorflow/tfjs-layers": "^3.11.0", "@tensorflow/tfjs-layers": "^3.11.0",
"@tensorflow/tfjs-node-gpu": "^3.11.0",
"@tensorflow/tfjs-node": "^3.11.0", "@tensorflow/tfjs-node": "^3.11.0",
"@tensorflow/tfjs": "^3.11.0", "@tensorflow/tfjs-node-gpu": "^3.11.0",
"@types/node": "^16.11.7", "@types/node": "^16.11.7",
"@typescript-eslint/eslint-plugin": "^5.4.0", "@typescript-eslint/eslint-plugin": "^5.4.0",
"@typescript-eslint/parser": "^5.4.0", "@typescript-eslint/parser": "^5.4.0",
@ -74,18 +74,18 @@
"canvas": "^2.8.0", "canvas": "^2.8.0",
"dayjs": "^1.10.7", "dayjs": "^1.10.7",
"esbuild": "^0.13.14", "esbuild": "^0.13.14",
"eslint": "8.2.0",
"eslint-config-airbnb-base": "^15.0.0", "eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-html": "^6.2.0", "eslint-plugin-html": "^6.2.0",
"eslint-plugin-import": "^2.25.3", "eslint-plugin-import": "^2.25.3",
"eslint-plugin-json": "^3.1.0", "eslint-plugin-json": "^3.1.0",
"eslint-plugin-node": "^11.1.0", "eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.1", "eslint-plugin-promise": "^5.1.1",
"eslint": "8.2.0",
"node-fetch": "^3.1.0", "node-fetch": "^3.1.0",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"seedrandom": "^3.0.5", "seedrandom": "^3.0.5",
"tslib": "^2.3.1", "tslib": "^2.3.1",
"typedoc": "0.22.9", "typedoc": "0.22.9",
"typescript": "4.4.4" "typescript": "4.5.2"
} }
} }

View File

@ -24,7 +24,7 @@ export async function load(config: Config): Promise<GraphModel> {
return model; return model;
} }
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> { export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<number> {
if (!model) return 0; if (!model) return 0;
const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime); const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0); const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);

View File

@ -24,7 +24,7 @@ export async function load(config: Config): Promise<GraphModel> {
return model; return model;
} }
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> { export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<number> {
if (!model) return 0; if (!model) return 0;
const skipTime = (config.face.liveness?.skipTime || 0) > (now() - lastTime); const skipTime = (config.face.liveness?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.liveness?.skipFrames || 0); const skipFrame = skipped < (config.face.liveness?.skipFrames || 0);

View File

@ -28,7 +28,7 @@ export async function load(config: Config): Promise<GraphModel> {
return model; return model;
} }
export async function predict(image: Tensor, config: Config, idx, count): Promise<Array<{ score: number, emotion: string }>> { export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<Array<{ score: number, emotion: string }>> {
if (!model) return []; if (!model) return [];
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0); const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime); const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);

View File

@ -31,7 +31,7 @@ export async function load(config: Config) {
} }
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ age: number }> { export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ age: number }> {
if (!model) return { age: 0 }; if (!model) return { age: 0 };
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0); const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime); const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime);

View File

@ -2,7 +2,7 @@
* Gesture detection algorithm * Gesture detection algorithm
*/ */
import type { GestureResult } from '../result'; import type { GestureResult, BodyResult, FaceResult, HandResult, Point } from '../result';
import * as fingerPose from '../hand/fingerpose'; import * as fingerPose from '../hand/fingerpose';
/** face gesture type */ /** face gesture type */
@ -31,7 +31,7 @@ export type HandGesture =
| 'victory' | 'victory'
| 'thumbs up'; | 'thumbs up';
export const body = (res): GestureResult[] => { export const body = (res: BodyResult[]): GestureResult[] => {
if (!res) return []; if (!res) return [];
const gestures: Array<{ body: number, gesture: BodyGesture }> = []; const gestures: Array<{ body: number, gesture: BodyGesture }> = [];
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
@ -53,12 +53,12 @@ export const body = (res): GestureResult[] => {
return gestures; return gestures;
}; };
export const face = (res): GestureResult[] => { export const face = (res: FaceResult[]): GestureResult[] => {
if (!res) return []; if (!res) return [];
const gestures: Array<{ face: number, gesture: FaceGesture }> = []; const gestures: Array<{ face: number, gesture: FaceGesture }> = [];
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
if (res[i].mesh && res[i].mesh.length > 450) { if (res[i].mesh && res[i].mesh.length > 450) {
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2]; const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0]; const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
if (Math.abs(zDiff / xDiff) <= 0.15) gestures.push({ face: i, gesture: 'facing center' }); if (Math.abs(zDiff / xDiff) <= 0.15) gestures.push({ face: i, gesture: 'facing center' });
else gestures.push({ face: i, gesture: `facing ${zDiff < 0 ? 'left' : 'right'}` }); else gestures.push({ face: i, gesture: `facing ${zDiff < 0 ? 'left' : 'right'}` });
@ -68,14 +68,14 @@ export const face = (res): GestureResult[] => {
if (openRight < 0.2) gestures.push({ face: i, gesture: 'blink right eye' }); if (openRight < 0.2) gestures.push({ face: i, gesture: 'blink right eye' });
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1])); const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
if (mouthOpen > 10) gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` }); if (mouthOpen > 10) gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
const chinDepth = res[i].mesh[152][2]; const chinDepth = res[i].mesh[152][2] || 0;
if (Math.abs(chinDepth) > 10) gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? 'up' : 'down'}` }); if (Math.abs(chinDepth) > 10) gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? 'up' : 'down'}` });
} }
} }
return gestures; return gestures;
}; };
export const iris = (res): GestureResult[] => { export const iris = (res: FaceResult[]): GestureResult[] => {
if (!res) return []; if (!res) return [];
const gestures: Array<{ iris: number, gesture: IrisGesture }> = []; const gestures: Array<{ iris: number, gesture: IrisGesture }> = [];
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
@ -116,18 +116,18 @@ export const iris = (res): GestureResult[] => {
return gestures; return gestures;
}; };
export const hand = (res): GestureResult[] => { export const hand = (res: HandResult[]): GestureResult[] => {
if (!res) return []; if (!res) return [];
const gestures: Array<{ hand: number, gesture: HandGesture }> = []; const gestures: Array<{ hand: number, gesture: HandGesture }> = [];
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
const fingers: Array<{ name: string, position: number }> = []; const fingers: Array<{ name: string, position: Point }> = [];
if (res[i]['annotations']) { if (res[i]['annotations']) {
for (const [finger, pos] of Object.entries(res[i]['annotations'])) { for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
} }
} }
if (fingers && fingers.length > 0) { if (fingers && fingers.length > 0) {
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a)); const closest = fingers.reduce((best, a) => ((best.position[2] || 0) < (a.position[2] || 0) ? best : a));
gestures.push({ hand: i, gesture: `${closest.name} forward` as HandGesture }); gestures.push({ hand: i, gesture: `${closest.name} forward` as HandGesture });
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a)); const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture }); gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });

View File

@ -347,8 +347,8 @@ export class Human {
if (kernels[kernel.name]) kernels[kernel.name] += kernel.kernelTimeMs; if (kernels[kernel.name]) kernels[kernel.name] += kernel.kernelTimeMs;
else kernels[kernel.name] = kernel.kernelTimeMs; else kernels[kernel.name] = kernel.kernelTimeMs;
} }
const kernelArr: Array<{ name, ms }> = []; const kernelArr: Array<{ name: string, ms: number }> = [];
Object.entries(kernels).forEach((key) => kernelArr.push({ name: key[0], ms: key[1] })); // convert to array Object.entries(kernels).forEach((key) => kernelArr.push({ name: key[0], ms: key[1] as unknown as number })); // convert to array
kernelArr.sort((a, b) => b.ms - a.ms); // sort kernelArr.sort((a, b) => b.ms - a.ms); // sort
kernelArr.length = 20; // crop kernelArr.length = 20; // crop
const res: Record<string, number> = {}; const res: Record<string, number> = {};
@ -497,7 +497,7 @@ export class Human {
let gestureRes: GestureResult[] = []; let gestureRes: GestureResult[] = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; gestureRes = [...gesture.face(faceRes as FaceResult[]), ...gesture.body(bodyRes as BodyResult[]), ...gesture.hand(handRes as HandResult[]), ...gesture.iris(faceRes as FaceResult[])];
if (!this.config.async) this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); if (!this.config.async) this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
else if (this.performance.gesture) delete this.performance.gesture; else if (this.performance.gesture) delete this.performance.gesture;
} }

View File

@ -4,16 +4,16 @@
* TensorFlow Tensor type * TensorFlow Tensor type
* @external * @external
*/ */
export { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index'; export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index';
/** /**
* TensorFlow GraphModel type * TensorFlow GraphModel type
* @external * @external
*/ */
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
/** Tensorflow Long type /** Tensorflow Long type
* @external long * @external long
*/ */
// eslint-disable-next-line node/no-missing-import // eslint-disable-next-line node/no-missing-import
export { Long } from 'long'; export type { Long } from 'long';

View File

@ -149,6 +149,7 @@ export class Env {
const cpu = { model: '', flags: [] }; const cpu = { model: '', flags: [] };
if (this.node && this.platform.startsWith('linux')) { if (this.node && this.platform.startsWith('linux')) {
// eslint-disable-next-line global-require // eslint-disable-next-line global-require
/*
const fs = require('fs'); const fs = require('fs');
try { try {
const data = fs.readFileSync('/proc/cpuinfo').toString(); const data = fs.readFileSync('/proc/cpuinfo').toString();
@ -160,7 +161,8 @@ export class Env {
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort(); cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
} }
} }
} catch { /**/ } } catch { }
*/
} }
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu }); if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
else this['cpu'] = cpu; else this['cpu'] = cpu;

View File

@ -65,7 +65,7 @@ export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>,
}, []); }, []);
// helper function: async wait // helper function: async wait
export async function wait(time) { export async function wait(time: number) {
const waiting = new Promise((resolve) => { setTimeout(() => resolve(true), time); }); const waiting = new Promise((resolve) => { setTimeout(() => resolve(true), time); });
await waiting; await waiting;
} }

View File

@ -48,10 +48,11 @@ async function warmupCanvas(instance: Human) {
src = null; src = null;
} }
// src = encodeURI('../assets/human-sample-upper.jpg'); // src = encodeURI('../assets/human-sample-upper.jpg');
let img; let img: HTMLImageElement;
if (typeof Image !== 'undefined') img = new Image(); if (typeof Image !== 'undefined') img = new Image();
// @ts-ignore env.image is an external monkey-patch // @ts-ignore env.image is an external monkey-patch
else if (env.Image) img = new env.Image(); else if (env.Image) img = new env.Image();
else return;
img.onload = async () => { img.onload = async () => {
const canvas = image.canvas(img.naturalWidth, img.naturalHeight); const canvas = image.canvas(img.naturalWidth, img.naturalHeight);
if (!canvas) { if (!canvas) {
@ -103,11 +104,13 @@ async function warmupNode(instance: Human) {
* - only used for `webgl` and `humangl` backends * - only used for `webgl` and `humangl` backends
* @param userConfig?: Config * @param userConfig?: Config
*/ */
export async function warmup(instance: Human, userConfig?: Partial<Config>): Promise<Result | { error }> { export async function warmup(instance: Human, userConfig?: Partial<Config>): Promise<Result> {
const t0 = now(); const t0 = now();
instance.state = 'warmup'; instance.state = 'warmup';
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config; if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') return { error: 'null' }; if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') {
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
}
let res; let res;
return new Promise(async (resolve) => { return new Promise(async (resolve) => {
if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance); if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance);

View File

@ -6,7 +6,7 @@
"outDir": "types", "outDir": "types",
"baseUrl": "./", "baseUrl": "./",
"paths": { "tslib": ["./node_modules/tslib/tslib.d.ts"] }, "paths": { "tslib": ["./node_modules/tslib/tslib.d.ts"] },
"lib": ["esnext", "dom", "webworker"], "lib": ["esnext", "dom"],
"allowJs": true, "allowJs": true,
"allowSyntheticDefaultImports": false, "allowSyntheticDefaultImports": false,
"allowUnreachableCode": false, "allowUnreachableCode": false,

2
wiki

@ -1 +1 @@
Subproject commit 799273cb94e64d08280479435cc11daedabd5a60 Subproject commit 7b35db4dca9ba3a41b62ec11257a474b2d12c132