strict type checks

pull/193/head
Vladimir Mandic 2021-08-17 08:51:17 -04:00
parent c97eb67c5b
commit c205c08fb0
45 changed files with 191 additions and 221 deletions

View File

@ -9,7 +9,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/08/14 mandic00@live.com
### **HEAD -> main** 2021/08/15 mandic00@live.com
- experimental webgpu support
- add experimental webgu demo

14
TODO.md
View File

@ -1,12 +1,5 @@
# To-Do list for Human library
## Big Ticket Items
Implementation of WebGPU backend
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
<br>
## Work in Progress
WebGL shader optimizations for faster load and initial detection
@ -69,10 +62,15 @@ Object detection using CenterNet or NanoDet models is not working when using WAS
<https://github.com/tensorflow/tfjs/issues/4824>
*Target: `Human` v2.2 with `TFJS` v3.9*
### WebGPU
### WebGPU Backend
Implementation of WebGPU backend
Experimental support only
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
<br>
- Backend WebGPU missing kernel ops
<https://github.com/tensorflow/tfjs/issues/5496>
- Backend WebGPU incompatible with web workers

View File

@ -4,8 +4,6 @@
/// <reference lib="webworker" />
// // @ts-nocheck Linting of ServiceWorker is not supported for JS files
const skipCaching = false;
const cacheName = 'Human';

View File

@ -1,3 +1,5 @@
/// <reference lib="webworker" />
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
// import Human from '../dist/human.esm.js';
self.importScripts('../dist/human.js');
@ -34,12 +36,9 @@ onmessage = async (msg) => {
if (ctx) ctx.drawImage(result.canvas, 0, 0);
const img = ctx ? ctx.getImageData(0, 0, result.canvas.width, result.canvas.height) : null;
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
// @ts-ignore tslint wrong type matching for worker
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img.data.buffer]);
// @ts-ignore tslint wrong type matching for worker
else postMessage({ result });
} else {
// @ts-ignore tslint wrong type matching for worker
postMessage({ result });
}
busy = false;

View File

@ -1,5 +1,7 @@
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
/// <reference lib="webworker" />
// import Human from '../dist/human.esm.js';
self.importScripts('../../dist/human.js');
@ -15,6 +17,5 @@ onmessage = async (msg) => {
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
let result = {};
result = await human.detect(image, msg.data.config);
// @ts-ignore tslint wrong type matching for worker
postMessage({ result: result[msg.data.type], type: msg.data.type });
};

View File

@ -1,5 +1,7 @@
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
/// <reference lib="webworker" />
// import Human from '../dist/human.esm.js';
self.importScripts('../../dist/human.js');
self.importScripts('../../node_modules/@tensorflow/tfjs-core/dist/tf-core.es2017.js');
@ -17,6 +19,5 @@ onmessage = async (msg) => {
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
let result = {};
result = await human.detect(image, msg.data.config);
// @ts-ignore tslint wrong type matching for worker
postMessage({ result: result[msg.data.type], type: msg.data.type });
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -221,13 +221,13 @@ var config = {
// src/sysinfo.ts
function info() {
let platform;
let agent;
let platform = "";
let agent = "";
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
agent = navigator.userAgent.replace(raw[0], "");
if (platform[1])
agent = agent.replace(raw[1], "");
@ -494,7 +494,7 @@ var BlazeFaceModel = class {
this.config = config3;
}
async getBoundingBoxes(inputImage, userConfig) {
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
return null;
const [batch, boxes, scores] = tf3.tidy(() => {
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
@ -4071,9 +4071,10 @@ var Pipeline = class {
if (config3.face.iris.enabled)
rawCoords = await this.augmentIris(rawCoords, face5);
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
const storeConfidence = box6.confidence;
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
box6.confidence = storeConfidence;
box6 = {
confidence: box6.confidence,
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
};
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
}
@ -7691,11 +7692,10 @@ var anchors = [
// src/handpose/handdetector.ts
var HandDetector = class {
constructor(model10) {
var _a;
this.model = model10;
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf10.tensor2d(this.anchors);
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}
@ -10531,7 +10531,7 @@ function join2(faces, bodies, hands, gestures, shape) {
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
if (shape && shape[1] && shape[2])
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2);
}
@ -11471,6 +11471,8 @@ var Human = class {
if (this.config.cacheSensitivity === 0)
return false;
const resizeFact = 32;
if (!input.shape[1] || !input.shape[2])
return false;
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
const reducedData = await reduced.data();
let sum = 0;
@ -11584,6 +11586,7 @@ var Human = class {
faceres: null,
segmentation: null
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
this.image = (input) => process4(input, this.config);
this.faceTriangulation = triangulation;
this.faceUVMap = uvmap;
@ -11683,10 +11686,10 @@ var Human = class {
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
let handRes;
let objectRes;
let faceRes = [];
let bodyRes = [];
let handRes = [];
let objectRes = [];
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
if (this.performance.face)

View File

@ -222,13 +222,13 @@ var config = {
// src/sysinfo.ts
function info() {
let platform;
let agent;
let platform = "";
let agent = "";
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
agent = navigator.userAgent.replace(raw[0], "");
if (platform[1])
agent = agent.replace(raw[1], "");
@ -495,7 +495,7 @@ var BlazeFaceModel = class {
this.config = config3;
}
async getBoundingBoxes(inputImage, userConfig) {
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
return null;
const [batch, boxes, scores] = tf3.tidy(() => {
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
@ -4072,9 +4072,10 @@ var Pipeline = class {
if (config3.face.iris.enabled)
rawCoords = await this.augmentIris(rawCoords, face5);
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
const storeConfidence = box6.confidence;
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
box6.confidence = storeConfidence;
box6 = {
confidence: box6.confidence,
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
};
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
}
@ -7692,11 +7693,10 @@ var anchors = [
// src/handpose/handdetector.ts
var HandDetector = class {
constructor(model10) {
var _a;
this.model = model10;
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf10.tensor2d(this.anchors);
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}
@ -10532,7 +10532,7 @@ function join2(faces, bodies, hands, gestures, shape) {
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
if (shape && shape[1] && shape[2])
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2);
}
@ -11472,6 +11472,8 @@ var Human = class {
if (this.config.cacheSensitivity === 0)
return false;
const resizeFact = 32;
if (!input.shape[1] || !input.shape[2])
return false;
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
const reducedData = await reduced.data();
let sum = 0;
@ -11585,6 +11587,7 @@ var Human = class {
faceres: null,
segmentation: null
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
this.image = (input) => process4(input, this.config);
this.faceTriangulation = triangulation;
this.faceUVMap = uvmap;
@ -11684,10 +11687,10 @@ var Human = class {
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
let handRes;
let objectRes;
let faceRes = [];
let bodyRes = [];
let handRes = [];
let objectRes = [];
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
if (this.performance.face)

31
dist/human.node.js vendored
View File

@ -221,13 +221,13 @@ var config = {
// src/sysinfo.ts
function info() {
let platform;
let agent;
let platform = "";
let agent = "";
if (typeof navigator !== "undefined") {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
agent = navigator.userAgent.replace(raw[0], "");
if (platform[1])
agent = agent.replace(raw[1], "");
@ -494,7 +494,7 @@ var BlazeFaceModel = class {
this.config = config3;
}
async getBoundingBoxes(inputImage, userConfig) {
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
return null;
const [batch, boxes, scores] = tf3.tidy(() => {
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
@ -4071,9 +4071,10 @@ var Pipeline = class {
if (config3.face.iris.enabled)
rawCoords = await this.augmentIris(rawCoords, face5);
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
const storeConfidence = box6.confidence;
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
box6.confidence = storeConfidence;
box6 = {
confidence: box6.confidence,
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
};
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
}
@ -7691,11 +7692,10 @@ var anchors = [
// src/handpose/handdetector.ts
var HandDetector = class {
constructor(model10) {
var _a;
this.model = model10;
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf10.tensor2d(this.anchors);
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}
@ -10531,7 +10531,7 @@ function join2(faces, bodies, hands, gestures, shape) {
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
if (shape && shape[1] && shape[2])
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2);
}
@ -11471,6 +11471,8 @@ var Human = class {
if (this.config.cacheSensitivity === 0)
return false;
const resizeFact = 32;
if (!input.shape[1] || !input.shape[2])
return false;
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
const reducedData = await reduced.data();
let sum = 0;
@ -11584,6 +11586,7 @@ var Human = class {
faceres: null,
segmentation: null
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
this.image = (input) => process4(input, this.config);
this.faceTriangulation = triangulation;
this.faceUVMap = uvmap;
@ -11683,10 +11686,10 @@ var Human = class {
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
let handRes;
let objectRes;
let faceRes = [];
let bodyRes = [];
let handRes = [];
let objectRes = [];
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
if (this.performance.face)

View File

@ -67,8 +67,8 @@
"@tensorflow/tfjs-node": "^3.8.0",
"@tensorflow/tfjs-node-gpu": "^3.8.0",
"@types/node": "^16.6.1",
"@typescript-eslint/eslint-plugin": "^4.29.1",
"@typescript-eslint/parser": "^4.29.1",
"@typescript-eslint/eslint-plugin": "^4.29.2",
"@typescript-eslint/parser": "^4.29.2",
"@vladmandic/pilogger": "^0.2.18",
"canvas": "^2.8.0",
"chokidar": "^3.5.2",

View File

@ -1,22 +1,22 @@
2021-08-15 08:08:32 INFO:  @vladmandic/human version 2.1.3
2021-08-15 08:08:32 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-08-15 08:08:32 INFO:  Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.20","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
2021-08-15 08:08:32 INFO:  Clean: ["dist/*","types/*","typedoc/*"]
2021-08-15 08:08:32 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-08-15 08:08:32 STATE: target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
2021-08-15 08:08:32 STATE: target: node type: node: {"imports":42,"importBytes":437903,"outputBytes":379522,"outputFiles":"dist/human.node.js"}
2021-08-15 08:08:32 STATE: target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
2021-08-15 08:08:32 STATE: target: nodeGPU type: node: {"imports":42,"importBytes":437911,"outputBytes":379526,"outputFiles":"dist/human.node-gpu.js"}
2021-08-15 08:08:32 STATE: target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
2021-08-15 08:08:33 STATE: target: nodeWASM type: node: {"imports":42,"importBytes":437978,"outputBytes":379598,"outputFiles":"dist/human.node-wasm.js"}
2021-08-15 08:08:33 STATE: target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
2021-08-15 08:08:33 STATE: target: browserNoBundle type: esm: {"imports":42,"importBytes":437842,"outputBytes":249009,"outputFiles":"dist/human.esm-nobundle.js"}
2021-08-15 08:08:33 STATE: target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
2021-08-15 08:08:33 STATE: target: browserBundle type: iife: {"imports":42,"importBytes":2771301,"outputBytes":1379375,"outputFiles":"dist/human.js"}
2021-08-15 08:08:34 STATE: target: browserBundle type: esm: {"imports":42,"importBytes":2771301,"outputBytes":1379367,"outputFiles":"dist/human.esm.js"}
2021-08-15 08:08:34 INFO:  Running Linter: ["server/","src/","tfjs/","test/","demo/"]
2021-08-15 08:08:57 INFO:  Linter complete: files: 77 errors: 0 warnings: 0
2021-08-15 08:08:57 INFO:  Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-08-15 08:08:57 INFO:  Generate Typings: ["src/human.ts"] outDir: ["types"]
2021-08-15 08:09:11 INFO:  Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
2021-08-15 08:09:25 INFO:  Documentation generated at /home/vlado/dev/human/typedoc 1
2021-08-17 08:49:41 INFO:  @vladmandic/human version 2.1.3
2021-08-17 08:49:41 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.5.0
2021-08-17 08:49:41 INFO:  Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.20","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
2021-08-17 08:49:41 INFO:  Clean: ["dist/*","types/*","typedoc/*"]
2021-08-17 08:49:41 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-08-17 08:49:41 STATE: target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
2021-08-17 08:49:41 STATE: target: node type: node: {"imports":42,"importBytes":436292,"outputBytes":379767,"outputFiles":"dist/human.node.js"}
2021-08-17 08:49:41 STATE: target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
2021-08-17 08:49:41 STATE: target: nodeGPU type: node: {"imports":42,"importBytes":436300,"outputBytes":379771,"outputFiles":"dist/human.node-gpu.js"}
2021-08-17 08:49:41 STATE: target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
2021-08-17 08:49:41 STATE: target: nodeWASM type: node: {"imports":42,"importBytes":436367,"outputBytes":379843,"outputFiles":"dist/human.node-wasm.js"}
2021-08-17 08:49:41 STATE: target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
2021-08-17 08:49:41 STATE: target: browserNoBundle type: esm: {"imports":42,"importBytes":436231,"outputBytes":249203,"outputFiles":"dist/human.esm-nobundle.js"}
2021-08-17 08:49:42 STATE: target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
2021-08-17 08:49:42 STATE: target: browserBundle type: iife: {"imports":42,"importBytes":2769690,"outputBytes":1379569,"outputFiles":"dist/human.js"}
2021-08-17 08:49:42 STATE: target: browserBundle type: esm: {"imports":42,"importBytes":2769690,"outputBytes":1379561,"outputFiles":"dist/human.esm.js"}
2021-08-17 08:49:42 INFO:  Running Linter: ["server/","src/","tfjs/","test/","demo/"]
2021-08-17 08:50:06 INFO:  Linter complete: files: 77 errors: 0 warnings: 0
2021-08-17 08:50:06 INFO:  Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-08-17 08:50:06 INFO:  Generate Typings: ["src/human.ts"] outDir: ["types"]
2021-08-17 08:50:21 INFO:  Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
2021-08-17 08:50:34 INFO:  Documentation generated at /home/vlado/dev/human/typedoc 1

View File

@ -21,10 +21,10 @@ async function analyzeGraph(modelPath) {
const shape = val.tensorShape.dim.map((a) => parseInt(a.size));
inputs.push({ name: key, dtype: val.dtype, shape });
}
// @ts-ignore
// @ts-ignore accessing private property
} else if (model.executor.graph['inputs']) {
log.info('model inputs based on executor');
// @ts-ignore
// @ts-ignore accessing private property
for (const t of model.executor.graph['inputs']) {
inputs.push({ name: t.name, dtype: t.attrParams.dtype.value, shape: t.attrParams.shape.value });
}
@ -40,10 +40,10 @@ async function analyzeGraph(modelPath) {
const shape = val.tensorShape?.dim.map((a) => parseInt(a.size));
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
}
// @ts-ignore
// @ts-ignore accessing private property
} else if (model.executor.graph['outputs']) {
log.info('model outputs based on executor');
// @ts-ignore
// @ts-ignore accessing private property
for (const t of model.executor.graph['outputs']) {
outputs.push({ id: i++, name: t.name, dtype: t.attrParams.dtype?.value || t.rawAttrs.T.type, shape: t.attrParams.shape?.value });
}
@ -62,13 +62,13 @@ async function analyzeSaved(modelPath) {
log.data('tags:', meta[0].tags);
log.data('signature:', Object.keys(meta[0].signatureDefs));
const inputs = Object.values(sign.inputs)[0];
// @ts-ignore
// @ts-ignore a is array
const inputShape = inputs.shape?.map((a) => a.array[0]);
log.data('inputs:', { name: inputs.name, dtype: inputs.dtype, shape: inputShape });
const outputs = [];
let i = 0;
for (const [key, val] of Object.entries(sign.outputs)) {
// @ts-ignore
// @ts-ignore a is array
const shape = val.shape?.map((a) => a.array[0]);
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
}

View File

@ -16,8 +16,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face.age.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
@ -33,7 +32,7 @@ export async function predict(image: Tensor, config: Config | any) {
}
skipped = 0;
return new Promise(async (resolve) => {
if (!model.inputs[0].shape) return;
if (!model.inputs || !model.inputs[0] || !model.inputs[0].shape) return;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);

View File

@ -39,8 +39,7 @@ export class BlazeFaceModel {
async getBoundingBoxes(inputImage: Tensor, userConfig: Config) {
// sanity check on input
// @ts-ignore isDisposed is internal property
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
if ((!inputImage) || (inputImage['isDisposedInternal']) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
const [batch, boxes, scores] = tf.tidy(() => {
const resizedImage = tf.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
const normalizedImage = tf.sub(tf.div(resizedImage, 127.5), 0.5);

View File

@ -60,11 +60,10 @@ export async function predict(input: Tensor, config: Config): Promise<Face[]> {
export async function load(config): Promise<[unknown, GraphModel | null, GraphModel | null]> {
if ((!faceModels[0] && config.face.enabled) || (!faceModels[1] && config.face.mesh.enabled) || (!faceModels[2] && config.face.iris.enabled)) {
// @ts-ignore type mismatch for GraphModel
faceModels = await Promise.all([
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.mesh.modelPath), { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.iris.modelPath), { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.mesh.modelPath), { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.iris.modelPath), { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
]);
if (config.face.mesh.enabled) {
if (!faceModels[1] || !faceModels[1]['modelUrl']) log('load model failed:', config.face.mesh.modelPath);

View File

@ -295,10 +295,10 @@ export class Pipeline {
// override box from detection with one calculated from mesh
const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
const storeConfidence = box.confidence;
// @ts-ignore enlargeBox does not include confidence so we append it manually
box = bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5); // redefine box with mesh calculated one
box.confidence = storeConfidence;
box = {
confidence: box.confidence, // keep confidence
...bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5), // redefine box with mesh calculated one
};
// do rotation one more time with mesh keypoints if we want to return perfect image
if (config.face.detector.rotation && config.face.mesh.enabled && config.face.description.enabled && tf.ENV.flags.IS_BROWSER) {

View File

@ -15,8 +15,7 @@ let model: GraphModel;
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch for Graphmodel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
model['width'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[2].size);
model['height'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[1].size);
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);

View File

@ -278,16 +278,13 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
ctx.lineWidth = localOptions.lineWidth;
ctx.font = localOptions.font;
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
// @ts-ignore box may not exist
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
// @ts-ignore box may not exist
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
// @ts-ignore box may not exist
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
}

View File

@ -22,8 +22,7 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -13,8 +13,7 @@ let model: GraphModel;
export async function load(config) {
const modelUrl = join(config.modelBasePath, config.face.embedding.modelPath);
if (!model) {
// @ts-ignore type mismatch for GraphModel
model = await tf.loadGraphModel(modelUrl);
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
if (!model) log('load model failed:', config.face.embedding.modelPath);
else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);
@ -55,10 +54,9 @@ export function enhance(input): Tensor {
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
const tensor = input.image || input.tensor;
if (!(tensor instanceof tf.Tensor)) return null;
if (!model || !model.inputs || !model.inputs[0].shape) return null;
const crop = (tensor.shape.length === 3)
// @ts-ignore model possibly undefined
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
// @ts-ignore model possibly undefined
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
// convert to black&white to avoid colorization impact

View File

@ -162,7 +162,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
parent.analyze('Get Face');
// is something went wrong, skip the face
// @ts-ignore possibly undefined
// @ts-ignore possibly undefied
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
log('Face object is disposed:', faces[i].tensor);
continue;

View File

@ -25,8 +25,7 @@ type DB = Array<{ name: string, source: string, embedding: number[] }>;
export async function load(config: Config): Promise<GraphModel> {
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
if (!model) {
// @ts-ignore type mismatch for GraphModel
model = await tf.loadGraphModel(modelUrl);
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
if (!model) log('load model failed:', config.face.description.modelPath);
else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);

View File

@ -34,8 +34,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.agegenderrace.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.agegenderrace.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face.agegenderrace.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -19,8 +19,7 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath)) as unknown as GraphModel;
alternative = model.inputs[0].shape ? model.inputs[0]?.shape[3] === 1 : false;
if (!model || !model['modelUrl']) log('load model failed:', config.face.gender.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);

View File

@ -15,8 +15,7 @@ export class HandDetector {
this.model = model;
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
this.anchorsTensor = tf.tensor2d(this.anchors);
// @ts-ignore model is not undefined here
this.inputSize = this.model?.inputs[0].shape[2];
this.inputSize = (this.model && this.model.inputs && this.model.inputs[0].shape) ? this.model.inputs[0].shape[2] : 0;
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
}

View File

@ -71,10 +71,9 @@ export async function predict(input: Tensor, config: Config): Promise<Hand[]> {
export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
if (!handDetectorModel || !handPoseModel) {
// @ts-ignore type mismatch on GraphModel
[handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
]);
if (config.hand.enabled) {
if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector.modelPath);

View File

@ -41,7 +41,7 @@ export function mergeDeep(...objects) {
}
// helper function: return min and max from input array
export const minmax = (data) => data.reduce((acc, val) => {
export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>, val) => {
acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];
acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];
return acc;

View File

@ -4,7 +4,7 @@
import { log, now, mergeDeep } from './helpers';
import { Config, defaults } from './config';
import { Result, Gesture } from './result';
import { Result, Face, Hand, Body, Item, Gesture } from './result';
import * as sysinfo from './sysinfo';
import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend';
@ -166,6 +166,7 @@ export class Human {
faceres: null,
segmentation: null,
};
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
// export access to image processing
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
this.image = (input: Input) => image.process(input, this.config);
@ -179,7 +180,7 @@ export class Human {
// helper function: measure tensor leak
/** @hidden */
analyze = (...msg) => {
analyze = (...msg: string[]) => {
if (!this.#analyzeMemoryLeaks) return;
const currentTensors = this.tf.engine().state.numTensors;
const previousTensors = this.#numTensors;
@ -190,7 +191,7 @@ export class Human {
// quick sanity check on inputs
/** @hidden */
#sanity = (input): null | string => {
#sanity = (input: Input): null | string => {
if (!this.#checkSanity) return null;
if (!input) return 'input is not defined';
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
@ -233,7 +234,6 @@ export class Human {
*/
// eslint-disable-next-line class-methods-use-this
enhance(input: Tensor): Tensor | null {
// @ts-ignore type mismach for Tensor
return faceres.enhance(input);
}
@ -391,9 +391,10 @@ export class Human {
// check if input changed sufficiently to trigger new detections
/** @hidden */
#skipFrame = async (input) => {
#skipFrame = async (input: Tensor) => {
if (this.config.cacheSensitivity === 0) return false;
const resizeFact = 32;
if (!input.shape[1] || !input.shape[2]) return false;
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
// use tensor sum
/*
@ -453,23 +454,6 @@ export class Human {
// load models if enabled
await this.load();
/*
// function disabled in favor of inputChanged
// disable video optimization for inputs of type image, but skip if inside worker thread
let previousVideoOptimized;
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
if (input && this.config.videoOptimized && (typeof window !== 'undefined') && (typeof WorkerGlobalScope !== 'undefined') && (
(typeof HTMLImageElement !== 'undefined' && input instanceof HTMLImageElement)
|| (typeof Image !== 'undefined' && input instanceof Image)
|| (typeof ImageData !== 'undefined' && input instanceof ImageData)
|| (typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap))
) {
log('disabling video optimization');
previousVideoOptimized = this.config.videoOptimized;
this.config.videoOptimized = false;
}
*/
timeStamp = now();
let process = image.process(input, this.config);
this.performance.image = Math.trunc(now() - timeStamp);
@ -508,10 +492,10 @@ export class Human {
// prepare where to store model results
// keep them with weak typing as it can be promise or not
let faceRes;
let bodyRes;
let handRes;
let objectRes;
let faceRes: Face[] | Promise<Face[]> | never[] = [];
let bodyRes: Body[] | Promise<Body[]> | never[] = [];
let handRes: Hand[] | Promise<Hand[]> | never[] = [];
let objectRes: Item[] | Promise<Item[]> | never[] = [];
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
@ -590,15 +574,15 @@ export class Human {
this.performance.total = Math.trunc(now() - timeStart);
this.state = 'idle';
this.result = {
face: faceRes,
body: bodyRes,
hand: handRes,
face: faceRes as Face[],
body: bodyRes as Body[],
hand: handRes as Hand[],
gesture: gestureRes,
object: objectRes,
object: objectRes as Item[],
performance: this.performance,
canvas: process.canvas,
timestamp: Date.now(),
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
get persons() { return persons.join(faceRes as Face[], bodyRes as Body[], handRes as Hand[], gestureRes, process?.tensor?.shape); },
};
// finally dispose input tensor
@ -611,7 +595,7 @@ export class Human {
/** @hidden */
#warmupBitmap = async () => {
const b64toBlob = (base64, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
let blob;
let res;
switch (this.config.warmup) {
@ -662,7 +646,7 @@ export class Human {
/** @hidden */
#warmupNode = async () => {
const atob = (str) => Buffer.from(str, 'base64');
const atob = (str: string) => Buffer.from(str, 'base64');
let img;
if (this.config.warmup === 'face') img = atob(sample.face);
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);

View File

@ -14,7 +14,7 @@ const maxSize = 2048;
let inCanvas;
let outCanvas;
// instance of fximage
let fx;
let fx: fxImage.GLImageFilter | null;
// process input image and return tensor
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement

View File

@ -18,29 +18,17 @@ import * as segmentation from './segmentation/segmentation';
export async function load(instance) {
if (instance.config.async) { // load models concurrently
[
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.face,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.emotion,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.handpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.posenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.blazepose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.efficientpose,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.movenet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.nanodet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.centernet,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.faceres,
// @ts-ignore models loaded via promise array cannot be correctly inferred
instance.models.segmentation,
// @ts-ignore models loaded via promise array cannot be correctly inferred
// instance.models.agegenderrace,
] = await Promise.all([
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),

View File

@ -22,8 +22,7 @@ const bodyParts = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftSh
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -44,7 +44,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
// create new overarching box from all boxes beloning to person
const x: number[] = [];
const y: number[] = [];
const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
const extractXY = (box: [number, number, number, number] | undefined) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
if (box && box.length === 4) {
x.push(box[0], box[0] + box[2]);
y.push(box[1], box[1] + box[3]);
@ -59,7 +59,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
// shape is known so we calculate boxRaw as well
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
if (shape && shape[1] && shape[2]) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
persons.push(person);
}

View File

@ -24,7 +24,7 @@ export async function predict(input: Tensor, config: Config): Promise<Body[]> {
return results3d;
});
const buffers = await Promise.all(res.map((tensor) => tensor.buffer()));
const buffers = await Promise.all(res.map((tensor: Tensor) => tensor.buffer()));
for (const t of res) tf.dispose(t);
const decoded = await poses.decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
@ -35,8 +35,7 @@ export async function predict(input: Tensor, config: Config): Promise<Body[]> {
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch for GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -1,11 +1,11 @@
import * as kpt from './keypoints';
import { Body } from '../result';
export function eitherPointDoesntMeetConfidence(a, b, minConfidence) {
export function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number) {
return (a < minConfidence || b < minConfidence);
}
export function getAdjacentKeyPoints(keypoints, minConfidence) {
export function getAdjacentKeyPoints(keypoints, minConfidence: number) {
return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {
if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {
return result;

View File

@ -15,8 +15,7 @@ let busy = false;
export async function load(config: Config): Promise<GraphModel> {
if (!model) {
// @ts-ignore type mismatch on GraphModel
model = await tf.loadGraphModel(join(config.modelBasePath, config.segmentation.modelPath));
model = await tf.loadGraphModel(join(config.modelBasePath, config.segmentation.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.segmentation.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);

View File

@ -2,13 +2,13 @@
* Helper function that returns basic system info
*/
export function info(): { platform: string, agent: string } {
let platform;
let agent;
let platform = '';
let agent = '';
if (typeof navigator !== 'undefined') {
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
if (raw && raw[0]) {
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, '') : '';
platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
agent = navigator.userAgent.replace(raw[0], '');
if (platform[1]) agent = agent.replace(raw[1], '');
agent = agent.replace(/ /g, ' ');

View File

@ -1,36 +1,46 @@
{
"compilerOptions": {
"noEmitOnError": false,
"module": "es2020",
"target": "es2018",
"moduleResolution": "node",
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"baseUrl": "./",
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
"noEmitOnError": false,
"declaration": true,
"allowJs": true,
"allowSyntheticDefaultImports": true,
"allowUnreachableCode": false,
"allowUnusedLabels": false,
"alwaysStrict": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"importHelpers": true,
"noFallthroughCasesInSwitch": true,
"noImplicitAny": false,
"noImplicitOverride": true,
"noImplicitReturns": true,
"noImplicitThis": true,
"noPropertyAccessFromIndexSignature": false,
"noUncheckedIndexedAccess": false,
"noUnusedLocals": false,
"noUnusedParameters": true,
"preserveConstEnums": true,
"pretty": true,
"removeComments": false,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"allowJs": true,
"baseUrl": "./",
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
"strictBindCallApply": true,
"strictFunctionTypes": true,
"strictNullChecks": true,
"noImplicitAny": false,
"noUnusedLocals": false,
"noImplicitReturns": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedParameters": true,
"pretty": true,
"noFallthroughCasesInSwitch": true,
"allowUnreachableCode": false
"strictPropertyInitialization": true
},
"formatCodeOptions": {
"indentSize": 2,
"tabSize": 2
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"exclude": ["node_modules/", "types/", "tfjs/", "dist/"],
"include": ["src"],
"typedocOptions": {

View File

@ -5,4 +5,4 @@ export declare function join(folder: string, file: string): string;
export declare function log(...msg: any[]): void;
export declare const now: () => number;
export declare function mergeDeep(...objects: any[]): any;
export declare const minmax: (data: any) => any;
export declare const minmax: (data: Array<number>) => number[];

View File

@ -112,7 +112,7 @@ export declare class Human {
*/
constructor(userConfig?: Config | Record<string, unknown>);
/** @hidden */
analyze: (...msg: any[]) => void;
analyze: (...msg: string[]) => void;
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*

View File

@ -1,6 +1,6 @@
import { Body } from '../result';
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
export declare function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number): boolean;
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: number): any[];
export declare function getBoundingBox(keypoints: any): [number, number, number, number];
export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): Array<Body>;
export declare class MaxHeap {