mirror of https://github.com/vladmandic/human
strict type checks
parent
c97eb67c5b
commit
c205c08fb0
|
@ -9,7 +9,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/08/14 mandic00@live.com
|
### **HEAD -> main** 2021/08/15 mandic00@live.com
|
||||||
|
|
||||||
- experimental webgpu support
|
- experimental webgpu support
|
||||||
- add experimental webgu demo
|
- add experimental webgu demo
|
||||||
|
|
14
TODO.md
14
TODO.md
|
@ -1,12 +1,5 @@
|
||||||
# To-Do list for Human library
|
# To-Do list for Human library
|
||||||
|
|
||||||
## Big Ticket Items
|
|
||||||
|
|
||||||
Implementation of WebGPU backend
|
|
||||||
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
|
|
||||||
|
|
||||||
<br>
|
|
||||||
|
|
||||||
## Work in Progress
|
## Work in Progress
|
||||||
|
|
||||||
WebGL shader optimizations for faster load and initial detection
|
WebGL shader optimizations for faster load and initial detection
|
||||||
|
@ -69,10 +62,15 @@ Object detection using CenterNet or NanoDet models is not working when using WAS
|
||||||
<https://github.com/tensorflow/tfjs/issues/4824>
|
<https://github.com/tensorflow/tfjs/issues/4824>
|
||||||
*Target: `Human` v2.2 with `TFJS` v3.9*
|
*Target: `Human` v2.2 with `TFJS` v3.9*
|
||||||
|
|
||||||
### WebGPU
|
### WebGPU Backend
|
||||||
|
|
||||||
|
Implementation of WebGPU backend
|
||||||
Experimental support only
|
Experimental support only
|
||||||
|
|
||||||
|
*Target: `Human` v2.3 with `Chrome` v94 and `TFJS` v4.0*
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
- Backend WebGPU missing kernel ops
|
- Backend WebGPU missing kernel ops
|
||||||
<https://github.com/tensorflow/tfjs/issues/5496>
|
<https://github.com/tensorflow/tfjs/issues/5496>
|
||||||
- Backend WebGPU incompatible with web workers
|
- Backend WebGPU incompatible with web workers
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
|
|
||||||
/// <reference lib="webworker" />
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
// // @ts-nocheck Linting of ServiceWorker is not supported for JS files
|
|
||||||
|
|
||||||
const skipCaching = false;
|
const skipCaching = false;
|
||||||
|
|
||||||
const cacheName = 'Human';
|
const cacheName = 'Human';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||||
// import Human from '../dist/human.esm.js';
|
// import Human from '../dist/human.esm.js';
|
||||||
self.importScripts('../dist/human.js');
|
self.importScripts('../dist/human.js');
|
||||||
|
@ -34,12 +36,9 @@ onmessage = async (msg) => {
|
||||||
if (ctx) ctx.drawImage(result.canvas, 0, 0);
|
if (ctx) ctx.drawImage(result.canvas, 0, 0);
|
||||||
const img = ctx ? ctx.getImageData(0, 0, result.canvas.width, result.canvas.height) : null;
|
const img = ctx ? ctx.getImageData(0, 0, result.canvas.width, result.canvas.height) : null;
|
||||||
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
|
result.canvas = null; // must strip original canvas from return value as it cannot be transfered from worker thread
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
|
||||||
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img.data.buffer]);
|
if (img) postMessage({ result, image: img.data.buffer, width: msg.data.width, height: msg.data.height }, [img.data.buffer]);
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
|
||||||
else postMessage({ result });
|
else postMessage({ result });
|
||||||
} else {
|
} else {
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
|
||||||
postMessage({ result });
|
postMessage({ result });
|
||||||
}
|
}
|
||||||
busy = false;
|
busy = false;
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||||
|
|
||||||
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
// import Human from '../dist/human.esm.js';
|
// import Human from '../dist/human.esm.js';
|
||||||
self.importScripts('../../dist/human.js');
|
self.importScripts('../../dist/human.js');
|
||||||
|
|
||||||
|
@ -15,6 +17,5 @@ onmessage = async (msg) => {
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
let result = {};
|
let result = {};
|
||||||
result = await human.detect(image, msg.data.config);
|
result = await human.detect(image, msg.data.config);
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
|
||||||
postMessage({ result: result[msg.data.type], type: msg.data.type });
|
postMessage({ result: result[msg.data.type], type: msg.data.type });
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
// load Human using IIFE script as Chome Mobile does not support Modules as Workers
|
||||||
|
|
||||||
|
/// <reference lib="webworker" />
|
||||||
|
|
||||||
// import Human from '../dist/human.esm.js';
|
// import Human from '../dist/human.esm.js';
|
||||||
self.importScripts('../../dist/human.js');
|
self.importScripts('../../dist/human.js');
|
||||||
self.importScripts('../../node_modules/@tensorflow/tfjs-core/dist/tf-core.es2017.js');
|
self.importScripts('../../node_modules/@tensorflow/tfjs-core/dist/tf-core.es2017.js');
|
||||||
|
@ -17,6 +19,5 @@ onmessage = async (msg) => {
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
let result = {};
|
let result = {};
|
||||||
result = await human.detect(image, msg.data.config);
|
result = await human.detect(image, msg.data.config);
|
||||||
// @ts-ignore tslint wrong type matching for worker
|
|
||||||
postMessage({ result: result[msg.data.type], type: msg.data.type });
|
postMessage({ result: result[msg.data.type], type: msg.data.type });
|
||||||
};
|
};
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -221,13 +221,13 @@ var config = {
|
||||||
|
|
||||||
// src/sysinfo.ts
|
// src/sysinfo.ts
|
||||||
function info() {
|
function info() {
|
||||||
let platform;
|
let platform = "";
|
||||||
let agent;
|
let agent = "";
|
||||||
if (typeof navigator !== "undefined") {
|
if (typeof navigator !== "undefined") {
|
||||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||||
if (raw && raw[0]) {
|
if (raw && raw[0]) {
|
||||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||||
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
||||||
agent = navigator.userAgent.replace(raw[0], "");
|
agent = navigator.userAgent.replace(raw[0], "");
|
||||||
if (platform[1])
|
if (platform[1])
|
||||||
agent = agent.replace(raw[1], "");
|
agent = agent.replace(raw[1], "");
|
||||||
|
@ -494,7 +494,7 @@ var BlazeFaceModel = class {
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage, userConfig) {
|
async getBoundingBoxes(inputImage, userConfig) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf3.tidy(() => {
|
const [batch, boxes, scores] = tf3.tidy(() => {
|
||||||
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
||||||
|
@ -4071,9 +4071,10 @@ var Pipeline = class {
|
||||||
if (config3.face.iris.enabled)
|
if (config3.face.iris.enabled)
|
||||||
rawCoords = await this.augmentIris(rawCoords, face5);
|
rawCoords = await this.augmentIris(rawCoords, face5);
|
||||||
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
||||||
const storeConfidence = box6.confidence;
|
box6 = {
|
||||||
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
|
confidence: box6.confidence,
|
||||||
box6.confidence = storeConfidence;
|
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
|
||||||
|
};
|
||||||
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
||||||
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
||||||
}
|
}
|
||||||
|
@ -7691,11 +7692,10 @@ var anchors = [
|
||||||
// src/handpose/handdetector.ts
|
// src/handpose/handdetector.ts
|
||||||
var HandDetector = class {
|
var HandDetector = class {
|
||||||
constructor(model10) {
|
constructor(model10) {
|
||||||
var _a;
|
|
||||||
this.model = model10;
|
this.model = model10;
|
||||||
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
||||||
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
||||||
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
|
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
|
||||||
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
||||||
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
||||||
}
|
}
|
||||||
|
@ -10531,7 +10531,7 @@ function join2(faces, bodies, hands, gestures, shape) {
|
||||||
const minX = Math.min(...x);
|
const minX = Math.min(...x);
|
||||||
const minY = Math.min(...y);
|
const minY = Math.min(...y);
|
||||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||||
if (shape && shape.length === 4)
|
if (shape && shape[1] && shape[2])
|
||||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||||
persons2.push(person2);
|
persons2.push(person2);
|
||||||
}
|
}
|
||||||
|
@ -11471,6 +11471,8 @@ var Human = class {
|
||||||
if (this.config.cacheSensitivity === 0)
|
if (this.config.cacheSensitivity === 0)
|
||||||
return false;
|
return false;
|
||||||
const resizeFact = 32;
|
const resizeFact = 32;
|
||||||
|
if (!input.shape[1] || !input.shape[2])
|
||||||
|
return false;
|
||||||
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||||
const reducedData = await reduced.data();
|
const reducedData = await reduced.data();
|
||||||
let sum = 0;
|
let sum = 0;
|
||||||
|
@ -11584,6 +11586,7 @@ var Human = class {
|
||||||
faceres: null,
|
faceres: null,
|
||||||
segmentation: null
|
segmentation: null
|
||||||
};
|
};
|
||||||
|
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
|
||||||
this.image = (input) => process4(input, this.config);
|
this.image = (input) => process4(input, this.config);
|
||||||
this.faceTriangulation = triangulation;
|
this.faceTriangulation = triangulation;
|
||||||
this.faceUVMap = uvmap;
|
this.faceUVMap = uvmap;
|
||||||
|
@ -11683,10 +11686,10 @@ var Human = class {
|
||||||
this.performance.cached++;
|
this.performance.cached++;
|
||||||
this.performance.changed = Math.trunc(now() - timeStamp);
|
this.performance.changed = Math.trunc(now() - timeStamp);
|
||||||
this.analyze("Check Changed:");
|
this.analyze("Check Changed:");
|
||||||
let faceRes;
|
let faceRes = [];
|
||||||
let bodyRes;
|
let bodyRes = [];
|
||||||
let handRes;
|
let handRes = [];
|
||||||
let objectRes;
|
let objectRes = [];
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||||
if (this.performance.face)
|
if (this.performance.face)
|
||||||
|
|
|
@ -222,13 +222,13 @@ var config = {
|
||||||
|
|
||||||
// src/sysinfo.ts
|
// src/sysinfo.ts
|
||||||
function info() {
|
function info() {
|
||||||
let platform;
|
let platform = "";
|
||||||
let agent;
|
let agent = "";
|
||||||
if (typeof navigator !== "undefined") {
|
if (typeof navigator !== "undefined") {
|
||||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||||
if (raw && raw[0]) {
|
if (raw && raw[0]) {
|
||||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||||
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
||||||
agent = navigator.userAgent.replace(raw[0], "");
|
agent = navigator.userAgent.replace(raw[0], "");
|
||||||
if (platform[1])
|
if (platform[1])
|
||||||
agent = agent.replace(raw[1], "");
|
agent = agent.replace(raw[1], "");
|
||||||
|
@ -495,7 +495,7 @@ var BlazeFaceModel = class {
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage, userConfig) {
|
async getBoundingBoxes(inputImage, userConfig) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf3.tidy(() => {
|
const [batch, boxes, scores] = tf3.tidy(() => {
|
||||||
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
||||||
|
@ -4072,9 +4072,10 @@ var Pipeline = class {
|
||||||
if (config3.face.iris.enabled)
|
if (config3.face.iris.enabled)
|
||||||
rawCoords = await this.augmentIris(rawCoords, face5);
|
rawCoords = await this.augmentIris(rawCoords, face5);
|
||||||
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
||||||
const storeConfidence = box6.confidence;
|
box6 = {
|
||||||
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
|
confidence: box6.confidence,
|
||||||
box6.confidence = storeConfidence;
|
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
|
||||||
|
};
|
||||||
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
||||||
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
||||||
}
|
}
|
||||||
|
@ -7692,11 +7693,10 @@ var anchors = [
|
||||||
// src/handpose/handdetector.ts
|
// src/handpose/handdetector.ts
|
||||||
var HandDetector = class {
|
var HandDetector = class {
|
||||||
constructor(model10) {
|
constructor(model10) {
|
||||||
var _a;
|
|
||||||
this.model = model10;
|
this.model = model10;
|
||||||
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
||||||
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
||||||
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
|
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
|
||||||
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
||||||
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
||||||
}
|
}
|
||||||
|
@ -10532,7 +10532,7 @@ function join2(faces, bodies, hands, gestures, shape) {
|
||||||
const minX = Math.min(...x);
|
const minX = Math.min(...x);
|
||||||
const minY = Math.min(...y);
|
const minY = Math.min(...y);
|
||||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||||
if (shape && shape.length === 4)
|
if (shape && shape[1] && shape[2])
|
||||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||||
persons2.push(person2);
|
persons2.push(person2);
|
||||||
}
|
}
|
||||||
|
@ -11472,6 +11472,8 @@ var Human = class {
|
||||||
if (this.config.cacheSensitivity === 0)
|
if (this.config.cacheSensitivity === 0)
|
||||||
return false;
|
return false;
|
||||||
const resizeFact = 32;
|
const resizeFact = 32;
|
||||||
|
if (!input.shape[1] || !input.shape[2])
|
||||||
|
return false;
|
||||||
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||||
const reducedData = await reduced.data();
|
const reducedData = await reduced.data();
|
||||||
let sum = 0;
|
let sum = 0;
|
||||||
|
@ -11585,6 +11587,7 @@ var Human = class {
|
||||||
faceres: null,
|
faceres: null,
|
||||||
segmentation: null
|
segmentation: null
|
||||||
};
|
};
|
||||||
|
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
|
||||||
this.image = (input) => process4(input, this.config);
|
this.image = (input) => process4(input, this.config);
|
||||||
this.faceTriangulation = triangulation;
|
this.faceTriangulation = triangulation;
|
||||||
this.faceUVMap = uvmap;
|
this.faceUVMap = uvmap;
|
||||||
|
@ -11684,10 +11687,10 @@ var Human = class {
|
||||||
this.performance.cached++;
|
this.performance.cached++;
|
||||||
this.performance.changed = Math.trunc(now() - timeStamp);
|
this.performance.changed = Math.trunc(now() - timeStamp);
|
||||||
this.analyze("Check Changed:");
|
this.analyze("Check Changed:");
|
||||||
let faceRes;
|
let faceRes = [];
|
||||||
let bodyRes;
|
let bodyRes = [];
|
||||||
let handRes;
|
let handRes = [];
|
||||||
let objectRes;
|
let objectRes = [];
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||||
if (this.performance.face)
|
if (this.performance.face)
|
||||||
|
|
|
@ -221,13 +221,13 @@ var config = {
|
||||||
|
|
||||||
// src/sysinfo.ts
|
// src/sysinfo.ts
|
||||||
function info() {
|
function info() {
|
||||||
let platform;
|
let platform = "";
|
||||||
let agent;
|
let agent = "";
|
||||||
if (typeof navigator !== "undefined") {
|
if (typeof navigator !== "undefined") {
|
||||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||||
if (raw && raw[0]) {
|
if (raw && raw[0]) {
|
||||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||||
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
platform = platformMatch && platformMatch[0] ? platformMatch[0].replace(/\(|\)/g, "") : "";
|
||||||
agent = navigator.userAgent.replace(raw[0], "");
|
agent = navigator.userAgent.replace(raw[0], "");
|
||||||
if (platform[1])
|
if (platform[1])
|
||||||
agent = agent.replace(raw[1], "");
|
agent = agent.replace(raw[1], "");
|
||||||
|
@ -494,7 +494,7 @@ var BlazeFaceModel = class {
|
||||||
this.config = config3;
|
this.config = config3;
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage, userConfig) {
|
async getBoundingBoxes(inputImage, userConfig) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage["isDisposedInternal"] || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [batch, boxes, scores] = tf3.tidy(() => {
|
const [batch, boxes, scores] = tf3.tidy(() => {
|
||||||
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
const resizedImage = tf3.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
||||||
|
@ -4071,9 +4071,10 @@ var Pipeline = class {
|
||||||
if (config3.face.iris.enabled)
|
if (config3.face.iris.enabled)
|
||||||
rawCoords = await this.augmentIris(rawCoords, face5);
|
rawCoords = await this.augmentIris(rawCoords, face5);
|
||||||
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
const mesh = this.transformRawCoords(rawCoords, box6, angle, rotationMatrix);
|
||||||
const storeConfidence = box6.confidence;
|
box6 = {
|
||||||
box6 = enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5);
|
confidence: box6.confidence,
|
||||||
box6.confidence = storeConfidence;
|
...enlargeBox(calculateLandmarksBoundingBox(mesh), 1.5)
|
||||||
|
};
|
||||||
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
if (config3.face.detector.rotation && config3.face.mesh.enabled && config3.face.description.enabled && tf4.ENV.flags.IS_BROWSER) {
|
||||||
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
[angle, rotationMatrix, face5] = this.correctFaceRotation(config3, box6, input);
|
||||||
}
|
}
|
||||||
|
@ -7691,11 +7692,10 @@ var anchors = [
|
||||||
// src/handpose/handdetector.ts
|
// src/handpose/handdetector.ts
|
||||||
var HandDetector = class {
|
var HandDetector = class {
|
||||||
constructor(model10) {
|
constructor(model10) {
|
||||||
var _a;
|
|
||||||
this.model = model10;
|
this.model = model10;
|
||||||
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
this.anchors = anchors.map((anchor) => [anchor.x, anchor.y]);
|
||||||
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
this.anchorsTensor = tf10.tensor2d(this.anchors);
|
||||||
this.inputSize = (_a = this.model) == null ? void 0 : _a.inputs[0].shape[2];
|
this.inputSize = this.model && this.model.inputs && this.model.inputs[0].shape ? this.model.inputs[0].shape[2] : 0;
|
||||||
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
this.inputSizeTensor = tf10.tensor1d([this.inputSize, this.inputSize]);
|
||||||
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
this.doubleInputSizeTensor = tf10.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
||||||
}
|
}
|
||||||
|
@ -10531,7 +10531,7 @@ function join2(faces, bodies, hands, gestures, shape) {
|
||||||
const minX = Math.min(...x);
|
const minX = Math.min(...x);
|
||||||
const minY = Math.min(...y);
|
const minY = Math.min(...y);
|
||||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||||
if (shape && shape.length === 4)
|
if (shape && shape[1] && shape[2])
|
||||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||||
persons2.push(person2);
|
persons2.push(person2);
|
||||||
}
|
}
|
||||||
|
@ -11471,6 +11471,8 @@ var Human = class {
|
||||||
if (this.config.cacheSensitivity === 0)
|
if (this.config.cacheSensitivity === 0)
|
||||||
return false;
|
return false;
|
||||||
const resizeFact = 32;
|
const resizeFact = 32;
|
||||||
|
if (!input.shape[1] || !input.shape[2])
|
||||||
|
return false;
|
||||||
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
const reduced = tf21.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||||
const reducedData = await reduced.data();
|
const reducedData = await reduced.data();
|
||||||
let sum = 0;
|
let sum = 0;
|
||||||
|
@ -11584,6 +11586,7 @@ var Human = class {
|
||||||
faceres: null,
|
faceres: null,
|
||||||
segmentation: null
|
segmentation: null
|
||||||
};
|
};
|
||||||
|
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
|
||||||
this.image = (input) => process4(input, this.config);
|
this.image = (input) => process4(input, this.config);
|
||||||
this.faceTriangulation = triangulation;
|
this.faceTriangulation = triangulation;
|
||||||
this.faceUVMap = uvmap;
|
this.faceUVMap = uvmap;
|
||||||
|
@ -11683,10 +11686,10 @@ var Human = class {
|
||||||
this.performance.cached++;
|
this.performance.cached++;
|
||||||
this.performance.changed = Math.trunc(now() - timeStamp);
|
this.performance.changed = Math.trunc(now() - timeStamp);
|
||||||
this.analyze("Check Changed:");
|
this.analyze("Check Changed:");
|
||||||
let faceRes;
|
let faceRes = [];
|
||||||
let bodyRes;
|
let bodyRes = [];
|
||||||
let handRes;
|
let handRes = [];
|
||||||
let objectRes;
|
let objectRes = [];
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
faceRes = this.config.face.enabled ? detectFace(this, process6.tensor) : [];
|
||||||
if (this.performance.face)
|
if (this.performance.face)
|
||||||
|
|
|
@ -67,8 +67,8 @@
|
||||||
"@tensorflow/tfjs-node": "^3.8.0",
|
"@tensorflow/tfjs-node": "^3.8.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.8.0",
|
"@tensorflow/tfjs-node-gpu": "^3.8.0",
|
||||||
"@types/node": "^16.6.1",
|
"@types/node": "^16.6.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.29.1",
|
"@typescript-eslint/eslint-plugin": "^4.29.2",
|
||||||
"@typescript-eslint/parser": "^4.29.1",
|
"@typescript-eslint/parser": "^4.29.2",
|
||||||
"@vladmandic/pilogger": "^0.2.18",
|
"@vladmandic/pilogger": "^0.2.18",
|
||||||
"canvas": "^2.8.0",
|
"canvas": "^2.8.0",
|
||||||
"chokidar": "^3.5.2",
|
"chokidar": "^3.5.2",
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
2021-08-15 08:08:32 [36mINFO: [39m @vladmandic/human version 2.1.3
|
2021-08-17 08:49:41 [36mINFO: [39m @vladmandic/human version 2.1.3
|
||||||
2021-08-15 08:08:32 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
2021-08-17 08:49:41 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.5.0
|
||||||
2021-08-15 08:08:32 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.20","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
|
2021-08-17 08:49:41 [36mINFO: [39m Toolchain: {"tfjs":"3.8.0","esbuild":"0.12.20","typescript":"4.3.5","typedoc":"0.21.5","eslint":"7.32.0"}
|
||||||
2021-08-15 08:08:32 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
2021-08-17 08:49:41 [36mINFO: [39m Clean: ["dist/*","types/*","typedoc/*"]
|
||||||
2021-08-15 08:08:32 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
2021-08-17 08:49:41 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||||
2021-08-15 08:08:32 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1303,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-15 08:08:32 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":437903,"outputBytes":379522,"outputFiles":"dist/human.node.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: node type: node: {"imports":42,"importBytes":436292,"outputBytes":379767,"outputFiles":"dist/human.node.js"}
|
||||||
2021-08-15 08:08:32 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1311,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-15 08:08:32 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":437911,"outputBytes":379526,"outputFiles":"dist/human.node-gpu.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: nodeGPU type: node: {"imports":42,"importBytes":436300,"outputBytes":379771,"outputFiles":"dist/human.node-gpu.js"}
|
||||||
2021-08-15 08:08:32 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1378,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-15 08:08:33 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":437978,"outputBytes":379598,"outputFiles":"dist/human.node-wasm.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: nodeWASM type: node: {"imports":42,"importBytes":436367,"outputBytes":379843,"outputFiles":"dist/human.node-wasm.js"}
|
||||||
2021-08-15 08:08:33 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2168,"outputBytes":1242,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-15 08:08:33 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":437842,"outputBytes":249009,"outputFiles":"dist/human.esm-nobundle.js"}
|
2021-08-17 08:49:41 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":42,"importBytes":436231,"outputBytes":249203,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||||
2021-08-15 08:08:33 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
2021-08-17 08:49:42 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1170,"moduleBytes":4145868,"imports":7,"importBytes":2168,"outputBytes":2334701,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-08-15 08:08:33 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2771301,"outputBytes":1379375,"outputFiles":"dist/human.js"}
|
2021-08-17 08:49:42 [35mSTATE:[39m target: browserBundle type: iife: {"imports":42,"importBytes":2769690,"outputBytes":1379569,"outputFiles":"dist/human.js"}
|
||||||
2021-08-15 08:08:34 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2771301,"outputBytes":1379367,"outputFiles":"dist/human.esm.js"}
|
2021-08-17 08:49:42 [35mSTATE:[39m target: browserBundle type: esm: {"imports":42,"importBytes":2769690,"outputBytes":1379561,"outputFiles":"dist/human.esm.js"}
|
||||||
2021-08-15 08:08:34 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
2021-08-17 08:49:42 [36mINFO: [39m Running Linter: ["server/","src/","tfjs/","test/","demo/"]
|
||||||
2021-08-15 08:08:57 [36mINFO: [39m Linter complete: files: 77 errors: 0 warnings: 0
|
2021-08-17 08:50:06 [36mINFO: [39m Linter complete: files: 77 errors: 0 warnings: 0
|
||||||
2021-08-15 08:08:57 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
2021-08-17 08:50:06 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||||
2021-08-15 08:08:57 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
2021-08-17 08:50:06 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||||
2021-08-15 08:09:11 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
2021-08-17 08:50:21 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||||
2021-08-15 08:09:25 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
2021-08-17 08:50:34 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||||
|
|
|
@ -21,10 +21,10 @@ async function analyzeGraph(modelPath) {
|
||||||
const shape = val.tensorShape.dim.map((a) => parseInt(a.size));
|
const shape = val.tensorShape.dim.map((a) => parseInt(a.size));
|
||||||
inputs.push({ name: key, dtype: val.dtype, shape });
|
inputs.push({ name: key, dtype: val.dtype, shape });
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
// @ts-ignore accessing private property
|
||||||
} else if (model.executor.graph['inputs']) {
|
} else if (model.executor.graph['inputs']) {
|
||||||
log.info('model inputs based on executor');
|
log.info('model inputs based on executor');
|
||||||
// @ts-ignore
|
// @ts-ignore accessing private property
|
||||||
for (const t of model.executor.graph['inputs']) {
|
for (const t of model.executor.graph['inputs']) {
|
||||||
inputs.push({ name: t.name, dtype: t.attrParams.dtype.value, shape: t.attrParams.shape.value });
|
inputs.push({ name: t.name, dtype: t.attrParams.dtype.value, shape: t.attrParams.shape.value });
|
||||||
}
|
}
|
||||||
|
@ -40,10 +40,10 @@ async function analyzeGraph(modelPath) {
|
||||||
const shape = val.tensorShape?.dim.map((a) => parseInt(a.size));
|
const shape = val.tensorShape?.dim.map((a) => parseInt(a.size));
|
||||||
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
|
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
// @ts-ignore accessing private property
|
||||||
} else if (model.executor.graph['outputs']) {
|
} else if (model.executor.graph['outputs']) {
|
||||||
log.info('model outputs based on executor');
|
log.info('model outputs based on executor');
|
||||||
// @ts-ignore
|
// @ts-ignore accessing private property
|
||||||
for (const t of model.executor.graph['outputs']) {
|
for (const t of model.executor.graph['outputs']) {
|
||||||
outputs.push({ id: i++, name: t.name, dtype: t.attrParams.dtype?.value || t.rawAttrs.T.type, shape: t.attrParams.shape?.value });
|
outputs.push({ id: i++, name: t.name, dtype: t.attrParams.dtype?.value || t.rawAttrs.T.type, shape: t.attrParams.shape?.value });
|
||||||
}
|
}
|
||||||
|
@ -62,13 +62,13 @@ async function analyzeSaved(modelPath) {
|
||||||
log.data('tags:', meta[0].tags);
|
log.data('tags:', meta[0].tags);
|
||||||
log.data('signature:', Object.keys(meta[0].signatureDefs));
|
log.data('signature:', Object.keys(meta[0].signatureDefs));
|
||||||
const inputs = Object.values(sign.inputs)[0];
|
const inputs = Object.values(sign.inputs)[0];
|
||||||
// @ts-ignore
|
// @ts-ignore a is array
|
||||||
const inputShape = inputs.shape?.map((a) => a.array[0]);
|
const inputShape = inputs.shape?.map((a) => a.array[0]);
|
||||||
log.data('inputs:', { name: inputs.name, dtype: inputs.dtype, shape: inputShape });
|
log.data('inputs:', { name: inputs.name, dtype: inputs.dtype, shape: inputShape });
|
||||||
const outputs = [];
|
const outputs = [];
|
||||||
let i = 0;
|
let i = 0;
|
||||||
for (const [key, val] of Object.entries(sign.outputs)) {
|
for (const [key, val] of Object.entries(sign.outputs)) {
|
||||||
// @ts-ignore
|
// @ts-ignore a is array
|
||||||
const shape = val.shape?.map((a) => a.array[0]);
|
const shape = val.shape?.map((a) => a.array[0]);
|
||||||
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
|
outputs.push({ id: i++, name: key, dytpe: val.dtype, shape });
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,8 +16,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export async function load(config: Config | any) {
|
export async function load(config: Config | any) {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.face.age.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.face.age.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
@ -33,7 +32,7 @@ export async function predict(image: Tensor, config: Config | any) {
|
||||||
}
|
}
|
||||||
skipped = 0;
|
skipped = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (!model.inputs[0].shape) return;
|
if (!model.inputs || !model.inputs[0] || !model.inputs[0].shape) return;
|
||||||
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||||
const enhance = tf.mul(resize, [255.0]);
|
const enhance = tf.mul(resize, [255.0]);
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
|
@ -39,8 +39,7 @@ export class BlazeFaceModel {
|
||||||
|
|
||||||
async getBoundingBoxes(inputImage: Tensor, userConfig: Config) {
|
async getBoundingBoxes(inputImage: Tensor, userConfig: Config) {
|
||||||
// sanity check on input
|
// sanity check on input
|
||||||
// @ts-ignore isDisposed is internal property
|
if ((!inputImage) || (inputImage['isDisposedInternal']) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
||||||
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
|
||||||
const [batch, boxes, scores] = tf.tidy(() => {
|
const [batch, boxes, scores] = tf.tidy(() => {
|
||||||
const resizedImage = tf.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
const resizedImage = tf.image.resizeBilinear(inputImage, [this.inputSize, this.inputSize]);
|
||||||
const normalizedImage = tf.sub(tf.div(resizedImage, 127.5), 0.5);
|
const normalizedImage = tf.sub(tf.div(resizedImage, 127.5), 0.5);
|
||||||
|
|
|
@ -60,11 +60,10 @@ export async function predict(input: Tensor, config: Config): Promise<Face[]> {
|
||||||
|
|
||||||
export async function load(config): Promise<[unknown, GraphModel | null, GraphModel | null]> {
|
export async function load(config): Promise<[unknown, GraphModel | null, GraphModel | null]> {
|
||||||
if ((!faceModels[0] && config.face.enabled) || (!faceModels[1] && config.face.mesh.enabled) || (!faceModels[2] && config.face.iris.enabled)) {
|
if ((!faceModels[0] && config.face.enabled) || (!faceModels[1] && config.face.mesh.enabled) || (!faceModels[2] && config.face.iris.enabled)) {
|
||||||
// @ts-ignore type mismatch for GraphModel
|
|
||||||
faceModels = await Promise.all([
|
faceModels = await Promise.all([
|
||||||
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
|
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
|
||||||
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.mesh.modelPath), { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
|
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.mesh.modelPath), { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
|
||||||
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.iris.modelPath), { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
|
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.iris.modelPath), { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
|
||||||
]);
|
]);
|
||||||
if (config.face.mesh.enabled) {
|
if (config.face.mesh.enabled) {
|
||||||
if (!faceModels[1] || !faceModels[1]['modelUrl']) log('load model failed:', config.face.mesh.modelPath);
|
if (!faceModels[1] || !faceModels[1]['modelUrl']) log('load model failed:', config.face.mesh.modelPath);
|
||||||
|
|
|
@ -295,10 +295,10 @@ export class Pipeline {
|
||||||
|
|
||||||
// override box from detection with one calculated from mesh
|
// override box from detection with one calculated from mesh
|
||||||
const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const mesh = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
const storeConfidence = box.confidence;
|
box = {
|
||||||
// @ts-ignore enlargeBox does not include confidence so we append it manually
|
confidence: box.confidence, // keep confidence
|
||||||
box = bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5); // redefine box with mesh calculated one
|
...bounding.enlargeBox(bounding.calculateLandmarksBoundingBox(mesh), 1.5), // redefine box with mesh calculated one
|
||||||
box.confidence = storeConfidence;
|
};
|
||||||
|
|
||||||
// do rotation one more time with mesh keypoints if we want to return perfect image
|
// do rotation one more time with mesh keypoints if we want to return perfect image
|
||||||
if (config.face.detector.rotation && config.face.mesh.enabled && config.face.description.enabled && tf.ENV.flags.IS_BROWSER) {
|
if (config.face.detector.rotation && config.face.mesh.enabled && config.face.description.enabled && tf.ENV.flags.IS_BROWSER) {
|
||||||
|
|
|
@ -15,8 +15,7 @@ let model: GraphModel;
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch for Graphmodel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
|
||||||
model['width'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[2].size);
|
model['width'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[2].size);
|
||||||
model['height'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[1].size);
|
model['height'] = parseInt(model['signature'].inputs['input_1:0'].tensorShape.dim[1].size);
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
||||||
|
|
|
@ -278,16 +278,13 @@ export async function body(inCanvas: HTMLCanvasElement, result: Array<Body>, dra
|
||||||
ctx.lineWidth = localOptions.lineWidth;
|
ctx.lineWidth = localOptions.lineWidth;
|
||||||
ctx.font = localOptions.font;
|
ctx.font = localOptions.font;
|
||||||
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
|
if (localOptions.drawBoxes && result[i].box && result[i].box?.length === 4) {
|
||||||
// @ts-ignore box may not exist
|
|
||||||
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
|
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
|
||||||
if (localOptions.drawLabels) {
|
if (localOptions.drawLabels) {
|
||||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||||
ctx.fillStyle = localOptions.shadowColor;
|
ctx.fillStyle = localOptions.shadowColor;
|
||||||
// @ts-ignore box may not exist
|
|
||||||
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
||||||
}
|
}
|
||||||
ctx.fillStyle = localOptions.labelColor;
|
ctx.fillStyle = localOptions.labelColor;
|
||||||
// @ts-ignore box may not exist
|
|
||||||
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
ctx.fillText(`body ${100 * result[i].score}%`, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,8 +22,7 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -13,8 +13,7 @@ let model: GraphModel;
|
||||||
export async function load(config) {
|
export async function load(config) {
|
||||||
const modelUrl = join(config.modelBasePath, config.face.embedding.modelPath);
|
const modelUrl = join(config.modelBasePath, config.face.embedding.modelPath);
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch for GraphModel
|
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(modelUrl);
|
|
||||||
if (!model) log('load model failed:', config.face.embedding.modelPath);
|
if (!model) log('load model failed:', config.face.embedding.modelPath);
|
||||||
else if (config.debug) log('load model:', modelUrl);
|
else if (config.debug) log('load model:', modelUrl);
|
||||||
} else if (config.debug) log('cached model:', modelUrl);
|
} else if (config.debug) log('cached model:', modelUrl);
|
||||||
|
@ -55,10 +54,9 @@ export function enhance(input): Tensor {
|
||||||
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
|
||||||
const tensor = input.image || input.tensor;
|
const tensor = input.image || input.tensor;
|
||||||
if (!(tensor instanceof tf.Tensor)) return null;
|
if (!(tensor instanceof tf.Tensor)) return null;
|
||||||
|
if (!model || !model.inputs || !model.inputs[0].shape) return null;
|
||||||
const crop = (tensor.shape.length === 3)
|
const crop = (tensor.shape.length === 3)
|
||||||
// @ts-ignore model possibly undefined
|
|
||||||
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
|
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
|
||||||
// @ts-ignore model possibly undefined
|
|
||||||
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
|
|
||||||
// convert to black&white to avoid colorization impact
|
// convert to black&white to avoid colorization impact
|
||||||
|
|
|
@ -162,7 +162,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
|
||||||
parent.analyze('Get Face');
|
parent.analyze('Get Face');
|
||||||
|
|
||||||
// is something went wrong, skip the face
|
// is something went wrong, skip the face
|
||||||
// @ts-ignore possibly undefined
|
// @ts-ignore possibly undefied
|
||||||
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
|
if (!faces[i].tensor || faces[i].tensor['isDisposedInternal']) {
|
||||||
log('Face object is disposed:', faces[i].tensor);
|
log('Face object is disposed:', faces[i].tensor);
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -25,8 +25,7 @@ type DB = Array<{ name: string, source: string, embedding: number[] }>;
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
|
const modelUrl = join(config.modelBasePath, config.face.description.modelPath);
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch for GraphModel
|
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(modelUrl);
|
|
||||||
if (!model) log('load model failed:', config.face.description.modelPath);
|
if (!model) log('load model failed:', config.face.description.modelPath);
|
||||||
else if (config.debug) log('load model:', modelUrl);
|
else if (config.debug) log('load model:', modelUrl);
|
||||||
} else if (config.debug) log('cached model:', modelUrl);
|
} else if (config.debug) log('cached model:', modelUrl);
|
||||||
|
|
|
@ -34,8 +34,7 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export async function load(config: Config | any) {
|
export async function load(config: Config | any) {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.agegenderrace.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.agegenderrace.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.face.agegenderrace.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.face.agegenderrace.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -19,8 +19,7 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export async function load(config: Config | any) {
|
export async function load(config: Config | any) {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
|
|
||||||
alternative = model.inputs[0].shape ? model.inputs[0]?.shape[3] === 1 : false;
|
alternative = model.inputs[0].shape ? model.inputs[0]?.shape[3] === 1 : false;
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.face.gender.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.face.gender.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
|
|
|
@ -15,8 +15,7 @@ export class HandDetector {
|
||||||
this.model = model;
|
this.model = model;
|
||||||
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
|
this.anchors = anchors.anchors.map((anchor) => [anchor.x, anchor.y]);
|
||||||
this.anchorsTensor = tf.tensor2d(this.anchors);
|
this.anchorsTensor = tf.tensor2d(this.anchors);
|
||||||
// @ts-ignore model is not undefined here
|
this.inputSize = (this.model && this.model.inputs && this.model.inputs[0].shape) ? this.model.inputs[0].shape[2] : 0;
|
||||||
this.inputSize = this.model?.inputs[0].shape[2];
|
|
||||||
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
|
this.inputSizeTensor = tf.tensor1d([this.inputSize, this.inputSize]);
|
||||||
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
this.doubleInputSizeTensor = tf.tensor1d([this.inputSize * 2, this.inputSize * 2]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,10 +71,9 @@ export async function predict(input: Tensor, config: Config): Promise<Hand[]> {
|
||||||
|
|
||||||
export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
|
export async function load(config: Config): Promise<[GraphModel | null, GraphModel | null]> {
|
||||||
if (!handDetectorModel || !handPoseModel) {
|
if (!handDetectorModel || !handPoseModel) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
|
||||||
[handDetectorModel, handPoseModel] = await Promise.all([
|
[handDetectorModel, handPoseModel] = await Promise.all([
|
||||||
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
|
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
|
||||||
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
|
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) as unknown as GraphModel : null,
|
||||||
]);
|
]);
|
||||||
if (config.hand.enabled) {
|
if (config.hand.enabled) {
|
||||||
if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector.modelPath);
|
if (!handDetectorModel || !handDetectorModel['modelUrl']) log('load model failed:', config.hand.detector.modelPath);
|
||||||
|
|
|
@ -41,7 +41,7 @@ export function mergeDeep(...objects) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// helper function: return min and max from input array
|
// helper function: return min and max from input array
|
||||||
export const minmax = (data) => data.reduce((acc, val) => {
|
export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>, val) => {
|
||||||
acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];
|
acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];
|
||||||
acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];
|
acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];
|
||||||
return acc;
|
return acc;
|
||||||
|
|
50
src/human.ts
50
src/human.ts
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
import { log, now, mergeDeep } from './helpers';
|
import { log, now, mergeDeep } from './helpers';
|
||||||
import { Config, defaults } from './config';
|
import { Config, defaults } from './config';
|
||||||
import { Result, Gesture } from './result';
|
import { Result, Face, Hand, Body, Item, Gesture } from './result';
|
||||||
import * as sysinfo from './sysinfo';
|
import * as sysinfo from './sysinfo';
|
||||||
import * as tf from '../dist/tfjs.esm.js';
|
import * as tf from '../dist/tfjs.esm.js';
|
||||||
import * as backend from './tfjs/backend';
|
import * as backend from './tfjs/backend';
|
||||||
|
@ -166,6 +166,7 @@ export class Human {
|
||||||
faceres: null,
|
faceres: null,
|
||||||
segmentation: null,
|
segmentation: null,
|
||||||
};
|
};
|
||||||
|
this.result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0, persons: [] };
|
||||||
// export access to image processing
|
// export access to image processing
|
||||||
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
|
// @ts-ignore eslint-typescript cannot correctly infer type in anonymous function
|
||||||
this.image = (input: Input) => image.process(input, this.config);
|
this.image = (input: Input) => image.process(input, this.config);
|
||||||
|
@ -179,7 +180,7 @@ export class Human {
|
||||||
|
|
||||||
// helper function: measure tensor leak
|
// helper function: measure tensor leak
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
analyze = (...msg) => {
|
analyze = (...msg: string[]) => {
|
||||||
if (!this.#analyzeMemoryLeaks) return;
|
if (!this.#analyzeMemoryLeaks) return;
|
||||||
const currentTensors = this.tf.engine().state.numTensors;
|
const currentTensors = this.tf.engine().state.numTensors;
|
||||||
const previousTensors = this.#numTensors;
|
const previousTensors = this.#numTensors;
|
||||||
|
@ -190,7 +191,7 @@ export class Human {
|
||||||
|
|
||||||
// quick sanity check on inputs
|
// quick sanity check on inputs
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
#sanity = (input): null | string => {
|
#sanity = (input: Input): null | string => {
|
||||||
if (!this.#checkSanity) return null;
|
if (!this.#checkSanity) return null;
|
||||||
if (!input) return 'input is not defined';
|
if (!input) return 'input is not defined';
|
||||||
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
|
if (this.tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) return 'input must be a tensor';
|
||||||
|
@ -233,7 +234,6 @@ export class Human {
|
||||||
*/
|
*/
|
||||||
// eslint-disable-next-line class-methods-use-this
|
// eslint-disable-next-line class-methods-use-this
|
||||||
enhance(input: Tensor): Tensor | null {
|
enhance(input: Tensor): Tensor | null {
|
||||||
// @ts-ignore type mismach for Tensor
|
|
||||||
return faceres.enhance(input);
|
return faceres.enhance(input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -391,9 +391,10 @@ export class Human {
|
||||||
|
|
||||||
// check if input changed sufficiently to trigger new detections
|
// check if input changed sufficiently to trigger new detections
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
#skipFrame = async (input) => {
|
#skipFrame = async (input: Tensor) => {
|
||||||
if (this.config.cacheSensitivity === 0) return false;
|
if (this.config.cacheSensitivity === 0) return false;
|
||||||
const resizeFact = 32;
|
const resizeFact = 32;
|
||||||
|
if (!input.shape[1] || !input.shape[2]) return false;
|
||||||
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
const reduced: Tensor = tf.image.resizeBilinear(input, [Math.trunc(input.shape[1] / resizeFact), Math.trunc(input.shape[2] / resizeFact)]);
|
||||||
// use tensor sum
|
// use tensor sum
|
||||||
/*
|
/*
|
||||||
|
@ -453,23 +454,6 @@ export class Human {
|
||||||
// load models if enabled
|
// load models if enabled
|
||||||
await this.load();
|
await this.load();
|
||||||
|
|
||||||
/*
|
|
||||||
// function disabled in favor of inputChanged
|
|
||||||
// disable video optimization for inputs of type image, but skip if inside worker thread
|
|
||||||
let previousVideoOptimized;
|
|
||||||
// @ts-ignore ignore missing type for WorkerGlobalScope as that is the point
|
|
||||||
if (input && this.config.videoOptimized && (typeof window !== 'undefined') && (typeof WorkerGlobalScope !== 'undefined') && (
|
|
||||||
(typeof HTMLImageElement !== 'undefined' && input instanceof HTMLImageElement)
|
|
||||||
|| (typeof Image !== 'undefined' && input instanceof Image)
|
|
||||||
|| (typeof ImageData !== 'undefined' && input instanceof ImageData)
|
|
||||||
|| (typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap))
|
|
||||||
) {
|
|
||||||
log('disabling video optimization');
|
|
||||||
previousVideoOptimized = this.config.videoOptimized;
|
|
||||||
this.config.videoOptimized = false;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
let process = image.process(input, this.config);
|
let process = image.process(input, this.config);
|
||||||
this.performance.image = Math.trunc(now() - timeStamp);
|
this.performance.image = Math.trunc(now() - timeStamp);
|
||||||
|
@ -508,10 +492,10 @@ export class Human {
|
||||||
|
|
||||||
// prepare where to store model results
|
// prepare where to store model results
|
||||||
// keep them with weak typing as it can be promise or not
|
// keep them with weak typing as it can be promise or not
|
||||||
let faceRes;
|
let faceRes: Face[] | Promise<Face[]> | never[] = [];
|
||||||
let bodyRes;
|
let bodyRes: Body[] | Promise<Body[]> | never[] = [];
|
||||||
let handRes;
|
let handRes: Hand[] | Promise<Hand[]> | never[] = [];
|
||||||
let objectRes;
|
let objectRes: Item[] | Promise<Item[]> | never[] = [];
|
||||||
|
|
||||||
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
|
@ -590,15 +574,15 @@ export class Human {
|
||||||
this.performance.total = Math.trunc(now() - timeStart);
|
this.performance.total = Math.trunc(now() - timeStart);
|
||||||
this.state = 'idle';
|
this.state = 'idle';
|
||||||
this.result = {
|
this.result = {
|
||||||
face: faceRes,
|
face: faceRes as Face[],
|
||||||
body: bodyRes,
|
body: bodyRes as Body[],
|
||||||
hand: handRes,
|
hand: handRes as Hand[],
|
||||||
gesture: gestureRes,
|
gesture: gestureRes,
|
||||||
object: objectRes,
|
object: objectRes as Item[],
|
||||||
performance: this.performance,
|
performance: this.performance,
|
||||||
canvas: process.canvas,
|
canvas: process.canvas,
|
||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
|
get persons() { return persons.join(faceRes as Face[], bodyRes as Body[], handRes as Hand[], gestureRes, process?.tensor?.shape); },
|
||||||
};
|
};
|
||||||
|
|
||||||
// finally dispose input tensor
|
// finally dispose input tensor
|
||||||
|
@ -611,7 +595,7 @@ export class Human {
|
||||||
|
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
#warmupBitmap = async () => {
|
#warmupBitmap = async () => {
|
||||||
const b64toBlob = (base64, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
|
const b64toBlob = (base64: string, type = 'application/octet-stream') => fetch(`data:${type};base64,${base64}`).then((res) => res.blob());
|
||||||
let blob;
|
let blob;
|
||||||
let res;
|
let res;
|
||||||
switch (this.config.warmup) {
|
switch (this.config.warmup) {
|
||||||
|
@ -662,7 +646,7 @@ export class Human {
|
||||||
|
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
#warmupNode = async () => {
|
#warmupNode = async () => {
|
||||||
const atob = (str) => Buffer.from(str, 'base64');
|
const atob = (str: string) => Buffer.from(str, 'base64');
|
||||||
let img;
|
let img;
|
||||||
if (this.config.warmup === 'face') img = atob(sample.face);
|
if (this.config.warmup === 'face') img = atob(sample.face);
|
||||||
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);
|
if (this.config.warmup === 'body' || this.config.warmup === 'full') img = atob(sample.body);
|
||||||
|
|
|
@ -14,7 +14,7 @@ const maxSize = 2048;
|
||||||
let inCanvas;
|
let inCanvas;
|
||||||
let outCanvas;
|
let outCanvas;
|
||||||
// instance of fximage
|
// instance of fximage
|
||||||
let fx;
|
let fx: fxImage.GLImageFilter | null;
|
||||||
|
|
||||||
// process input image and return tensor
|
// process input image and return tensor
|
||||||
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
// input can be tensor, imagedata, htmlimageelement, htmlvideoelement
|
||||||
|
|
|
@ -18,29 +18,17 @@ import * as segmentation from './segmentation/segmentation';
|
||||||
export async function load(instance) {
|
export async function load(instance) {
|
||||||
if (instance.config.async) { // load models concurrently
|
if (instance.config.async) { // load models concurrently
|
||||||
[
|
[
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.face,
|
instance.models.face,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.emotion,
|
instance.models.emotion,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.handpose,
|
instance.models.handpose,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.posenet,
|
instance.models.posenet,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.blazepose,
|
instance.models.blazepose,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.efficientpose,
|
instance.models.efficientpose,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.movenet,
|
instance.models.movenet,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.nanodet,
|
instance.models.nanodet,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.centernet,
|
instance.models.centernet,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.faceres,
|
instance.models.faceres,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
instance.models.segmentation,
|
instance.models.segmentation,
|
||||||
// @ts-ignore models loaded via promise array cannot be correctly inferred
|
|
||||||
// instance.models.agegenderrace,
|
// instance.models.agegenderrace,
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),
|
instance.models.face || (instance.config.face.enabled ? facemesh.load(instance.config) : null),
|
||||||
|
|
|
@ -22,8 +22,7 @@ const bodyParts = ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftSh
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -44,7 +44,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
|
||||||
// create new overarching box from all boxes beloning to person
|
// create new overarching box from all boxes beloning to person
|
||||||
const x: number[] = [];
|
const x: number[] = [];
|
||||||
const y: number[] = [];
|
const y: number[] = [];
|
||||||
const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
|
const extractXY = (box: [number, number, number, number] | undefined) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
|
||||||
if (box && box.length === 4) {
|
if (box && box.length === 4) {
|
||||||
x.push(box[0], box[0] + box[2]);
|
x.push(box[0], box[0] + box[2]);
|
||||||
y.push(box[1], box[1] + box[3]);
|
y.push(box[1], box[1] + box[3]);
|
||||||
|
@ -59,7 +59,7 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
|
||||||
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
||||||
|
|
||||||
// shape is known so we calculate boxRaw as well
|
// shape is known so we calculate boxRaw as well
|
||||||
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
|
if (shape && shape[1] && shape[2]) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
|
||||||
|
|
||||||
persons.push(person);
|
persons.push(person);
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ export async function predict(input: Tensor, config: Config): Promise<Body[]> {
|
||||||
return results3d;
|
return results3d;
|
||||||
});
|
});
|
||||||
|
|
||||||
const buffers = await Promise.all(res.map((tensor) => tensor.buffer()));
|
const buffers = await Promise.all(res.map((tensor: Tensor) => tensor.buffer()));
|
||||||
for (const t of res) tf.dispose(t);
|
for (const t of res) tf.dispose(t);
|
||||||
|
|
||||||
const decoded = await poses.decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
|
const decoded = await poses.decode(buffers[0], buffers[1], buffers[2], buffers[3], config.body.maxDetected, config.body.minConfidence);
|
||||||
|
@ -35,8 +35,7 @@ export async function predict(input: Tensor, config: Config): Promise<Body[]> {
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch for GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import * as kpt from './keypoints';
|
import * as kpt from './keypoints';
|
||||||
import { Body } from '../result';
|
import { Body } from '../result';
|
||||||
|
|
||||||
export function eitherPointDoesntMeetConfidence(a, b, minConfidence) {
|
export function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number) {
|
||||||
return (a < minConfidence || b < minConfidence);
|
return (a < minConfidence || b < minConfidence);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getAdjacentKeyPoints(keypoints, minConfidence) {
|
export function getAdjacentKeyPoints(keypoints, minConfidence: number) {
|
||||||
return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {
|
return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {
|
||||||
if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {
|
if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -15,8 +15,7 @@ let busy = false;
|
||||||
|
|
||||||
export async function load(config: Config): Promise<GraphModel> {
|
export async function load(config: Config): Promise<GraphModel> {
|
||||||
if (!model) {
|
if (!model) {
|
||||||
// @ts-ignore type mismatch on GraphModel
|
model = await tf.loadGraphModel(join(config.modelBasePath, config.segmentation.modelPath)) as unknown as GraphModel;
|
||||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.segmentation.modelPath));
|
|
||||||
if (!model || !model['modelUrl']) log('load model failed:', config.segmentation.modelPath);
|
if (!model || !model['modelUrl']) log('load model failed:', config.segmentation.modelPath);
|
||||||
else if (config.debug) log('load model:', model['modelUrl']);
|
else if (config.debug) log('load model:', model['modelUrl']);
|
||||||
} else if (config.debug) log('cached model:', model['modelUrl']);
|
} else if (config.debug) log('cached model:', model['modelUrl']);
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
* Helper function that returns basic system info
|
* Helper function that returns basic system info
|
||||||
*/
|
*/
|
||||||
export function info(): { platform: string, agent: string } {
|
export function info(): { platform: string, agent: string } {
|
||||||
let platform;
|
let platform = '';
|
||||||
let agent;
|
let agent = '';
|
||||||
if (typeof navigator !== 'undefined') {
|
if (typeof navigator !== 'undefined') {
|
||||||
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
const raw = navigator.userAgent.match(/\(([^()]+)\)/g);
|
||||||
if (raw && raw[0]) {
|
if (raw && raw[0]) {
|
||||||
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
const platformMatch = raw[0].match(/\(([^()]+)\)/g);
|
||||||
platform = platformMatch ? platformMatch[0].replace(/\(|\)/g, '') : '';
|
platform = (platformMatch && platformMatch[0]) ? platformMatch[0].replace(/\(|\)/g, '') : '';
|
||||||
agent = navigator.userAgent.replace(raw[0], '');
|
agent = navigator.userAgent.replace(raw[0], '');
|
||||||
if (platform[1]) agent = agent.replace(raw[1], '');
|
if (platform[1]) agent = agent.replace(raw[1], '');
|
||||||
agent = agent.replace(/ /g, ' ');
|
agent = agent.replace(/ /g, ' ');
|
||||||
|
|
|
@ -1,36 +1,46 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"noEmitOnError": false,
|
|
||||||
"module": "es2020",
|
"module": "es2020",
|
||||||
"target": "es2018",
|
"target": "es2018",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"typeRoots": ["node_modules/@types"],
|
"typeRoots": ["node_modules/@types"],
|
||||||
"outDir": "types",
|
"outDir": "types",
|
||||||
|
"baseUrl": "./",
|
||||||
|
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
|
||||||
|
"noEmitOnError": false,
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
|
"allowJs": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"allowUnreachableCode": false,
|
||||||
|
"allowUnusedLabels": false,
|
||||||
|
"alwaysStrict": true,
|
||||||
"emitDecoratorMetadata": true,
|
"emitDecoratorMetadata": true,
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"importHelpers": true,
|
"importHelpers": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitAny": false,
|
||||||
|
"noImplicitOverride": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noImplicitThis": true,
|
||||||
|
"noPropertyAccessFromIndexSignature": false,
|
||||||
|
"noUncheckedIndexedAccess": false,
|
||||||
|
"noUnusedLocals": false,
|
||||||
|
"noUnusedParameters": true,
|
||||||
"preserveConstEnums": true,
|
"preserveConstEnums": true,
|
||||||
|
"pretty": true,
|
||||||
"removeComments": false,
|
"removeComments": false,
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"allowJs": true,
|
"strictBindCallApply": true,
|
||||||
"baseUrl": "./",
|
"strictFunctionTypes": true,
|
||||||
"paths": { "tslib": ["node_modules/tslib/tslib.d.ts"] },
|
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"noImplicitAny": false,
|
"strictPropertyInitialization": true
|
||||||
"noUnusedLocals": false,
|
},
|
||||||
"noImplicitReturns": true,
|
"formatCodeOptions": {
|
||||||
"noImplicitThis": true,
|
"indentSize": 2,
|
||||||
"alwaysStrict": true,
|
"tabSize": 2
|
||||||
"noUnusedParameters": true,
|
|
||||||
"pretty": true,
|
|
||||||
"noFallthroughCasesInSwitch": true,
|
|
||||||
"allowUnreachableCode": false
|
|
||||||
},
|
},
|
||||||
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
|
||||||
"exclude": ["node_modules/", "types/", "tfjs/", "dist/"],
|
"exclude": ["node_modules/", "types/", "tfjs/", "dist/"],
|
||||||
"include": ["src"],
|
"include": ["src"],
|
||||||
"typedocOptions": {
|
"typedocOptions": {
|
||||||
|
|
|
@ -5,4 +5,4 @@ export declare function join(folder: string, file: string): string;
|
||||||
export declare function log(...msg: any[]): void;
|
export declare function log(...msg: any[]): void;
|
||||||
export declare const now: () => number;
|
export declare const now: () => number;
|
||||||
export declare function mergeDeep(...objects: any[]): any;
|
export declare function mergeDeep(...objects: any[]): any;
|
||||||
export declare const minmax: (data: any) => any;
|
export declare const minmax: (data: Array<number>) => number[];
|
||||||
|
|
|
@ -112,7 +112,7 @@ export declare class Human {
|
||||||
*/
|
*/
|
||||||
constructor(userConfig?: Config | Record<string, unknown>);
|
constructor(userConfig?: Config | Record<string, unknown>);
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
analyze: (...msg: any[]) => void;
|
analyze: (...msg: string[]) => void;
|
||||||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||||
* - Calculation is based on normalized Minkowski distance between
|
* - Calculation is based on normalized Minkowski distance between
|
||||||
*
|
*
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Body } from '../result';
|
import { Body } from '../result';
|
||||||
export declare function eitherPointDoesntMeetConfidence(a: any, b: any, minConfidence: any): boolean;
|
export declare function eitherPointDoesntMeetConfidence(a: number, b: number, minConfidence: number): boolean;
|
||||||
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: any): any[];
|
export declare function getAdjacentKeyPoints(keypoints: any, minConfidence: number): any[];
|
||||||
export declare function getBoundingBox(keypoints: any): [number, number, number, number];
|
export declare function getBoundingBox(keypoints: any): [number, number, number, number];
|
||||||
export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): Array<Body>;
|
export declare function scalePoses(poses: any, [height, width]: [any, any], [inputResolutionHeight, inputResolutionWidth]: [any, any]): Array<Body>;
|
||||||
export declare class MaxHeap {
|
export declare class MaxHeap {
|
||||||
|
|
Loading…
Reference in New Issue