mirror of https://github.com/vladmandic/human
update typescript
parent
4369f94cf8
commit
7d5112efc6
|
@ -9,8 +9,9 @@
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/11/16 mandic00@live.com
|
### **HEAD -> main** 2021/11/17 mandic00@live.com
|
||||||
|
|
||||||
|
- optimize centernet
|
||||||
- cache frequent tf constants
|
- cache frequent tf constants
|
||||||
- add extra face rotation prior to mesh
|
- add extra face rotation prior to mesh
|
||||||
- release 2.5.2
|
- release 2.5.2
|
||||||
|
|
|
@ -331,7 +331,7 @@ For more info, see [**Configuration Details**](https://github.com/vladmandic/hum
|
||||||
|
|
||||||
<br><hr><br>
|
<br><hr><br>
|
||||||
|
|
||||||
`Human` library is written in `TypeScript` [4.4](https://www.typescriptlang.org/docs/handbook/intro.html)
|
`Human` library is written in `TypeScript` [4.5](https://www.typescriptlang.org/docs/handbook/intro.html)
|
||||||
Conforming to latest `JavaScript` [ECMAScript version 2021](https://262.ecma-international.org/) standard
|
Conforming to latest `JavaScript` [ECMAScript version 2021](https://262.ecma-international.org/) standard
|
||||||
Build target is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/11.0/)
|
Build target is `JavaScript` [EMCAScript version 2018](https://262.ecma-international.org/11.0/)
|
||||||
|
|
||||||
|
|
20
TODO.md
20
TODO.md
|
@ -43,13 +43,14 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
|
||||||
## Pending Release Notes
|
## Pending Release Notes
|
||||||
|
|
||||||
New:
|
New:
|
||||||
- new demo `demos/faceid` that utilizes multiple algorithm to validate input before triggering face recognition
|
- New type definitions rollup
|
||||||
- new optional model `liveness`
|
- New demo `demos/faceid` that utilizes multiple algorithm to validate input before triggering face recognition
|
||||||
|
- New optional model `liveness`
|
||||||
checks if input appears to be a real-world live image or a recording
|
checks if input appears to be a real-world live image or a recording
|
||||||
best used together with `antispoofing` that checks if input appears to have a realistic face
|
best used together with `antispoofing` that checks if input appears to have a realistic face
|
||||||
- new face masking option in `face.config.detector.mask`
|
- New face masking option in `face.config.detector.mask`
|
||||||
result is shading of face image outside of face area which is useful for increased sensitivity of other modules that rely on detected face as input
|
result is shading of face image outside of face area which is useful for increased sensitivity of other modules that rely on detected face as input
|
||||||
- new face crop option in `face.config.detector.cropFactor`
|
- New face crop option in `face.config.detector.cropFactor`
|
||||||
result is user-definable fine-tuning for other modules that rely on detected face as input
|
result is user-definable fine-tuning for other modules that rely on detected face as input
|
||||||
|
|
||||||
Other:
|
Other:
|
||||||
|
@ -60,14 +61,3 @@ Other:
|
||||||
- Documentation overhaul
|
- Documentation overhaul
|
||||||
- Fix for optional `gear`, `ssrnet`, `mobilefacenet` modules
|
- Fix for optional `gear`, `ssrnet`, `mobilefacenet` modules
|
||||||
- Fix for Firefox WebGPU compatibility issue
|
- Fix for Firefox WebGPU compatibility issue
|
||||||
|
|
||||||
```
|
|
||||||
cp tfjs/tfjs.esm.d.ts types/lib/dist/
|
|
||||||
node_modules/.bin/api-extractor run --local --verbose
|
|
||||||
cp types/human.d.ts dist/human.esm-nobundle.d.ts
|
|
||||||
cp types/human.d.ts dist/human.esm.d.ts
|
|
||||||
cp types/human.d.ts dist/human.d.ts
|
|
||||||
cp types/human.d.ts dist/human.node-gpu.d.ts
|
|
||||||
cp types/human.d.ts dist/human.node.d.ts
|
|
||||||
cp types/human.d.ts dist/human.node-wasm.d.ts
|
|
||||||
```
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -10,13 +10,6 @@ var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||||||
var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
|
var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
|
||||||
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
||||||
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
||||||
}) : x)(function(x) {
|
|
||||||
if (typeof require !== "undefined")
|
|
||||||
return require.apply(this, arguments);
|
|
||||||
throw new Error('Dynamic require of "' + x + '" is not supported');
|
|
||||||
});
|
|
||||||
var __export = (target, all2) => {
|
var __export = (target, all2) => {
|
||||||
__markAsModule(target);
|
__markAsModule(target);
|
||||||
for (var name in all2)
|
for (var name in all2)
|
||||||
|
@ -1348,19 +1341,6 @@ var Env = class {
|
||||||
async updateCPU() {
|
async updateCPU() {
|
||||||
const cpu = { model: "", flags: [] };
|
const cpu = { model: "", flags: [] };
|
||||||
if (this.node && this.platform.startsWith("linux")) {
|
if (this.node && this.platform.startsWith("linux")) {
|
||||||
const fs = __require("fs");
|
|
||||||
try {
|
|
||||||
const data = fs.readFileSync("/proc/cpuinfo").toString();
|
|
||||||
for (const line of data.split("\n")) {
|
|
||||||
if (line.startsWith("model name")) {
|
|
||||||
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
|
|
||||||
}
|
|
||||||
if (line.startsWith("flags")) {
|
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!this["cpu"])
|
if (!this["cpu"])
|
||||||
Object.defineProperty(this, "cpu", { value: cpu });
|
Object.defineProperty(this, "cpu", { value: cpu });
|
||||||
|
@ -12131,7 +12111,7 @@ var face2 = (res) => {
|
||||||
const gestures = [];
|
const gestures = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15)
|
if (Math.abs(zDiff / xDiff) <= 0.15)
|
||||||
gestures.push({ face: i, gesture: "facing center" });
|
gestures.push({ face: i, gesture: "facing center" });
|
||||||
|
@ -12146,7 +12126,7 @@ var face2 = (res) => {
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10)
|
if (mouthOpen > 10)
|
||||||
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10)
|
if (Math.abs(chinDepth) > 10)
|
||||||
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
||||||
}
|
}
|
||||||
|
@ -12209,7 +12189,7 @@ var hand2 = (res) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => best.position[2] < a.position[2] ? best : a);
|
const closest = fingers.reduce((best, a) => (best.position[2] || 0) < (a.position[2] || 0) ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
||||||
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
||||||
|
@ -13211,6 +13191,8 @@ async function warmupCanvas(instance) {
|
||||||
img = new Image();
|
img = new Image();
|
||||||
else if (env.Image)
|
else if (env.Image)
|
||||||
img = new env.Image();
|
img = new env.Image();
|
||||||
|
else
|
||||||
|
return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas3) {
|
if (!canvas3) {
|
||||||
|
@ -13258,8 +13240,9 @@ async function warmup(instance, userConfig) {
|
||||||
instance.state = "warmup";
|
instance.state = "warmup";
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
instance.config = mergeDeep(instance.config, userConfig);
|
instance.config = mergeDeep(instance.config, userConfig);
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none")
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none") {
|
||||||
return { error: "null" };
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === "function")
|
if (typeof createImageBitmap === "function")
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -71732,19 +71732,6 @@ var Env = class {
|
||||||
async updateCPU() {
|
async updateCPU() {
|
||||||
const cpu = { model: "", flags: [] };
|
const cpu = { model: "", flags: [] };
|
||||||
if (this.node && this.platform.startsWith("linux")) {
|
if (this.node && this.platform.startsWith("linux")) {
|
||||||
const fs = __require("fs");
|
|
||||||
try {
|
|
||||||
const data = fs.readFileSync("/proc/cpuinfo").toString();
|
|
||||||
for (const line of data.split("\n")) {
|
|
||||||
if (line.startsWith("model name")) {
|
|
||||||
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
|
|
||||||
}
|
|
||||||
if (line.startsWith("flags")) {
|
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!this["cpu"])
|
if (!this["cpu"])
|
||||||
Object.defineProperty(this, "cpu", { value: cpu });
|
Object.defineProperty(this, "cpu", { value: cpu });
|
||||||
|
@ -82515,7 +82502,7 @@ var face2 = (res) => {
|
||||||
const gestures = [];
|
const gestures = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15)
|
if (Math.abs(zDiff / xDiff) <= 0.15)
|
||||||
gestures.push({ face: i, gesture: "facing center" });
|
gestures.push({ face: i, gesture: "facing center" });
|
||||||
|
@ -82530,7 +82517,7 @@ var face2 = (res) => {
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10)
|
if (mouthOpen > 10)
|
||||||
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10)
|
if (Math.abs(chinDepth) > 10)
|
||||||
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
||||||
}
|
}
|
||||||
|
@ -82593,7 +82580,7 @@ var hand2 = (res) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => best.position[2] < a.position[2] ? best : a);
|
const closest = fingers.reduce((best, a) => (best.position[2] || 0) < (a.position[2] || 0) ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
||||||
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
||||||
|
@ -83595,6 +83582,8 @@ async function warmupCanvas(instance) {
|
||||||
img = new Image();
|
img = new Image();
|
||||||
else if (env2.Image)
|
else if (env2.Image)
|
||||||
img = new env2.Image();
|
img = new env2.Image();
|
||||||
|
else
|
||||||
|
return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas3) {
|
if (!canvas3) {
|
||||||
|
@ -83642,8 +83631,9 @@ async function warmup(instance, userConfig) {
|
||||||
instance.state = "warmup";
|
instance.state = "warmup";
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
instance.config = mergeDeep(instance.config, userConfig);
|
instance.config = mergeDeep(instance.config, userConfig);
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none")
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none") {
|
||||||
return { error: "null" };
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === "function")
|
if (typeof createImageBitmap === "function")
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -1358,19 +1358,6 @@ var Env = class {
|
||||||
async updateCPU() {
|
async updateCPU() {
|
||||||
const cpu = { model: "", flags: [] };
|
const cpu = { model: "", flags: [] };
|
||||||
if (this.node && this.platform.startsWith("linux")) {
|
if (this.node && this.platform.startsWith("linux")) {
|
||||||
const fs = require("fs");
|
|
||||||
try {
|
|
||||||
const data = fs.readFileSync("/proc/cpuinfo").toString();
|
|
||||||
for (const line of data.split("\n")) {
|
|
||||||
if (line.startsWith("model name")) {
|
|
||||||
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
|
|
||||||
}
|
|
||||||
if (line.startsWith("flags")) {
|
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!this["cpu"])
|
if (!this["cpu"])
|
||||||
Object.defineProperty(this, "cpu", { value: cpu });
|
Object.defineProperty(this, "cpu", { value: cpu });
|
||||||
|
@ -12200,7 +12187,7 @@ var face2 = (res) => {
|
||||||
const gestures = [];
|
const gestures = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15)
|
if (Math.abs(zDiff / xDiff) <= 0.15)
|
||||||
gestures.push({ face: i, gesture: "facing center" });
|
gestures.push({ face: i, gesture: "facing center" });
|
||||||
|
@ -12215,7 +12202,7 @@ var face2 = (res) => {
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10)
|
if (mouthOpen > 10)
|
||||||
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10)
|
if (Math.abs(chinDepth) > 10)
|
||||||
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
||||||
}
|
}
|
||||||
|
@ -12278,7 +12265,7 @@ var hand2 = (res) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => best.position[2] < a.position[2] ? best : a);
|
const closest = fingers.reduce((best, a) => (best.position[2] || 0) < (a.position[2] || 0) ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
||||||
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
||||||
|
@ -13281,6 +13268,8 @@ async function warmupCanvas(instance) {
|
||||||
img = new Image();
|
img = new Image();
|
||||||
else if (env.Image)
|
else if (env.Image)
|
||||||
img = new env.Image();
|
img = new env.Image();
|
||||||
|
else
|
||||||
|
return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas3) {
|
if (!canvas3) {
|
||||||
|
@ -13328,8 +13317,9 @@ async function warmup(instance, userConfig) {
|
||||||
instance.state = "warmup";
|
instance.state = "warmup";
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
instance.config = mergeDeep(instance.config, userConfig);
|
instance.config = mergeDeep(instance.config, userConfig);
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none")
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none") {
|
||||||
return { error: "null" };
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === "function")
|
if (typeof createImageBitmap === "function")
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -1359,19 +1359,6 @@ var Env = class {
|
||||||
async updateCPU() {
|
async updateCPU() {
|
||||||
const cpu = { model: "", flags: [] };
|
const cpu = { model: "", flags: [] };
|
||||||
if (this.node && this.platform.startsWith("linux")) {
|
if (this.node && this.platform.startsWith("linux")) {
|
||||||
const fs = require("fs");
|
|
||||||
try {
|
|
||||||
const data = fs.readFileSync("/proc/cpuinfo").toString();
|
|
||||||
for (const line of data.split("\n")) {
|
|
||||||
if (line.startsWith("model name")) {
|
|
||||||
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
|
|
||||||
}
|
|
||||||
if (line.startsWith("flags")) {
|
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!this["cpu"])
|
if (!this["cpu"])
|
||||||
Object.defineProperty(this, "cpu", { value: cpu });
|
Object.defineProperty(this, "cpu", { value: cpu });
|
||||||
|
@ -12201,7 +12188,7 @@ var face2 = (res) => {
|
||||||
const gestures = [];
|
const gestures = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15)
|
if (Math.abs(zDiff / xDiff) <= 0.15)
|
||||||
gestures.push({ face: i, gesture: "facing center" });
|
gestures.push({ face: i, gesture: "facing center" });
|
||||||
|
@ -12216,7 +12203,7 @@ var face2 = (res) => {
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10)
|
if (mouthOpen > 10)
|
||||||
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10)
|
if (Math.abs(chinDepth) > 10)
|
||||||
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
||||||
}
|
}
|
||||||
|
@ -12279,7 +12266,7 @@ var hand2 = (res) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => best.position[2] < a.position[2] ? best : a);
|
const closest = fingers.reduce((best, a) => (best.position[2] || 0) < (a.position[2] || 0) ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
||||||
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
||||||
|
@ -13282,6 +13269,8 @@ async function warmupCanvas(instance) {
|
||||||
img = new Image();
|
img = new Image();
|
||||||
else if (env.Image)
|
else if (env.Image)
|
||||||
img = new env.Image();
|
img = new env.Image();
|
||||||
|
else
|
||||||
|
return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas3) {
|
if (!canvas3) {
|
||||||
|
@ -13329,8 +13318,9 @@ async function warmup(instance, userConfig) {
|
||||||
instance.state = "warmup";
|
instance.state = "warmup";
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
instance.config = mergeDeep(instance.config, userConfig);
|
instance.config = mergeDeep(instance.config, userConfig);
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none")
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none") {
|
||||||
return { error: "null" };
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === "function")
|
if (typeof createImageBitmap === "function")
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
|
@ -1358,19 +1358,6 @@ var Env = class {
|
||||||
async updateCPU() {
|
async updateCPU() {
|
||||||
const cpu = { model: "", flags: [] };
|
const cpu = { model: "", flags: [] };
|
||||||
if (this.node && this.platform.startsWith("linux")) {
|
if (this.node && this.platform.startsWith("linux")) {
|
||||||
const fs = require("fs");
|
|
||||||
try {
|
|
||||||
const data = fs.readFileSync("/proc/cpuinfo").toString();
|
|
||||||
for (const line of data.split("\n")) {
|
|
||||||
if (line.startsWith("model name")) {
|
|
||||||
cpu.model = line.match(/:(.*)/g)[0].replace(":", "").trim();
|
|
||||||
}
|
|
||||||
if (line.startsWith("flags")) {
|
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(":", "").trim().split(" ").sort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!this["cpu"])
|
if (!this["cpu"])
|
||||||
Object.defineProperty(this, "cpu", { value: cpu });
|
Object.defineProperty(this, "cpu", { value: cpu });
|
||||||
|
@ -12200,7 +12187,7 @@ var face2 = (res) => {
|
||||||
const gestures = [];
|
const gestures = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15)
|
if (Math.abs(zDiff / xDiff) <= 0.15)
|
||||||
gestures.push({ face: i, gesture: "facing center" });
|
gestures.push({ face: i, gesture: "facing center" });
|
||||||
|
@ -12215,7 +12202,7 @@ var face2 = (res) => {
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10)
|
if (mouthOpen > 10)
|
||||||
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10)
|
if (Math.abs(chinDepth) > 10)
|
||||||
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? "up" : "down"}` });
|
||||||
}
|
}
|
||||||
|
@ -12278,7 +12265,7 @@ var hand2 = (res) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => best.position[2] < a.position[2] ? best : a);
|
const closest = fingers.reduce((best, a) => (best.position[2] || 0) < (a.position[2] || 0) ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` });
|
||||||
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
const highest = fingers.reduce((best, a) => best.position[1] < a.position[1] ? best : a);
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
gestures.push({ hand: i, gesture: `${highest.name} up` });
|
||||||
|
@ -13281,6 +13268,8 @@ async function warmupCanvas(instance) {
|
||||||
img = new Image();
|
img = new Image();
|
||||||
else if (env.Image)
|
else if (env.Image)
|
||||||
img = new env.Image();
|
img = new env.Image();
|
||||||
|
else
|
||||||
|
return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
const canvas3 = canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas3) {
|
if (!canvas3) {
|
||||||
|
@ -13328,8 +13317,9 @@ async function warmup(instance, userConfig) {
|
||||||
instance.state = "warmup";
|
instance.state = "warmup";
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
instance.config = mergeDeep(instance.config, userConfig);
|
instance.config = mergeDeep(instance.config, userConfig);
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none")
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === "none") {
|
||||||
return { error: "null" };
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === "function")
|
if (typeof createImageBitmap === "function")
|
||||||
|
|
|
@ -54,6 +54,7 @@
|
||||||
],
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@microsoft/api-extractor": "^7.18.19",
|
"@microsoft/api-extractor": "^7.18.19",
|
||||||
|
"@tensorflow/tfjs": "^3.11.0",
|
||||||
"@tensorflow/tfjs-backend-cpu": "^3.11.0",
|
"@tensorflow/tfjs-backend-cpu": "^3.11.0",
|
||||||
"@tensorflow/tfjs-backend-wasm": "^3.11.0",
|
"@tensorflow/tfjs-backend-wasm": "^3.11.0",
|
||||||
"@tensorflow/tfjs-backend-webgl": "^3.11.0",
|
"@tensorflow/tfjs-backend-webgl": "^3.11.0",
|
||||||
|
@ -62,9 +63,8 @@
|
||||||
"@tensorflow/tfjs-core": "^3.11.0",
|
"@tensorflow/tfjs-core": "^3.11.0",
|
||||||
"@tensorflow/tfjs-data": "^3.11.0",
|
"@tensorflow/tfjs-data": "^3.11.0",
|
||||||
"@tensorflow/tfjs-layers": "^3.11.0",
|
"@tensorflow/tfjs-layers": "^3.11.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.11.0",
|
|
||||||
"@tensorflow/tfjs-node": "^3.11.0",
|
"@tensorflow/tfjs-node": "^3.11.0",
|
||||||
"@tensorflow/tfjs": "^3.11.0",
|
"@tensorflow/tfjs-node-gpu": "^3.11.0",
|
||||||
"@types/node": "^16.11.7",
|
"@types/node": "^16.11.7",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.4.0",
|
"@typescript-eslint/eslint-plugin": "^5.4.0",
|
||||||
"@typescript-eslint/parser": "^5.4.0",
|
"@typescript-eslint/parser": "^5.4.0",
|
||||||
|
@ -74,18 +74,18 @@
|
||||||
"canvas": "^2.8.0",
|
"canvas": "^2.8.0",
|
||||||
"dayjs": "^1.10.7",
|
"dayjs": "^1.10.7",
|
||||||
"esbuild": "^0.13.14",
|
"esbuild": "^0.13.14",
|
||||||
|
"eslint": "8.2.0",
|
||||||
"eslint-config-airbnb-base": "^15.0.0",
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
"eslint-plugin-html": "^6.2.0",
|
"eslint-plugin-html": "^6.2.0",
|
||||||
"eslint-plugin-import": "^2.25.3",
|
"eslint-plugin-import": "^2.25.3",
|
||||||
"eslint-plugin-json": "^3.1.0",
|
"eslint-plugin-json": "^3.1.0",
|
||||||
"eslint-plugin-node": "^11.1.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-promise": "^5.1.1",
|
"eslint-plugin-promise": "^5.1.1",
|
||||||
"eslint": "8.2.0",
|
|
||||||
"node-fetch": "^3.1.0",
|
"node-fetch": "^3.1.0",
|
||||||
"rimraf": "^3.0.2",
|
"rimraf": "^3.0.2",
|
||||||
"seedrandom": "^3.0.5",
|
"seedrandom": "^3.0.5",
|
||||||
"tslib": "^2.3.1",
|
"tslib": "^2.3.1",
|
||||||
"typedoc": "0.22.9",
|
"typedoc": "0.22.9",
|
||||||
"typescript": "4.4.4"
|
"typescript": "4.5.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<number> {
|
||||||
if (!model) return 0;
|
if (!model) return 0;
|
||||||
const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
|
||||||
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);
|
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);
|
||||||
|
|
|
@ -24,7 +24,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<number> {
|
||||||
if (!model) return 0;
|
if (!model) return 0;
|
||||||
const skipTime = (config.face.liveness?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face.liveness?.skipTime || 0) > (now() - lastTime);
|
||||||
const skipFrame = skipped < (config.face.liveness?.skipFrames || 0);
|
const skipFrame = skipped < (config.face.liveness?.skipFrames || 0);
|
||||||
|
|
|
@ -28,7 +28,7 @@ export async function load(config: Config): Promise<GraphModel> {
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image: Tensor, config: Config, idx, count): Promise<Array<{ score: number, emotion: string }>> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<Array<{ score: number, emotion: string }>> {
|
||||||
if (!model) return [];
|
if (!model) return [];
|
||||||
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
|
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
|
||||||
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
|
||||||
|
|
|
@ -31,7 +31,7 @@ export async function load(config: Config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ age: number }> {
|
export async function predict(image: Tensor, config: Config, idx: number, count: number): Promise<{ age: number }> {
|
||||||
if (!model) return { age: 0 };
|
if (!model) return { age: 0 };
|
||||||
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
|
||||||
const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime);
|
const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime);
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
* Gesture detection algorithm
|
* Gesture detection algorithm
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import type { GestureResult } from '../result';
|
import type { GestureResult, BodyResult, FaceResult, HandResult, Point } from '../result';
|
||||||
import * as fingerPose from '../hand/fingerpose';
|
import * as fingerPose from '../hand/fingerpose';
|
||||||
|
|
||||||
/** face gesture type */
|
/** face gesture type */
|
||||||
|
@ -31,7 +31,7 @@ export type HandGesture =
|
||||||
| 'victory'
|
| 'victory'
|
||||||
| 'thumbs up';
|
| 'thumbs up';
|
||||||
|
|
||||||
export const body = (res): GestureResult[] => {
|
export const body = (res: BodyResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ body: number, gesture: BodyGesture }> = [];
|
const gestures: Array<{ body: number, gesture: BodyGesture }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
|
@ -53,12 +53,12 @@ export const body = (res): GestureResult[] => {
|
||||||
return gestures;
|
return gestures;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const face = (res): GestureResult[] => {
|
export const face = (res: FaceResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ face: number, gesture: FaceGesture }> = [];
|
const gestures: Array<{ face: number, gesture: FaceGesture }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 450) {
|
if (res[i].mesh && res[i].mesh.length > 450) {
|
||||||
const zDiff = res[i].mesh[33][2] - res[i].mesh[263][2];
|
const zDiff = (res[i].mesh[33][2] || 0) - (res[i].mesh[263][2] || 0);
|
||||||
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
const xDiff = res[i].mesh[33][0] - res[i].mesh[263][0];
|
||||||
if (Math.abs(zDiff / xDiff) <= 0.15) gestures.push({ face: i, gesture: 'facing center' });
|
if (Math.abs(zDiff / xDiff) <= 0.15) gestures.push({ face: i, gesture: 'facing center' });
|
||||||
else gestures.push({ face: i, gesture: `facing ${zDiff < 0 ? 'left' : 'right'}` });
|
else gestures.push({ face: i, gesture: `facing ${zDiff < 0 ? 'left' : 'right'}` });
|
||||||
|
@ -68,14 +68,14 @@ export const face = (res): GestureResult[] => {
|
||||||
if (openRight < 0.2) gestures.push({ face: i, gesture: 'blink right eye' });
|
if (openRight < 0.2) gestures.push({ face: i, gesture: 'blink right eye' });
|
||||||
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
const mouthOpen = Math.min(100, 500 * Math.abs(res[i].mesh[13][1] - res[i].mesh[14][1]) / Math.abs(res[i].mesh[10][1] - res[i].mesh[152][1]));
|
||||||
if (mouthOpen > 10) gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
if (mouthOpen > 10) gestures.push({ face: i, gesture: `mouth ${Math.trunc(mouthOpen)}% open` });
|
||||||
const chinDepth = res[i].mesh[152][2];
|
const chinDepth = res[i].mesh[152][2] || 0;
|
||||||
if (Math.abs(chinDepth) > 10) gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? 'up' : 'down'}` });
|
if (Math.abs(chinDepth) > 10) gestures.push({ face: i, gesture: `head ${chinDepth < 0 ? 'up' : 'down'}` });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return gestures;
|
return gestures;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const iris = (res): GestureResult[] => {
|
export const iris = (res: FaceResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ iris: number, gesture: IrisGesture }> = [];
|
const gestures: Array<{ iris: number, gesture: IrisGesture }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
|
@ -116,18 +116,18 @@ export const iris = (res): GestureResult[] => {
|
||||||
return gestures;
|
return gestures;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const hand = (res): GestureResult[] => {
|
export const hand = (res: HandResult[]): GestureResult[] => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures: Array<{ hand: number, gesture: HandGesture }> = [];
|
const gestures: Array<{ hand: number, gesture: HandGesture }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
const fingers: Array<{ name: string, position: number }> = [];
|
const fingers: Array<{ name: string, position: Point }> = [];
|
||||||
if (res[i]['annotations']) {
|
if (res[i]['annotations']) {
|
||||||
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
||||||
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
|
const closest = fingers.reduce((best, a) => ((best.position[2] || 0) < (a.position[2] || 0) ? best : a));
|
||||||
gestures.push({ hand: i, gesture: `${closest.name} forward` as HandGesture });
|
gestures.push({ hand: i, gesture: `${closest.name} forward` as HandGesture });
|
||||||
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));
|
||||||
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });
|
gestures.push({ hand: i, gesture: `${highest.name} up` as HandGesture });
|
||||||
|
|
|
@ -347,8 +347,8 @@ export class Human {
|
||||||
if (kernels[kernel.name]) kernels[kernel.name] += kernel.kernelTimeMs;
|
if (kernels[kernel.name]) kernels[kernel.name] += kernel.kernelTimeMs;
|
||||||
else kernels[kernel.name] = kernel.kernelTimeMs;
|
else kernels[kernel.name] = kernel.kernelTimeMs;
|
||||||
}
|
}
|
||||||
const kernelArr: Array<{ name, ms }> = [];
|
const kernelArr: Array<{ name: string, ms: number }> = [];
|
||||||
Object.entries(kernels).forEach((key) => kernelArr.push({ name: key[0], ms: key[1] })); // convert to array
|
Object.entries(kernels).forEach((key) => kernelArr.push({ name: key[0], ms: key[1] as unknown as number })); // convert to array
|
||||||
kernelArr.sort((a, b) => b.ms - a.ms); // sort
|
kernelArr.sort((a, b) => b.ms - a.ms); // sort
|
||||||
kernelArr.length = 20; // crop
|
kernelArr.length = 20; // crop
|
||||||
const res: Record<string, number> = {};
|
const res: Record<string, number> = {};
|
||||||
|
@ -497,7 +497,7 @@ export class Human {
|
||||||
let gestureRes: GestureResult[] = [];
|
let gestureRes: GestureResult[] = [];
|
||||||
if (this.config.gesture.enabled) {
|
if (this.config.gesture.enabled) {
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
gestureRes = [...gesture.face(faceRes as FaceResult[]), ...gesture.body(bodyRes as BodyResult[]), ...gesture.hand(handRes as HandResult[]), ...gesture.iris(faceRes as FaceResult[])];
|
||||||
if (!this.config.async) this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
|
if (!this.config.async) this.performance.gesture = this.env.perfadd ? (this.performance.gesture || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
|
||||||
else if (this.performance.gesture) delete this.performance.gesture;
|
else if (this.performance.gesture) delete this.performance.gesture;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,16 +4,16 @@
|
||||||
* TensorFlow Tensor type
|
* TensorFlow Tensor type
|
||||||
* @external
|
* @external
|
||||||
*/
|
*/
|
||||||
export { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index';
|
export type { Tensor, TensorLike, Rank } from '@tensorflow/tfjs-core/dist/index';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TensorFlow GraphModel type
|
* TensorFlow GraphModel type
|
||||||
* @external
|
* @external
|
||||||
*/
|
*/
|
||||||
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
export type { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
||||||
|
|
||||||
/** Tensorflow Long type
|
/** Tensorflow Long type
|
||||||
* @external long
|
* @external long
|
||||||
*/
|
*/
|
||||||
// eslint-disable-next-line node/no-missing-import
|
// eslint-disable-next-line node/no-missing-import
|
||||||
export { Long } from 'long';
|
export type { Long } from 'long';
|
||||||
|
|
|
@ -149,6 +149,7 @@ export class Env {
|
||||||
const cpu = { model: '', flags: [] };
|
const cpu = { model: '', flags: [] };
|
||||||
if (this.node && this.platform.startsWith('linux')) {
|
if (this.node && this.platform.startsWith('linux')) {
|
||||||
// eslint-disable-next-line global-require
|
// eslint-disable-next-line global-require
|
||||||
|
/*
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
try {
|
try {
|
||||||
const data = fs.readFileSync('/proc/cpuinfo').toString();
|
const data = fs.readFileSync('/proc/cpuinfo').toString();
|
||||||
|
@ -160,7 +161,8 @@ export class Env {
|
||||||
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
|
cpu.flags = line.match(/:(.*)/g)[0].replace(':', '').trim().split(' ').sort();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch { /**/ }
|
} catch { }
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
|
if (!this['cpu']) Object.defineProperty(this, 'cpu', { value: cpu });
|
||||||
else this['cpu'] = cpu;
|
else this['cpu'] = cpu;
|
||||||
|
|
|
@ -65,7 +65,7 @@ export const minmax = (data: Array<number>) => data.reduce((acc: Array<number>,
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// helper function: async wait
|
// helper function: async wait
|
||||||
export async function wait(time) {
|
export async function wait(time: number) {
|
||||||
const waiting = new Promise((resolve) => { setTimeout(() => resolve(true), time); });
|
const waiting = new Promise((resolve) => { setTimeout(() => resolve(true), time); });
|
||||||
await waiting;
|
await waiting;
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,10 +48,11 @@ async function warmupCanvas(instance: Human) {
|
||||||
src = null;
|
src = null;
|
||||||
}
|
}
|
||||||
// src = encodeURI('../assets/human-sample-upper.jpg');
|
// src = encodeURI('../assets/human-sample-upper.jpg');
|
||||||
let img;
|
let img: HTMLImageElement;
|
||||||
if (typeof Image !== 'undefined') img = new Image();
|
if (typeof Image !== 'undefined') img = new Image();
|
||||||
// @ts-ignore env.image is an external monkey-patch
|
// @ts-ignore env.image is an external monkey-patch
|
||||||
else if (env.Image) img = new env.Image();
|
else if (env.Image) img = new env.Image();
|
||||||
|
else return;
|
||||||
img.onload = async () => {
|
img.onload = async () => {
|
||||||
const canvas = image.canvas(img.naturalWidth, img.naturalHeight);
|
const canvas = image.canvas(img.naturalWidth, img.naturalHeight);
|
||||||
if (!canvas) {
|
if (!canvas) {
|
||||||
|
@ -103,11 +104,13 @@ async function warmupNode(instance: Human) {
|
||||||
* - only used for `webgl` and `humangl` backends
|
* - only used for `webgl` and `humangl` backends
|
||||||
* @param userConfig?: Config
|
* @param userConfig?: Config
|
||||||
*/
|
*/
|
||||||
export async function warmup(instance: Human, userConfig?: Partial<Config>): Promise<Result | { error }> {
|
export async function warmup(instance: Human, userConfig?: Partial<Config>): Promise<Result> {
|
||||||
const t0 = now();
|
const t0 = now();
|
||||||
instance.state = 'warmup';
|
instance.state = 'warmup';
|
||||||
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
|
if (userConfig) instance.config = mergeDeep(instance.config, userConfig) as Config;
|
||||||
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') return { error: 'null' };
|
if (!instance.config.warmup || instance.config.warmup.length === 0 || instance.config.warmup === 'none') {
|
||||||
|
return { face: [], body: [], hand: [], gesture: [], object: [], performance: instance.performance, timestamp: now(), persons: [], error: null };
|
||||||
|
}
|
||||||
let res;
|
let res;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance);
|
if (typeof createImageBitmap === 'function') res = await warmupBitmap(instance);
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
2021-11-17 16:48:30 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"2.5.2"}
|
||||||
|
2021-11-17 16:48:30 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||||
|
2021-11-17 16:48:30 [36mINFO: [39m Toolchain: {"build":"0.6.4","esbuild":"0.13.14","typescript":"4.5.2","typedoc":"0.22.9","eslint":"8.2.0"}
|
||||||
|
2021-11-17 16:48:30 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||||
|
2021-11-17 16:48:30 [35mSTATE:[39m Clean: {"locations":["dist/*","types/lib/*","typedoc/*"]}
|
||||||
|
2021-11-17 16:48:30 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
|
||||||
|
2021-11-17 16:48:30 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":62,"inputBytes":546864,"outputBytes":462510}
|
||||||
|
2021-11-17 16:48:30 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":62,"inputBytes":546872,"outputBytes":462514}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":62,"inputBytes":546939,"outputBytes":462586}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":62,"inputBytes":546501,"outputBytes":464266}
|
||||||
|
2021-11-17 16:48:31 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":307,"outputBytes":2500732}
|
||||||
|
2021-11-17 16:48:32 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":62,"inputBytes":3046321,"outputBytes":1625475}
|
||||||
|
2021-11-17 16:48:32 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":62,"inputBytes":3046321,"outputBytes":2970799}
|
||||||
|
2021-11-17 16:48:54 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":108}
|
||||||
|
2021-11-17 16:48:54 [33mWARN: [39m You are running with an unsupported TypeScript version! TypeDoc supports 4.0, 4.1, 4.2, 4.3, 4.4
|
||||||
|
2021-11-17 16:49:03 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":52,"generated":true}
|
||||||
|
2021-11-17 16:49:03 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5811,"outputBytes":3821}
|
||||||
|
2021-11-17 16:49:03 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15166,"outputBytes":11786}
|
||||||
|
2021-11-17 16:49:43 [35mSTATE:[39m Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":93,"errors":0,"warnings":0}
|
||||||
|
2021-11-17 16:49:44 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||||
|
2021-11-17 16:49:44 [36mINFO: [39m Done...
|
|
@ -6,7 +6,7 @@
|
||||||
"outDir": "types",
|
"outDir": "types",
|
||||||
"baseUrl": "./",
|
"baseUrl": "./",
|
||||||
"paths": { "tslib": ["./node_modules/tslib/tslib.d.ts"] },
|
"paths": { "tslib": ["./node_modules/tslib/tslib.d.ts"] },
|
||||||
"lib": ["esnext", "dom", "webworker"],
|
"lib": ["esnext", "dom"],
|
||||||
"allowJs": true,
|
"allowJs": true,
|
||||||
"allowSyntheticDefaultImports": false,
|
"allowSyntheticDefaultImports": false,
|
||||||
"allowUnreachableCode": false,
|
"allowUnreachableCode": false,
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,3 +1,5 @@
|
||||||
|
/// <reference types="offscreencanvas" />
|
||||||
|
|
||||||
/** meta-function that performs draw for: canvas, face, body, hand */
|
/** meta-function that performs draw for: canvas, face, body, hand */
|
||||||
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial<DrawOptions>): Promise<[void, void, void, void, void] | null>;
|
||||||
|
|
||||||
|
@ -1208,17 +1210,7 @@ declare class Human {
|
||||||
* @param userConfig - {@link Config}
|
* @param userConfig - {@link Config}
|
||||||
* @returns result - {@link Result}
|
* @returns result - {@link Result}
|
||||||
*/
|
*/
|
||||||
warmup(userConfig?: Partial<Config>): Promise<Result | {
|
warmup(userConfig?: Partial<Config>): Promise<Result>;
|
||||||
error: any; /** Container for events dispatched by Human
|
|
||||||
* Possible events:
|
|
||||||
* - `create`: triggered when Human object is instantiated
|
|
||||||
* - `load`: triggered when models are loaded (explicitly or on-demand)
|
|
||||||
* - `image`: triggered when input image is processed
|
|
||||||
* - `result`: triggered when detection is complete
|
|
||||||
* - `warmup`: triggered when warmup is complete
|
|
||||||
* - `error`: triggered on some errors
|
|
||||||
*/
|
|
||||||
}>;
|
|
||||||
/** Run detect with tensorflow profiling
|
/** Run detect with tensorflow profiling
|
||||||
* - result object will contain total exeuction time information for top-20 kernels
|
* - result object will contain total exeuction time information for top-20 kernels
|
||||||
* - actual detection object can be accessed via `human.result`
|
* - actual detection object can be accessed via `human.result`
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 799273cb94e64d08280479435cc11daedabd5a60
|
Subproject commit 7b35db4dca9ba3a41b62ec11257a474b2d12c132
|
Loading…
Reference in New Issue