mirror of https://github.com/vladmandic/human
refactor face classes
parent
245ecaf710
commit
61633928cf
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **1.1.9**
|
||||
Version: **1.1.10**
|
||||
Description: **Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/03/18 mandic00@live.com
|
||||
### **1.1.10** 2021/03/18 mandic00@live.com
|
||||
|
||||
- cleanup
|
||||
- redefine tensor
|
||||
- enforce types
|
||||
- regen type declarations
|
||||
|
|
|
@ -12,8 +12,8 @@ const userConfig = {
|
|||
mesh: { enabled: true },
|
||||
embedding: { enabled: true },
|
||||
iris: { enabled: false },
|
||||
age: { enabled: false },
|
||||
gender: { enabled: false },
|
||||
age: { enabled: true },
|
||||
gender: { enabled: true },
|
||||
emotion: { enabled: false },
|
||||
},
|
||||
hand: { enabled: false },
|
||||
|
@ -87,6 +87,7 @@ function faces(index, res, fileName) {
|
|||
// mouse click on any face canvas triggers analysis
|
||||
canvas.addEventListener('click', (evt) => {
|
||||
log('Select:', 'Image:', evt.target.tag.sample, 'Face:', evt.target.tag.face, all[evt.target.tag.sample][evt.target.tag.face]);
|
||||
log('Select:', 'Gender:', all[evt.target.tag.sample][evt.target.tag.face].gender);
|
||||
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
|
||||
});
|
||||
// if we actually got face image tensor, draw canvas with that face
|
||||
|
@ -135,9 +136,10 @@ async function main() {
|
|||
let images = dir.filter((img) => (img.endsWith('.jpg') && img.includes('sample')));
|
||||
|
||||
// enumerate additional private test images in /private, not includded in git repository
|
||||
res = await fetch('/private');
|
||||
res = await fetch('/private/err');
|
||||
dir = (res && res.ok) ? await res.json() : [];
|
||||
images = images.concat(dir.filter((img) => (img.endsWith('.jpg'))));
|
||||
// images = images.concat(dir.filter((img) => (img.endsWith('.jpg'))));
|
||||
images = dir.filter((img) => (img.endsWith('.jpg')));
|
||||
|
||||
// download and analyze all images
|
||||
log('Enumerated:', images.length, 'images');
|
||||
|
|
|
@ -62,7 +62,7 @@
|
|||
"@vladmandic/pilogger": "^0.2.15",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "^0.9.3",
|
||||
"esbuild": "^0.9.6",
|
||||
"eslint": "^7.22.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
import { log, now } from './helpers';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as age from './age/age';
|
||||
import * as gender from './gender/gender';
|
||||
import * as emotion from './emotion/emotion';
|
||||
import * as embedding from './embedding/embedding';
|
||||
|
||||
type Tensor = typeof tf.Tensor;
|
||||
|
||||
const calculateFaceAngle = (mesh): { roll: number | null, yaw: number | null, pitch: number | null } => {
|
||||
if (!mesh || mesh.length < 300) return { roll: null, yaw: null, pitch: null };
|
||||
const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1);
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360);
|
||||
const angle = {
|
||||
// values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees
|
||||
// value of 0 means center
|
||||
// roll is face lean left/right
|
||||
roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]), // looking at x,y of outside corners of leftEye and rightEye
|
||||
// yaw is face turn left/right
|
||||
yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]), // looking at x,z of outside corners of leftEye and rightEye
|
||||
// pitch is face move up/down
|
||||
pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]), // looking at y,z of top and bottom points of the face
|
||||
};
|
||||
return angle;
|
||||
};
|
||||
|
||||
export const detectFace = async (parent, input): Promise<any> => {
|
||||
// run facemesh, includes blazeface and iris
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
let timeStamp;
|
||||
let ageRes;
|
||||
let genderRes;
|
||||
let emotionRes;
|
||||
let embeddingRes;
|
||||
const faceRes: Array<{
|
||||
confidence: number,
|
||||
boxConfidence: number,
|
||||
faceConfidence: number,
|
||||
box: [number, number, number, number],
|
||||
mesh: Array<[number, number, number]>
|
||||
meshRaw: Array<[number, number, number]>
|
||||
boxRaw: [number, number, number, number],
|
||||
annotations: Array<{ part: string, points: Array<[number, number, number]>[] }>,
|
||||
age: number,
|
||||
gender: string,
|
||||
genderConfidence: number,
|
||||
emotion: string,
|
||||
embedding: number[],
|
||||
iris: number,
|
||||
angle: { roll: number | null, yaw: number | null, pitch: number | null },
|
||||
tensor: Tensor,
|
||||
}> = [];
|
||||
parent.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await parent.models.face?.estimateFaces(input, parent.config);
|
||||
parent.perf.face = Math.trunc(now() - timeStamp);
|
||||
if (!faces) return [];
|
||||
for (const face of faces) {
|
||||
parent.analyze('Get Face');
|
||||
|
||||
// is something went wrong, skip the face
|
||||
if (!face.image || face.image.isDisposedInternal) {
|
||||
log('Face object is disposed:', face.image);
|
||||
continue;
|
||||
}
|
||||
|
||||
const angle = calculateFaceAngle(face.mesh);
|
||||
|
||||
// run age, inherits face from blazeface
|
||||
parent.analyze('Start Age:');
|
||||
if (parent.config.async) {
|
||||
ageRes = parent.config.face.age.enabled ? age.predict(face.image, parent.config) : {};
|
||||
} else {
|
||||
parent.state = 'run:age';
|
||||
timeStamp = now();
|
||||
ageRes = parent.config.face.age.enabled ? await age.predict(face.image, parent.config) : {};
|
||||
parent.perf.age = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run gender, inherits face from blazeface
|
||||
parent.analyze('Start Gender:');
|
||||
if (parent.config.async) {
|
||||
genderRes = parent.config.face.gender.enabled ? gender.predict(face.image, parent.config) : {};
|
||||
} else {
|
||||
parent.state = 'run:gender';
|
||||
timeStamp = now();
|
||||
genderRes = parent.config.face.gender.enabled ? await gender.predict(face.image, parent.config) : {};
|
||||
parent.perf.gender = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Emotion:');
|
||||
if (parent.config.async) {
|
||||
emotionRes = parent.config.face.emotion.enabled ? emotion.predict(face.image, parent.config) : {};
|
||||
} else {
|
||||
parent.state = 'run:emotion';
|
||||
timeStamp = now();
|
||||
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(face.image, parent.config) : {};
|
||||
parent.perf.emotion = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
parent.analyze('End Emotion:');
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Embedding:');
|
||||
if (parent.config.async) {
|
||||
embeddingRes = parent.config.face.embedding.enabled ? embedding.predict(face, parent.config) : [];
|
||||
} else {
|
||||
parent.state = 'run:embedding';
|
||||
timeStamp = now();
|
||||
embeddingRes = parent.config.face.embedding.enabled ? await embedding.predict(face, parent.config) : [];
|
||||
parent.perf.embedding = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
parent.analyze('End Emotion:');
|
||||
|
||||
// if async wait for results
|
||||
if (parent.config.async) {
|
||||
[ageRes, genderRes, emotionRes, embeddingRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes]);
|
||||
}
|
||||
|
||||
parent.analyze('Finish Face:');
|
||||
|
||||
// calculate iris distance
|
||||
// iris: array[ center, left, top, right, bottom]
|
||||
if (!parent.config.face.iris.enabled && face?.annotations?.leftEyeIris && face?.annotations?.rightEyeIris) {
|
||||
delete face.annotations.leftEyeIris;
|
||||
delete face.annotations.rightEyeIris;
|
||||
}
|
||||
const irisSize = (face.annotations?.leftEyeIris && face.annotations?.rightEyeIris)
|
||||
/* average human iris size is 11.7mm */
|
||||
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
||||
: 0;
|
||||
|
||||
// combine results
|
||||
faceRes.push({
|
||||
...face,
|
||||
age: ageRes.age,
|
||||
gender: genderRes.gender,
|
||||
genderConfidence: genderRes.confidence,
|
||||
emotion: emotionRes,
|
||||
embedding: embeddingRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
angle,
|
||||
tensor: parent.config.face.detector.return ? face.image?.squeeze() : null,
|
||||
});
|
||||
// dispose original face tensor
|
||||
face.image?.dispose();
|
||||
|
||||
parent.analyze('End Face');
|
||||
}
|
||||
parent.analyze('End FaceMesh:');
|
||||
if (parent.config.async) {
|
||||
if (parent.perf.face) delete parent.perf.face;
|
||||
if (parent.perf.age) delete parent.perf.age;
|
||||
if (parent.perf.gender) delete parent.perf.gender;
|
||||
if (parent.perf.emotion) delete parent.perf.emotion;
|
||||
}
|
||||
return faceRes;
|
||||
};
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
|
||||
|
||||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as handdetector from './handdetector';
|
||||
import * as handpipeline from './handpipeline';
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
// helper function: wrapper around console output
|
||||
export function log(...msg) {
|
||||
const dt = new Date();
|
||||
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
|
||||
// eslint-disable-next-line no-console
|
||||
if (msg) console.log(ts, 'Human:', ...msg);
|
||||
}
|
||||
|
||||
// helper function: gets elapsed time on both browser and nodejs
|
||||
export const now = () => {
|
||||
if (typeof performance !== 'undefined') return performance.now();
|
||||
return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());
|
||||
};
|
||||
|
||||
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
|
||||
export function mergeDeep(...objects) {
|
||||
const isObject = (obj) => obj && typeof obj === 'object';
|
||||
return objects.reduce((prev, obj) => {
|
||||
Object.keys(obj || {}).forEach((key) => {
|
||||
const pVal = prev[key];
|
||||
const oVal = obj[key];
|
||||
if (Array.isArray(pVal) && Array.isArray(oVal)) prev[key] = pVal.concat(...oVal);
|
||||
else if (isObject(pVal) && isObject(oVal)) prev[key] = mergeDeep(pVal, oVal);
|
||||
else prev[key] = oVal;
|
||||
});
|
||||
return prev;
|
||||
}, {});
|
||||
}
|
235
src/human.ts
235
src/human.ts
|
@ -1,7 +1,8 @@
|
|||
import { log } from './log';
|
||||
import { log, now, mergeDeep } from './helpers';
|
||||
import * as sysinfo from './sysinfo';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as backend from './tfjs/backend';
|
||||
import * as faceall from './faceall';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as age from './age/age';
|
||||
import * as gender from './gender/gender';
|
||||
|
@ -33,26 +34,6 @@ export type TensorFlow = typeof tf;
|
|||
/** Generic Model object type, holds instance of individual models */
|
||||
type Model = Object;
|
||||
|
||||
// helper function: gets elapsed time on both browser and nodejs
|
||||
const now = () => {
|
||||
if (typeof performance !== 'undefined') return performance.now();
|
||||
return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());
|
||||
};
|
||||
|
||||
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
|
||||
function mergeDeep(...objects) {
|
||||
const isObject = (obj) => obj && typeof obj === 'object';
|
||||
return objects.reduce((prev, obj) => {
|
||||
Object.keys(obj || {}).forEach((key) => {
|
||||
const pVal = prev[key];
|
||||
const oVal = obj[key];
|
||||
if (Array.isArray(pVal) && Array.isArray(oVal)) prev[key] = pVal.concat(...oVal);
|
||||
else if (isObject(pVal) && isObject(oVal)) prev[key] = mergeDeep(pVal, oVal);
|
||||
else prev[key] = oVal;
|
||||
});
|
||||
return prev;
|
||||
}, {});
|
||||
}
|
||||
/**
|
||||
* **Human** library main class
|
||||
*
|
||||
|
@ -101,7 +82,7 @@ export class Human {
|
|||
nanodet: typeof nanodet;
|
||||
};
|
||||
sysinfo: { platform: string, agent: string };
|
||||
#perf: any;
|
||||
perf: any;
|
||||
#numTensors: number;
|
||||
#analyzeMemoryLeaks: boolean;
|
||||
#checkSanity: boolean;
|
||||
|
@ -118,7 +99,7 @@ export class Human {
|
|||
this.#analyzeMemoryLeaks = false;
|
||||
this.#checkSanity = false;
|
||||
this.#firstRun = true;
|
||||
this.#perf = {};
|
||||
this.perf = {};
|
||||
// object that contains all initialized models
|
||||
this.models = {
|
||||
face: null,
|
||||
|
@ -156,7 +137,7 @@ export class Human {
|
|||
|
||||
// helper function: measure tensor leak
|
||||
/** @hidden */
|
||||
#analyze = (...msg) => {
|
||||
analyze = (...msg) => {
|
||||
if (!this.#analyzeMemoryLeaks) return;
|
||||
const current = this.tf.engine().state.numTensors;
|
||||
const previous = this.#numTensors;
|
||||
|
@ -252,7 +233,7 @@ export class Human {
|
|||
}
|
||||
|
||||
const current = Math.trunc(now() - timeStamp);
|
||||
if (current > (this.#perf.load || 0)) this.#perf.load = current;
|
||||
if (current > (this.perf.load || 0)) this.perf.load = current;
|
||||
}
|
||||
|
||||
// check if backend needs initialization if it changed
|
||||
|
@ -305,164 +286,10 @@ export class Human {
|
|||
if (this.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||
}
|
||||
await this.tf.ready();
|
||||
this.#perf.backend = Math.trunc(now() - timeStamp);
|
||||
this.perf.backend = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
}
|
||||
|
||||
/** @hidden */
|
||||
#calculateFaceAngle = (mesh): { roll: number | null, yaw: number | null, pitch: number | null } => {
|
||||
if (!mesh || mesh.length < 300) return { roll: null, yaw: null, pitch: null };
|
||||
const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1);
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360);
|
||||
const angle = {
|
||||
// values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees
|
||||
// value of 0 means center
|
||||
// roll is face lean left/right
|
||||
roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]), // looking at x,y of outside corners of leftEye and rightEye
|
||||
// yaw is face turn left/right
|
||||
yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]), // looking at x,z of outside corners of leftEye and rightEye
|
||||
// pitch is face move up/down
|
||||
pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]), // looking at y,z of top and bottom points of the face
|
||||
};
|
||||
return angle;
|
||||
}
|
||||
|
||||
/** @hidden */
|
||||
#detectFace = async (input): Promise<any> => {
|
||||
// run facemesh, includes blazeface and iris
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
let timeStamp;
|
||||
let ageRes;
|
||||
let genderRes;
|
||||
let emotionRes;
|
||||
let embeddingRes;
|
||||
const faceRes: Array<{
|
||||
confidence: number,
|
||||
boxConfidence: number,
|
||||
faceConfidence: number,
|
||||
box: [number, number, number, number],
|
||||
mesh: Array<[number, number, number]>
|
||||
meshRaw: Array<[number, number, number]>
|
||||
boxRaw: [number, number, number, number],
|
||||
annotations: Array<{ part: string, points: Array<[number, number, number]>[] }>,
|
||||
age: number,
|
||||
gender: string,
|
||||
genderConfidence: number,
|
||||
emotion: string,
|
||||
embedding: number[],
|
||||
iris: number,
|
||||
angle: { roll: number | null, yaw: number | null, pitch: number | null },
|
||||
tensor: Tensor,
|
||||
}> = [];
|
||||
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||
this.#perf.face = Math.trunc(now() - timeStamp);
|
||||
if (!faces) return [];
|
||||
for (const face of faces) {
|
||||
this.#analyze('Get Face');
|
||||
|
||||
// is something went wrong, skip the face
|
||||
if (!face.image || face.image.isDisposedInternal) {
|
||||
log('Face object is disposed:', face.image);
|
||||
continue;
|
||||
}
|
||||
|
||||
const angle = this.#calculateFaceAngle(face.mesh);
|
||||
|
||||
// run age, inherits face from blazeface
|
||||
this.#analyze('Start Age:');
|
||||
if (this.config.async) {
|
||||
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
|
||||
} else {
|
||||
this.state = 'run:age';
|
||||
timeStamp = now();
|
||||
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
|
||||
this.#perf.age = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run gender, inherits face from blazeface
|
||||
this.#analyze('Start Gender:');
|
||||
if (this.config.async) {
|
||||
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
|
||||
} else {
|
||||
this.state = 'run:gender';
|
||||
timeStamp = now();
|
||||
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
|
||||
this.#perf.gender = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
this.#analyze('Start Emotion:');
|
||||
if (this.config.async) {
|
||||
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
|
||||
} else {
|
||||
this.state = 'run:emotion';
|
||||
timeStamp = now();
|
||||
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
||||
this.#perf.emotion = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.#analyze('End Emotion:');
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
this.#analyze('Start Embedding:');
|
||||
if (this.config.async) {
|
||||
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face, this.config) : [];
|
||||
} else {
|
||||
this.state = 'run:embedding';
|
||||
timeStamp = now();
|
||||
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face, this.config) : [];
|
||||
this.#perf.embedding = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.#analyze('End Emotion:');
|
||||
|
||||
// if async wait for results
|
||||
if (this.config.async) {
|
||||
[ageRes, genderRes, emotionRes, embeddingRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes]);
|
||||
}
|
||||
|
||||
this.#analyze('Finish Face:');
|
||||
|
||||
// calculate iris distance
|
||||
// iris: array[ center, left, top, right, bottom]
|
||||
if (!this.config.face.iris.enabled && face?.annotations?.leftEyeIris && face?.annotations?.rightEyeIris) {
|
||||
delete face.annotations.leftEyeIris;
|
||||
delete face.annotations.rightEyeIris;
|
||||
}
|
||||
const irisSize = (face.annotations?.leftEyeIris && face.annotations?.rightEyeIris)
|
||||
/* average human iris size is 11.7mm */
|
||||
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
||||
: 0;
|
||||
|
||||
// combine results
|
||||
faceRes.push({
|
||||
...face,
|
||||
age: ageRes.age,
|
||||
gender: genderRes.gender,
|
||||
genderConfidence: genderRes.confidence,
|
||||
emotion: emotionRes,
|
||||
embedding: embeddingRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
angle,
|
||||
tensor: this.config.face.detector.return ? face.image?.squeeze() : null,
|
||||
});
|
||||
// dispose original face tensor
|
||||
face.image?.dispose();
|
||||
|
||||
this.#analyze('End Face');
|
||||
}
|
||||
this.#analyze('End FaceMesh:');
|
||||
if (this.config.async) {
|
||||
if (this.#perf.face) delete this.#perf.face;
|
||||
if (this.#perf.age) delete this.#perf.age;
|
||||
if (this.#perf.gender) delete this.#perf.gender;
|
||||
if (this.#perf.emotion) delete this.#perf.emotion;
|
||||
}
|
||||
return faceRes;
|
||||
}
|
||||
|
||||
// main detect function
|
||||
async detect(input: Input, userConfig: Config | Object = {}): Promise<Result | Error> {
|
||||
// detection happens inside a promise
|
||||
|
@ -490,7 +317,7 @@ export class Human {
|
|||
await this.load();
|
||||
|
||||
if (this.config.scoped) this.tf.engine().startScope();
|
||||
this.#analyze('Start Scope:');
|
||||
this.analyze('Start Scope:');
|
||||
|
||||
timeStamp = now();
|
||||
const process = image.process(input, this.config);
|
||||
|
@ -499,8 +326,8 @@ export class Human {
|
|||
resolve({ error: 'could not convert input to tensor' });
|
||||
return;
|
||||
}
|
||||
this.#perf.image = Math.trunc(now() - timeStamp);
|
||||
this.#analyze('Get Image:');
|
||||
this.perf.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze('Get Image:');
|
||||
|
||||
// prepare where to store model results
|
||||
let bodyRes;
|
||||
|
@ -510,55 +337,55 @@ export class Human {
|
|||
|
||||
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? this.#detectFace(process.tensor) : [];
|
||||
if (this.#perf.face) delete this.#perf.face;
|
||||
faceRes = this.config.face.enabled ? faceall.detectFace(this, process.tensor) : [];
|
||||
if (this.perf.face) delete this.perf.face;
|
||||
} else {
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
faceRes = this.config.face.enabled ? await this.#detectFace(process.tensor) : [];
|
||||
this.#perf.face = Math.trunc(now() - timeStamp);
|
||||
faceRes = this.config.face.enabled ? await faceall.detectFace(this, process.tensor) : [];
|
||||
this.perf.face = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run body: can be posenet or blazepose
|
||||
this.#analyze('Start Body:');
|
||||
this.analyze('Start Body:');
|
||||
if (this.config.async) {
|
||||
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
|
||||
else bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
|
||||
if (this.#perf.body) delete this.#perf.body;
|
||||
if (this.perf.body) delete this.perf.body;
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
|
||||
else bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
|
||||
this.#perf.body = Math.trunc(now() - timeStamp);
|
||||
this.perf.body = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.#analyze('End Body:');
|
||||
this.analyze('End Body:');
|
||||
|
||||
// run handpose
|
||||
this.#analyze('Start Hand:');
|
||||
this.analyze('Start Hand:');
|
||||
if (this.config.async) {
|
||||
handRes = this.config.hand.enabled ? this.models.handpose?.estimateHands(process.tensor, this.config) : [];
|
||||
if (this.#perf.hand) delete this.#perf.hand;
|
||||
if (this.perf.hand) delete this.perf.hand;
|
||||
} else {
|
||||
this.state = 'run:hand';
|
||||
timeStamp = now();
|
||||
handRes = this.config.hand.enabled ? await this.models.handpose?.estimateHands(process.tensor, this.config) : [];
|
||||
this.#perf.hand = Math.trunc(now() - timeStamp);
|
||||
this.perf.hand = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.#analyze('End Hand:');
|
||||
this.analyze('End Hand:');
|
||||
|
||||
// run nanodet
|
||||
this.#analyze('Start Object:');
|
||||
this.analyze('Start Object:');
|
||||
if (this.config.async) {
|
||||
objectRes = this.config.object.enabled ? nanodet.predict(process.tensor, this.config) : [];
|
||||
if (this.#perf.object) delete this.#perf.object;
|
||||
if (this.perf.object) delete this.perf.object;
|
||||
} else {
|
||||
this.state = 'run:object';
|
||||
timeStamp = now();
|
||||
objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
|
||||
this.#perf.object = Math.trunc(now() - timeStamp);
|
||||
this.perf.object = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.#analyze('End Object:');
|
||||
this.analyze('End Object:');
|
||||
|
||||
// if async wait for results
|
||||
if (this.config.async) {
|
||||
|
@ -567,18 +394,18 @@ export class Human {
|
|||
process.tensor.dispose();
|
||||
|
||||
if (this.config.scoped) this.tf.engine().endScope();
|
||||
this.#analyze('End Scope:');
|
||||
this.analyze('End Scope:');
|
||||
|
||||
let gestureRes = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
// @ts-ignore
|
||||
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
||||
if (!this.config.async) this.#perf.gesture = Math.trunc(now() - timeStamp);
|
||||
else if (this.#perf.gesture) delete this.#perf.gesture;
|
||||
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
|
||||
else if (this.perf.gesture) delete this.perf.gesture;
|
||||
}
|
||||
|
||||
this.#perf.total = Math.trunc(now() - timeStart);
|
||||
this.perf.total = Math.trunc(now() - timeStart);
|
||||
this.state = 'idle';
|
||||
const result = {
|
||||
face: faceRes,
|
||||
|
@ -586,7 +413,7 @@ export class Human {
|
|||
hand: handRes,
|
||||
gesture: gestureRes,
|
||||
object: objectRes,
|
||||
performance: this.#perf,
|
||||
performance: this.perf,
|
||||
canvas: process.canvas,
|
||||
};
|
||||
// log('Result:', result);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// @ts-nocheck
|
||||
|
||||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from './log';
|
||||
import { log } from './helpers';
|
||||
|
||||
export const data = {};
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../log';
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
export const config = {
|
||||
|
|
Loading…
Reference in New Issue