fix gear and ssrnet modules

pull/280/head
Vladimir Mandic 2021-11-13 12:23:32 -05:00
parent ee2deb88dc
commit 50a678c33a
12 changed files with 258 additions and 133 deletions

View File

@ -11,9 +11,7 @@
### **HEAD -> main** 2021/11/12 mandic00@live.com
### **origin/main** 2021/11/11 mandic00@live.com
- implement optional face masking
- add similarity score range normalization
- add faceid demo
- documentation overhaul

View File

@ -24,8 +24,8 @@ export async function load(config: Config): Promise<GraphModel> {
return model;
}
export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null;
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> {
if (!model) return 0;
const skipTime = (config.face.antispoof?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.antispoof?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && cached[idx]) {

View File

@ -12,6 +12,9 @@ import * as faceres from './faceres';
import * as mask from './mask';
import * as antispoof from './antispoof';
import * as liveness from './liveness';
import * as gear from '../gear/gear';
import * as ssrnetAge from '../gear/ssrnet-age';
import * as ssrnetGender from '../gear/ssrnet-gender';
import type { FaceResult } from '../result';
import type { Tensor } from '../tfjs/types';
import type { Human } from '../human';
@ -95,18 +98,30 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
parent.analyze('End Liveness:');
// run gear, inherits face from blazeface
/*
parent.analyze('Start GEAR:');
if (parent.config.async) {
gearRes = parent.config.face.agegenderrace.enabled ? agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
gearRes = parent.config.face['gear']?.enabled ? gear.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:gear';
timeStamp = now();
gearRes = parent.config.face.agegenderrace.enabled ? await agegenderrace.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
gearRes = parent.config.face['gear']?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze('End GEAR:');
*/
// run gear, inherits face from blazeface
parent.analyze('Start SSRNet:');
if (parent.config.async) {
ageRes = parent.config.face['ssrnet']?.enabled ? ssrnetAge.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
genderRes = parent.config.face['ssrnet']?.enabled ? ssrnetGender.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:ssrnet';
timeStamp = now();
ageRes = parent.config.face['ssrnet']?.enabled ? await ssrnetAge.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
genderRes = parent.config.face['ssrnet']?.enabled ? await ssrnetGender.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze('End SSRNet:');
// run emotion, inherits face from blazeface
parent.analyze('Start Description:');
@ -124,9 +139,12 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
if (parent.config.async) {
[ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes, livenessRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes, livenessRes]);
}
parent.analyze('Finish Face:');
// override age/gender if alternative models are used
if (parent.config.face['ssrnet']?.enabled && ageRes && genderRes) descRes = { age: ageRes.age, gender: genderRes.gender, genderScore: genderRes.genderScore };
if (parent.config.face['gear']?.enabled && gearRes) descRes = { age: gearRes.age, gender: gearRes.gender, genderScore: gearRes.genderScore, race: gearRes.race };
// calculate iris distance
// iris: array[ center, left, top, right, bottom]
if (!parent.config.face.iris?.enabled && faces[i]?.annotations?.leftEyeIris && faces[i]?.annotations?.rightEyeIris) {
@ -146,20 +164,22 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
// delete temp face image
if (faces[i].tensor) delete faces[i].tensor;
// combine results
faceRes.push({
const res: FaceResult = {
...faces[i],
id: i,
age: descRes?.age,
gender: descRes?.gender,
genderScore: descRes?.genderScore,
embedding: descRes?.descriptor,
emotion: emotionRes,
real: antispoofRes,
live: livenessRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor,
});
};
if (descRes?.age) res.age = descRes.age;
if (descRes?.gender) res.gender = descRes.gender;
if (descRes?.genderScore) res.genderScore = descRes?.genderScore;
if (descRes?.descriptor) res.embedding = descRes?.descriptor;
if (descRes?.race) res.race = descRes?.race;
if (emotionRes) res.emotion = emotionRes;
if (antispoofRes) res.real = antispoofRes;
if (livenessRes) res.live = livenessRes;
if (irisSize && irisSize !== 0) res.iris = Math.trunc(500 / irisSize / 11.7) / 100;
if (rotation) res.rotation = rotation;
if (tensor) res.tensor = tensor;
faceRes.push(res);
parent.analyze('End Face');
}
parent.analyze('End FaceMesh:');

View File

@ -62,8 +62,8 @@ export function enhance(input): Tensor {
*/
}
export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null;
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ age: number, gender: string, genderScore: number, descriptor: number[] }> {
if (!model) return { age: 0, gender: 'unknown', genderScore: 0, descriptor: [] };
const skipFrame = skipped < (config.face.description?.skipFrames || 0);
const skipTime = (config.face.description?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipFrame && skipTime && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) {

View File

@ -24,8 +24,8 @@ export async function load(config: Config): Promise<GraphModel> {
return model;
}
export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null;
export async function predict(image: Tensor, config: Config, idx, count): Promise<number> {
if (!model) return 0;
const skipTime = (config.face.liveness?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.liveness?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && cached[idx]) {

View File

@ -12,7 +12,6 @@ import { env } from '../util/env';
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
let model: GraphModel | null;
// let last: Array<{ score: number, emotion: string }> = [];
const last: Array<Array<{ score: number, emotion: string }>> = [];
let lastCount = 0;
let lastTime = 0;
@ -31,8 +30,8 @@ export async function load(config: Config): Promise<GraphModel> {
return model;
}
export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null;
export async function predict(image: Tensor, config: Config, idx, count): Promise<Array<{ score: number, emotion: string }>> {
if (!model) return [];
const skipFrame = skipped < (config.face.emotion?.skipFrames || 0);
const skipTime = (config.face.emotion?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx] && (last[idx].length > 0)) {

70
src/gear/gear.ts Normal file
View File

@ -0,0 +1,70 @@
/**
* GEAR [gender/emotion/age/race] model implementation
*
* Based on: [**GEAR Predictor**](https://github.com/Udolf15/GEAR-Predictor)
*/
import { log, join, now } from '../util/util';
import * as tf from '../../dist/tfjs.esm.js';
import type { Config } from '../config';
import type { GraphModel, Tensor } from '../tfjs/types';
import { env } from '../util/env';
type GearType = { age: number, gender: string, genderScore: number, race: Array<{ score: number, race: string }> }
let model: GraphModel | null;
const last: Array<GearType> = [];
const raceNames = ['white', 'black', 'asian', 'indian', 'other'];
const ageWeights = [15, 23, 28, 35.5, 45.5, 55.5, 65];
let lastCount = 0;
let lastTime = 0;
let skipped = Number.MAX_SAFE_INTEGER;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config) {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face['gear'].modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face['gear'].modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
return model;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config, idx, count): Promise<GearType> {
if (!model) return { age: 0, gender: 'unknown', genderScore: 0, race: [] };
const skipFrame = skipped < (config.face['gear']?.skipFrames || 0);
const skipTime = (config.face['gear']?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx]) {
skipped++;
return last[idx];
}
skipped = 0;
return new Promise(async (resolve) => {
if (!model?.inputs[0].shape) return;
const t: Record<string, Tensor> = {};
// t.resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape[2], model?.inputs[0].shape[1]], false);
const box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
const obj: GearType = { age: 0, gender: 'unknown', genderScore: 0, race: [] };
if (config.face['gear']?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[];
const gender = await t.gender.data();
obj.gender = gender[0] > gender[1] ? 'male' : 'female';
obj.genderScore = Math.round(100 * (gender[0] > gender[1] ? gender[0] : gender[1])) / 100;
const race = await t.race.data();
for (let i = 0; i < race.length; i++) {
if (race[i] > (config.face['gear']?.minConfidence || 0.2)) obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] });
}
obj.race.sort((a, b) => b.score - a.score);
// {0: 'Below20', 1: '21-25', 2: '26-30', 3: '31-40',4: '41-50', 5: '51-60', 6: 'Above60'}
const ageDistribution = Array.from(await t.age.data());
const ageSorted = ageDistribution.map((a, i) => [ageWeights[i], a]).sort((a, b) => b[1] - a[1]);
let age = ageSorted[0][0]; // pick best starting point
for (let i = 1; i < ageSorted.length; i++) age += ageSorted[i][1] * (ageSorted[i][0] - age); // adjust with each other choice by weight
obj.age = Math.round(10 * age) / 10;
last[idx] = obj;
lastCount = count;
lastTime = now();
resolve(obj);
});
}

View File

@ -2,8 +2,6 @@
* Age model implementation
*
* Based on: [**SSR-Net**](https://github.com/shamangary/SSR-Net)
*
* Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis
*/
import { log, join, now } from '../util/util';
@ -13,16 +11,17 @@ import type { GraphModel, Tensor } from '../tfjs/types';
import { env } from '../util/env';
let model: GraphModel | null;
let last = { age: 0 };
const last: Array<{ age: number }> = [];
let lastCount = 0;
let lastTime = 0;
let skipped = Number.MAX_SAFE_INTEGER;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function load(config: Config | any) {
export async function load(config: Config) {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face.age.modelPath);
model = await tf.loadGraphModel(join(config.modelBasePath, config.face['ssrnet'].modelPathAge)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face['ssrnet'].modelPathAge);
else if (config.debug) log('load model:', model['modelUrl']);
} else {
if (config.debug) log('cached model:', model['modelUrl']);
@ -31,35 +30,30 @@ export async function load(config: Config | any) {
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config | any) {
if (!model) return null;
const skipTime = (config.face.age?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.age?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && last.age && (last.age > 0)) {
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ age: number }> {
if (!model) return { age: 0 };
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipFrame && skipTime && (lastCount === count) && last[idx]?.age && (last[idx]?.age > 0)) {
skipped++;
return last;
return last[idx];
}
skipped = 0;
return new Promise(async (resolve) => {
if (!model?.inputs || !model.inputs[0] || !model.inputs[0].shape) return;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
const enhance = tf.mul(resize, [255.0]);
tf.dispose(resize);
let ageT;
const t: Record<string, Tensor> = {};
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
t.enhance = tf.mul(t.resize, 255);
const obj = { age: 0 };
if (config.face.age.enabled) ageT = model.execute(enhance);
lastTime = now();
tf.dispose(enhance);
if (ageT) {
const data = await ageT.data();
if (config.face['ssrnet'].enabled) t.age = model.execute(t.enhance) as Tensor;
if (t.age) {
const data = await t.age.data();
obj.age = Math.trunc(10 * data[0]) / 10;
}
tf.dispose(ageT);
last = obj;
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
last[idx] = obj;
lastCount = count;
lastTime = now();
resolve(obj);
});
}

View File

@ -2,8 +2,6 @@
* Gender model implementation
*
* Based on: [**SSR-Net**](https://github.com/shamangary/SSR-Net)
*
* Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis
*/
import { log, join, now } from '../util/util';
@ -13,10 +11,10 @@ import type { GraphModel, Tensor } from '../tfjs/types';
import { env } from '../util/env';
let model: GraphModel | null;
let last = { gender: '' };
const last: Array<{ gender: string, genderScore: number }> = [];
let lastCount = 0;
let lastTime = 0;
let skipped = Number.MAX_SAFE_INTEGER;
let alternative = false;
// tuning values
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
@ -25,85 +23,45 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
export async function load(config: Config | any) {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath)) as unknown as GraphModel;
alternative = model.inputs[0].shape ? model.inputs[0]?.shape[3] === 1 : false;
if (!model || !model['modelUrl']) log('load model failed:', config.face.gender.modelPath);
model = await tf.loadGraphModel(join(config.modelBasePath, config.face['ssrnet'].modelPathGender)) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face['ssrnet'].modelPathGender);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
return model;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export async function predict(image: Tensor, config: Config | any) {
if (!model) return null;
const skipTime = (config.face.gender?.skipTime || 0) > (now() - lastTime);
const skipFrame = skipped < (config.face.gender?.skipFrames || 0);
if (config.skipAllowed && skipTime && skipFrame && last.gender !== '') {
export async function predict(image: Tensor, config: Config, idx, count): Promise<{ gender: string, genderScore: number }> {
if (!model) return { gender: 'unknown', genderScore: 0 };
const skipFrame = skipped < (config.face['ssrnet']?.skipFrames || 0);
const skipTime = (config.face['ssrnet']?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipFrame && skipTime && (lastCount === count) && last[idx]?.gender && (last[idx]?.genderScore > 0)) {
skipped++;
return last;
return last[idx];
}
skipped = 0;
return new Promise(async (resolve) => {
if (!model?.inputs[0].shape) return;
const resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
let enhance;
if (alternative) {
enhance = tf.tidy(() => {
const [red, green, blue] = tf.split(resize, 3, 3);
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
const normalize = tf.mul(tf.sub(grayscale, 0.5), 2); // range grayscale:-1..1
return normalize;
});
} else {
enhance = tf.mul(resize, [255.0]); // range RGB:0..255
}
tf.dispose(resize);
let genderT;
const obj = { gender: '', confidence: 0 };
if (config.face.gender.enabled) genderT = model.execute(enhance);
const t: Record<string, Tensor> = {};
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
t.enhance = tf.tidy(() => {
const [red, green, blue] = tf.split(t.resize, 3, 3);
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
const normalize = tf.mul(tf.sub(grayscale, 0.5), 2); // range grayscale:-1..1
return normalize;
});
const obj = { gender: '', genderScore: 0 };
if (config.face['ssrnet'].enabled) t.gender = model.execute(t.enhance) as Tensor;
const data = await t.gender.data();
obj.gender = data[0] > data[1] ? 'female' : 'male'; // returns two values 0..1, bigger one is prediction
obj.genderScore = data[0] > data[1] ? (Math.trunc(100 * data[0]) / 100) : (Math.trunc(100 * data[1]) / 100);
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
last[idx] = obj;
lastCount = count;
lastTime = now();
tf.dispose(enhance);
if (genderT) {
if (!Array.isArray(genderT)) {
const data = await genderT.data();
if (alternative) {
// returns two values 0..1, bigger one is prediction
if (data[0] > config.face.gender.minConfidence || data[1] > config.face.gender.minConfidence) {
obj.gender = data[0] > data[1] ? 'female' : 'male';
obj.confidence = data[0] > data[1] ? (Math.trunc(100 * data[0]) / 100) : (Math.trunc(100 * data[1]) / 100);
}
} else {
// returns one value 0..1, .5 is prediction threshold
const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
obj.confidence = Math.min(0.99, confidence);
}
}
tf.dispose(genderT);
} else {
const gender = await genderT[0].data();
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
obj.confidence = Math.min(0.99, confidence);
}
/*
let age = (await genderT[1].argMax(1).data())[0];
const all = await genderT[1].data();
age = Math.round(all[age - 1] > all[age + 1] ? 10 * age - 100 * all[age - 1] : 10 * age + 100 * all[age + 1]) / 10;
const descriptor = await genderT[1].data();
*/
genderT.forEach((t) => tf.dispose(t));
}
}
last = obj;
resolve(obj);
});
}

View File

@ -4,7 +4,9 @@
import { env } from './util/env';
import { log } from './util/util';
import * as agegenderrace from './gear/gear-agegenderrace';
import * as gear from './gear/gear';
import * as ssrnetAge from './gear/ssrnet-age';
import * as ssrnetGender from './gear/ssrnet-gender';
import * as antispoof from './face/antispoof';
import * as blazeface from './face/blazeface';
import * as blazepose from './body/blazepose';
@ -31,8 +33,8 @@ import type { Human } from './human';
* - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading
*/
export class Models {
age: null | GraphModel | Promise<GraphModel> = null;
agegenderrace: null | GraphModel | Promise<GraphModel> = null;
ssrnetage: null | GraphModel | Promise<GraphModel> = null;
gear: null | GraphModel | Promise<GraphModel> = null;
blazeposedetect: null | GraphModel | Promise<GraphModel> = null;
blazepose: null | GraphModel | Promise<GraphModel> = null;
centernet: null | GraphModel | Promise<GraphModel> = null;
@ -43,7 +45,7 @@ export class Models {
faceiris: null | GraphModel | Promise<GraphModel> = null;
facemesh: null | GraphModel | Promise<GraphModel> = null;
faceres: null | GraphModel | Promise<GraphModel> = null;
gender: null | GraphModel | Promise<GraphModel> = null;
ssrnetgender: null | GraphModel | Promise<GraphModel> = null;
handpose: null | GraphModel | Promise<GraphModel> = null;
handskeleton: null | GraphModel | Promise<GraphModel> = null;
handtrack: null | GraphModel | Promise<GraphModel> = null;
@ -70,7 +72,6 @@ export async function load(instance: Human): Promise<void> {
if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config);
if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body.detector?.modelPath && instance.config.body?.modelPath?.includes('blazepose')) instance.models.blazeposedetect = blazepose.loadDetect(instance.config);
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);
if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body?.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config);
if (instance.config.body.enabled && !instance.models.movenet && instance.config.body?.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config);
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body?.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config);
if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config);
@ -80,7 +81,9 @@ export async function load(instance: Human): Promise<void> {
if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config);
if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config);
if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config);
if (instance.config.face.enabled && instance.config.face['agegenderrace']?.enabled && !instance.models.agegenderrace) instance.models.agegenderrace = agegenderrace.load(instance.config);
if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config);
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config);
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);
if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);
if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config);

View File

@ -41,6 +41,8 @@ export interface FaceResult {
genderScore?: number,
/** detected emotions */
emotion?: Array<{ score: number, emotion: string }>,
/** detected race */
race?: Array<{ score: number, race: string }>,
/** face descriptor */
embedding?: Array<number>,
/** face iris distance from camera */

81
test/test-gear.js Normal file
View File

@ -0,0 +1,81 @@
require('@tensorflow/tfjs-node');
const fs = require('fs');
const path = require('path');
const log = require('@vladmandic/pilogger');
const Human = require('../dist/human.node.js').default;
process.env.TF_CPP_MIN_LOG_LEVEL = '2';
const humanConfig = {
backend: 'tensorflow',
face: {
detector: { enabled: true, modelPath: 'file://../human-models/models/blazeface-back.json', cropFactor: 1.6 },
mesh: { enabled: false },
iris: { enabled: false },
description: { enabled: true, modelPath: 'file://../human-models/models/faceres.json' },
gear: { enabled: true, modelPath: 'file://../human-models/models/gear.json' },
ssrnet: { enabled: true, modelPathAge: 'file://../human-models/models/age.json', modelPathGender: 'file://../human-models/models/gender.json' },
emotion: { enabled: false },
},
body: { enabled: false },
hand: { enabled: false },
object: { enabled: false },
gestures: { enabled: false },
};
const human = new Human(humanConfig);
function getImageTensor(imageFile) {
let tensor;
try {
const buffer = fs.readFileSync(imageFile);
tensor = human.tf.node.decodeImage(buffer, 3);
} catch (e) {
log.warn(`error loading image: ${imageFile}: ${e.message}`);
}
return tensor;
}
function printResult(obj) {
if (!obj || !obj.res || !obj.res.face || obj.res.face.length === 0) log.warn('no faces detected');
else obj.res.face.forEach((face, i) => log.data({ face: i, model: obj.model, image: obj.image, age: face.age, gender: face.gender, genderScore: face.genderScore, race: face.race }));
}
async function main() {
log.header();
if (process.argv.length !== 3) {
log.error('parameters: <input-image> or <input-folder> missing');
process.exit(1);
}
if (!fs.existsSync(process.argv[2])) {
log.error(`file not found: ${process.argv[2]}`);
process.exit(1);
}
const stat = fs.statSync(process.argv[2]);
const files = [];
if (stat.isFile()) files.push(process.argv[2]);
else if (stat.isDirectory()) fs.readdirSync(process.argv[2]).forEach((f) => files.push(path.join(process.argv[2], f)));
log.data('input:', files);
await human.load();
let res;
for (const f of files) {
const tensor = getImageTensor(f);
if (!tensor) continue;
human.config.face.description.enabled = true;
human.config.face.gear.enabled = false;
human.config.face.ssrnet.enabled = false;
res = await human.detect(tensor);
printResult({ model: 'faceres', image: f, res });
human.config.face.description.enabled = false;
human.config.face.gear.enabled = true;
human.config.face.ssrnet.enabled = false;
res = await human.detect(tensor);
printResult({ model: 'gear', image: f, res });
human.config.face.description.enabled = false;
human.config.face.gear.enabled = false;
human.config.face.ssrnet.enabled = true;
res = await human.detect(tensor);
printResult({ model: 'ssrnet', image: f, res });
human.tf.dispose(tensor);
}
}
main();