fix mobilefacenet module

pull/233/head
Vladimir Mandic 2021-11-13 17:26:19 -05:00
parent 1cb7b51924
commit 48dc679b2e
24 changed files with 3440 additions and 3105 deletions

View File

@ -9,7 +9,10 @@
## Changelog ## Changelog
### **HEAD -> main** 2021/11/12 mandic00@live.com ### **HEAD -> main** 2021/11/13 mandic00@live.com
### **origin/main** 2021/11/12 mandic00@live.com
- implement optional face masking - implement optional face masking
- add similarity score range normalization - add similarity score range normalization

View File

@ -46,9 +46,13 @@ New:
- new optional model `liveness` - new optional model `liveness`
checks if input appears to be a real-world live image or a recording checks if input appears to be a real-world live image or a recording
best used together with `antispoofing` that checks if input appears to have a realistic face best used together with `antispoofing` that checks if input appears to have a realistic face
- new face masking option in `face.config.detector.mask` - new face masking option in `face.config.detector.mask`
result is shading of face image outside of face area which is useful for increased sensitivity of other modules that rely on detected face as input
- new face crop option in `face.config.detector.cropFactor`
result is user-definable fine-tuning for other modules that rely on detected face as input
Other: Other:
- Improved **Safari** compatibility - Improved **Safari** compatibility
- Improved `similarity` and `match` score range normalization - Improved `similarity` and `match` score range normalization
- Documentation overhaul - Documentation overhaul
- Fixed optional `gear`, `ssrnet`, `mobilefacenet` modules

View File

@ -78,6 +78,7 @@ var humanConfig = {
enabled: true, enabled: true,
detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, detector: { rotation: true, return: true, cropFactor: 1.6, mask: false },
description: { enabled: true }, description: { enabled: true },
mobilefacenet: { enabled: false, modelPath: "https://vladmandic.github.io/human-models/models/mobilefacenet.json" },
iris: { enabled: true }, iris: { enabled: true },
emotion: { enabled: false }, emotion: { enabled: false },
antispoof: { enabled: true }, antispoof: { enabled: true },
@ -88,6 +89,7 @@ var humanConfig = {
object: { enabled: false }, object: { enabled: false },
gesture: { enabled: true } gesture: { enabled: true }
}; };
var matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 };
var options = { var options = {
minConfidence: 0.6, minConfidence: 0.6,
minSize: 224, minSize: 224,
@ -97,7 +99,8 @@ var options = {
threshold: 0.5, threshold: 0.5,
mask: humanConfig.face.detector.mask, mask: humanConfig.face.detector.mask,
rotation: humanConfig.face.detector.rotation, rotation: humanConfig.face.detector.rotation,
cropFactor: humanConfig.face.detector.cropFactor cropFactor: humanConfig.face.detector.cropFactor,
...matchOptions
}; };
var ok = { var ok = {
faceCount: false, faceCount: false,
@ -254,6 +257,7 @@ async function detectFace() {
(_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.clearRect(0, 0, options.minSize, options.minSize); (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.clearRect(0, 0, options.minSize, options.minSize);
if (!current.face || !current.face.tensor || !current.face.embedding) if (!current.face || !current.face.tensor || !current.face.embedding)
return false; return false;
console.log("face record:", current.face);
human.tf.browser.toPixels(current.face.tensor, dom.canvas); human.tf.browser.toPixels(current.face.tensor, dom.canvas);
if (await count() === 0) { if (await count() === 0) {
log2("face database is empty"); log2("face database is empty");
@ -263,7 +267,7 @@ async function detectFace() {
} }
const db2 = await load(); const db2 = await load();
const descriptors = db2.map((rec) => rec.descriptor); const descriptors = db2.map((rec) => rec.descriptor);
const res = await human.match(current.face.embedding, descriptors); const res = await human.match(current.face.embedding, descriptors, matchOptions);
current.record = db2[res.index] || null; current.record = db2[res.index] || null;
if (current.record) { if (current.record) {
log2(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1e3 * res.similarity) / 10}%`); log2(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1e3 * res.similarity) / 10}%`);

File diff suppressed because one or more lines are too long

View File

@ -16,7 +16,8 @@ const humanConfig = { // user configuration for human, used to fine-tune behavio
face: { face: {
enabled: true, enabled: true,
detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image
description: { enabled: true }, description: { enabled: true }, // default model for face descriptor extraction is faceres
mobilefacenet: { enabled: false, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model
iris: { enabled: true }, // needed to determine gaze direction iris: { enabled: true }, // needed to determine gaze direction
emotion: { enabled: false }, // not needed emotion: { enabled: false }, // not needed
antispoof: { enabled: true }, // enable optional antispoof module antispoof: { enabled: true }, // enable optional antispoof module
@ -28,6 +29,9 @@ const humanConfig = { // user configuration for human, used to fine-tune behavio
gesture: { enabled: true }, // parses face and iris gestures gesture: { enabled: true }, // parses face and iris gestures
}; };
// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model
const matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for faceres model
const options = { const options = {
minConfidence: 0.6, // overal face confidence for box, face, gender, real, live minConfidence: 0.6, // overal face confidence for box, face, gender, real, live
minSize: 224, // min input to face descriptor model before degradation minSize: 224, // min input to face descriptor model before degradation
@ -38,6 +42,7 @@ const options = {
mask: humanConfig.face.detector.mask, mask: humanConfig.face.detector.mask,
rotation: humanConfig.face.detector.rotation, rotation: humanConfig.face.detector.rotation,
cropFactor: humanConfig.face.detector.cropFactor, cropFactor: humanConfig.face.detector.cropFactor,
...matchOptions,
}; };
const ok = { // must meet all rules const ok = { // must meet all rules
@ -194,6 +199,8 @@ async function deleteRecord() {
async function detectFace() { async function detectFace() {
dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize); dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);
if (!current.face || !current.face.tensor || !current.face.embedding) return false; if (!current.face || !current.face.tensor || !current.face.embedding) return false;
// eslint-disable-next-line no-console
console.log('face record:', current.face);
human.tf.browser.toPixels(current.face.tensor as unknown as TensorLike, dom.canvas); human.tf.browser.toPixels(current.face.tensor as unknown as TensorLike, dom.canvas);
if (await indexDb.count() === 0) { if (await indexDb.count() === 0) {
log('face database is empty'); log('face database is empty');
@ -203,7 +210,7 @@ async function detectFace() {
} }
const db = await indexDb.load(); const db = await indexDb.load();
const descriptors = db.map((rec) => rec.descriptor); const descriptors = db.map((rec) => rec.descriptor);
const res = await human.match(current.face.embedding, descriptors); const res = await human.match(current.face.embedding, descriptors, matchOptions);
current.record = db[res.index] || null; current.record = db[res.index] || null;
if (current.record) { if (current.record) {
log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`); log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`);

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

522
dist/human.esm.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

818
dist/human.js vendored

File diff suppressed because one or more lines are too long

975
dist/human.node-gpu.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

975
dist/human.node.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,112 +0,0 @@
/**
* EfficientPose model implementation
*
* Based on: [**BecauseofAI MobileFace**](https://github.com/becauseofAI/MobileFace)
*
* Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis
*/
import { log, join } from '../util/util';
import * as tf from '../../dist/tfjs.esm.js';
import type { Tensor, GraphModel } from '../tfjs/types';
import { env } from '../util/env';
let model: GraphModel | null;
export async function load(config) {
const modelUrl = join(config.modelBasePath, config.face.embedding.modelPath);
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
if (!model) log('load model failed:', config.face.embedding.modelPath);
else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);
return model;
}
export function enhance(input): Tensor {
const image = tf.tidy(() => {
// input received from detector is already normalized to 0..1
// input is also assumed to be straightened
// const data = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
// do a tight crop of image and resize it to fit the model
const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
const tensor = input.image || input.tensor;
if (!(tensor instanceof tf.Tensor)) return null;
if (!model || !model.inputs || !model.inputs[0].shape) return null;
const crop = (tensor.shape.length === 3)
? tf.image.cropAndResize(tf.expandDims(tensor, 0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch dimension if missing
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
// convert to black&white to avoid colorization impact
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const [red, green, blue] = tf.split(crop, 3, 3);
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
const merge = tf.stack([grayscale, grayscale, grayscale], 3).squeeze(4);
/*
// optional increase image contrast
// or do it per-channel so mean is done on each channel
// or do it based on histogram
const mean = merge.mean();
const factor = 5;
const contrast = merge.sub(mean).mul(factor).add(mean);
*/
// normalize brightness from 0..1
const darken = tf.sub(merge, merge.min());
const lighten = tf.div(darken, darken.max());
return lighten;
});
return image;
}
export async function predict(input, config): Promise<number[]> {
if (!model) return [];
return new Promise(async (resolve) => {
// let data: Array<[]> = [];
let data: Array<number> = [];
if (config.face.embedding.enabled) {
const image = enhance(input);
const dataT = model?.execute(image) as Tensor;
/*
const dataT = tf.tidy(() => {
/*
// if needed convert from NHWC to NCHW
const nchw = image.transpose([3, 0, 1, 2]);
const res = model.execute(image);
// optionally do it twice with flipped image and average results
const res1 = model.execute(image);
const flipped = tf.image.flipLeftRight(image);
const res2 = model.execute(flipped);
const merge = tf.stack([res1, res2], 2).squeeze();
const res = reshape.logSumExp(1);
// optional normalize outputs with l2 normalization
const scaled = tf.tidy(() => {
const l2 = res.norm('euclidean');
const scale = res.div(l2);
return scale;
});
// optional reduce feature vector complexity
const reshape = tf.reshape(res, [128, 2]); // split 256 vectors into 128 x 2
const reduce = reshape.logSumExp(1); // reduce 2nd dimension by calculating logSumExp on it
return reduce;
});
*/
const output = await dataT.data();
data = Array.from(output); // convert typed array to simple array
tf.dispose(dataT);
tf.dispose(image);
}
resolve(data);
});
}

View File

@ -15,6 +15,7 @@ import * as liveness from './liveness';
import * as gear from '../gear/gear'; import * as gear from '../gear/gear';
import * as ssrnetAge from '../gear/ssrnet-age'; import * as ssrnetAge from '../gear/ssrnet-age';
import * as ssrnetGender from '../gear/ssrnet-gender'; import * as ssrnetGender from '../gear/ssrnet-gender';
import * as mobilefacenet from './mobilefacenet';
import type { FaceResult } from '../result'; import type { FaceResult } from '../result';
import type { Tensor } from '../tfjs/types'; import type { Tensor } from '../tfjs/types';
import type { Human } from '../human'; import type { Human } from '../human';
@ -28,7 +29,7 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
let gearRes; let gearRes;
let genderRes; let genderRes;
let emotionRes; let emotionRes;
let embeddingRes; let mobilefacenetRes;
let antispoofRes; let antispoofRes;
let livenessRes; let livenessRes;
let descRes; let descRes;
@ -93,7 +94,7 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
parent.state = 'run:liveness'; parent.state = 'run:liveness';
timeStamp = now(); timeStamp = now();
livenessRes = parent.config.face.liveness?.enabled ? await liveness.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null; livenessRes = parent.config.face.liveness?.enabled ? await liveness.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : null;
parent.performance.antispoof = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp); parent.performance.liveness = env.perfadd ? (parent.performance.antispoof || 0) + Math.trunc(now() - timeStamp) : Math.trunc(now() - timeStamp);
} }
parent.analyze('End Liveness:'); parent.analyze('End Liveness:');
@ -105,7 +106,7 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
parent.state = 'run:gear'; parent.state = 'run:gear';
timeStamp = now(); timeStamp = now();
gearRes = parent.config.face['gear']?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {}; gearRes = parent.config.face['gear']?.enabled ? await gear.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.gear = Math.trunc(now() - timeStamp);
} }
parent.analyze('End GEAR:'); parent.analyze('End GEAR:');
@ -119,10 +120,22 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
timeStamp = now(); timeStamp = now();
ageRes = parent.config.face['ssrnet']?.enabled ? await ssrnetAge.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {}; ageRes = parent.config.face['ssrnet']?.enabled ? await ssrnetAge.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
genderRes = parent.config.face['ssrnet']?.enabled ? await ssrnetGender.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {}; genderRes = parent.config.face['ssrnet']?.enabled ? await ssrnetGender.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.emotion = Math.trunc(now() - timeStamp); parent.performance.ssrnet = Math.trunc(now() - timeStamp);
} }
parent.analyze('End SSRNet:'); parent.analyze('End SSRNet:');
// run gear, inherits face from blazeface
parent.analyze('Start MobileFaceNet:');
if (parent.config.async) {
mobilefacenetRes = parent.config.face['mobilefacenet']?.enabled ? mobilefacenet.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:mobilefacenet';
timeStamp = now();
mobilefacenetRes = parent.config.face['mobilefacenet']?.enabled ? await mobilefacenet.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.mobilefacenet = Math.trunc(now() - timeStamp);
}
parent.analyze('End MobileFaceNet:');
// run emotion, inherits face from blazeface // run emotion, inherits face from blazeface
parent.analyze('Start Description:'); parent.analyze('Start Description:');
if (parent.config.async) { if (parent.config.async) {
@ -137,13 +150,15 @@ export const detectFace = async (parent: Human /* instance of human */, input: T
// if async wait for results // if async wait for results
if (parent.config.async) { if (parent.config.async) {
[ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes, livenessRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes, livenessRes]); [ageRes, genderRes, emotionRes, mobilefacenetRes, descRes, gearRes, antispoofRes, livenessRes] = await Promise.all([ageRes, genderRes, emotionRes, mobilefacenetRes, descRes, gearRes, antispoofRes, livenessRes]);
} }
parent.analyze('Finish Face:'); parent.analyze('Finish Face:');
// override age/gender if alternative models are used // override age/gender if alternative models are used
if (parent.config.face['ssrnet']?.enabled && ageRes && genderRes) descRes = { age: ageRes.age, gender: genderRes.gender, genderScore: genderRes.genderScore }; if (parent.config.face['ssrnet']?.enabled && ageRes && genderRes) descRes = { age: ageRes.age, gender: genderRes.gender, genderScore: genderRes.genderScore };
if (parent.config.face['gear']?.enabled && gearRes) descRes = { age: gearRes.age, gender: gearRes.gender, genderScore: gearRes.genderScore, race: gearRes.race }; if (parent.config.face['gear']?.enabled && gearRes) descRes = { age: gearRes.age, gender: gearRes.gender, genderScore: gearRes.genderScore, race: gearRes.race };
// override descriptor if embedding model is used
if (parent.config.face['mobilefacenet']?.enabled && mobilefacenetRes) descRes.descriptor = mobilefacenetRes;
// calculate iris distance // calculate iris distance
// iris: array[ center, left, top, right, bottom] // iris: array[ center, left, top, right, bottom]

87
src/face/mobilefacenet.ts Normal file
View File

@ -0,0 +1,87 @@
/**
* EfficientPose model implementation
*
* Based on: [**BecauseofAI MobileFace**](https://github.com/becauseofAI/MobileFace)
*
* Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis
*/
import { log, join, now } from '../util/util';
import * as tf from '../../dist/tfjs.esm.js';
import type { Tensor, GraphModel } from '../tfjs/types';
import type { Config } from '../config';
import { env } from '../util/env';
let model: GraphModel | null;
const last: Array<number[]> = [];
let lastCount = 0;
let lastTime = 0;
let skipped = Number.MAX_SAFE_INTEGER;
export async function load(config: Config): Promise<GraphModel> {
const modelUrl = join(config.modelBasePath, config.face['mobilefacenet'].modelPath);
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(modelUrl) as unknown as GraphModel;
if (!model) log('load model failed:', config.face['mobilefacenet'].modelPath);
else if (config.debug) log('load model:', modelUrl);
} else if (config.debug) log('cached model:', modelUrl);
return model;
}
/*
// convert to black&white to avoid colorization impact
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const [red, green, blue] = tf.split(crop, 3, 3);
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
const merge = tf.stack([grayscale, grayscale, grayscale], 3).squeeze(4);
// optional increase image contrast
// or do it per-channel so mean is done on each channel
// or do it based on histogram
const mean = merge.mean();
const factor = 5;
const contrast = merge.sub(mean).mul(factor).add(mean);
*/
export async function predict(input: Tensor, config: Config, idx, count): Promise<number[]> {
if (!model) return [];
const skipFrame = skipped < (config.face['embedding']?.skipFrames || 0);
const skipTime = (config.face['embedding']?.skipTime || 0) > (now() - lastTime);
if (config.skipAllowed && skipTime && skipFrame && (lastCount === count) && last[idx]) {
skipped++;
return last[idx];
}
return new Promise(async (resolve) => {
let data: Array<number> = [];
if (config.face['embedding']?.enabled && model?.inputs[0].shape) {
const t: Record<string, Tensor> = {};
t.crop = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
// do a tight crop of image and resize it to fit the model
// const box = [[0.05, 0.15, 0.85, 0.85]]; // empyrical values for top, left, bottom, right
// t.crop = tf.image.cropAndResize(input, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
t.data = model?.execute(t.crop) as Tensor;
/*
// optional normalize outputs with l2 normalization
const scaled = tf.tidy(() => {
const l2 = res.norm('euclidean');
const scale = res.div(l2);
return scale;
});
// optional reduce feature vector complexity
const reshape = tf.reshape(res, [128, 2]); // split 256 vectors into 128 x 2
const reduce = reshape.logSumExp(1); // reduce 2nd dimension by calculating logSumExp on it
*/
const output = await t.data.data();
data = Array.from(output); // convert typed array to simple array
}
last[idx] = data;
lastCount = count;
lastTime = now();
resolve(data);
});
}

View File

@ -62,6 +62,7 @@ export async function predict(image: Tensor, config: Config, idx, count): Promis
let age = ageSorted[0][0]; // pick best starting point let age = ageSorted[0][0]; // pick best starting point
for (let i = 1; i < ageSorted.length; i++) age += ageSorted[i][1] * (ageSorted[i][0] - age); // adjust with each other choice by weight for (let i = 1; i < ageSorted.length; i++) age += ageSorted[i][1] * (ageSorted[i][0] - age); // adjust with each other choice by weight
obj.age = Math.round(10 * age) / 10; obj.age = Math.round(10 * age) / 10;
Object.keys(t).forEach((tensor) => tf.dispose(t[tensor]));
last[idx] = obj; last[idx] = obj;
lastCount = count; lastCount = count;
lastTime = now(); lastTime = now();

View File

@ -13,6 +13,7 @@ import * as blazepose from './body/blazepose';
import * as centernet from './object/centernet'; import * as centernet from './object/centernet';
import * as efficientpose from './body/efficientpose'; import * as efficientpose from './body/efficientpose';
import * as emotion from './gear/emotion'; import * as emotion from './gear/emotion';
import * as mobilefacenet from './face/mobilefacenet';
import * as facemesh from './face/facemesh'; import * as facemesh from './face/facemesh';
import * as faceres from './face/faceres'; import * as faceres from './face/faceres';
import * as handpose from './hand/handpose'; import * as handpose from './hand/handpose';
@ -39,7 +40,7 @@ export class Models {
blazepose: null | GraphModel | Promise<GraphModel> = null; blazepose: null | GraphModel | Promise<GraphModel> = null;
centernet: null | GraphModel | Promise<GraphModel> = null; centernet: null | GraphModel | Promise<GraphModel> = null;
efficientpose: null | GraphModel | Promise<GraphModel> = null; efficientpose: null | GraphModel | Promise<GraphModel> = null;
embedding: null | GraphModel | Promise<GraphModel> = null; mobilefacenet: null | GraphModel | Promise<GraphModel> = null;
emotion: null | GraphModel | Promise<GraphModel> = null; emotion: null | GraphModel | Promise<GraphModel> = null;
facedetect: null | GraphModel | Promise<GraphModel> = null; facedetect: null | GraphModel | Promise<GraphModel> = null;
faceiris: null | GraphModel | Promise<GraphModel> = null; faceiris: null | GraphModel | Promise<GraphModel> = null;
@ -84,6 +85,7 @@ export async function load(instance: Human): Promise<void> {
if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config); if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config);
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config); if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config);
if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config); if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config);
if (instance.config.face.enabled && instance.config.face['mobilefacenet']?.enabled && !instance.models.mobilefacenet) instance.models.mobilefacenet = mobilefacenet.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config); if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);
if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config); if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);
if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config); if (instance.config.object.enabled && !instance.models.centernet && instance.config.object?.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config);

View File

@ -1,26 +1,26 @@
2021-11-13 12:09:51 INFO:  @vladmandic/human version 2.5.1 2021-11-13 17:19:22 INFO:  @vladmandic/human version 2.5.1
2021-11-13 12:09:51 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1 2021-11-13 17:19:22 INFO:  User: vlado Platform: linux Arch: x64 Node: v17.0.1
2021-11-13 12:09:51 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.1"} 2021-11-13 17:19:22 INFO:  Application: {"name":"@vladmandic/human","version":"2.5.1"}
2021-11-13 12:09:51 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} 2021-11-13 17:19:22 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
2021-11-13 12:09:51 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.13","typescript":"4.4.4","typedoc":"0.22.8","eslint":"8.2.0"} 2021-11-13 17:19:22 INFO:  Toolchain: {"build":"0.6.3","esbuild":"0.13.13","typescript":"4.4.4","typedoc":"0.22.8","eslint":"8.2.0"}
2021-11-13 12:09:51 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} 2021-11-13 17:19:22 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
2021-11-13 12:09:51 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]} 2021-11-13 17:19:22 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]}
2021-11-13 12:09:51 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275} 2021-11-13 17:19:22 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":102,"outputBytes":1275}
2021-11-13 12:09:51 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":60,"inputBytes":538983,"outputBytes":457718} 2021-11-13 17:19:23 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":61,"inputBytes":543749,"outputBytes":460526}
2021-11-13 12:09:51 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283} 2021-11-13 17:19:23 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":110,"outputBytes":1283}
2021-11-13 12:09:51 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":60,"inputBytes":538991,"outputBytes":457722} 2021-11-13 17:19:23 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":61,"inputBytes":543757,"outputBytes":460530}
2021-11-13 12:09:51 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350} 2021-11-13 17:19:23 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":1,"inputBytes":149,"outputBytes":1350}
2021-11-13 12:09:52 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":60,"inputBytes":539058,"outputBytes":457794} 2021-11-13 17:19:23 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":61,"inputBytes":543824,"outputBytes":460602}
2021-11-13 12:09:52 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652} 2021-11-13 17:19:23 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1063,"outputBytes":1652}
2021-11-13 12:09:52 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912} 2021-11-13 17:19:23 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2326,"outputBytes":912}
2021-11-13 12:09:52 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":60,"inputBytes":538620,"outputBytes":459966} 2021-11-13 17:19:23 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":61,"inputBytes":543386,"outputBytes":462792}
2021-11-13 12:09:52 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562703,"outputBytes":2497652} 2021-11-13 17:19:23 STATE: Compile: {"name":"tfjs/browser/esm/custom","format":"esm","platform":"browser","input":"tfjs/tf-custom.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2562703,"outputBytes":2497652}
2021-11-13 12:09:52 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":60,"inputBytes":3035360,"outputBytes":1621354} 2021-11-13 17:19:24 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":61,"inputBytes":3040126,"outputBytes":1622880}
2021-11-13 12:09:53 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":60,"inputBytes":3035360,"outputBytes":2963133} 2021-11-13 17:19:24 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":61,"inputBytes":3040126,"outputBytes":2965877}
2021-11-13 12:10:13 STATE: Typings: {"input":"src/human.ts","output":"types","files":52} 2021-11-13 17:19:50 STATE: Typings: {"input":"src/human.ts","output":"types","files":53}
2021-11-13 12:10:21 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":50,"generated":true} 2021-11-13 17:20:02 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":50,"generated":true}
2021-11-13 12:10:21 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5801,"outputBytes":3822} 2021-11-13 17:20:02 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":5801,"outputBytes":3822}
2021-11-13 12:10:21 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":14654,"outputBytes":11518} 2021-11-13 17:20:02 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":15166,"outputBytes":11786}
2021-11-13 12:11:00 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":92,"errors":0,"warnings":0} 2021-11-13 17:20:44 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":92,"errors":0,"warnings":0}
2021-11-13 12:11:01 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} 2021-11-13 17:20:44 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
2021-11-13 12:11:01 INFO:  Done... 2021-11-13 17:20:44 INFO:  Done...

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

11
types/src/face/mobilefacenet.d.ts vendored Normal file
View File

@ -0,0 +1,11 @@
/**
* EfficientPose model implementation
*
* Based on: [**BecauseofAI MobileFace**](https://github.com/becauseofAI/MobileFace)
*
* Obsolete and replaced by `faceres` that performs age/gender/descriptor analysis
*/
import type { Tensor, GraphModel } from '../tfjs/types';
import type { Config } from '../config';
export declare function load(config: Config): Promise<GraphModel>;
export declare function predict(input: Tensor, config: Config, idx: any, count: any): Promise<number[]>;

View File

@ -16,7 +16,7 @@ export declare class Models {
blazepose: null | GraphModel | Promise<GraphModel>; blazepose: null | GraphModel | Promise<GraphModel>;
centernet: null | GraphModel | Promise<GraphModel>; centernet: null | GraphModel | Promise<GraphModel>;
efficientpose: null | GraphModel | Promise<GraphModel>; efficientpose: null | GraphModel | Promise<GraphModel>;
embedding: null | GraphModel | Promise<GraphModel>; mobilefacenet: null | GraphModel | Promise<GraphModel>;
emotion: null | GraphModel | Promise<GraphModel>; emotion: null | GraphModel | Promise<GraphModel>;
facedetect: null | GraphModel | Promise<GraphModel>; facedetect: null | GraphModel | Promise<GraphModel>;
faceiris: null | GraphModel | Promise<GraphModel>; faceiris: null | GraphModel | Promise<GraphModel>;