add optional anti-spoofing module

pull/280/head
Vladimir Mandic 2021-10-13 10:56:56 -04:00
parent 86db81d29c
commit fcc64e845c
16 changed files with 163 additions and 20 deletions

View File

@ -9,8 +9,10 @@
## Changelog
### **HEAD -> main** 2021/10/12 mandic00@live.com
### **HEAD -> main** 2021/10/13 mandic00@live.com
- add node-match advanced example using worker thread pool
- package updates
- optimize image preprocessing
### **release: 2.3.2** 2021/10/11 mandic00@live.com

View File

@ -31,6 +31,7 @@ import jsonView from './helpers/jsonview.js';
let human;
let userConfig = {
face: { antispoof: { enabled: true } },
// face: { enabled: false },
// body: { enabled: false },
// hand: { enabled: false },
@ -609,6 +610,7 @@ async function processImage(input, title) {
const prev = document.getElementsByClassName('thumbnail');
if (prev && prev.length > 0) document.getElementById('samples-container').insertBefore(thumb, prev[0]);
else document.getElementById('samples-container').appendChild(thumb);
document.getElementById('samples-container').style.display = 'block';
// finish up
status();

BIN
models/antispoof.bin Normal file

Binary file not shown.

60
models/antispoof.json Normal file
View File

@ -0,0 +1,60 @@
{
"format": "graph-model",
"generatedBy": "https://www.kaggle.com/anku420/fake-face-detection",
"convertedBy": "https://github.com/vladmandic",
"signature":
{
"inputs":
{
"conv2d_input": {"name":"conv2d_input:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"128"},{"size":"128"},{"size":"3"}]}}
},
"outputs":
{
"activation_4": {"name":"Identity:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"1"}]}}
}
},
"modelTopology":
{
"node":
[
{"name":"unknown","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"3"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_0","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_1","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_2","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
{"name":"unknown_3","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_4","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential/flatten/Const","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}},
{"name":"unknown_5","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3136"},{"size":"128"}]}}}}},
{"name":"unknown_6","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
{"name":"unknown_7","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"},{"size":"1"}]}}}}},
{"name":"unknown_8","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"conv2d_input","op":"Placeholder","attr":{"dtype":{"type":"DT_FLOAT"},"shape":{"shape":{"dim":[{"size":"-1"},{"size":"128"},{"size":"128"},{"size":"3"}]}}}},
{"name":"StatefulPartitionedCall/sequential/conv2d/BiasAdd","op":"_FusedConv2D","input":["conv2d_input","unknown","unknown_0"],"device":"/device:CPU:0","attr":{"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"dilations":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/sequential/max_pooling2d/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/sequential/conv2d/BiasAdd"],"attr":{"ksize":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/sequential/activation/Relu","op":"Relu","input":["StatefulPartitionedCall/sequential/max_pooling2d/MaxPool"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential/conv2d_1/BiasAdd","op":"_FusedConv2D","input":["StatefulPartitionedCall/sequential/activation/Relu","unknown_1","unknown_2"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/sequential/max_pooling2d_1/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/sequential/conv2d_1/BiasAdd"],"attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","2","2","1"]}},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/sequential/activation_1/Relu","op":"Relu","input":["StatefulPartitionedCall/sequential/max_pooling2d_1/MaxPool"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential/conv2d_2/BiasAdd","op":"_FusedConv2D","input":["StatefulPartitionedCall/sequential/activation_1/Relu","unknown_3","unknown_4"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/sequential/max_pooling2d_2/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/sequential/conv2d_2/BiasAdd"],"attr":{"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","2","2","1"]}},"ksize":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential/activation_2/Relu","op":"Relu","input":["StatefulPartitionedCall/sequential/max_pooling2d_2/MaxPool"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/sequential/flatten/Reshape","op":"Reshape","input":["StatefulPartitionedCall/sequential/activation_2/Relu","StatefulPartitionedCall/sequential/flatten/Const"],"attr":{"T":{"type":"DT_FLOAT"},"Tshape":{"type":"DT_INT32"}}},
{"name":"StatefulPartitionedCall/sequential/activation_3/Relu","op":"_FusedMatMul","input":["StatefulPartitionedCall/sequential/flatten/Reshape","unknown_5","unknown_6"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"transpose_b":{"b":false},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"transpose_a":{"b":false}}},
{"name":"StatefulPartitionedCall/sequential/dense_1/BiasAdd","op":"_FusedMatMul","input":["StatefulPartitionedCall/sequential/activation_3/Relu","unknown_7","unknown_8"],"device":"/device:CPU:0","attr":{"transpose_b":{"b":false},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"transpose_a":{"b":false}}},
{"name":"StatefulPartitionedCall/sequential/activation_4/Sigmoid","op":"Sigmoid","input":["StatefulPartitionedCall/sequential/dense_1/BiasAdd"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"Identity","op":"Identity","input":["StatefulPartitionedCall/sequential/activation_4/Sigmoid"],"attr":{"T":{"type":"DT_FLOAT"}}}
],
"library": {},
"versions":
{
"producer": 716
}
},
"weightsManifest":
[
{
"paths": ["antispoof.bin"],
"weights": [{"name":"unknown","shape":[3,3,3,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_0","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_1","shape":[3,3,64,32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_2","shape":[32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_3","shape":[3,3,32,16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_4","shape":[16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"StatefulPartitionedCall/sequential/flatten/Const","shape":[2],"dtype":"int32"},{"name":"unknown_5","shape":[3136,128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_6","shape":[128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_7","shape":[128,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"unknown_8","shape":[1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}}]
}
]
}

View File

@ -24,7 +24,7 @@ export async function loadDetect(config: Config): Promise<GraphModel> {
const inputs = Object.values(models[0].modelSignature['inputs']);
inputSize[0][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
inputSize[0][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.object.modelPath);
if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.body.detector?.modelPath);
else if (config.debug) log('load model:', models[0]['modelUrl']);
} else if (config.debug && models[0]) log('cached model:', models[0]['modelUrl']);
return models[0] as GraphModel;
@ -39,7 +39,7 @@ export async function loadPose(config: Config): Promise<GraphModel> {
inputSize[1][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (config.body.modelPath?.includes('lite')) outputNodes = ['ld_3d', 'output_segmentation', 'output_heatmap', 'world_3d', 'output_poseflag'];
else outputNodes = ['Identity', 'Identity_2', 'Identity_3', 'Identity_4', 'Identity_1']; // v2 from pinto full and heavy
if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.object.modelPath);
if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', models[1]['modelUrl']);
} else if (config.debug) log('cached model:', models[1]['modelUrl']);
return models[1];

View File

@ -42,6 +42,13 @@ export interface FaceEmotionConfig {
modelPath: string,
}
/** Emotion part of face configuration */
export interface FaceAntiSpoofConfig {
enabled: boolean,
skipFrames: number,
modelPath: string,
}
/** Controlls and configures all face-specific options:
* - face detection, face mesh detection, age, gender, emotion detection and face description
*
@ -61,6 +68,7 @@ export interface FaceConfig {
iris: Partial<FaceIrisConfig>,
description: Partial<FaceDescriptionConfig>,
emotion: Partial<FaceEmotionConfig>,
antispoof: Partial<FaceAntiSpoofConfig>,
}
/** Controlls and configures all body detection specific options
@ -397,6 +405,14 @@ const config: Config = {
// only used when cacheSensitivity is not zero
minConfidence: 0.1, // threshold for discarding a prediction
},
antispoof: {
enabled: false,
skipFrames: 14, // how max many frames to go without re-running the detector
// only used when cacheSensitivity is not zero
modelPath: 'antispoof.json', // face description model
// can be either absolute path or relative to modelBasePath
},
},
body: {

42
src/face/antispoof.ts Normal file
View File

@ -0,0 +1,42 @@
/**
* Anti-spoofing model implementation
*/
import { log, join } from '../util/util';
import type { Config } from '../config';
import type { GraphModel, Tensor } from '../tfjs/types';
import * as tf from '../../dist/tfjs.esm.js';
import { env } from '../util/env';
let model: GraphModel | null;
const cached: Array<number> = [];
let skipped = Number.MAX_SAFE_INTEGER;
let lastCount = 0;
export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.antispoof?.modelPath || '')) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.face.antispoof?.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
return model;
}
export async function predict(image: Tensor, config: Config, idx, count) {
if (!model) return null;
if ((skipped < (config.face.antispoof?.skipFrames || 0)) && config.skipFrame && (lastCount === count) && cached[idx]) {
skipped++;
return cached[idx];
}
skipped = 0;
return new Promise(async (resolve) => {
const resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape ? model.inputs[0].shape[2] : 0, model?.inputs[0].shape ? model.inputs[0].shape[1] : 0], false);
const res = model?.predict(resize) as Tensor;
const num = (await res.data())[0];
cached[idx] = Math.round(100 * num) / 100;
lastCount = count;
tf.dispose([resize, res]);
resolve(cached[idx]);
});
}

View File

@ -23,7 +23,7 @@ export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.detector?.modelPath || '')) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
if (!model || !model['modelUrl']) log('load model failed:', config.face.detector?.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;

View File

@ -8,6 +8,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as facemesh from './facemesh';
import * as emotion from '../gear/emotion';
import * as faceres from './faceres';
import * as antispoof from './antispoof';
import type { FaceResult } from '../result';
import type { Tensor } from '../tfjs/types';
import { calculateFaceAngle } from './angles';
@ -21,6 +22,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
let genderRes;
let emotionRes;
let embeddingRes;
let antispoofRes;
let descRes;
const faceRes: Array<FaceResult> = [];
parent.state = 'run:face';
@ -55,6 +57,18 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
}
parent.analyze('End Emotion:');
// run antispoof, inherits face from blazeface
parent.analyze('Start AntiSpoof:');
if (parent.config.async) {
antispoofRes = parent.config.face.antispoof.enabled ? antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
} else {
parent.state = 'run:antispoof';
timeStamp = now();
antispoofRes = parent.config.face.antispoof.enabled ? await antispoof.predict(faces[i].tensor || tf.tensor([]), parent.config, i, faces.length) : {};
parent.performance.antispoof = Math.trunc(now() - timeStamp);
}
parent.analyze('End AntiSpoof:');
// run gear, inherits face from blazeface
/*
parent.analyze('Start GEAR:');
@ -83,7 +97,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
// if async wait for results
if (parent.config.async) {
[ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes]);
[ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes, descRes, gearRes, antispoofRes]);
}
parent.analyze('Finish Face:');
@ -115,6 +129,7 @@ export const detectFace = async (parent /* instance of human */, input: Tensor):
genderScore: descRes.genderScore,
embedding: descRes.descriptor,
emotion: emotionRes,
real: antispoofRes,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor,

View File

@ -126,7 +126,7 @@ export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.mesh?.modelPath || '')) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
if (!model || !model['modelUrl']) log('load model failed:', config.face.mesh?.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;

View File

@ -31,7 +31,7 @@ export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.iris?.modelPath || '')) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
if (!model || !model['modelUrl']) log('load model failed:', config.face.iris?.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
inputSize = model.inputs[0].shape ? model.inputs[0].shape[2] : 0;

View File

@ -24,7 +24,7 @@ export async function load(config: Config): Promise<GraphModel> {
if (env.initial) model = null;
if (!model) {
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.emotion?.modelPath || '')) as unknown as GraphModel;
if (!model || !model['modelUrl']) log('load model failed:', config.body.modelPath);
if (!model || !model['modelUrl']) log('load model failed:', config.face.emotion?.modelPath);
else if (config.debug) log('load model:', model['modelUrl']);
} else if (config.debug) log('cached model:', model['modelUrl']);
return model;

View File

@ -68,7 +68,7 @@ export async function loadDetect(config: Config): Promise<GraphModel> {
const inputs = Object.values(models[0].modelSignature['inputs']);
inputSize[0][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
inputSize[0][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.object.modelPath);
if (!models[0] || !models[0]['modelUrl']) log('load model failed:', config.hand.detector?.modelPath);
else if (config.debug) log('load model:', models[0]['modelUrl']);
} else if (config.debug) log('cached model:', models[0]['modelUrl']);
return models[0];
@ -81,7 +81,7 @@ export async function loadSkeleton(config: Config): Promise<GraphModel> {
const inputs = Object.values(models[1].modelSignature['inputs']);
inputSize[1][0] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[1].size) : 0;
inputSize[1][1] = Array.isArray(inputs) ? parseInt(inputs[0].tensorShape.dim[2].size) : 0;
if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.object.modelPath);
if (!models[1] || !models[1]['modelUrl']) log('load model failed:', config.hand.skeleton?.modelPath);
else if (config.debug) log('load model:', models[1]['modelUrl']);
} else if (config.debug) log('cached model:', models[1]['modelUrl']);
return models[1];

View File

@ -2,25 +2,26 @@
* Loader and Validator for all models used by Human
*/
import { env } from './util/env';
import { log } from './util/util';
import type { GraphModel } from './tfjs/types';
import * as agegenderrace from './gear/gear-agegenderrace';
import * as antispoof from './face/antispoof';
import * as blazeface from './face/blazeface';
import * as facemesh from './face/facemesh';
import * as iris from './face/iris';
import * as faceres from './face/faceres';
import * as blazepose from './body/blazepose';
import * as centernet from './object/centernet';
import * as efficientpose from './body/efficientpose';
import * as emotion from './gear/emotion';
import * as posenet from './body/posenet';
import * as facemesh from './face/facemesh';
import * as faceres from './face/faceres';
import * as handpose from './handpose/handpose';
import * as handtrack from './hand/handtrack';
import * as blazepose from './body/blazepose';
import * as efficientpose from './body/efficientpose';
import * as iris from './face/iris';
import * as movenet from './body/movenet';
import * as nanodet from './object/nanodet';
import * as centernet from './object/centernet';
import * as posenet from './body/posenet';
import * as segmentation from './segmentation/segmentation';
import type { GraphModel } from './tfjs/types';
import type { Human } from './human';
import { env } from './util/env';
import * as agegenderrace from './gear/gear-agegenderrace';
/** Instances of all possible TFJS Graph Models used by Human
* - loaded as needed based on configuration
@ -49,6 +50,7 @@ export class Models {
nanodet: null | GraphModel | Promise<GraphModel> = null;
posenet: null | GraphModel | Promise<GraphModel> = null;
segmentation: null | GraphModel | Promise<GraphModel> = null;
antispoof: null | GraphModel | Promise<GraphModel> = null;
}
export function reset(instance: Human): void {
@ -66,6 +68,7 @@ export async function load(instance: Human): Promise<void> {
if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config);
if (instance.config.face.enabled && instance.config.face.mesh?.enabled && !instance.models.facemesh) instance.models.facemesh = facemesh.load(instance.config);
if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config);
if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config);
if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config);
if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config);
if (instance.config.body.enabled && !instance.models.posenet && instance.config.body?.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config);

View File

@ -29,6 +29,7 @@ export type Point = [number, number, number?];
* - embedding: facial descriptor as array of numerical elements
* - iris: iris distance from current viewpoint as distance value in centimeters for a typical camera
* field of view of 88 degrees. value should be adjusted manually as needed
* - real: anti-spoofing analysis to determine if face is real of fake
* - rotation: face rotiation that contains both angles and matrix used for 3d transformations
* - angle: face angle as object with values for roll, yaw and pitch angles
* - matrix: 3d transofrmation matrix as array of numeric values
@ -51,6 +52,7 @@ export interface FaceResult {
emotion?: Array<{ score: number, emotion: string }>,
embedding?: Array<number>,
iris?: number,
real?: number,
rotation?: {
angle: { roll: number, yaw: number, pitch: number },
matrix: [number, number, number, number, number, number, number, number, number],

View File

@ -203,6 +203,7 @@ export async function face(inCanvas: HTMLCanvasElement | OffscreenCanvas, result
if (f.genderScore) labels.push(`${f.gender || ''} ${Math.trunc(100 * f.genderScore)}%`);
if (f.age) labels.push(`age: ${f.age || ''}`);
if (f.iris) labels.push(`distance: ${f.iris}`);
if (f.real) labels.push(`Real: ${Math.trunc(100 * f.real)}%`);
if (f.emotion && f.emotion.length > 0) {
const emotion = f.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);
if (emotion.length > 3) emotion.length = 3;