enable cross origin isolation

pull/293/head
Vladimir Mandic 2021-04-24 18:43:59 -04:00
parent 01c9bb24b5
commit a05b9e7774
8 changed files with 13 additions and 116 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **1.6.1**
Version: **1.7.0**
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -11,9 +11,6 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/04/24 mandic00@live.com
### **origin/main** 2021/04/24 mandic00@live.com
- remove efficientpose
- major version rebuild

View File

@ -505,12 +505,10 @@ function setupMenu() {
menu.process.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
human.config.face.detector.skipFrames = parseInt(val);
human.config.face.emotion.skipFrames = parseInt(val);
human.config.face.age.skipFrames = parseInt(val);
human.config.hand.skipFrames = parseInt(val);
});
menu.process.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
human.config.face.detector.minConfidence = parseFloat(val);
human.config.face.gender.minConfidence = parseFloat(val);
human.config.face.emotion.minConfidence = parseFloat(val);
human.config.hand.minConfidence = parseFloat(val);
});
@ -536,9 +534,7 @@ function setupMenu() {
menu.models.addBool('face detect', human.config.face, 'enabled', (val) => human.config.face.enabled = val);
menu.models.addBool('face mesh', human.config.face.mesh, 'enabled', (val) => human.config.face.mesh.enabled = val);
menu.models.addBool('face iris', human.config.face.iris, 'enabled', (val) => human.config.face.iris.enabled = val);
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.age.description = val);
// menu.models.addBool('face age', human.config.face.age, 'enabled', (val) => human.config.face.age.enabled = val);
// menu.models.addBool('face gender', human.config.face.gender, 'enabled', (val) => human.config.face.gender.enabled = val);
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.description.enabled = val);
menu.models.addBool('face emotion', human.config.face.emotion, 'enabled', (val) => human.config.face.emotion.enabled = val);
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.models.addBool('body pose', human.config.body, 'enabled', (val) => human.config.body.enabled = val);

View File

@ -1,6 +1,6 @@
{
"name": "@vladmandic/human",
"version": "1.6.1",
"version": "1.7.0",
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
"sideEffects": false,
"main": "dist/human.node.js",

View File

@ -129,17 +129,7 @@ export interface Config {
enabled: boolean,
modelPath: string,
skipFrames: number,
},
age: {
enabled: boolean,
modelPath: string,
skipFrames: number,
},
gender: {
enabled: boolean,
minConfidence: number,
modelPath: string,
skipFrames: number,
},
emotion: {
enabled: boolean,
@ -147,10 +137,6 @@ export interface Config {
skipFrames: number,
modelPath: string,
},
embedding: {
enabled: boolean,
modelPath: string,
},
},
/** Controlls and configures all body detection specific options
* - enabled: true/false
@ -323,6 +309,7 @@ const config: Config = {
// can be either absolute path or relative to modelBasePath
skipFrames: 31, // how many frames to go without re-running the detector
// only used for video inputs
minConfidence: 0.1, // threshold for discarding a prediction
},
emotion: {
@ -332,29 +319,6 @@ const config: Config = {
modelPath: 'emotion.json', // face emotion model
// can be either absolute path or relative to modelBasePath
},
age: {
enabled: false, // obsolete, replaced by description module
modelPath: 'age.json', // age model
// can be either absolute path or relative to modelBasePath
skipFrames: 33, // how many frames to go without re-running the detector
// only used for video inputs
},
gender: {
enabled: false, // obsolete, replaced by description module
minConfidence: 0.1, // threshold for discarding a prediction
modelPath: 'gender.json', // gender model
// can be either absolute path or relative to modelBasePath
skipFrames: 34, // how many frames to go without re-running the detector
// only used for video inputs
},
embedding: {
enabled: false, // obsolete, replaced by description module
modelPath: 'mobileface.json', // face descriptor model
// can be either absolute path or relative to modelBasePath
},
},
body: {

View File

@ -1,9 +1,6 @@
import { log, now } from './helpers';
import * as tf from '../dist/tfjs.esm.js';
import * as age from './age/age';
import * as gender from './gender/gender';
import * as emotion from './emotion/emotion';
import * as embedding from './embedding/embedding';
import * as faceres from './faceres/faceres';
type Tensor = typeof tf.Tensor;
@ -147,28 +144,6 @@ export const detectFace = async (parent, input): Promise<any> => {
const rotation = calculateFaceAngle(face, [input.shape[2], input.shape[1]]);
// run age, inherits face from blazeface
parent.analyze('Start Age:');
if (parent.config.async) {
ageRes = parent.config.face.age.enabled ? age.predict(face.image, parent.config) : {};
} else {
parent.state = 'run:age';
timeStamp = now();
ageRes = parent.config.face.age.enabled ? await age.predict(face.image, parent.config) : {};
parent.perf.age = Math.trunc(now() - timeStamp);
}
// run gender, inherits face from blazeface
parent.analyze('Start Gender:');
if (parent.config.async) {
genderRes = parent.config.face.gender.enabled ? gender.predict(face.image, parent.config) : {};
} else {
parent.state = 'run:gender';
timeStamp = now();
genderRes = parent.config.face.gender.enabled ? await gender.predict(face.image, parent.config) : {};
parent.perf.gender = Math.trunc(now() - timeStamp);
}
// run emotion, inherits face from blazeface
parent.analyze('Start Emotion:');
if (parent.config.async) {
@ -181,18 +156,6 @@ export const detectFace = async (parent, input): Promise<any> => {
}
parent.analyze('End Emotion:');
// run emotion, inherits face from blazeface
parent.analyze('Start Embedding:');
if (parent.config.async) {
embeddingRes = parent.config.face.embedding.enabled ? embedding.predict(face, parent.config) : [];
} else {
parent.state = 'run:embedding';
timeStamp = now();
embeddingRes = parent.config.face.embedding.enabled ? await embedding.predict(face, parent.config) : [];
parent.perf.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze('End Embedding:');
// run emotion, inherits face from blazeface
parent.analyze('Start Description:');
if (parent.config.async) {
@ -226,10 +189,10 @@ export const detectFace = async (parent, input): Promise<any> => {
// combine results
faceRes.push({
...face,
age: descRes.age || ageRes.age,
gender: descRes.gender || genderRes.gender,
genderConfidence: descRes.genderConfidence || genderRes.confidence,
embedding: descRes.descriptor || embeddingRes,
age: descRes.age,
gender: descRes.gender,
genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor,
emotion: emotionRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
rotation,

View File

@ -121,7 +121,7 @@ export async function predict(image, config) {
tf.tidy(() => {
const gender = resT.find((t) => t.shape[1] === 1).dataSync();
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
if (confidence > config.face.description.minConfidence) {
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
obj.genderConfidence = Math.min(0.99, confidence);
}

View File

@ -4,15 +4,11 @@ import * as tf from '../dist/tfjs.esm.js';
import * as backend from './tfjs/backend';
import * as faceall from './faceall';
import * as facemesh from './blazeface/facemesh';
import * as age from './age/age';
import * as gender from './gender/gender';
import * as faceres from './faceres/faceres';
import * as emotion from './emotion/emotion';
import * as embedding from './embedding/embedding';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as efficientpose from './efficientpose/efficientpose';
import * as nanodet from './nanodet/nanodet';
import * as gesture from './gesture/gesture';
import * as image from './image/image';
@ -103,8 +99,6 @@ export class Human {
/** Internal: Currently loaded classes */
classes: {
facemesh: typeof facemesh;
age: typeof age;
gender: typeof gender;
emotion: typeof emotion;
body: typeof posenet | typeof blazepose;
hand: typeof handpose;
@ -162,8 +156,6 @@ export class Human {
// export raw access to underlying models
this.classes = {
facemesh,
age,
gender,
emotion,
faceres,
body: this.config.body.modelPath.includes('posenet') ? posenet : blazepose,
@ -212,10 +204,9 @@ export class Human {
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
* - Calculation is based on normalized Minkowski distance between
*/
// eslint-disable-next-line class-methods-use-this
similarity(embedding1: Array<number>, embedding2: Array<number>): number {
if (this.config.face.description.enabled) return faceres.similarity(embedding1, embedding2);
if (this.config.face.embedding.enabled) return embedding.similarity(embedding1, embedding2);
return 0;
return faceres.similarity(embedding1, embedding2);
}
/** Enhance method performs additional enhacements to face image previously detected for futher processing
@ -262,39 +253,27 @@ export class Human {
if (this.config.async) {
[
this.models.face,
this.models.age,
this.models.gender,
this.models.emotion,
this.models.embedding,
this.models.handpose,
this.models.posenet,
this.models.blazepose,
this.models.efficientpose,
this.models.nanodet,
this.models.faceres,
] = await Promise.all([
this.models.face || (this.config.face.enabled ? facemesh.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? <any>posenet.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? posenet.load(this.config) : null),
this.models.blazepose || (this.config.body.enabled && this.config.body.modelPath.includes('blazepose') ? blazepose.load(this.config) : null),
this.models.efficientpose || (this.config.body.enabled && this.config.body.modelPath.includes('efficientpose') ? efficientpose.load(this.config) : null),
this.models.nanodet || (this.config.object.enabled ? nanodet.load(this.config) : null),
this.models.faceres || ((this.config.face.enabled && this.config.face.description.enabled) ? faceres.load(this.config) : null),
]);
} else {
if (this.config.face.enabled && !this.models.face) this.models.face = await facemesh.load(this.config);
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelPath.includes('posenet')) this.models.posenet = await posenet.load(this.config);
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelPath.includes('blazepose')) this.models.blazepose = await blazepose.load(this.config);
if (this.config.body.enabled && !this.models.efficientpose && this.config.body.modelPath.includes('efficientpose')) this.models.efficientpose = await efficientpose.load(this.config);
if (this.config.object.enabled && !this.models.nanodet) this.models.nanodet = await nanodet.load(this.config);
if (this.config.face.enabled && this.config.face.description.enabled && !this.models.faceres) this.models.faceres = await faceres.load(this.config);
}
@ -446,14 +425,12 @@ export class Human {
if (this.config.async) {
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(process.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.body = current;
}

2
wiki

@ -1 +1 @@
Subproject commit ee4cf3aa27940b10e275ef9e8119e220c4b2d70d
Subproject commit e06119f8538250b84e4ff4caed40746b749a3531