implemented 3d face angle calculations

pull/91/head
Vladimir Mandic 2021-03-06 17:22:47 -05:00
parent 6671414443
commit be8de4b901
24 changed files with 720 additions and 656 deletions

View File

@ -1,6 +1,6 @@
# Human Library # Human Library
**3D Face Detection, Face Embedding & Recognition,** **3D Face Detection & Rotation Tracking, Face Embedding & Recognition,**
**Body Pose Tracking, Hand & Finger Tracking,** **Body Pose Tracking, Hand & Finger Tracking,**
**Iris Analysis, Age & Gender & Emotion Prediction** **Iris Analysis, Age & Gender & Emotion Prediction**
**& Gesture Recognition** **& Gesture Recognition**

View File

@ -4,4 +4,3 @@
- Prune pre-packaged models - Prune pre-packaged models
- Build Face embedding database - Build Face embedding database
- Dynamic sample processing - Dynamic sample processing
- Update screenshots

View File

@ -2,18 +2,18 @@ import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
import Menu from './menu.js'; import Menu from './menu.js';
import GLBench from './gl-bench.js'; import GLBench from './gl-bench.js';
const userConfig = { backend: 'wasm' }; // add any user configuration overrides const userConfig = { backend: 'webgl' }; // add any user configuration overrides
/* /*
const userConfig = { const userConfig = {
backend: 'wasm', backend: 'wasm',
async: false, async: false,
warmup: 'full', warmup: 'face',
videoOptimized: false, videoOptimized: false,
face: { enabled: false, iris: { enabled: true }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } }, face: { enabled: true, iris: { enabled: false }, mesh: { enabled: true }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false }, embedding: { enabled: false } },
hand: { enabled: false }, hand: { enabled: false },
gestures: { enabled: true }, gestures: { enabled: true },
body: { enabled: true, modelType: 'blazepose', modelPath: '../models/blazepose.json' }, body: { enabled: false, modelType: 'blazepose', modelPath: '../models/blazepose.json' },
}; };
*/ */
@ -129,10 +129,11 @@ async function drawResults(input) {
} }
// draw all results // draw all results
await human.draw.face(canvas, result.face); human.draw.face(canvas, result.face);
await human.draw.body(canvas, result.body); human.draw.body(canvas, result.body);
await human.draw.hand(canvas, result.hand); human.draw.hand(canvas, result.hand);
await human.draw.gesture(canvas, result.gesture); human.draw.gesture(canvas, result.gesture);
human.draw.angles(canvas, result.face);
await calcSimmilariry(result); await calcSimmilariry(result);
// update log // update log

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1352771, "bytes": 1353316,
"imports": [] "imports": []
}, },
"demo/menu.js": { "demo/menu.js": {
@ -13,7 +13,7 @@
"imports": [] "imports": []
}, },
"demo/browser.js": { "demo/browser.js": {
"bytes": 27984, "bytes": 28008,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js", "path": "dist/human.esm.js",
@ -35,7 +35,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 2059743 "bytes": 2062279
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
@ -43,7 +43,7 @@
"entryPoint": "demo/browser.js", "entryPoint": "demo/browser.js",
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1345275 "bytesInOutput": 1345820
}, },
"demo/menu.js": { "demo/menu.js": {
"bytesInOutput": 10696 "bytesInOutput": 10696
@ -52,10 +52,10 @@
"bytesInOutput": 6759 "bytesInOutput": 6759
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 17494 "bytesInOutput": 17496
} }
}, },
"bytes": 1387609 "bytes": 1388156
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

386
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

14
dist/human.esm.json vendored
View File

@ -450,7 +450,7 @@
"imports": [] "imports": []
}, },
"src/draw.ts": { "src/draw.ts": {
"bytes": 16222, "bytes": 16861,
"imports": [ "imports": [
{ {
"path": "config.js", "path": "config.js",
@ -463,7 +463,7 @@
] ]
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 21589, "bytes": 22707,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -553,7 +553,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 1977987 "bytes": 1980477
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -575,7 +575,7 @@
"bytesInOutput": 394 "bytesInOutput": 394
}, },
"dist/tfjs.esm.js": { "dist/tfjs.esm.js": {
"bytesInOutput": 1056721 "bytesInOutput": 1056723
}, },
"src/tfjs/backend.ts": { "src/tfjs/backend.ts": {
"bytesInOutput": 1053 "bytesInOutput": 1053
@ -596,7 +596,7 @@
"bytesInOutput": 5043 "bytesInOutput": 5043
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 11629 "bytesInOutput": 11955
}, },
"src/faceboxes/faceboxes.ts": { "src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1576 "bytesInOutput": 1576
@ -686,10 +686,10 @@
"bytesInOutput": 2583 "bytesInOutput": 2583
}, },
"src/draw.ts": { "src/draw.ts": {
"bytesInOutput": 9597 "bytesInOutput": 9814
} }
}, },
"bytes": 1352771 "bytes": 1353316
} }
} }
} }

14
dist/human.iife.json vendored
View File

@ -450,7 +450,7 @@
"imports": [] "imports": []
}, },
"src/draw.ts": { "src/draw.ts": {
"bytes": 16222, "bytes": 16861,
"imports": [ "imports": [
{ {
"path": "config.js", "path": "config.js",
@ -463,7 +463,7 @@
] ]
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 21589, "bytes": 22707,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -553,7 +553,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 1977998 "bytes": 1980488
}, },
"dist/human.ts": { "dist/human.ts": {
"imports": [], "imports": [],
@ -567,7 +567,7 @@
"bytesInOutput": 1690 "bytesInOutput": 1690
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 11665 "bytesInOutput": 11991
}, },
"src/log.ts": { "src/log.ts": {
"bytesInOutput": 252 "bytesInOutput": 252
@ -576,7 +576,7 @@
"bytesInOutput": 394 "bytesInOutput": 394
}, },
"dist/tfjs.esm.js": { "dist/tfjs.esm.js": {
"bytesInOutput": 1056721 "bytesInOutput": 1056723
}, },
"src/tfjs/backend.ts": { "src/tfjs/backend.ts": {
"bytesInOutput": 1053 "bytesInOutput": 1053
@ -684,10 +684,10 @@
"bytesInOutput": 2583 "bytesInOutput": 2583
}, },
"src/draw.ts": { "src/draw.ts": {
"bytesInOutput": 9597 "bytesInOutput": 9814
} }
}, },
"bytes": 1352813 "bytes": 1353358
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

12
dist/human.node.json vendored
View File

@ -450,7 +450,7 @@
"imports": [] "imports": []
}, },
"src/draw.ts": { "src/draw.ts": {
"bytes": 16222, "bytes": 16861,
"imports": [ "imports": [
{ {
"path": "config.js", "path": "config.js",
@ -463,7 +463,7 @@
] ]
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 21589, "bytes": 22707,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -553,7 +553,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 744738 "bytes": 747228
}, },
"dist/human.node-gpu.js": { "dist/human.node-gpu.js": {
"imports": [], "imports": [],
@ -570,7 +570,7 @@
"bytesInOutput": 1677 "bytesInOutput": 1677
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 11632 "bytesInOutput": 11958
}, },
"src/log.ts": { "src/log.ts": {
"bytesInOutput": 251 "bytesInOutput": 251
@ -684,10 +684,10 @@
"bytesInOutput": 2580 "bytesInOutput": 2580
}, },
"src/draw.ts": { "src/draw.ts": {
"bytesInOutput": 9486 "bytesInOutput": 9699
} }
}, },
"bytes": 290185 "bytes": 290724
} }
} }
} }

386
dist/human.ts vendored

File diff suppressed because one or more lines are too long

4
dist/human.ts.map vendored

File diff suppressed because one or more lines are too long

View File

@ -342,6 +342,26 @@ export async function hand(inCanvas, result) {
} }
} }
export async function angles(inCanvas, result) {
// todo
if (!result || !inCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement)) return;
const ctx = inCanvas.getContext('2d');
if (!ctx) return;
ctx.font = options.font;
ctx.strokeStyle = options.color;
ctx.fillStyle = options.color;
ctx.lineWidth = options.lineWidth;
/*
const r = 200;
for (const res of result) {
ctx.moveTo(inCanvas.width - r, inCanvas.height - r);
ctx.lineTo(inCanvas.width - r + (r * Math.cos(res.angle.roll)), inCanvas.height - r + (r * Math.sin(res.angle.roll)));
ctx.stroke();
}
*/
}
export async function canvas(inCanvas, outCanvas) { export async function canvas(inCanvas, outCanvas) {
if (!inCanvas || !outCanvas) return; if (!inCanvas || !outCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement) || !(outCanvas instanceof HTMLCanvasElement)) return; if (!(inCanvas instanceof HTMLCanvasElement) || !(outCanvas instanceof HTMLCanvasElement)) return;
@ -356,4 +376,5 @@ export async function all(inCanvas, result) {
body(inCanvas, result.body); body(inCanvas, result.body);
hand(inCanvas, result.hand); hand(inCanvas, result.hand);
gesture(inCanvas, result.gesture); gesture(inCanvas, result.gesture);
angles(inCanvas, result.face);
} }

View File

@ -23,7 +23,7 @@ export const face = (res) => {
const gestures: Array<{ face: number, gesture: string }> = []; const gestures: Array<{ face: number, gesture: string }> = [];
for (let i = 0; i < res.length; i++) { for (let i = 0; i < res.length; i++) {
if (res[i].mesh && res[i].mesh.length > 0) { if (res[i].mesh && res[i].mesh.length > 0) {
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2]; const eyeFacing = res[i].mesh[33][2] - res[i].mesh[263][2];
if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' }); if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' });
else gestures.push({ face: i, gesture: `facing ${eyeFacing < 0 ? 'right' : 'left'}` }); else gestures.push({ face: i, gesture: `facing ${eyeFacing < 0 ? 'right' : 'left'}` });
const openLeft = Math.abs(res[i].mesh[374][1] - res[i].mesh[386][1]) / Math.abs(res[i].mesh[443][1] - res[i].mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord const openLeft = Math.abs(res[i].mesh[374][1] - res[i].mesh[386][1]) / Math.abs(res[i].mesh[443][1] - res[i].mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord

View File

@ -248,6 +248,25 @@ class Human {
} }
} }
calculateFaceAngle = (mesh) => {
if (!mesh || mesh.length < 152) return {};
const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1);
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360);
const angle = {
// roll is face lean left/right
// looking at x,y of outside corners of leftEye and rightEye
roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]),
// yaw is face turn left/right
// looking at x,z of outside corners of leftEye and rightEye
yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]),
// pitch is face move up/down
// looking at y,x of top and bottom points of the face
pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]),
};
return angle;
}
async detectFace(input) { async detectFace(input) {
// run facemesh, includes blazeface and iris // run facemesh, includes blazeface and iris
// eslint-disable-next-line no-async-promise-executor // eslint-disable-next-line no-async-promise-executor
@ -256,7 +275,24 @@ class Human {
let genderRes; let genderRes;
let emotionRes; let emotionRes;
let embeddingRes; let embeddingRes;
const faceRes: Array<{ confidence: number, boxConfidence: number, faceConfidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = []; const faceRes: Array<{
confidence: number,
boxConfidence: number,
faceConfidence: number,
box: any,
mesh:any,
meshRaw: any,
boxRaw: any,
annotations: any,
age: number,
gender: string,
genderConfidence: number,
emotion: string,
embedding: any,
iris: number,
angle: any
}> = [];
this.state = 'run:face'; this.state = 'run:face';
timeStamp = now(); timeStamp = now();
const faces = await this.models.face?.estimateFaces(input, this.config); const faces = await this.models.face?.estimateFaces(input, this.config);
@ -270,6 +306,8 @@ class Human {
continue; continue;
} }
const angle = this.calculateFaceAngle(face.mesh);
// run age, inherits face from blazeface // run age, inherits face from blazeface
this.analyze('Start Age:'); this.analyze('Start Age:');
if (this.config.async) { if (this.config.async) {
@ -350,6 +388,7 @@ class Human {
emotion: emotionRes, emotion: emotionRes,
embedding: embeddingRes, embedding: embeddingRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0, iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
angle,
// image: face.image.toInt().squeeze(), // image: face.image.toInt().squeeze(),
}); });
@ -385,10 +424,6 @@ class Human {
resolve({ error }); resolve({ error });
} }
let bodyRes;
let handRes;
let faceRes;
const timeStart = now(); const timeStart = now();
// configure backend // configure backend
@ -410,6 +445,11 @@ class Human {
this.perf.image = Math.trunc(now() - timeStamp); this.perf.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:'); this.analyze('Get Image:');
// prepare where to store model results
let bodyRes;
let handRes;
let faceRes;
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : []; faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];
@ -548,7 +588,7 @@ class Human {
else res = await this.warmupNode(); else res = await this.warmupNode();
this.config.videoOptimized = video; this.config.videoOptimized = video;
const t1 = now(); const t1 = now();
if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms'); if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms', res);
return res; return res;
} }
} }

1
types/draw.d.ts vendored
View File

@ -20,5 +20,6 @@ export declare function gesture(inCanvas: any, result: any): Promise<void>;
export declare function face(inCanvas: any, result: any): Promise<void>; export declare function face(inCanvas: any, result: any): Promise<void>;
export declare function body(inCanvas: any, result: any): Promise<void>; export declare function body(inCanvas: any, result: any): Promise<void>;
export declare function hand(inCanvas: any, result: any): Promise<void>; export declare function hand(inCanvas: any, result: any): Promise<void>;
export declare function angles(inCanvas: any, result: any): Promise<void>;
export declare function canvas(inCanvas: any, outCanvas: any): Promise<void>; export declare function canvas(inCanvas: any, outCanvas: any): Promise<void>;
export declare function all(inCanvas: any, result: any): Promise<void>; export declare function all(inCanvas: any, result: any): Promise<void>;

2
types/human.d.ts vendored
View File

@ -27,6 +27,7 @@ declare class Human {
simmilarity(embedding1: any, embedding2: any): number; simmilarity(embedding1: any, embedding2: any): number;
load(userConfig?: null): Promise<void>; load(userConfig?: null): Promise<void>;
checkBackend(force?: boolean): Promise<void>; checkBackend(force?: boolean): Promise<void>;
calculateFaceAngle: (mesh: any) => {};
detectFace(input: any): Promise<{ detectFace(input: any): Promise<{
confidence: number; confidence: number;
boxConfidence: number; boxConfidence: number;
@ -42,6 +43,7 @@ declare class Human {
emotion: string; emotion: string;
embedding: any; embedding: any;
iris: number; iris: number;
angle: any;
}[]>; }[]>;
detect(input: any, userConfig?: {}): Promise<unknown>; detect(input: any, userConfig?: {}): Promise<unknown>;
warmupBitmap(): Promise<any>; warmupBitmap(): Promise<any>;