mirror of https://github.com/vladmandic/human
implemented 3d face angle calculations
parent
0d7cb8d8ea
commit
cd949deec9
|
@ -1,6 +1,6 @@
|
|||
# Human Library
|
||||
|
||||
**3D Face Detection, Face Embedding & Recognition,**
|
||||
**3D Face Detection & Rotation Tracking, Face Embedding & Recognition,**
|
||||
**Body Pose Tracking, Hand & Finger Tracking,**
|
||||
**Iris Analysis, Age & Gender & Emotion Prediction**
|
||||
**& Gesture Recognition**
|
||||
|
|
1
TODO.md
1
TODO.md
|
@ -4,4 +4,3 @@
|
|||
- Prune pre-packaged models
|
||||
- Build Face embedding database
|
||||
- Dynamic sample processing
|
||||
- Update screenshots
|
||||
|
|
|
@ -2,18 +2,18 @@ import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
|||
import Menu from './menu.js';
|
||||
import GLBench from './gl-bench.js';
|
||||
|
||||
const userConfig = { backend: 'wasm' }; // add any user configuration overrides
|
||||
const userConfig = { backend: 'webgl' }; // add any user configuration overrides
|
||||
|
||||
/*
|
||||
const userConfig = {
|
||||
backend: 'wasm',
|
||||
async: false,
|
||||
warmup: 'full',
|
||||
warmup: 'face',
|
||||
videoOptimized: false,
|
||||
face: { enabled: false, iris: { enabled: true }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } },
|
||||
face: { enabled: true, iris: { enabled: false }, mesh: { enabled: true }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false }, embedding: { enabled: false } },
|
||||
hand: { enabled: false },
|
||||
gestures: { enabled: true },
|
||||
body: { enabled: true, modelType: 'blazepose', modelPath: '../models/blazepose.json' },
|
||||
body: { enabled: false, modelType: 'blazepose', modelPath: '../models/blazepose.json' },
|
||||
};
|
||||
*/
|
||||
|
||||
|
@ -129,10 +129,11 @@ async function drawResults(input) {
|
|||
}
|
||||
|
||||
// draw all results
|
||||
await human.draw.face(canvas, result.face);
|
||||
await human.draw.body(canvas, result.body);
|
||||
await human.draw.hand(canvas, result.hand);
|
||||
await human.draw.gesture(canvas, result.gesture);
|
||||
human.draw.face(canvas, result.face);
|
||||
human.draw.body(canvas, result.body);
|
||||
human.draw.hand(canvas, result.hand);
|
||||
human.draw.gesture(canvas, result.gesture);
|
||||
human.draw.angles(canvas, result.face);
|
||||
await calcSimmilariry(result);
|
||||
|
||||
// update log
|
||||
|
|
21
src/draw.ts
21
src/draw.ts
|
@ -342,6 +342,26 @@ export async function hand(inCanvas, result) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function angles(inCanvas, result) {
|
||||
// todo
|
||||
if (!result || !inCanvas) return;
|
||||
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
||||
const ctx = inCanvas.getContext('2d');
|
||||
if (!ctx) return;
|
||||
ctx.font = options.font;
|
||||
ctx.strokeStyle = options.color;
|
||||
ctx.fillStyle = options.color;
|
||||
ctx.lineWidth = options.lineWidth;
|
||||
/*
|
||||
const r = 200;
|
||||
for (const res of result) {
|
||||
ctx.moveTo(inCanvas.width - r, inCanvas.height - r);
|
||||
ctx.lineTo(inCanvas.width - r + (r * Math.cos(res.angle.roll)), inCanvas.height - r + (r * Math.sin(res.angle.roll)));
|
||||
ctx.stroke();
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
export async function canvas(inCanvas, outCanvas) {
|
||||
if (!inCanvas || !outCanvas) return;
|
||||
if (!(inCanvas instanceof HTMLCanvasElement) || !(outCanvas instanceof HTMLCanvasElement)) return;
|
||||
|
@ -356,4 +376,5 @@ export async function all(inCanvas, result) {
|
|||
body(inCanvas, result.body);
|
||||
hand(inCanvas, result.hand);
|
||||
gesture(inCanvas, result.gesture);
|
||||
angles(inCanvas, result.face);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ export const face = (res) => {
|
|||
const gestures: Array<{ face: number, gesture: string }> = [];
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
if (res[i].mesh && res[i].mesh.length > 0) {
|
||||
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
|
||||
const eyeFacing = res[i].mesh[33][2] - res[i].mesh[263][2];
|
||||
if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' });
|
||||
else gestures.push({ face: i, gesture: `facing ${eyeFacing < 0 ? 'right' : 'left'}` });
|
||||
const openLeft = Math.abs(res[i].mesh[374][1] - res[i].mesh[386][1]) / Math.abs(res[i].mesh[443][1] - res[i].mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord
|
||||
|
|
52
src/human.ts
52
src/human.ts
|
@ -248,6 +248,25 @@ class Human {
|
|||
}
|
||||
}
|
||||
|
||||
calculateFaceAngle = (mesh) => {
|
||||
if (!mesh || mesh.length < 152) return {};
|
||||
const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1);
|
||||
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||
const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360);
|
||||
const angle = {
|
||||
// roll is face lean left/right
|
||||
// looking at x,y of outside corners of leftEye and rightEye
|
||||
roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]),
|
||||
// yaw is face turn left/right
|
||||
// looking at x,z of outside corners of leftEye and rightEye
|
||||
yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]),
|
||||
// pitch is face move up/down
|
||||
// looking at y,x of top and bottom points of the face
|
||||
pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]),
|
||||
};
|
||||
return angle;
|
||||
}
|
||||
|
||||
async detectFace(input) {
|
||||
// run facemesh, includes blazeface and iris
|
||||
// eslint-disable-next-line no-async-promise-executor
|
||||
|
@ -256,7 +275,24 @@ class Human {
|
|||
let genderRes;
|
||||
let emotionRes;
|
||||
let embeddingRes;
|
||||
const faceRes: Array<{ confidence: number, boxConfidence: number, faceConfidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
|
||||
const faceRes: Array<{
|
||||
confidence: number,
|
||||
boxConfidence: number,
|
||||
faceConfidence: number,
|
||||
box: any,
|
||||
mesh:any,
|
||||
meshRaw: any,
|
||||
boxRaw: any,
|
||||
annotations: any,
|
||||
age: number,
|
||||
gender: string,
|
||||
genderConfidence: number,
|
||||
emotion: string,
|
||||
embedding: any,
|
||||
iris: number,
|
||||
angle: any
|
||||
}> = [];
|
||||
|
||||
this.state = 'run:face';
|
||||
timeStamp = now();
|
||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||
|
@ -270,6 +306,8 @@ class Human {
|
|||
continue;
|
||||
}
|
||||
|
||||
const angle = this.calculateFaceAngle(face.mesh);
|
||||
|
||||
// run age, inherits face from blazeface
|
||||
this.analyze('Start Age:');
|
||||
if (this.config.async) {
|
||||
|
@ -350,6 +388,7 @@ class Human {
|
|||
emotion: emotionRes,
|
||||
embedding: embeddingRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
angle,
|
||||
// image: face.image.toInt().squeeze(),
|
||||
});
|
||||
|
||||
|
@ -385,10 +424,6 @@ class Human {
|
|||
resolve({ error });
|
||||
}
|
||||
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
|
||||
const timeStart = now();
|
||||
|
||||
// configure backend
|
||||
|
@ -410,6 +445,11 @@ class Human {
|
|||
this.perf.image = Math.trunc(now() - timeStamp);
|
||||
this.analyze('Get Image:');
|
||||
|
||||
// prepare where to store model results
|
||||
let bodyRes;
|
||||
let handRes;
|
||||
let faceRes;
|
||||
|
||||
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
||||
if (this.config.async) {
|
||||
faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];
|
||||
|
@ -548,7 +588,7 @@ class Human {
|
|||
else res = await this.warmupNode();
|
||||
this.config.videoOptimized = video;
|
||||
const t1 = now();
|
||||
if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms');
|
||||
if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms', res);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue