diff --git a/README.md b/README.md index 1526092c..15a68feb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Human Library -**3D Face Detection, Face Embedding & Recognition,** +**3D Face Detection & Rotation Tracking, Face Embedding & Recognition,** **Body Pose Tracking, Hand & Finger Tracking,** **Iris Analysis, Age & Gender & Emotion Prediction** **& Gesture Recognition** diff --git a/TODO.md b/TODO.md index 7197fbe0..f1f3a9f3 100644 --- a/TODO.md +++ b/TODO.md @@ -4,4 +4,3 @@ - Prune pre-packaged models - Build Face embedding database - Dynamic sample processing -- Update screenshots diff --git a/demo/browser.js b/demo/browser.js index e717960b..30d560a9 100644 --- a/demo/browser.js +++ b/demo/browser.js @@ -2,18 +2,18 @@ import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human import Menu from './menu.js'; import GLBench from './gl-bench.js'; -const userConfig = { backend: 'wasm' }; // add any user configuration overrides +const userConfig = { backend: 'webgl' }; // add any user configuration overrides /* const userConfig = { backend: 'wasm', async: false, - warmup: 'full', + warmup: 'face', videoOptimized: false, - face: { enabled: false, iris: { enabled: true }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } }, + face: { enabled: true, iris: { enabled: false }, mesh: { enabled: true }, age: { enabled: false }, gender: { enabled: false }, emotion: { enabled: false }, embedding: { enabled: false } }, hand: { enabled: false }, gestures: { enabled: true }, - body: { enabled: true, modelType: 'blazepose', modelPath: '../models/blazepose.json' }, + body: { enabled: false, modelType: 'blazepose', modelPath: '../models/blazepose.json' }, }; */ @@ -129,10 +129,11 @@ async function drawResults(input) { } // draw all results - await human.draw.face(canvas, result.face); - await human.draw.body(canvas, result.body); - await human.draw.hand(canvas, result.hand); - await human.draw.gesture(canvas, result.gesture); + human.draw.face(canvas, result.face); + human.draw.body(canvas, result.body); + human.draw.hand(canvas, result.hand); + human.draw.gesture(canvas, result.gesture); + human.draw.angles(canvas, result.face); await calcSimmilariry(result); // update log diff --git a/src/draw.ts b/src/draw.ts index a4c3ecc9..8d67926f 100644 --- a/src/draw.ts +++ b/src/draw.ts @@ -342,6 +342,26 @@ export async function hand(inCanvas, result) { } } +export async function angles(inCanvas, result) { + // todo + if (!result || !inCanvas) return; + if (!(inCanvas instanceof HTMLCanvasElement)) return; + const ctx = inCanvas.getContext('2d'); + if (!ctx) return; + ctx.font = options.font; + ctx.strokeStyle = options.color; + ctx.fillStyle = options.color; + ctx.lineWidth = options.lineWidth; + /* + const r = 200; + for (const res of result) { + ctx.moveTo(inCanvas.width - r, inCanvas.height - r); + ctx.lineTo(inCanvas.width - r + (r * Math.cos(res.angle.roll)), inCanvas.height - r + (r * Math.sin(res.angle.roll))); + ctx.stroke(); + } + */ +} + export async function canvas(inCanvas, outCanvas) { if (!inCanvas || !outCanvas) return; if (!(inCanvas instanceof HTMLCanvasElement) || !(outCanvas instanceof HTMLCanvasElement)) return; @@ -356,4 +376,5 @@ export async function all(inCanvas, result) { body(inCanvas, result.body); hand(inCanvas, result.hand); gesture(inCanvas, result.gesture); + angles(inCanvas, result.face); } diff --git a/src/gesture/gesture.ts b/src/gesture/gesture.ts index 1f47b970..b174e920 100644 --- a/src/gesture/gesture.ts +++ b/src/gesture/gesture.ts @@ -23,7 +23,7 @@ export const face = (res) => { const gestures: Array<{ face: number, gesture: string }> = []; for (let i = 0; i < res.length; i++) { if (res[i].mesh && res[i].mesh.length > 0) { - const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2]; + const eyeFacing = res[i].mesh[33][2] - res[i].mesh[263][2]; if (Math.abs(eyeFacing) < 10) gestures.push({ face: i, gesture: 'facing camera' }); else gestures.push({ face: i, gesture: `facing ${eyeFacing < 0 ? 'right' : 'left'}` }); const openLeft = Math.abs(res[i].mesh[374][1] - res[i].mesh[386][1]) / Math.abs(res[i].mesh[443][1] - res[i].mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord diff --git a/src/human.ts b/src/human.ts index 2e62de50..38171c32 100644 --- a/src/human.ts +++ b/src/human.ts @@ -248,6 +248,25 @@ class Human { } } + calculateFaceAngle = (mesh) => { + if (!mesh || mesh.length < 152) return {}; + const radians = (a1, a2, b1, b2) => Math.atan2(b2 - a2, b1 - a1); + // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars + const degrees = (theta) => Math.abs(((theta * 180) / Math.PI) % 360); + const angle = { + // roll is face lean left/right + // looking at x,y of outside corners of leftEye and rightEye + roll: radians(mesh[33][0], mesh[33][1], mesh[263][0], mesh[263][1]), + // yaw is face turn left/right + // looking at x,z of outside corners of leftEye and rightEye + yaw: radians(mesh[33][0], mesh[33][2], mesh[263][0], mesh[263][2]), + // pitch is face move up/down + // looking at y,x of top and bottom points of the face + pitch: radians(mesh[10][1], mesh[10][2], mesh[152][1], mesh[152][2]), + }; + return angle; + } + async detectFace(input) { // run facemesh, includes blazeface and iris // eslint-disable-next-line no-async-promise-executor @@ -256,7 +275,24 @@ class Human { let genderRes; let emotionRes; let embeddingRes; - const faceRes: Array<{ confidence: number, boxConfidence: number, faceConfidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = []; + const faceRes: Array<{ + confidence: number, + boxConfidence: number, + faceConfidence: number, + box: any, + mesh:any, + meshRaw: any, + boxRaw: any, + annotations: any, + age: number, + gender: string, + genderConfidence: number, + emotion: string, + embedding: any, + iris: number, + angle: any + }> = []; + this.state = 'run:face'; timeStamp = now(); const faces = await this.models.face?.estimateFaces(input, this.config); @@ -270,6 +306,8 @@ class Human { continue; } + const angle = this.calculateFaceAngle(face.mesh); + // run age, inherits face from blazeface this.analyze('Start Age:'); if (this.config.async) { @@ -350,6 +388,7 @@ class Human { emotion: emotionRes, embedding: embeddingRes, iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0, + angle, // image: face.image.toInt().squeeze(), }); @@ -385,10 +424,6 @@ class Human { resolve({ error }); } - let bodyRes; - let handRes; - let faceRes; - const timeStart = now(); // configure backend @@ -410,6 +445,11 @@ class Human { this.perf.image = Math.trunc(now() - timeStamp); this.analyze('Get Image:'); + // prepare where to store model results + let bodyRes; + let handRes; + let faceRes; + // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion if (this.config.async) { faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : []; @@ -548,7 +588,7 @@ class Human { else res = await this.warmupNode(); this.config.videoOptimized = video; const t1 = now(); - if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms'); + if (this.config.debug) log('Warmup', this.config.warmup, Math.round(t1 - t0), 'ms', res); return res; } }