add test for face descriptors

pull/280/head
Vladimir Mandic 2021-03-11 18:26:04 -05:00
parent 622de45e50
commit c3ecdf5486
7 changed files with 133 additions and 4 deletions

2
.gitignore vendored
View File

@ -1,2 +1,2 @@
node_modules
alternative
private

View File

@ -2,6 +2,7 @@
- Strong typing
- Automated testing
- Guard against corrupt input
- Improve face embedding
- Build Face embedding database
- Dynamic sample processing

View File

@ -85,6 +85,7 @@ export default {
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression,
// this is applied on detection objects only and before minConfidence
return: true, // return extracted face as tensor
},
mesh: {

32
demo/embedding.html Normal file
View File

@ -0,0 +1,32 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Human</title>
<meta charset="utf-8">
<meta http-equiv="content-type">
<meta content="text/html">
<meta name="description" content="Human: 3D Face Detection, Body Pose, Hand & Finger Tracking, Iris Tracking, Age & Gender Prediction, Emotion Prediction & Gesture Recognition; Author: Vladimir Mandic <https://github.com/vladmandic>">
<meta name="viewport" content="width=device-width, initial-scale=0.6, minimum-scale=0.3, maximum-scale=3.0, shrink-to-fit=yes, user-scalable=yes">
<meta name="theme-color" content="#000000"/>
<meta name="application-name" content="Human">
<meta name="msapplication-tooltip" content="Human: AI-powered 3D Human Detection">
<link rel="manifest" href="./manifest.webmanifest">
<link rel="shortcut icon" href="../favicon.ico" type="image/x-icon">
<link rel="apple-touch-icon" href="../assets/icon.png">
<script src="./embedding.js" type="module"></script>
<style>
@font-face { font-family: 'Lato'; font-display: swap; font-style: normal; font-weight: 100; src: local('Lato'), url('../assets/lato-light.woff2') }
html { font-family: 'Lato', 'Segoe UI'; font-size: 20px; font-variant: small-caps; }
body { margin: 0; background: black; color: white; overflow-x: hidden; scrollbar-width: none; }
body::-webkit-scrollbar { display: none; }
img { object-fit: contain; }
.face { width: 200px; height: 200px; }
</style>
</head>
<body>
<br>Sample Images:
<div id="images"></div>
<br>Extracted Faces - click on a face to sort by simmilarity:<br>
<div id="faces"></div>
</body>
</html>

94
demo/embedding.js Normal file
View File

@ -0,0 +1,94 @@
import Human from '../dist/human.esm.js';
const userConfig = {
backend: 'wasm',
async: false,
warmup: 'none',
debug: true,
filter: false,
videoOptimized: false,
face: {
enabled: true,
detector: { rotation: true },
mesh: { enabled: true },
embedding: { enabled: true, modelPath: '../models/mobilefacenet.json' },
iris: { enabled: false },
age: { enabled: false },
gender: { enabled: false },
emotion: { enabled: false },
},
hand: { enabled: false },
gesture: { enabled: false },
body: { enabled: false },
};
const human = new Human(userConfig);
const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg',
'../private/me (1).jpg', '../private/me (2).jpg', '../private/me (3).jpg', '../private/me (4).jpg', '../private/me (5).jpg', '../private/me (6).jpg', '../private/me (7).jpg', '../private/me (8).jpg',
'../private/me (9).jpg', '../private/me (10).jpg', '../private/me (11).jpg', '../private/me (12).jpg', '../private/me (13).jpg'];
const all = [];
function log(...msg) {
const dt = new Date();
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
// eslint-disable-next-line no-console
console.log(ts, ...msg);
}
async function analyze(face) {
log('Face:', face);
const canvases = document.getElementsByClassName('face');
for (const canvas of canvases) {
const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding);
canvas.title = res;
await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas);
const ctx = canvas.getContext('2d');
ctx.font = 'small-caps 1rem "Lato"';
ctx.fillStyle = 'rgba(0, 0, 0, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 3, 19);
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20);
}
const sorted = document.getElementById('faces');
[...sorted.children]
.sort((a, b) => parseFloat(b.title) - parseFloat(a.title))
.forEach((canvas) => sorted.appendChild(canvas));
}
async function faces(index, res) {
all[index] = res.face;
for (const i in res.face) {
// log(res.face[i]);
const canvas = document.createElement('canvas');
canvas.tag = { sample: index, face: i };
canvas.width = 200;
canvas.height = 200;
canvas.className = 'face';
canvas.addEventListener('click', (evt) => {
log('Select:', evt.target.tag.sample, evt.target.tag.face);
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
});
human.tf.browser.toPixels(res.face[i].tensor, canvas);
document.getElementById('faces').appendChild(canvas);
}
}
async function add(index) {
log('Add:', samples[index]);
return new Promise((resolve) => {
const img = new Image(100, 100);
img.onload = () => {
human.detect(img).then((res) => faces(index, res));
document.getElementById('images').appendChild(img);
resolve(true);
};
img.title = samples[index];
img.src = samples[index];
});
}
async function main() {
await human.load();
for (const i in samples) await add(i);
}
window.onload = main;

View File

@ -303,7 +303,8 @@ class Human {
emotion: string,
embedding: any,
iris: number,
angle: any
angle: any,
tensor: any,
}> = [];
this.state = 'run:face';
@ -402,7 +403,7 @@ class Human {
embedding: embeddingRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
angle,
// image: face.image.toInt().squeeze(),
tensor: this.config.face.detector.return ? face.image.squeeze() : null,
});
// dont need face anymore

2
wiki

@ -1 +1 @@
Subproject commit 69294f7a0a99bd996286f8f5bb655c7ea8bfc10d
Subproject commit fa7ac1f695547aa0fd25845e6cac7ed5ee0adcae