human/demo/embedding.js

135 lines
4.5 KiB
JavaScript
Raw Normal View History

2021-03-12 00:26:04 +01:00
import Human from '../dist/human.esm.js';
const userConfig = {
backend: 'wasm',
async: false,
warmup: 'none',
debug: true,
videoOptimized: false,
face: {
enabled: true,
2021-03-12 04:04:44 +01:00
detector: { rotation: true, return: true },
2021-03-12 00:26:04 +01:00
mesh: { enabled: true },
2021-03-12 18:54:08 +01:00
embedding: { enabled: true },
2021-03-12 00:26:04 +01:00
iris: { enabled: false },
age: { enabled: false },
gender: { enabled: false },
emotion: { enabled: false },
},
hand: { enabled: false },
gesture: { enabled: false },
body: { enabled: false },
2021-03-13 17:26:53 +01:00
filter: {
enabled: false,
},
2021-03-12 00:26:04 +01:00
};
2021-03-12 18:54:08 +01:00
const human = new Human(userConfig); // new instance of human
const all = []; // array that will hold all detected faces
2021-03-12 00:26:04 +01:00
function log(...msg) {
const dt = new Date();
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
// eslint-disable-next-line no-console
console.log(ts, ...msg);
}
async function analyze(face) {
2021-03-12 18:54:08 +01:00
// if we have face image tensor, enhance it and display it
if (face.tensor) {
const enhanced = human.enhance(face);
if (enhanced) {
const c = document.getElementById('orig');
const squeeze = enhanced.squeeze();
human.tf.browser.toPixels(squeeze, c);
enhanced.dispose();
squeeze.dispose();
}
}
2021-03-12 04:04:44 +01:00
2021-03-12 18:54:08 +01:00
// loop through all canvases that contain faces
2021-03-12 00:26:04 +01:00
const canvases = document.getElementsByClassName('face');
for (const canvas of canvases) {
2021-03-12 18:54:08 +01:00
// calculate simmilarity from selected face to current one in the loop
const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding, 3);
2021-03-12 18:54:08 +01:00
// draw the canvas and simmilarity score
2021-03-12 00:26:04 +01:00
canvas.title = res;
await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas);
const ctx = canvas.getContext('2d');
ctx.font = 'small-caps 1rem "Lato"';
ctx.fillStyle = 'rgba(0, 0, 0, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 3, 19);
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20);
}
2021-03-12 18:54:08 +01:00
// sort all faces by simmilarity
2021-03-12 00:26:04 +01:00
const sorted = document.getElementById('faces');
[...sorted.children]
.sort((a, b) => parseFloat(b.title) - parseFloat(a.title))
.forEach((canvas) => sorted.appendChild(canvas));
}
async function faces(index, res) {
all[index] = res.face;
for (const i in res.face) {
// log(res.face[i]);
const canvas = document.createElement('canvas');
canvas.tag = { sample: index, face: i };
canvas.width = 200;
canvas.height = 200;
canvas.className = 'face';
2021-03-12 18:54:08 +01:00
// mouse click on any face canvas triggers analysis
2021-03-12 00:26:04 +01:00
canvas.addEventListener('click', (evt) => {
2021-03-13 17:26:53 +01:00
log('Select:', 'Image:', evt.target.tag.sample, 'Face:', evt.target.tag.face, all[evt.target.tag.sample][evt.target.tag.face]);
2021-03-12 00:26:04 +01:00
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
});
2021-03-12 18:54:08 +01:00
// if we actually got face image tensor, draw canvas with that face
if (res.face[i].tensor) {
human.tf.browser.toPixels(res.face[i].tensor, canvas);
document.getElementById('faces').appendChild(canvas);
}
2021-03-12 00:26:04 +01:00
}
}
2021-03-13 17:26:53 +01:00
async function process(index, image) {
2021-03-12 00:26:04 +01:00
return new Promise((resolve) => {
2021-03-13 17:26:53 +01:00
const img = new Image(128, 128);
2021-03-12 18:54:08 +01:00
img.onload = () => { // must wait until image is loaded
2021-03-13 17:26:53 +01:00
human.detect(img).then((res) => {
faces(index, res); // then wait until image is analyzed
log('Add image:', index + 1, image, 'faces:', res.face.length);
document.getElementById('images').appendChild(img); // and finally we can add it
resolve(true);
});
2021-03-12 00:26:04 +01:00
};
img.title = image;
img.src = encodeURI(image);
2021-03-12 00:26:04 +01:00
});
}
async function main() {
await human.load();
// enumerate all sample images in /assets
let res = await fetch('/assets');
let dir = (res && res.ok) ? await res.json() : [];
let images = dir.filter((img) => (img.endsWith('.jpg') && img.includes('sample')));
// enumerate additional private test images in /private, not includded in git repository
res = await fetch('/private');
dir = (res && res.ok) ? await res.json() : [];
images = images.concat(dir.filter((img) => (img.endsWith('.jpg'))));
// download and analyze all images
log('Enumerated:', images.length, 'images');
2021-03-13 17:26:53 +01:00
for (let i = 0; i < images.length; i++) await process(i, images[i]);
2021-03-13 17:26:53 +01:00
const num = all.reduce((prev, cur) => prev += cur.length, 0);
log('Extracted faces:', num, 'from images:', all.length);
log(human.tf.engine().memory());
2021-03-12 04:04:44 +01:00
log('Ready');
2021-03-12 00:26:04 +01:00
}
window.onload = main;