human/demo/embedding.js

122 lines
4.4 KiB
JavaScript
Raw Normal View History

2021-03-12 00:26:04 +01:00
import Human from '../dist/human.esm.js';
const userConfig = {
backend: 'wasm',
async: false,
warmup: 'none',
debug: true,
filter: false,
videoOptimized: false,
face: {
enabled: true,
2021-03-12 04:04:44 +01:00
detector: { rotation: true, return: true },
2021-03-12 00:26:04 +01:00
mesh: { enabled: true },
2021-03-12 18:54:08 +01:00
embedding: { enabled: true },
2021-03-12 00:26:04 +01:00
iris: { enabled: false },
age: { enabled: false },
gender: { enabled: false },
emotion: { enabled: false },
},
hand: { enabled: false },
gesture: { enabled: false },
body: { enabled: false },
};
2021-03-12 18:54:08 +01:00
const human = new Human(userConfig); // new instance of human
2021-03-12 04:04:44 +01:00
const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'];
// const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg',
2021-03-12 18:54:08 +01:00
// '../private/me (1).jpg', '../private/me (2).jpg', '../private/me (3).jpg', '../private/me (4).jpg', '../private/me (5).jpg', '../private/me (6).jpg', '../private/me (7).jpg', '../private/me (8).jpg',
// '../private/me (9).jpg', '../private/me (10).jpg', '../private/me (11).jpg', '../private/me (12).jpg', '../private/me (13).jpg'];
const all = []; // array that will hold all detected faces
2021-03-12 00:26:04 +01:00
function log(...msg) {
const dt = new Date();
const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;
// eslint-disable-next-line no-console
console.log(ts, ...msg);
}
async function analyze(face) {
log('Face:', face);
2021-03-12 04:04:44 +01:00
2021-03-12 18:54:08 +01:00
// if we have face image tensor, enhance it and display it
if (face.tensor) {
const enhanced = human.enhance(face);
if (enhanced) {
const c = document.getElementById('orig');
const squeeze = enhanced.squeeze();
human.tf.browser.toPixels(squeeze, c);
enhanced.dispose();
squeeze.dispose();
}
}
2021-03-12 04:04:44 +01:00
2021-03-12 18:54:08 +01:00
// loop through all canvases that contain faces
2021-03-12 00:26:04 +01:00
const canvases = document.getElementsByClassName('face');
for (const canvas of canvases) {
2021-03-12 18:54:08 +01:00
// calculate simmilarity from selected face to current one in the loop
2021-03-12 00:26:04 +01:00
const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding);
2021-03-12 18:54:08 +01:00
// draw the canvas and simmilarity score
2021-03-12 00:26:04 +01:00
canvas.title = res;
await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas);
const ctx = canvas.getContext('2d');
ctx.font = 'small-caps 1rem "Lato"';
ctx.fillStyle = 'rgba(0, 0, 0, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 3, 19);
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20);
}
2021-03-12 18:54:08 +01:00
// sort all faces by simmilarity
2021-03-12 00:26:04 +01:00
const sorted = document.getElementById('faces');
[...sorted.children]
.sort((a, b) => parseFloat(b.title) - parseFloat(a.title))
.forEach((canvas) => sorted.appendChild(canvas));
}
async function faces(index, res) {
all[index] = res.face;
for (const i in res.face) {
// log(res.face[i]);
const canvas = document.createElement('canvas');
canvas.tag = { sample: index, face: i };
canvas.width = 200;
canvas.height = 200;
canvas.className = 'face';
2021-03-12 18:54:08 +01:00
// mouse click on any face canvas triggers analysis
2021-03-12 00:26:04 +01:00
canvas.addEventListener('click', (evt) => {
2021-03-12 04:04:44 +01:00
log('Select:', 'Image:', evt.target.tag.sample, 'Face:', evt.target.tag.face);
2021-03-12 00:26:04 +01:00
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
});
2021-03-12 18:54:08 +01:00
// if we actually got face image tensor, draw canvas with that face
if (res.face[i].tensor) {
human.tf.browser.toPixels(res.face[i].tensor, canvas);
document.getElementById('faces').appendChild(canvas);
}
2021-03-12 00:26:04 +01:00
}
}
async function add(index) {
2021-03-12 04:04:44 +01:00
log('Add image:', samples[index]);
2021-03-12 00:26:04 +01:00
return new Promise((resolve) => {
const img = new Image(100, 100);
2021-03-12 18:54:08 +01:00
img.onload = () => { // must wait until image is loaded
human.detect(img).then((res) => faces(index, res)); // then wait until image is analyzed
document.getElementById('images').appendChild(img); // and finally we can add it
2021-03-12 00:26:04 +01:00
resolve(true);
};
img.title = samples[index];
img.src = samples[index];
});
}
async function main() {
await human.load();
2021-03-12 18:54:08 +01:00
for (const i in samples) await add(i); // download and analyze all images
2021-03-12 04:04:44 +01:00
log('Ready');
2021-03-12 00:26:04 +01:00
}
window.onload = main;