update demos

pull/280/head
Vladimir Mandic 2021-03-21 17:47:00 -04:00
parent bf89c7ee98
commit ca511a5385
4 changed files with 33 additions and 25 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **1.2.1**
Version: **1.2.2**
Description: **Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/03/21 mandic00@live.com
### **1.2.2** 2021/03/21 mandic00@live.com
- precise face rotation
### **1.2.1** 2021/03/21 mandic00@live.com

View File

@ -82,27 +82,30 @@ function status(msg) {
if (div) div.innerText = msg;
}
let original;
const compare = { enabled: false, original: null };
async function calcSimmilariry(result) {
document.getElementById('compare-container').style.display = human.config.face.embedding.enabled ? 'block' : 'none';
if (!human.config.face.embedding.enabled) return;
if (!(result?.face?.length > 0) || (result?.face[0]?.embedding?.length >= 64)) return;
if (!original) {
original = result;
document.getElementById('compare-container').style.display = compare.enabled ? 'block' : 'none';
if (!compare.enabled) return;
if (!(result?.face?.length > 0) || (result?.face[0]?.embedding?.length <= 64)) return;
if (!compare.original) {
compare.original = result;
log('setting face compare baseline:', result.face[0]);
if (result.face[0].tensor) {
const enhanced = human.enhance(result.face[0]);
if (enhanced) {
const c = document.getElementById('orig');
const squeeze = enhanced.squeeze();
human.tf.browser.toPixels(squeeze, c);
const norm = squeeze.div(255);
human.tf.browser.toPixels(norm, c);
enhanced.dispose();
squeeze.dispose();
norm.dispose();
}
} else {
document.getElementById('compare-canvas').getContext('2d').drawImage(original.canvas, 0, 0, 200, 200);
document.getElementById('compare-canvas').getContext('2d').drawImage(compare.original.canvas, 0, 0, 200, 200);
}
}
const similarity = human.similarity(original?.face[0]?.embedding, result?.face[0]?.embedding);
const similarity = human.similarity(compare.original?.face[0]?.embedding, result?.face[0]?.embedding);
document.getElementById('similarity').innerText = `similarity: ${Math.trunc(1000 * similarity) / 10}%`;
}
@ -342,7 +345,7 @@ async function processImage(input) {
return new Promise((resolve) => {
const image = new Image();
image.onload = async () => {
log('Processing image:', encodeURI(image.src));
log('processing image:', encodeURI(image.src));
const canvas = document.getElementById('canvas');
image.width = image.naturalWidth;
image.height = image.naturalHeight;
@ -404,7 +407,7 @@ async function detectSampleImages() {
document.getElementById('play').style.display = 'none';
document.getElementById('canvas').style.display = 'none';
document.getElementById('samples-container').style.display = 'block';
log('Running detection of sample images');
log('running detection of sample images');
status('processing images');
document.getElementById('samples-container').innerHTML = '';
for (const m of Object.values(menu)) m.hide();
@ -509,8 +512,9 @@ function setupMenu() {
menu.models.addBool('face detect', human.config.face, 'enabled', (val) => human.config.face.enabled = val);
menu.models.addBool('face mesh', human.config.face.mesh, 'enabled', (val) => human.config.face.mesh.enabled = val);
menu.models.addBool('face iris', human.config.face.iris, 'enabled', (val) => human.config.face.iris.enabled = val);
menu.models.addBool('face age', human.config.face.age, 'enabled', (val) => human.config.face.age.enabled = val);
menu.models.addBool('face gender', human.config.face.gender, 'enabled', (val) => human.config.face.gender.enabled = val);
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.age.description = val);
// menu.models.addBool('face age', human.config.face.age, 'enabled', (val) => human.config.face.age.enabled = val);
// menu.models.addBool('face gender', human.config.face.gender, 'enabled', (val) => human.config.face.gender.enabled = val);
menu.models.addBool('face emotion', human.config.face.emotion, 'enabled', (val) => human.config.face.emotion.enabled = val);
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.models.addBool('body pose', human.config.body, 'enabled', (val) => human.config.body.enabled = val);
@ -520,9 +524,9 @@ function setupMenu() {
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.models.addBool('object detection', human.config.object, 'enabled', (val) => human.config.object.enabled = val);
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.models.addBool('face compare', human.config.face.embedding, 'enabled', (val) => {
human.config.face.embedding.enabled = val;
original = null;
menu.models.addBool('face compare', compare, 'enabled', (val) => {
compare.enabled = val;
compare.original = null;
});
document.getElementById('btnDisplay').addEventListener('click', (evt) => menu.display.toggle(evt));
@ -543,14 +547,14 @@ async function drawWarmup(res) {
}
async function main() {
log('Demo starting ...');
log('demo starting ...');
setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version}`;
if (ui.modelsPreload && !ui.useWorker) {
status('loading');
await human.load(userConfig); // this is not required, just pre-loads all models
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
log('Demo loaded models:', loaded);
log('demo loaded models:', loaded);
}
if (!ui.useWorker) {
status('initializing');
@ -560,7 +564,7 @@ async function main() {
status('human: ready');
document.getElementById('loader').style.display = 'none';
document.getElementById('play').style.display = 'block';
log('Demo ready...');
log('demo ready...');
}
window.onload = main;

View File

@ -11,7 +11,7 @@ const Human = require('../dist/human.node.js').default; // or const Human = requ
let human = null;
const myConfig = {
backend: 'tensorflow',
// backend: 'tensorflow',
console: true,
videoOptimized: false,
async: false,
@ -20,10 +20,11 @@ const myConfig = {
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false },
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
iris: { modelPath: 'file://models/iris.json', enabled: true },
age: { modelPath: 'file://models/age.json', enabled: true },
gender: { modelPath: 'file://models/gender.json', enabled: true },
description: { modelPath: 'file://models/faceres.json', enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
embedding: { modelPath: 'file://models/mobileface.json', enabled: true },
age: { modelPath: 'file://models/age.json', enabled: false },
gender: { modelPath: 'file://models/gender.json', enabled: false },
embedding: { modelPath: 'file://models/mobileface.json', enabled: false },
},
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true },

View File

@ -262,6 +262,8 @@ export class Human {
*/
if (this.config.backend && this.config.backend !== '') {
if (this.tf.ENV.flags.IS_BROWSER && this.config.backend === 'tensorflow') this.config.backend = 'webgl';
if (this.tf.ENV.flags.IS_NODE && (this.config.backend === 'webgl' || this.config.backend === 'wasm')) this.config.backend = 'tensorflow';
if (this.config.debug) log('setting backend:', this.config.backend);
if (this.config.backend === 'wasm') {