normalize all scores

pull/280/head
Vladimir Mandic 2021-04-01 09:24:56 -04:00
parent 942fa18b52
commit 5931a6f541
6 changed files with 42 additions and 19 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **1.3.0**
Version: **1.3.1**
Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/03/30 mandic00@live.com
### **1.3.1** 2021/03/30 mandic00@live.com
- added face3d demo
- initial work on face3d three.js demo
- enable buffering
- new icons

View File

@ -12,7 +12,7 @@ const userConfig = {
profile: false,
warmup: 'full',
videoOptimized: false,
filter: { enabled: true },
filter: { enabled: false },
face: { enabled: false,
mesh: { enabled: true },
iris: { enabled: true },

View File

@ -11,8 +11,8 @@ const Human = require('../dist/human.node.js').default; // or const Human = requ
let human = null;
const myConfig = {
// backend: 'tensorflow',
console: true,
backend: 'tensorflow',
debug: true,
videoOptimized: false,
async: false,
face: {
@ -22,17 +22,15 @@ const myConfig = {
iris: { modelPath: 'file://models/iris.json', enabled: true },
description: { modelPath: 'file://models/faceres.json', enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
age: { modelPath: 'file://models/age.json', enabled: false },
gender: { modelPath: 'file://models/gender.json', enabled: false },
embedding: { modelPath: 'file://models/mobileface.json', enabled: false },
},
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true },
hand: {
enabled: true,
detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' },
},
// body: { modelPath: 'file://models/efficientpose.json', enabled: true },
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true },
object: { modelPath: 'file://models/nanodet.json', enabled: true },
};
@ -66,6 +64,29 @@ async function detect(input) {
// dispose image tensor as we no longer need it
image.dispose();
// print data to console
log.data('Results:');
for (let i = 0; i < result.face.length; i++) {
const face = result.face[i];
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.boxConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
}
for (let i = 0; i < result.body.length; i++) {
const body = result.body[i];
log.data(` Body: #${i} score:${body.score}`);
}
for (let i = 0; i < result.hand.length; i++) {
const hand = result.hand[i];
log.data(` Hand: #${i} confidence:${hand.confidence}`);
}
for (let i = 0; i < result.gesture.length; i++) {
const [key, val] = Object.entries(result.gesture[i]);
log.data(` Gesture: ${key[0]}#${key[1]} gesture:${val[1]}`);
}
for (let i = 0; i < result.object.length; i++) {
const object = result.object[i];
log.data(` Object: #${i} score:${object.score} label:${object.label}`);
}
result.face.length = 0;
return result;
}

View File

@ -3,7 +3,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile';
let model;
let keypoints = { };
let keypoints: Array<any> = [];
let skipped = Number.MAX_SAFE_INTEGER;
const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle'];
@ -56,9 +56,9 @@ export async function predict(image, config) {
let resT;
if (!config.profile) {
if (config.body.enabled) resT = await model.executeAsync(tensor);
if (config.body.enabled) resT = await model.predict(tensor);
} else {
const profileT = config.body.enabled ? await tf.profile(() => model.executeAsync(tensor)) : {};
const profileT = config.body.enabled ? await tf.profile(() => model.predict(tensor)) : {};
resT = profileT.result.clone();
profileT.result.dispose();
profile.run('body', profileT);
@ -79,7 +79,7 @@ export async function predict(image, config) {
if (score > config.body.scoreThreshold) {
parts.push({
id,
score,
score: Math.round(100 * score) / 100,
part: bodyParts[id],
positionRaw: {
xRaw: x / model.inputs[0].shape[2], // x normalized to 0..1
@ -95,6 +95,7 @@ export async function predict(image, config) {
stack.forEach((s) => tf.dispose(s));
keypoints = parts;
}
resolve([{ keypoints }]);
const score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
resolve([{ score, keypoints }]);
});
}

View File

@ -49,7 +49,7 @@ export class HandPose {
(prediction.box.bottomRight[0] - prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.bottomRight[1] - prediction.box.topLeft[1]) / input.shape[1],
];
hands.push({ confidence: prediction.confidence, box, boxRaw, landmarks: prediction.landmarks, annotations });
hands.push({ confidence: Math.round(100 * prediction.confidence) / 100, box, boxRaw, landmarks: prediction.landmarks, annotations });
}
return hands;
}

View File

@ -58,7 +58,7 @@ async function process(res, inputSize, outputShape, config) {
const result = {
id: id++,
strideSize,
score,
score: Math.round(100 * score) / 100,
class: j + 1,
label: labels[j].label,
center: [Math.trunc(outputShape[0] * cx), Math.trunc(outputShape[1] * cy)],
@ -113,9 +113,9 @@ export async function predict(image, config) {
let objectT;
if (!config.profile) {
if (config.object.enabled) objectT = await model.executeAsync(transpose);
if (config.object.enabled) objectT = await model.predict(transpose);
} else {
const profileObject = config.object.enabled ? await tf.profile(() => model.executeAsync(transpose)) : {};
const profileObject = config.object.enabled ? await tf.profile(() => model.predict(transpose)) : {};
objectT = profileObject.result;
profile.run('object', profileObject);
}