normalize all scores

pull/94/head
Vladimir Mandic 2021-04-01 09:24:56 -04:00
parent 65ca7fead2
commit e2ed67e591
20 changed files with 195 additions and 170 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **1.3.0**
Version: **1.3.1**
Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/03/30 mandic00@live.com
### **1.3.1** 2021/03/30 mandic00@live.com
- added face3d demo
- initial work on face3d three.js demo
- enable buffering
- new icons

View File

@ -12,7 +12,7 @@ const userConfig = {
profile: false,
warmup: 'full',
videoOptimized: false,
filter: { enabled: true },
filter: { enabled: false },
face: { enabled: false,
mesh: { enabled: true },
iris: { enabled: true },

View File

@ -11,8 +11,8 @@ const Human = require('../dist/human.node.js').default; // or const Human = requ
let human = null;
const myConfig = {
// backend: 'tensorflow',
console: true,
backend: 'tensorflow',
debug: true,
videoOptimized: false,
async: false,
face: {
@ -22,17 +22,15 @@ const myConfig = {
iris: { modelPath: 'file://models/iris.json', enabled: true },
description: { modelPath: 'file://models/faceres.json', enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
age: { modelPath: 'file://models/age.json', enabled: false },
gender: { modelPath: 'file://models/gender.json', enabled: false },
embedding: { modelPath: 'file://models/mobileface.json', enabled: false },
},
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true },
hand: {
enabled: true,
detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' },
},
// body: { modelPath: 'file://models/efficientpose.json', enabled: true },
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true },
object: { modelPath: 'file://models/nanodet.json', enabled: true },
};
@ -66,6 +64,29 @@ async function detect(input) {
// dispose image tensor as we no longer need it
image.dispose();
// print data to console
log.data('Results:');
for (let i = 0; i < result.face.length; i++) {
const face = result.face[i];
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.boxConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
}
for (let i = 0; i < result.body.length; i++) {
const body = result.body[i];
log.data(` Body: #${i} score:${body.score}`);
}
for (let i = 0; i < result.hand.length; i++) {
const hand = result.hand[i];
log.data(` Hand: #${i} confidence:${hand.confidence}`);
}
for (let i = 0; i < result.gesture.length; i++) {
const [key, val] = Object.entries(result.gesture[i]);
log.data(` Gesture: ${key[0]}#${key[1]} gesture:${val[1]}`);
}
for (let i = 0; i < result.object.length; i++) {
const object = result.object[i];
log.data(` Object: #${i} score:${object.score} label:${object.label}`);
}
result.face.length = 0;
return result;
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

112
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

112
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

16
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -41,9 +41,9 @@ export class MediaPipeFaceMesh {
(prediction.box.endPoint[1] - prediction.box.startPoint[1]) / input.shape[1],
] : [];
results.push({
confidence: prediction.faceConfidence || prediction.boxConfidence || 0,
boxConfidence: prediction.boxConfidence,
faceConfidence: prediction.faceConfidence,
confidence: Math.round(100 * prediction.faceConfidence || 100 * prediction.boxConfidence || 0) / 100,
boxConfidence: Math.round(100 * prediction.boxConfidence) / 100,
faceConfidence: Math.round(100 * prediction.faceConfidence) / 100,
box,
boxRaw,
mesh,

View File

@ -53,5 +53,6 @@ export async function predict(image, config) {
presence: (100 - Math.trunc(100 / (1 + Math.exp(points[depth * i + 4])))) / 100, // reverse sigmoid value
});
}
return [{ keypoints }];
const score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
return [{ score, keypoints }];
}

View File

@ -3,7 +3,7 @@ import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile';
let model;
let keypoints = { };
let keypoints: Array<any> = [];
let skipped = Number.MAX_SAFE_INTEGER;
const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist', 'chest', 'leftShoulder', 'leftElbow', 'leftWrist', 'pelvis', 'rightHip', 'rightKnee', 'rightAnkle', 'leftHip', 'leftKnee', 'leftAnkle'];
@ -56,9 +56,9 @@ export async function predict(image, config) {
let resT;
if (!config.profile) {
if (config.body.enabled) resT = await model.executeAsync(tensor);
if (config.body.enabled) resT = await model.predict(tensor);
} else {
const profileT = config.body.enabled ? await tf.profile(() => model.executeAsync(tensor)) : {};
const profileT = config.body.enabled ? await tf.profile(() => model.predict(tensor)) : {};
resT = profileT.result.clone();
profileT.result.dispose();
profile.run('body', profileT);
@ -79,7 +79,7 @@ export async function predict(image, config) {
if (score > config.body.scoreThreshold) {
parts.push({
id,
score,
score: Math.round(100 * score) / 100,
part: bodyParts[id],
positionRaw: {
xRaw: x / model.inputs[0].shape[2], // x normalized to 0..1
@ -95,6 +95,7 @@ export async function predict(image, config) {
stack.forEach((s) => tf.dispose(s));
keypoints = parts;
}
resolve([{ keypoints }]);
const score = keypoints.reduce((prev, curr) => (curr.score > prev ? curr.score : prev), 0);
resolve([{ score, keypoints }]);
});
}

View File

@ -49,7 +49,7 @@ export class HandPose {
(prediction.box.bottomRight[0] - prediction.box.topLeft[0]) / input.shape[2],
(prediction.box.bottomRight[1] - prediction.box.topLeft[1]) / input.shape[1],
];
hands.push({ confidence: prediction.confidence, box, boxRaw, landmarks: prediction.landmarks, annotations });
hands.push({ confidence: Math.round(100 * prediction.confidence) / 100, box, boxRaw, landmarks: prediction.landmarks, annotations });
}
return hands;
}

View File

@ -58,7 +58,7 @@ async function process(res, inputSize, outputShape, config) {
const result = {
id: id++,
strideSize,
score,
score: Math.round(100 * score) / 100,
class: j + 1,
label: labels[j].label,
center: [Math.trunc(outputShape[0] * cx), Math.trunc(outputShape[1] * cy)],
@ -113,9 +113,9 @@ export async function predict(image, config) {
let objectT;
if (!config.profile) {
if (config.object.enabled) objectT = await model.executeAsync(transpose);
if (config.object.enabled) objectT = await model.predict(transpose);
} else {
const profileObject = config.object.enabled ? await tf.profile(() => model.executeAsync(transpose)) : {};
const profileObject = config.object.enabled ? await tf.profile(() => model.predict(transpose)) : {};
objectT = profileObject.result;
profile.run('object', profileObject);
}

View File

@ -34,7 +34,7 @@ export function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFw
// Else start a new detection instance at the position of the root.
const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, defaultOutputStride, displacementsFwdBuffer, displacementsBwdBuffer);
const score = getInstanceScore(poses, squaredNmsRadius, keypoints);
if (score > scoreThreshold) poses.push({ keypoints, score });
if (score > scoreThreshold) poses.push({ keypoints, score: Math.round(100 * score) / 100 });
}
return poses;
}

View File

@ -1,5 +1,6 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<{
score: any;
keypoints: {
id: any;
part: any;