update iris distance calculations

pull/280/head
Vladimir Mandic 2021-05-24 07:16:38 -04:00
parent 4d404c7592
commit edf2d896cf
6 changed files with 23 additions and 22 deletions

View File

@ -9,13 +9,14 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/05/23 mandic00@live.com
### **1.9.3** 2021/05/23 mandic00@live.com ### **1.9.3** 2021/05/23 mandic00@live.com
- use green weighted for input diff calculation - use green weighted for input diff calculation
- implement experimental drawoptions.bufferedoutput and bufferedfactor - implement experimental drawoptions.bufferedoutput and bufferedfactor
- use explicit tensor interface
### **origin/main** 2021/05/22 mandic00@live.com
- add tfjs types and remove all instances of any - add tfjs types and remove all instances of any
- enhance strong typing - enhance strong typing
- rebuild all for release - rebuild all for release

View File

@ -18,18 +18,18 @@ const userConfig = {
enabled: false, enabled: false,
flip: false, flip: false,
}, },
face: { enabled: false, face: { enabled: true,
detector: { return: false }, detector: { return: true },
mesh: { enabled: true }, mesh: { enabled: true },
iris: { enabled: true }, iris: { enabled: true },
description: { enabled: false }, description: { enabled: false },
emotion: { enabled: false }, emotion: { enabled: false },
}, },
hand: { enabled: true }, hand: { enabled: false },
gesture: { enabled: true }, body: { enabled: false, modelPath: 'posenet.json' },
body: { enabled: true, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' }, // body: { enabled: true, modelPath: 'blazepose.json' },
object: { enabled: false }, object: { enabled: false },
gesture: { enabled: true },
*/ */
}; };

View File

@ -161,8 +161,8 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
delete faces[i].annotations.rightEyeIris; delete faces[i].annotations.rightEyeIris;
} }
const irisSize = (faces[i].annotations?.leftEyeIris && faces[i].annotations?.rightEyeIris) const irisSize = (faces[i].annotations?.leftEyeIris && faces[i].annotations?.rightEyeIris)
/* average human iris size is 11.7mm */ /* note: average human iris size is 11.7mm */
? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]
: 0; : 0;
// combine results // combine results
@ -174,7 +174,7 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
genderConfidence: descRes.genderConfidence, genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor, embedding: descRes.descriptor,
emotion: emotionRes, emotion: emotionRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0, iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation, rotation,
tensor: parent.config.face.detector.return ? faces[i].image?.squeeze() : null, tensor: parent.config.face.detector.return ? faces[i].image?.squeeze() : null,
}); });

View File

@ -20,7 +20,7 @@ export async function load(config) {
} }
async function process(res, inputSize, outputShape, config) { async function process(res, inputSize, outputShape, config) {
const results: Array<{ score: number, class: number, label: string, box: number[], boxRaw: number[] }> = []; const results: Array<Item> = [];
const detections = res.arraySync(); const detections = res.arraySync();
const squeezeT = tf.squeeze(res); const squeezeT = tf.squeeze(res);
res.dispose(); res.dispose();
@ -38,6 +38,7 @@ async function process(res, inputSize, outputShape, config) {
classesT.dispose(); classesT.dispose();
const nms = nmsT.dataSync(); const nms = nmsT.dataSync();
nmsT.dispose(); nmsT.dispose();
let i = 0;
for (const id of nms) { for (const id of nms) {
const score = detections[0][id][4]; const score = detections[0][id][4];
const classVal = detections[0][id][5]; const classVal = detections[0][id][5];
@ -54,7 +55,7 @@ async function process(res, inputSize, outputShape, config) {
Math.trunc(boxRaw[2] * outputShape[0]), Math.trunc(boxRaw[2] * outputShape[0]),
Math.trunc(boxRaw[3] * outputShape[1]), Math.trunc(boxRaw[3] * outputShape[1]),
]; ];
results.push({ score, class: classVal, label, box, boxRaw }); results.push({ id: i++, score, class: classVal, label, box, boxRaw });
} }
return results; return results;
} }

View File

@ -23,7 +23,7 @@ export async function load(config) {
async function process(res, inputSize, outputShape, config) { async function process(res, inputSize, outputShape, config) {
let id = 0; let id = 0;
let results: Array<{ score: number, strideSize: number, class: number, label: string, center: number[], centerRaw: number[], box: number[], boxRaw: number[] }> = []; let results: Array<Item> = [];
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
// find scores, boxes, classes // find scores, boxes, classes
tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors

View File

@ -1,8 +1,4 @@
/** import { Tensor } from '../dist/tfjs.esm.js';
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
/** Face results /** Face results
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models * Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
@ -30,8 +26,6 @@
* - matrix: 3d transofrmation matrix as array of numeric values * - matrix: 3d transofrmation matrix as array of numeric values
* - tensor: face tensor as Tensor object which contains detected face * - tensor: face tensor as Tensor object which contains detected face
*/ */
import { Tensor } from '../dist/tfjs.esm.js';
export interface Face { export interface Face {
id: number id: number
confidence: number, confidence: number,
@ -69,7 +63,6 @@ export interface Face {
* - score: body part score value * - score: body part score value
* - presence: body part presence value * - presence: body part presence value
*/ */
export interface Body { export interface Body {
id: number, id: number,
score: number, score: number,
@ -115,6 +108,7 @@ export interface Hand {
* - boxRaw as array of [x, y, width, height], normalized to range 0..1 * - boxRaw as array of [x, y, width, height], normalized to range 0..1
*/ */
export interface Item { export interface Item {
id: number,
score: number, score: number,
strideSize?: number, strideSize?: number,
class: number, class: number,
@ -138,6 +132,11 @@ export type Gesture =
| { 'body': number, gesture: string } | { 'body': number, gesture: string }
| { 'hand': number, gesture: string } | { 'hand': number, gesture: string }
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
export interface Result { export interface Result {
/** {@link Face}: detection & analysis results */ /** {@link Face}: detection & analysis results */
face: Array<Face>, face: Array<Face>,