update iris distance calculations

pull/134/head
Vladimir Mandic 2021-05-24 07:16:38 -04:00
parent 4ed9e9f3a1
commit 53d2241036
20 changed files with 124 additions and 51 deletions

View File

@ -9,13 +9,14 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/05/23 mandic00@live.com
### **1.9.3** 2021/05/23 mandic00@live.com
- use green weighted for input diff calculation
- implement experimental drawoptions.bufferedoutput and bufferedfactor
### **origin/main** 2021/05/22 mandic00@live.com
- use explicit tensor interface
- add tfjs types and remove all instances of any
- enhance strong typing
- rebuild all for release

View File

@ -18,18 +18,18 @@ const userConfig = {
enabled: false,
flip: false,
},
face: { enabled: false,
detector: { return: false },
face: { enabled: true,
detector: { return: true },
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: false },
emotion: { enabled: false },
},
hand: { enabled: true },
gesture: { enabled: true },
body: { enabled: true, modelPath: 'posenet.json' },
hand: { enabled: false },
body: { enabled: false, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' },
object: { enabled: false },
gesture: { enabled: true },
*/
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -4453,7 +4453,7 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.leftEyeIris;
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) : 0;
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
faceRes.push({
id: i,
...faces[i],
@ -4462,7 +4462,7 @@ var detectFace = async (parent, input) => {
genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor,
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? (_g = faces[i].image) == null ? void 0 : _g.squeeze() : null
});
@ -17399,6 +17399,7 @@ async function process3(res, inputSize, outputShape, config3) {
classesT.dispose();
const nms = nmsT.dataSync();
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score = detections[0][id][4];
const classVal = detections[0][id][5];
@ -17415,7 +17416,7 @@ async function process3(res, inputSize, outputShape, config3) {
Math.trunc(boxRaw[2] * outputShape[0]),
Math.trunc(boxRaw[3] * outputShape[1])
];
results.push({ score, class: classVal, label, box: box4, boxRaw });
results.push({ id: i++, score, class: classVal, label, box: box4, boxRaw });
}
return results;
}

View File

@ -4454,7 +4454,7 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.leftEyeIris;
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) : 0;
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
faceRes.push({
id: i,
...faces[i],
@ -4463,7 +4463,7 @@ var detectFace = async (parent, input) => {
genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor,
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? (_g = faces[i].image) == null ? void 0 : _g.squeeze() : null
});
@ -17400,6 +17400,7 @@ async function process3(res, inputSize, outputShape, config3) {
classesT.dispose();
const nms = nmsT.dataSync();
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score = detections[0][id][4];
const classVal = detections[0][id][5];
@ -17416,7 +17417,7 @@ async function process3(res, inputSize, outputShape, config3) {
Math.trunc(boxRaw[2] * outputShape[0]),
Math.trunc(boxRaw[3] * outputShape[1])
];
results.push({ score, class: classVal, label, box: box4, boxRaw });
results.push({ id: i++, score, class: classVal, label, box: box4, boxRaw });
}
return results;
}

7
dist/human.node.js vendored
View File

@ -4453,7 +4453,7 @@ var detectFace = async (parent, input) => {
delete faces[i].annotations.leftEyeIris;
delete faces[i].annotations.rightEyeIris;
}
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) : 0;
const irisSize = ((_e = faces[i].annotations) == null ? void 0 : _e.leftEyeIris) && ((_f = faces[i].annotations) == null ? void 0 : _f.rightEyeIris) ? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2] : 0;
faceRes.push({
id: i,
...faces[i],
@ -4462,7 +4462,7 @@ var detectFace = async (parent, input) => {
genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor,
emotion: emotionRes,
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? (_g = faces[i].image) == null ? void 0 : _g.squeeze() : null
});
@ -17399,6 +17399,7 @@ async function process3(res, inputSize, outputShape, config3) {
classesT.dispose();
const nms = nmsT.dataSync();
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score = detections[0][id][4];
const classVal = detections[0][id][5];
@ -17415,7 +17416,7 @@ async function process3(res, inputSize, outputShape, config3) {
Math.trunc(boxRaw[2] * outputShape[0]),
Math.trunc(boxRaw[3] * outputShape[1])
];
results.push({ score, class: classVal, label, box: box4, boxRaw });
results.push({ id: i++, score, class: classVal, label, box: box4, boxRaw });
}
return results;
}

View File

@ -103,3 +103,20 @@
2021-05-23 13:54:50 INFO:  Generate types: ["src/human.ts"]
2021-05-23 13:54:56 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-23 13:54:56 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-24 07:15:57 INFO:  @vladmandic/human version 1.9.3
2021-05-24 07:15:57 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-24 07:15:57 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-24 07:15:57 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: node type: node: {"imports":36,"importBytes":424920,"outputBytes":381396,"outputFiles":"dist/human.node.js"}
2021-05-24 07:15:57 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":424928,"outputBytes":381400,"outputFiles":"dist/human.node-gpu.js"}
2021-05-24 07:15:57 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":424995,"outputBytes":381472,"outputFiles":"dist/human.node-wasm.js"}
2021-05-24 07:15:57 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":425022,"outputBytes":234163,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-24 07:15:59 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:59 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1535042,"outputBytes":1342052,"outputFiles":"dist/human.js"}
2021-05-24 07:16:00 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1535042,"outputBytes":1342044,"outputFiles":"dist/human.esm.js"}
2021-05-24 07:16:00 INFO:  Generate types: ["src/human.ts"]
2021-05-24 07:16:07 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-24 07:16:07 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -161,8 +161,8 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
delete faces[i].annotations.rightEyeIris;
}
const irisSize = (faces[i].annotations?.leftEyeIris && faces[i].annotations?.rightEyeIris)
/* average human iris size is 11.7mm */
? 11.7 * Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1]))
/* note: average human iris size is 11.7mm */
? Math.max(Math.abs(faces[i].annotations.leftEyeIris[3][0] - faces[i].annotations.leftEyeIris[1][0]), Math.abs(faces[i].annotations.rightEyeIris[4][1] - faces[i].annotations.rightEyeIris[2][1])) / input.shape[2]
: 0;
// combine results
@ -174,7 +174,7 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
genderConfidence: descRes.genderConfidence,
embedding: descRes.descriptor,
emotion: emotionRes,
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
iris: irisSize !== 0 ? Math.trunc(500 / irisSize / 11.7) / 100 : 0,
rotation,
tensor: parent.config.face.detector.return ? faces[i].image?.squeeze() : null,
});

View File

@ -20,7 +20,7 @@ export async function load(config) {
}
async function process(res, inputSize, outputShape, config) {
const results: Array<{ score: number, class: number, label: string, box: number[], boxRaw: number[] }> = [];
const results: Array<Item> = [];
const detections = res.arraySync();
const squeezeT = tf.squeeze(res);
res.dispose();
@ -38,6 +38,7 @@ async function process(res, inputSize, outputShape, config) {
classesT.dispose();
const nms = nmsT.dataSync();
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score = detections[0][id][4];
const classVal = detections[0][id][5];
@ -54,7 +55,7 @@ async function process(res, inputSize, outputShape, config) {
Math.trunc(boxRaw[2] * outputShape[0]),
Math.trunc(boxRaw[3] * outputShape[1]),
];
results.push({ score, class: classVal, label, box, boxRaw });
results.push({ id: i++, score, class: classVal, label, box, boxRaw });
}
return results;
}

View File

@ -23,7 +23,7 @@ export async function load(config) {
async function process(res, inputSize, outputShape, config) {
let id = 0;
let results: Array<{ score: number, strideSize: number, class: number, label: string, center: number[], centerRaw: number[], box: number[], boxRaw: number[] }> = [];
let results: Array<Item> = [];
for (const strideSize of [1, 2, 4]) { // try each stride size as it detects large/medium/small objects
// find scores, boxes, classes
tf.tidy(() => { // wrap in tidy to automatically deallocate temp tensors

View File

@ -1,8 +1,4 @@
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
import { Tensor } from '../dist/tfjs.esm.js';
/** Face results
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
@ -30,8 +26,6 @@
* - matrix: 3d transofrmation matrix as array of numeric values
* - tensor: face tensor as Tensor object which contains detected face
*/
import { Tensor } from '../dist/tfjs.esm.js';
export interface Face {
id: number
confidence: number,
@ -69,7 +63,6 @@ export interface Face {
* - score: body part score value
* - presence: body part presence value
*/
export interface Body {
id: number,
score: number,
@ -115,6 +108,7 @@ export interface Hand {
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
*/
export interface Item {
id: number,
score: number,
strideSize?: number,
class: number,
@ -138,6 +132,11 @@ export type Gesture =
| { 'body': number, gesture: string }
| { 'hand': number, gesture: string }
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
export interface Result {
/** {@link Face}: detection & analysis results */
face: Array<Face>,

File diff suppressed because one or more lines are too long

View File

@ -63,6 +63,38 @@
<div class="container container-main">
<div class="row">
<div class="col-8 col-content">
<section class="tsd-panel tsd-comment">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Face results
Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
Some values may be null if specific model is not enabled</p>
</div>
<p>Array of individual results with one object per detected face
Each result has:</p>
<ul>
<li>id: face number</li>
<li>confidence: overal detection confidence value</li>
<li>boxConfidence: face box detection confidence value</li>
<li>faceConfidence: face keypoints detection confidence value</li>
<li>box: face bounding box as array of [x, y, width, height], normalized to image resolution</li>
<li>boxRaw: face bounding box as array of [x, y, width, height], normalized to range 0..1</li>
<li>mesh: face keypoints as array of [x, y, z] points of face mesh, normalized to image resolution</li>
<li>meshRaw: face keypoints as array of [x, y, z] points of face mesh, normalized to range 0..1</li>
<li>annotations: annotated face keypoints as array of annotated face mesh points</li>
<li>age: age as value</li>
<li>gender: gender as value</li>
<li>genderConfidence: gender detection confidence as value</li>
<li>emotion: emotions as array of possible emotions with their individual scores</li>
<li>embedding: facial descriptor as array of numerical elements</li>
<li>iris: iris distance from current viewpoint as distance value</li>
<li>rotation: face rotiation that contains both angles and matrix used for 3d transformations</li>
<li>angle: face angle as object with values for roll, yaw and pitch angles</li>
<li>matrix: 3d transofrmation matrix as array of numeric values</li>
<li>tensor: face tensor as Tensor object which contains detected face</li>
</ul>
</div>
</section>
<section class="tsd-panel tsd-hierarchy">
<h3>Hierarchy</h3>
<ul class="tsd-hierarchy">

View File

@ -100,6 +100,7 @@
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#center" class="tsd-kind-icon">center</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#centerraw" class="tsd-kind-icon">center<wbr>Raw</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#class" class="tsd-kind-icon">class</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#id" class="tsd-kind-icon">id</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#label" class="tsd-kind-icon">label</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#score" class="tsd-kind-icon">score</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="item.html#stridesize" class="tsd-kind-icon">stride<wbr>Size</a></li>
@ -145,6 +146,13 @@
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="id" class="tsd-anchor"></a>
<h3>id</h3>
<div class="tsd-signature tsd-kind-icon">id<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div>
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="label" class="tsd-anchor"></a>
<h3>label</h3>
@ -219,6 +227,9 @@
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="item.html#class" class="tsd-kind-icon">class</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="item.html#id" class="tsd-kind-icon">id</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="item.html#label" class="tsd-kind-icon">label</a>
</li>

View File

@ -63,6 +63,14 @@
<div class="container container-main">
<div class="row">
<div class="col-8 col-content">
<section class="tsd-panel tsd-comment">
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Result interface definition for <strong>Human</strong> library</p>
</div>
<p>Contains all possible detection results</p>
</div>
</section>
<section class="tsd-panel tsd-hierarchy">
<h3>Hierarchy</h3>
<ul class="tsd-hierarchy">

13
types/result.d.ts vendored
View File

@ -1,8 +1,4 @@
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
import { Tensor } from '../dist/tfjs.esm.js';
/** Face results
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
* Some values may be null if specific model is not enabled
@ -29,7 +25,6 @@
* - matrix: 3d transofrmation matrix as array of numeric values
* - tensor: face tensor as Tensor object which contains detected face
*/
import { Tensor } from '../dist/tfjs.esm.js';
export interface Face {
id: number;
confidence: number;
@ -130,6 +125,7 @@ export interface Hand {
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
*/
export interface Item {
id: number;
score: number;
strideSize?: number;
class: number;
@ -159,6 +155,11 @@ export declare type Gesture = {
'hand': number;
gesture: string;
};
/**
* Result interface definition for **Human** library
*
* Contains all possible detection results
*/
export interface Result {
/** {@link Face}: detection & analysis results */
face: Array<Face>;