mirror of https://github.com/vladmandic/human
implemented unified result.persons that combines face, body and hands for each person
parent
1d6f8ddff4
commit
fd2bd21301
|
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/05/23 mandic00@live.com
|
### **HEAD -> main** 2021/05/24 mandic00@live.com
|
||||||
|
|
||||||
|
- added experimental results interpolation for smooth draw operations
|
||||||
|
|
||||||
### **1.9.3** 2021/05/23 mandic00@live.com
|
### **1.9.3** 2021/05/23 mandic00@live.com
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ let human;
|
||||||
|
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
warmup: 'none',
|
warmup: 'none',
|
||||||
/*
|
|
||||||
backend: 'webgl',
|
backend: 'webgl',
|
||||||
async: false,
|
async: false,
|
||||||
cacheSensitivity: 0,
|
cacheSensitivity: 0,
|
||||||
|
@ -25,12 +24,11 @@ const userConfig = {
|
||||||
description: { enabled: false },
|
description: { enabled: false },
|
||||||
emotion: { enabled: false },
|
emotion: { enabled: false },
|
||||||
},
|
},
|
||||||
hand: { enabled: false },
|
hand: { enabled: true },
|
||||||
body: { enabled: false, modelPath: 'posenet.json' },
|
body: { enabled: true, modelPath: 'posenet.json' },
|
||||||
// body: { enabled: true, modelPath: 'blazepose.json' },
|
// body: { enabled: true, modelPath: 'blazepose.json' },
|
||||||
object: { enabled: false },
|
object: { enabled: false },
|
||||||
gesture: { enabled: true },
|
gesture: { enabled: true },
|
||||||
*/
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const drawOptions = {
|
const drawOptions = {
|
||||||
|
@ -229,6 +227,8 @@ async function drawResults(input) {
|
||||||
|
|
||||||
// draw all results
|
// draw all results
|
||||||
human.draw.all(canvas, result, drawOptions);
|
human.draw.all(canvas, result, drawOptions);
|
||||||
|
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
|
||||||
|
const person = result.persons; // invoke person getter
|
||||||
/* use individual functions
|
/* use individual functions
|
||||||
human.draw.face(canvas, result.face);
|
human.draw.face(canvas, result.face);
|
||||||
human.draw.body(canvas, result.body);
|
human.draw.body(canvas, result.body);
|
||||||
|
|
16
demo/node.js
16
demo/node.js
|
@ -100,7 +100,7 @@ async function detect(input) {
|
||||||
for (let i = 0; i < result.face.length; i++) {
|
for (let i = 0; i < result.face.length; i++) {
|
||||||
const face = result.face[i];
|
const face = result.face[i];
|
||||||
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
|
||||||
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.boxConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
|
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.faceConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.data(' Face: N/A');
|
log.data(' Face: N/A');
|
||||||
|
@ -137,6 +137,20 @@ async function detect(input) {
|
||||||
} else {
|
} else {
|
||||||
log.data(' Object: N/A');
|
log.data(' Object: N/A');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// print data to console
|
||||||
|
if (result) {
|
||||||
|
log.data('Persons:');
|
||||||
|
const persons = result.persons;
|
||||||
|
for (let i = 0; i < persons.length; i++) {
|
||||||
|
const face = persons[i].face;
|
||||||
|
const faceTxt = face ? `confidence:${face.confidence} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
|
||||||
|
const body = persons[i].body;
|
||||||
|
const bodyTxt = body ? `confidence:${body.score} landmarks:${body.keypoints?.length}` : null;
|
||||||
|
log.data(` #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { TRI468 as triangulation } from '../blazeface/coords';
|
import { TRI468 as triangulation } from '../blazeface/coords';
|
||||||
import { mergeDeep } from '../helpers';
|
import { mergeDeep } from '../helpers';
|
||||||
import type { Result, Face, Body, Hand, Item, Gesture } from '../result';
|
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Draw Options
|
* Draw Options
|
||||||
|
@ -68,7 +68,7 @@ export const options: DrawOptions = {
|
||||||
calculateHandBox: <boolean>true,
|
calculateHandBox: <boolean>true,
|
||||||
};
|
};
|
||||||
|
|
||||||
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0 };
|
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||||
|
|
||||||
function point(ctx, x, y, z = 0, localOptions) {
|
function point(ctx, x, y, z = 0, localOptions) {
|
||||||
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
|
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
|
||||||
|
@ -150,8 +150,8 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
|
||||||
let what: unknown[] = []; // what&where is a record
|
let what: unknown[] = []; // what&where is a record
|
||||||
[where, what] = Object.entries(result[j]);
|
[where, what] = Object.entries(result[j]);
|
||||||
if ((what.length > 1) && ((what[1] as string).length > 0)) {
|
if ((what.length > 1) && ((what[1] as string).length > 0)) {
|
||||||
const person = where[1] as number > 0 ? `#${where[1]}` : '';
|
const who = where[1] as number > 0 ? `#${where[1]}` : '';
|
||||||
const label = `${where[0]} ${person}: ${what[1]}`;
|
const label = `${where[0]} ${who}: ${what[1]}`;
|
||||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||||
ctx.fillStyle = localOptions.shadowColor;
|
ctx.fillStyle = localOptions.shadowColor;
|
||||||
ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight));
|
ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight));
|
||||||
|
@ -473,6 +473,33 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>, drawOptions?: DrawOptions) {
|
||||||
|
const localOptions = mergeDeep(options, drawOptions);
|
||||||
|
if (!result || !inCanvas) return;
|
||||||
|
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
||||||
|
const ctx = inCanvas.getContext('2d');
|
||||||
|
if (!ctx) return;
|
||||||
|
ctx.lineJoin = 'round';
|
||||||
|
ctx.font = localOptions.font;
|
||||||
|
for (let i = 0; i < result.length; i++) {
|
||||||
|
if (localOptions.drawBoxes) {
|
||||||
|
ctx.strokeStyle = localOptions.color;
|
||||||
|
ctx.fillStyle = localOptions.color;
|
||||||
|
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
|
||||||
|
if (localOptions.drawLabels) {
|
||||||
|
const label = `person #${i}`;
|
||||||
|
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||||
|
ctx.fillStyle = localOptions.shadowColor;
|
||||||
|
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
||||||
|
}
|
||||||
|
ctx.fillStyle = localOptions.labelColor;
|
||||||
|
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
|
||||||
|
}
|
||||||
|
ctx.stroke();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function calcBuffered(newResult, localOptions) {
|
function calcBuffered(newResult, localOptions) {
|
||||||
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
|
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
|
||||||
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
|
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
|
||||||
|
@ -512,9 +539,9 @@ function calcBuffered(newResult, localOptions) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// no buffering implemented for face, object, gesture
|
// no buffering implemented for face, object, gesture
|
||||||
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
// bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||||
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
|
// bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
|
||||||
bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
|
// bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
|
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
|
||||||
|
@ -533,9 +560,10 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
|
||||||
} else {
|
} else {
|
||||||
bufferedResult = result;
|
bufferedResult = result;
|
||||||
}
|
}
|
||||||
face(inCanvas, bufferedResult.face, localOptions);
|
face(inCanvas, result.face, localOptions); // face does have buffering
|
||||||
body(inCanvas, bufferedResult.body, localOptions);
|
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
|
||||||
hand(inCanvas, bufferedResult.hand, localOptions);
|
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
|
||||||
gesture(inCanvas, bufferedResult.gesture, localOptions);
|
gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
|
||||||
object(inCanvas, bufferedResult.object, localOptions);
|
// person(inCanvas, result.persons, localOptions); // use interpolated results if available
|
||||||
|
object(inCanvas, result.object, localOptions); // object detection does not have buffering
|
||||||
}
|
}
|
||||||
|
|
28
src/human.ts
28
src/human.ts
|
@ -16,6 +16,7 @@ import * as centernet from './object/centernet';
|
||||||
import * as gesture from './gesture/gesture';
|
import * as gesture from './gesture/gesture';
|
||||||
import * as image from './image/image';
|
import * as image from './image/image';
|
||||||
import * as draw from './draw/draw';
|
import * as draw from './draw/draw';
|
||||||
|
import * as persons from './persons';
|
||||||
import * as sample from './sample';
|
import * as sample from './sample';
|
||||||
import * as app from '../package.json';
|
import * as app from '../package.json';
|
||||||
import { Tensor } from './tfjs/types';
|
import { Tensor } from './tfjs/types';
|
||||||
|
@ -179,10 +180,10 @@ export class Human {
|
||||||
/** @hidden */
|
/** @hidden */
|
||||||
analyze = (...msg) => {
|
analyze = (...msg) => {
|
||||||
if (!this.#analyzeMemoryLeaks) return;
|
if (!this.#analyzeMemoryLeaks) return;
|
||||||
const current = this.tf.engine().state.numTensors;
|
const currentTensors = this.tf.engine().state.numTensors;
|
||||||
const previous = this.#numTensors;
|
const previousTensors = this.#numTensors;
|
||||||
this.#numTensors = current;
|
this.#numTensors = currentTensors;
|
||||||
const leaked = current - previous;
|
const leaked = currentTensors - previousTensors;
|
||||||
if (leaked !== 0) log(...msg, leaked);
|
if (leaked !== 0) log(...msg, leaked);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -455,7 +456,7 @@ export class Human {
|
||||||
let bodyRes;
|
let bodyRes;
|
||||||
let handRes;
|
let handRes;
|
||||||
let objectRes;
|
let objectRes;
|
||||||
let current;
|
let elapsedTime;
|
||||||
|
|
||||||
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
|
@ -465,8 +466,8 @@ export class Human {
|
||||||
this.state = 'run:face';
|
this.state = 'run:face';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : [];
|
faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : [];
|
||||||
current = Math.trunc(now() - timeStamp);
|
elapsedTime = Math.trunc(now() - timeStamp);
|
||||||
if (current > 0) this.perf.face = current;
|
if (elapsedTime > 0) this.perf.face = elapsedTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
// run body: can be posenet or blazepose
|
// run body: can be posenet or blazepose
|
||||||
|
@ -480,8 +481,8 @@ export class Human {
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
|
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
|
||||||
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
|
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
|
||||||
current = Math.trunc(now() - timeStamp);
|
elapsedTime = Math.trunc(now() - timeStamp);
|
||||||
if (current > 0) this.perf.body = current;
|
if (elapsedTime > 0) this.perf.body = elapsedTime;
|
||||||
}
|
}
|
||||||
this.analyze('End Body:');
|
this.analyze('End Body:');
|
||||||
|
|
||||||
|
@ -494,8 +495,8 @@ export class Human {
|
||||||
this.state = 'run:hand';
|
this.state = 'run:hand';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
|
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
|
||||||
current = Math.trunc(now() - timeStamp);
|
elapsedTime = Math.trunc(now() - timeStamp);
|
||||||
if (current > 0) this.perf.hand = current;
|
if (elapsedTime > 0) this.perf.hand = elapsedTime;
|
||||||
}
|
}
|
||||||
this.analyze('End Hand:');
|
this.analyze('End Hand:');
|
||||||
|
|
||||||
|
@ -510,8 +511,8 @@ export class Human {
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
|
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
|
||||||
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : [];
|
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : [];
|
||||||
current = Math.trunc(now() - timeStamp);
|
elapsedTime = Math.trunc(now() - timeStamp);
|
||||||
if (current > 0) this.perf.object = current;
|
if (elapsedTime > 0) this.perf.object = elapsedTime;
|
||||||
}
|
}
|
||||||
this.analyze('End Object:');
|
this.analyze('End Object:');
|
||||||
|
|
||||||
|
@ -541,6 +542,7 @@ export class Human {
|
||||||
performance: this.perf,
|
performance: this.perf,
|
||||||
canvas: process.canvas,
|
canvas: process.canvas,
|
||||||
timestamp: Date.now(),
|
timestamp: Date.now(),
|
||||||
|
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes); },
|
||||||
};
|
};
|
||||||
// log('Result:', result);
|
// log('Result:', result);
|
||||||
resolve(res);
|
resolve(res);
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
import { Face, Body, Hand, Gesture, Person } from './result';
|
||||||
|
|
||||||
|
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person> {
|
||||||
|
let id = 0;
|
||||||
|
const persons: Array<Person> = [];
|
||||||
|
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
|
||||||
|
const person: Person = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
|
||||||
|
for (const body of bodies) {
|
||||||
|
if (face.box[0] > body.box[0] // x within body
|
||||||
|
&& face.box[0] < body.box[0] + body.box[2]
|
||||||
|
&& face.box[1] + face.box[3] > body.box[1] // y within body
|
||||||
|
&& face.box[1] + face.box[3] < body.box[1] + body.box[3]) {
|
||||||
|
person.body = body;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (person.body) { // only try to join hands if body is found
|
||||||
|
for (const hand of hands) {
|
||||||
|
if (hand.box[0] + hand.box[2] > person.body.box[0] // x within body for left hand
|
||||||
|
&& hand.box[0] + hand.box[2] < person.body.box[0] + person.body.box[2]
|
||||||
|
&& hand.box[1] + hand.box[3] > person.body.box[1] // x within body for left hand
|
||||||
|
&& hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {
|
||||||
|
if (person.hands) person.hands.left = hand;
|
||||||
|
}
|
||||||
|
if (hand.box[0] < person.body.box[0] + person.body.box[2] // x within body for right hand
|
||||||
|
&& hand.box[0] > person.body.box[0]
|
||||||
|
&& hand.box[1] + hand.box[3] > person.body.box[1] // x within body for right hand
|
||||||
|
&& hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {
|
||||||
|
if (person.hands) person.hands.right = hand;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const gesture of gestures) { // append all gestures according to ids
|
||||||
|
if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures?.push(gesture);
|
||||||
|
else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures?.push(gesture);
|
||||||
|
else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures?.push(gesture);
|
||||||
|
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
|
||||||
|
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
|
||||||
|
}
|
||||||
|
person.box = [ // this is incorrect as should be a caclulated value
|
||||||
|
Math.min(person.face?.box[0] || Number.MAX_SAFE_INTEGER, person.body?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[0] || Number.MAX_SAFE_INTEGER),
|
||||||
|
Math.min(person.face?.box[1] || Number.MAX_SAFE_INTEGER, person.body?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[1] || Number.MAX_SAFE_INTEGER),
|
||||||
|
Math.max(person.face?.box[2] || 0, person.body?.box[2] || 0, person.hands?.left?.box[2] || 0, person.hands?.right?.box[2] || 0),
|
||||||
|
Math.max(person.face?.box[3] || 0, person.body?.box[3] || 0, person.hands?.left?.box[3] || 0, person.hands?.right?.box[3] || 0),
|
||||||
|
];
|
||||||
|
persons.push(person);
|
||||||
|
}
|
||||||
|
return persons;
|
||||||
|
}
|
|
@ -4,9 +4,8 @@ import { Tensor } from '../dist/tfjs.esm.js';
|
||||||
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
||||||
* Some values may be null if specific model is not enabled
|
* Some values may be null if specific model is not enabled
|
||||||
*
|
*
|
||||||
* Array of individual results with one object per detected face
|
|
||||||
* Each result has:
|
* Each result has:
|
||||||
* - id: face number
|
* - id: face id number
|
||||||
* - confidence: overal detection confidence value
|
* - confidence: overal detection confidence value
|
||||||
* - boxConfidence: face box detection confidence value
|
* - boxConfidence: face box detection confidence value
|
||||||
* - faceConfidence: face keypoints detection confidence value
|
* - faceConfidence: face keypoints detection confidence value
|
||||||
|
@ -52,9 +51,8 @@ export interface Face {
|
||||||
|
|
||||||
/** Body results
|
/** Body results
|
||||||
*
|
*
|
||||||
* Array of individual results with one object per detected body
|
|
||||||
* Each results has:
|
* Each results has:
|
||||||
* - id:body id number
|
* - id: body id number
|
||||||
* - score: overall detection score
|
* - score: overall detection score
|
||||||
* - box: bounding box: x, y, width, height normalized to input image resolution
|
* - box: bounding box: x, y, width, height normalized to input image resolution
|
||||||
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
|
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
|
||||||
|
@ -80,13 +78,13 @@ export interface Body {
|
||||||
|
|
||||||
/** Hand results
|
/** Hand results
|
||||||
*
|
*
|
||||||
* Array of individual results with one object per detected hand
|
|
||||||
* Each result has:
|
* Each result has:
|
||||||
* - confidence as value
|
* - id: hand id number
|
||||||
* - box as array of [x, y, width, height], normalized to image resolution
|
* - confidence: detection confidence score as value
|
||||||
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
|
* - box: bounding box: x, y, width, height normalized to input image resolution
|
||||||
* - landmarks as array of [x, y, z] points of hand, normalized to image resolution
|
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
|
||||||
* - annotations as array of annotated face landmark points
|
* - landmarks: landmarks as array of [x, y, z] points of hand, normalized to image resolution
|
||||||
|
* - annotations: annotated landmarks for each hand part
|
||||||
*/
|
*/
|
||||||
export interface Hand {
|
export interface Hand {
|
||||||
id: number,
|
id: number,
|
||||||
|
@ -101,12 +99,13 @@ export interface Hand {
|
||||||
*
|
*
|
||||||
* Array of individual results with one object per detected gesture
|
* Array of individual results with one object per detected gesture
|
||||||
* Each result has:
|
* Each result has:
|
||||||
|
* - id: object id number
|
||||||
* - score as value
|
* - score as value
|
||||||
* - label as detected class name
|
* - label as detected class name
|
||||||
* - center as array of [x, y], normalized to image resolution
|
* - box: bounding box: x, y, width, height normalized to input image resolution
|
||||||
* - centerRaw as array of [x, y], normalized to range 0..1
|
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
|
||||||
* - box as array of [x, y, width, height], normalized to image resolution
|
* - center: optional center point as array of [x, y], normalized to image resolution
|
||||||
* - boxRaw as array of [x, y, width, height], normalized to range 0..1
|
* - centerRaw: optional center point as array of [x, y], normalized to range 0..1
|
||||||
*/
|
*/
|
||||||
export interface Item {
|
export interface Item {
|
||||||
id: number,
|
id: number,
|
||||||
|
@ -133,6 +132,27 @@ export type Gesture =
|
||||||
| { 'body': number, gesture: string }
|
| { 'body': number, gesture: string }
|
||||||
| { 'hand': number, gesture: string }
|
| { 'hand': number, gesture: string }
|
||||||
|
|
||||||
|
/** Person getter
|
||||||
|
*
|
||||||
|
* Each result has:
|
||||||
|
* - id: person id
|
||||||
|
* - face: face object
|
||||||
|
* - body: body object
|
||||||
|
* - hands: array of hand objects
|
||||||
|
* - gestures: array of gestures
|
||||||
|
* - box: bounding box: x, y, width, height normalized to input image resolution
|
||||||
|
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
|
||||||
|
*/
|
||||||
|
export interface Person {
|
||||||
|
id: number,
|
||||||
|
face: Face,
|
||||||
|
body: Body | null,
|
||||||
|
hands: { left: Hand | null, right: Hand | null },
|
||||||
|
gestures: Array<Gesture>,
|
||||||
|
box: [number, number, number, number],
|
||||||
|
boxRaw?: [number, number, number, number],
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Result interface definition for **Human** library
|
* Result interface definition for **Human** library
|
||||||
*
|
*
|
||||||
|
@ -149,7 +169,12 @@ export interface Result {
|
||||||
gesture: Array<Gesture>,
|
gesture: Array<Gesture>,
|
||||||
/** {@link Object}: detection & analysis results */
|
/** {@link Object}: detection & analysis results */
|
||||||
object: Array<Item>
|
object: Array<Item>
|
||||||
performance: Record<string, unknown>,
|
/** global performance object with timing values for each operation */
|
||||||
canvas?: OffscreenCanvas | HTMLCanvasElement,
|
readonly performance: Record<string, unknown>,
|
||||||
timestamp: number,
|
/** optional processed canvas that can be used to draw input on screen */
|
||||||
|
readonly canvas?: OffscreenCanvas | HTMLCanvasElement,
|
||||||
|
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
||||||
|
readonly timestamp: number,
|
||||||
|
/** getter property that returns unified persons object */
|
||||||
|
readonly persons: Array<Person>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,8 @@ function printResults(detect) {
|
||||||
const person = (detect.face && detect.face[0]) ? { confidence: detect.face[0].confidence, age: detect.face[0].age, gender: detect.face[0].gender } : {};
|
const person = (detect.face && detect.face[0]) ? { confidence: detect.face[0].confidence, age: detect.face[0].age, gender: detect.face[0].gender } : {};
|
||||||
const object = (detect.object && detect.object[0]) ? { score: detect.object[0].score, class: detect.object[0].label } : {};
|
const object = (detect.object && detect.object[0]) ? { score: detect.object[0].score, class: detect.object[0].label } : {};
|
||||||
const body = (detect.body && detect.body[0]) ? { score: detect.body[0].score, keypoints: detect.body[0].keypoints.length } : {};
|
const body = (detect.body && detect.body[0]) ? { score: detect.body[0].score, keypoints: detect.body[0].keypoints.length } : {};
|
||||||
if (detect.face) log('data', ' result: face:', detect.face?.length, 'body:', detect.body?.length, 'hand:', detect.hand?.length, 'gesture:', detect.gesture?.length, 'object:', detect.object?.length, person, object, body);
|
const persons = detect.persons;
|
||||||
|
if (detect.face) log('data', ' result: face:', detect.face?.length, 'body:', detect.body?.length, 'hand:', detect.hand?.length, 'gesture:', detect.gesture?.length, 'object:', detect.object?.length, 'person:', persons.length, person, object, body);
|
||||||
if (detect.performance) log('data', ' result: performance:', 'load:', detect?.performance.load, 'total:', detect.performance?.total);
|
if (detect.performance) log('data', ' result: performance:', 'load:', detect?.performance.load, 'total:', detect.performance?.total);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
const { fork } = require('child_process');
|
const { fork } = require('child_process');
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger');
|
||||||
|
|
||||||
|
let logFile = 'test.log';
|
||||||
|
|
||||||
const tests = [
|
const tests = [
|
||||||
'test-node.js',
|
'test-node.js',
|
||||||
'test-node-gpu.js',
|
'test-node-gpu.js',
|
||||||
|
@ -57,7 +60,9 @@ async function runTest(test) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function testAll() {
|
async function testAll() {
|
||||||
log.logFile(path.join(__dirname, 'test.log'));
|
logFile = path.join(__dirname, logFile);
|
||||||
|
if (fs.existsSync(logFile)) fs.unlinkSync(logFile);
|
||||||
|
log.logFile(logFile);
|
||||||
log.header();
|
log.header();
|
||||||
process.on('unhandledRejection', (data) => log.error('nodejs unhandled rejection', data));
|
process.on('unhandledRejection', (data) => log.error('nodejs unhandled rejection', data));
|
||||||
process.on('uncaughtException', (data) => log.error('nodejs unhandled exception', data));
|
process.on('uncaughtException', (data) => log.error('nodejs unhandled exception', data));
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit d3e31ec79f0f7f9b3382576dd246cd86de22bb43
|
Subproject commit e7c276c0f521c88a00601bd80c5d08be1345b6aa
|
Loading…
Reference in New Issue