mirror of https://github.com/vladmandic/human
update all box calculations
parent
70af13ce8c
commit
4ab5bbb6e6
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* Demo for face descriptor analysis and face simmilarity analysis
|
||||
*/
|
||||
|
||||
import Human from '../dist/human.esm.js';
|
||||
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* Main demo app that exposes all Human functionality
|
||||
*/
|
||||
|
||||
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
|
||||
|
@ -10,6 +16,7 @@ let human;
|
|||
|
||||
const userConfig = {
|
||||
warmup: 'none',
|
||||
/*
|
||||
backend: 'webgl',
|
||||
async: false,
|
||||
cacheSensitivity: 0,
|
||||
|
@ -29,6 +36,7 @@ const userConfig = {
|
|||
// body: { enabled: true, modelPath: 'blazepose.json' },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
*/
|
||||
};
|
||||
|
||||
const drawOptions = {
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
*
|
||||
* Used by node-multiprocess.js as an on-demand started worker process
|
||||
* Receives messages from parent process and sends results
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
@ -19,16 +24,16 @@ const myConfig = {
|
|||
enabled: true,
|
||||
detector: { enabled: true, rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: false },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: {
|
||||
enabled: false,
|
||||
enabled: true,
|
||||
},
|
||||
// body: { modelPath: 'blazepose.json', enabled: true },
|
||||
body: { enabled: false },
|
||||
object: { enabled: false },
|
||||
body: { enabled: true },
|
||||
object: { enabled: true },
|
||||
};
|
||||
|
||||
// read image from a file and create tensor to be used by faceapi
|
||||
|
@ -44,8 +49,10 @@ async function image(img) {
|
|||
async function detect(img) {
|
||||
const tensor = await image(img);
|
||||
const result = await human.detect(tensor);
|
||||
if (process.send) { // check if ipc exists
|
||||
process.send({ image: img, detected: result }); // send results back to main
|
||||
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
|
||||
}
|
||||
tensor.dispose();
|
||||
}
|
||||
|
||||
|
@ -57,8 +64,8 @@ async function main() {
|
|||
|
||||
// on worker start first initialize message handler so we don't miss any messages
|
||||
process.on('message', (msg) => {
|
||||
if (msg.exit) process.exit(); // if main told worker to exit
|
||||
if (msg.test) process.send({ test: true });
|
||||
if (msg.exit && process.exit) process.exit(); // if main told worker to exit
|
||||
if (msg.test && process.send) process.send({ test: true });
|
||||
if (msg.image) detect(msg.image); // if main told worker to process image
|
||||
log.data('Worker received message:', process.pid, msg); // generic log
|
||||
});
|
||||
|
@ -72,7 +79,7 @@ async function main() {
|
|||
await human.load();
|
||||
|
||||
// now we're ready, so send message back to main that it knows it can use this worker
|
||||
process.send({ ready: true });
|
||||
if (process.send) process.send({ ready: true });
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
*
|
||||
* Uses NodeJS fork functionality with inter-processing-messaging
|
||||
* Starts a pool of worker processes and dispatch work items to each worker when they are available
|
||||
* Uses node-multiprocess-worker.js for actual processing
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
@ -7,7 +13,7 @@ const log = require('@vladmandic/pilogger'); // this is my simple logger with fe
|
|||
const child_process = require('child_process');
|
||||
// note that main process import faceapi or tfjs at all
|
||||
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
const imgPathRoot = './assets'; // modify to include your sample images
|
||||
const numWorkers = 4; // how many workers will be started
|
||||
const workers = []; // this holds worker processes
|
||||
const images = []; // this holds queue of enumerated images
|
||||
|
@ -33,14 +39,14 @@ function waitCompletion() {
|
|||
if (activeWorkers > 0) setImmediate(() => waitCompletion());
|
||||
else {
|
||||
t[1] = process.hrtime.bigint();
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
}
|
||||
}
|
||||
|
||||
function measureLatency() {
|
||||
t[3] = process.hrtime.bigint();
|
||||
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
|
||||
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
|
||||
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
|
||||
}
|
||||
|
||||
|
@ -59,6 +65,7 @@ async function main() {
|
|||
if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile));
|
||||
}
|
||||
numImages = images.length;
|
||||
log.state('Enumerated images:', imgPathRoot, numImages);
|
||||
|
||||
t[0] = process.hrtime.bigint();
|
||||
// manage worker processes
|
||||
|
@ -71,7 +78,7 @@ async function main() {
|
|||
// otherwise it's an unknown message
|
||||
workers[i].on('message', (msg) => {
|
||||
if (msg.ready) detect(workers[i]);
|
||||
else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length);
|
||||
else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length, 'bodies:', msg.detected.body?.length, 'hands:', msg.detected.hand?.length, 'objects:', msg.detected.object?.length);
|
||||
else if (msg.test) measureLatency();
|
||||
else log.data('Main: worker message:', workers[i].pid, msg);
|
||||
});
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
/*
|
||||
Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
|
||||
|
||||
uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
|
||||
each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
|
||||
if you want process at specific intervals, set output fps to some value
|
||||
if you want to process an input stream, set real-time flag and set input as required
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
* Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
|
||||
*
|
||||
* Uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
|
||||
* Each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
|
||||
* If you want process at specific intervals, set output fps to some value
|
||||
* If you want to process an input stream, set real-time flag and set input as required
|
||||
*
|
||||
* Note that pipe2jpeg is not part of Human dependencies and should be installed manually
|
||||
* Working version of ffmpeg must be present on the system
|
||||
*/
|
||||
|
||||
const spawn = require('child_process').spawn;
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
/*
|
||||
Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
* Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human
|
||||
*
|
||||
* Note that node-webcam is not part of Human dependencies and should be installed manually
|
||||
* Working version of fswebcam must be present on the system
|
||||
*/
|
||||
|
||||
const util = require('util');
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Human demo for NodeJS
|
||||
*/
|
||||
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
|
|
|
@ -61,14 +61,14 @@
|
|||
"@tensorflow/tfjs-layers": "^3.6.0",
|
||||
"@tensorflow/tfjs-node": "^3.6.1",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.6.1",
|
||||
"@types/node": "^15.6.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.24.0",
|
||||
"@typescript-eslint/parser": "^4.24.0",
|
||||
"@types/node": "^15.6.1",
|
||||
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||
"@typescript-eslint/parser": "^4.25.0",
|
||||
"@vladmandic/pilogger": "^0.2.17",
|
||||
"canvas": "^2.8.0",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "^0.12.1",
|
||||
"esbuild": "^0.12.2",
|
||||
"eslint": "^7.27.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.23.3",
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person age
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Module that implements helper draw functions, exposed as human.draw
|
||||
*/
|
||||
|
||||
import { TRI468 as triangulation } from '../blazeface/coords';
|
||||
import { mergeDeep } from '../helpers';
|
||||
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
|
||||
|
@ -22,7 +26,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
|
|||
* -useCurves: draw polygons as cures or as lines,
|
||||
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
|
||||
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
|
||||
* -useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,
|
||||
*/
|
||||
export interface DrawOptions {
|
||||
color: string,
|
||||
|
@ -42,8 +45,6 @@ export interface DrawOptions {
|
|||
useCurves: boolean,
|
||||
bufferedOutput: boolean,
|
||||
bufferedFactor: number,
|
||||
useRawBoxes: boolean,
|
||||
calculateHandBox: boolean,
|
||||
}
|
||||
|
||||
export const options: DrawOptions = {
|
||||
|
@ -64,8 +65,6 @@ export const options: DrawOptions = {
|
|||
useCurves: <boolean>false,
|
||||
bufferedFactor: <number>2,
|
||||
bufferedOutput: <boolean>false,
|
||||
useRawBoxes: <boolean>false,
|
||||
calculateHandBox: <boolean>true,
|
||||
};
|
||||
|
||||
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||
|
@ -173,10 +172,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
|
|||
ctx.font = localOptions.font;
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.drawBoxes) {
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * f.boxRaw[0], inCanvas.height * f.boxRaw[1], inCanvas.width * f.boxRaw[2], inCanvas.height * f.boxRaw[3], localOptions);
|
||||
else rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
}
|
||||
if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
// silly hack since fillText does not suport new line
|
||||
const labels:string[] = [];
|
||||
labels.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
|
||||
|
@ -374,31 +370,14 @@ export async function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, dra
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
let box;
|
||||
if (!localOptions.calculateHandBox) {
|
||||
box = localOptions.useRawBoxes ? h.boxRaw : h.box;
|
||||
} else {
|
||||
box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
if (h.landmarks && h.landmarks.length > 0) {
|
||||
for (const pt of h.landmarks) {
|
||||
if (pt[0] < box[0]) box[0] = pt[0];
|
||||
if (pt[1] < box[1]) box[1] = pt[1];
|
||||
if (pt[0] > box[2]) box[2] = pt[0];
|
||||
if (pt[1] > box[3]) box[3] = pt[1];
|
||||
}
|
||||
box[2] -= box[0];
|
||||
box[3] -= box[1];
|
||||
}
|
||||
}
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * box[0], inCanvas.height * box[1], inCanvas.width * box[2], inCanvas.height * box[3], localOptions);
|
||||
else rect(ctx, box[0], box[1], box[2], box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText('hand', box[0] + 3, 1 + box[1] + localOptions.lineHeight, box[2]);
|
||||
ctx.fillText('hand', h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.fillStyle = localOptions.labelColor;
|
||||
ctx.fillText('hand', box[0] + 2, 0 + box[1] + localOptions.lineHeight, box[2]);
|
||||
ctx.fillText('hand', h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
|
@ -457,8 +436,7 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * h.boxRaw[0], inCanvas.height * h.boxRaw[1], inCanvas.width * h.boxRaw[2], inCanvas.height * h.boxRaw[3], localOptions);
|
||||
else rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||
|
@ -481,6 +459,7 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
|
|||
if (!ctx) return;
|
||||
ctx.lineJoin = 'round';
|
||||
ctx.font = localOptions.font;
|
||||
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
|
@ -504,6 +483,7 @@ function calcBuffered(newResult, localOptions) {
|
|||
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
|
||||
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
|
||||
|
||||
// interpolate body results
|
||||
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
||||
for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints
|
||||
bufferedResult.body[i].box = newResult.body[i].box
|
||||
|
@ -521,6 +501,7 @@ function calcBuffered(newResult, localOptions) {
|
|||
}));
|
||||
}
|
||||
|
||||
// interpolate hand results
|
||||
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
||||
for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations
|
||||
bufferedResult.hand[i].box = newResult.hand[i].box
|
||||
|
@ -538,6 +519,14 @@ function calcBuffered(newResult, localOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
// interpolate person results
|
||||
const newPersons = newResult.persons; // trigger getter function
|
||||
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
|
||||
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
||||
bufferedResult.persons[i].box = newPersons[i].box
|
||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / localOptions.bufferedFactor);
|
||||
}
|
||||
|
||||
// no buffering implemented for face, object, gesture
|
||||
// bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||
// bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
|
||||
|
@ -555,15 +544,12 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
|
|||
const localOptions = mergeDeep(options, drawOptions);
|
||||
if (!result || !inCanvas) return;
|
||||
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
||||
if (localOptions.bufferedOutput) {
|
||||
calcBuffered(result, localOptions);
|
||||
} else {
|
||||
bufferedResult = result;
|
||||
}
|
||||
if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
|
||||
else bufferedResult = result; // just use results as-is
|
||||
face(inCanvas, result.face, localOptions); // face does have buffering
|
||||
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
|
||||
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
|
||||
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available
|
||||
gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
|
||||
// person(inCanvas, result.persons, localOptions); // use interpolated results if available
|
||||
object(inCanvas, result.object, localOptions); // object detection does not have buffering
|
||||
}
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* EfficientPose Module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Body } from '../result';
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Emotion Module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person age
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, now } from './helpers';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as emotion from './emotion/emotion';
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
/**
|
||||
* HSE-FaceRes Module
|
||||
* Returns Age, Gender, Descriptor
|
||||
* Implements Face simmilarity function
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person gender
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Gesture detection module
|
||||
*/
|
||||
|
||||
import { Gesture } from '../result';
|
||||
|
||||
export const body = (res): Gesture[] => {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* HandPose module entry point
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as handdetector from './handdetector';
|
||||
|
@ -30,19 +34,35 @@ export async function predict(input, config): Promise<Hand[]> {
|
|||
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||
}
|
||||
}
|
||||
const box: [number, number, number, number] = predictions[i].box ? [
|
||||
|
||||
const landmarks = predictions[i].landmarks as number[];
|
||||
|
||||
let box: [number, number, number, number] = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work
|
||||
let boxRaw: [number, number, number, number] = [0, 0, 0, 0];
|
||||
if (landmarks && landmarks.length > 0) { // if we have landmarks, calculate box based on landmarks
|
||||
for (const pt of landmarks) {
|
||||
if (pt[0] < box[0]) box[0] = pt[0];
|
||||
if (pt[1] < box[1]) box[1] = pt[1];
|
||||
if (pt[0] > box[2]) box[2] = pt[0];
|
||||
if (pt[1] > box[3]) box[3] = pt[1];
|
||||
}
|
||||
box[2] -= box[0];
|
||||
box[3] -= box[1];
|
||||
boxRaw = [box[0] / input.shape[2], box[1] / input.shape[1], box[2] / input.shape[2], box[3] / input.shape[1]];
|
||||
} else { // otherwise use box from prediction
|
||||
box = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]),
|
||||
] : [0, 0, 0, 0];
|
||||
const boxRaw: [number, number, number, number] = [
|
||||
boxRaw = [
|
||||
(predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
];
|
||||
const landmarks = predictions[i].landmarks as number[];
|
||||
}
|
||||
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks, annotations });
|
||||
}
|
||||
return hands;
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Simple helper functions used accross codebase
|
||||
*/
|
||||
|
||||
// helper function: join two paths
|
||||
export function join(folder: string, file: string): string {
|
||||
const separator = folder.endsWith('/') ? '' : '/';
|
||||
|
|
15
src/human.ts
15
src/human.ts
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Human main module
|
||||
*/
|
||||
|
||||
import { log, now, mergeDeep } from './helpers';
|
||||
import { Config, defaults } from './config';
|
||||
import { Result, Gesture } from './result';
|
||||
|
@ -517,10 +521,7 @@ export class Human {
|
|||
this.analyze('End Object:');
|
||||
|
||||
// if async wait for results
|
||||
if (this.config.async) {
|
||||
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
}
|
||||
tf.dispose(process.tensor);
|
||||
if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
|
||||
// run gesture analysis last
|
||||
let gestureRes: Gesture[] = [];
|
||||
|
@ -542,8 +543,12 @@ export class Human {
|
|||
performance: this.perf,
|
||||
canvas: process.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes); },
|
||||
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
|
||||
};
|
||||
|
||||
// finally dispose input tensor
|
||||
tf.dispose(process.tensor);
|
||||
|
||||
// log('Result:', result);
|
||||
resolve(res);
|
||||
});
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Image Processing module used by Human
|
||||
*/
|
||||
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
import { Tensor } from '../tfjs/types';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/*
|
||||
WebGLImageFilter - MIT Licensed
|
||||
2013, Dominic Szablewski - phoboslab.org
|
||||
<https://github.com/phoboslab/WebGLImageFilter>
|
||||
WebGLImageFilter by Dominic Szablewski: <https://github.com/phoboslab/WebGLImageFilter>
|
||||
*/
|
||||
|
||||
function GLProgram(gl, vertexSource, fragmentSource) {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* CenterNet object detection module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* CoCo Labels used by object detection modules
|
||||
*/
|
||||
export const labels = [
|
||||
{ class: 1, label: 'person' },
|
||||
{ class: 2, label: 'bicycle' },
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* NanoDet object detection module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
/**
|
||||
* Module that analyzes existing results and recombines them into a unified person object
|
||||
*/
|
||||
|
||||
import { Face, Body, Hand, Gesture, Person } from './result';
|
||||
|
||||
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person> {
|
||||
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>, shape: Array<number> | undefined): Array<Person> {
|
||||
let id = 0;
|
||||
const persons: Array<Person> = [];
|
||||
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
|
||||
|
@ -36,12 +40,27 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
|
|||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
|
||||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
|
||||
}
|
||||
person.box = [ // this is incorrect as should be a caclulated value
|
||||
Math.min(person.face?.box[0] || Number.MAX_SAFE_INTEGER, person.body?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[0] || Number.MAX_SAFE_INTEGER),
|
||||
Math.min(person.face?.box[1] || Number.MAX_SAFE_INTEGER, person.body?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[1] || Number.MAX_SAFE_INTEGER),
|
||||
Math.max(person.face?.box[2] || 0, person.body?.box[2] || 0, person.hands?.left?.box[2] || 0, person.hands?.right?.box[2] || 0),
|
||||
Math.max(person.face?.box[3] || 0, person.body?.box[3] || 0, person.hands?.left?.box[3] || 0, person.hands?.right?.box[3] || 0),
|
||||
];
|
||||
|
||||
// create new overarching box from all boxes beloning to person
|
||||
const x: number[] = [];
|
||||
const y: number[] = [];
|
||||
const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
|
||||
if (box && box.length === 4) {
|
||||
x.push(box[0], box[0] + box[2]);
|
||||
y.push(box[1], box[1] + box[3]);
|
||||
}
|
||||
};
|
||||
extractXY(person.face?.box);
|
||||
extractXY(person.body?.box);
|
||||
extractXY(person.hands?.left?.box);
|
||||
extractXY(person.hands?.right?.box);
|
||||
const minX = Math.min(...x);
|
||||
const minY = Math.min(...y);
|
||||
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
||||
|
||||
// shape is known so we calculate boxRaw as well
|
||||
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
|
||||
|
||||
persons.push(person);
|
||||
}
|
||||
return persons;
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Profiling calculations
|
||||
*/
|
||||
|
||||
import { log } from './helpers';
|
||||
|
||||
export const data = {};
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Type definitions for Human results
|
||||
*/
|
||||
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
|
||||
/** Face results
|
||||
|
@ -176,5 +180,5 @@ export interface Result {
|
|||
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
||||
readonly timestamp: number,
|
||||
/** getter property that returns unified persons object */
|
||||
readonly persons: Array<Person>,
|
||||
persons: Array<Person>,
|
||||
}
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Embedded sample images used during warmup in dataURL format
|
||||
*/
|
||||
|
||||
// data:image/jpeg;base64,
|
||||
export const face = `
|
||||
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
// wrapper to load tfjs in a single place so version can be changed quickly
|
||||
/**
|
||||
* Creates tfjs bundle used by Human browser build target
|
||||
*/
|
||||
|
||||
// simplified
|
||||
// { modules: 1250, moduleBytes: 4013323, imports: 7, importBytes: 2255, outputBytes: 2991826, outputFiles: 'dist/tfjs.esm.js' }
|
||||
|
|
|
@ -1 +1,4 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node-gpu build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs-node-gpu';
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node-wasm build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs';
|
||||
export * from '@tensorflow/tfjs-backend-wasm';
|
||||
|
|
|
@ -1 +1,4 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs-node';
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
// export common types
|
||||
/**
|
||||
* Export common TensorFlow types
|
||||
*/
|
||||
|
||||
/**
|
||||
* TensorFlow Tensor type
|
||||
*/
|
||||
export { Tensor } from '@tensorflow/tfjs-core/dist/index';
|
||||
|
||||
/**
|
||||
* TensorFlow GraphModel type
|
||||
*/
|
||||
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
||||
|
|
|
@ -158,7 +158,7 @@ async function test(Human, inputConfig) {
|
|||
testDetect(second, 'assets/human-sample-body.jpg', 'default'),
|
||||
]);
|
||||
const t1 = process.hrtime.bigint();
|
||||
log('info', 'test complete:', Math.trunc(parseInt((t1 - t0).toString()) / 1000 / 1000), 'ms');
|
||||
log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
|
||||
}
|
||||
|
||||
exports.test = test;
|
||||
|
|
Loading…
Reference in New Issue