update all box calculations

pull/134/head
Vladimir Mandic 2021-05-25 08:58:20 -04:00
parent e62f18e34e
commit f357901e36
80 changed files with 1148 additions and 898 deletions

View File

@ -1,4 +1,10 @@
// @ts-nocheck // @ts-nocheck // typescript checks disabled as this is pure javascript
/**
* Human demo for browsers
*
* Demo for face mesh detection and projection as 3D object using Three.js
*/
import { DoubleSide, Mesh, MeshBasicMaterial, OrthographicCamera, Scene, sRGBEncoding, VideoTexture, WebGLRenderer, BufferGeometry, BufferAttribute } from './helpers/three.js'; import { DoubleSide, Mesh, MeshBasicMaterial, OrthographicCamera, Scene, sRGBEncoding, VideoTexture, WebGLRenderer, BufferGeometry, BufferAttribute } from './helpers/three.js';
import { OrbitControls } from './helpers/three-orbitControls.js'; import { OrbitControls } from './helpers/three-orbitControls.js';

View File

@ -1,4 +1,10 @@
// @ts-nocheck // @ts-nocheck // typescript checks disabled as this is pure javascript
/**
* Human demo for browsers
*
* Demo for face descriptor analysis and face simmilarity analysis
*/
import Human from '../dist/human.esm.js'; import Human from '../dist/human.esm.js';

View File

@ -1,4 +1,10 @@
// @ts-nocheck // @ts-nocheck // typescript checks disabled as this is pure javascript
/**
* Human demo for browsers
*
* Main demo app that exposes all Human functionality
*/
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used // import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
@ -10,6 +16,7 @@ let human;
const userConfig = { const userConfig = {
warmup: 'none', warmup: 'none',
/*
backend: 'webgl', backend: 'webgl',
async: false, async: false,
cacheSensitivity: 0, cacheSensitivity: 0,
@ -29,6 +36,7 @@ const userConfig = {
// body: { enabled: true, modelPath: 'blazepose.json' }, // body: { enabled: true, modelPath: 'blazepose.json' },
object: { enabled: false }, object: { enabled: false },
gesture: { enabled: true }, gesture: { enabled: true },
*/
}; };
const drawOptions = { const drawOptions = {

View File

@ -1,4 +1,9 @@
// @ts-nocheck /**
* Human demo for NodeJS
*
* Used by node-multiprocess.js as an on-demand started worker process
* Receives messages from parent process and sends results
*/
const fs = require('fs'); const fs = require('fs');
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
@ -19,16 +24,16 @@ const myConfig = {
enabled: true, enabled: true,
detector: { enabled: true, rotation: false }, detector: { enabled: true, rotation: false },
mesh: { enabled: true }, mesh: { enabled: true },
iris: { enabled: false }, iris: { enabled: true },
description: { enabled: true }, description: { enabled: true },
emotion: { enabled: true }, emotion: { enabled: true },
}, },
hand: { hand: {
enabled: false, enabled: true,
}, },
// body: { modelPath: 'blazepose.json', enabled: true }, // body: { modelPath: 'blazepose.json', enabled: true },
body: { enabled: false }, body: { enabled: true },
object: { enabled: false }, object: { enabled: true },
}; };
// read image from a file and create tensor to be used by faceapi // read image from a file and create tensor to be used by faceapi
@ -44,8 +49,10 @@ async function image(img) {
async function detect(img) { async function detect(img) {
const tensor = await image(img); const tensor = await image(img);
const result = await human.detect(tensor); const result = await human.detect(tensor);
if (process.send) { // check if ipc exists
process.send({ image: img, detected: result }); // send results back to main process.send({ image: img, detected: result }); // send results back to main
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
}
tensor.dispose(); tensor.dispose();
} }
@ -57,8 +64,8 @@ async function main() {
// on worker start first initialize message handler so we don't miss any messages // on worker start first initialize message handler so we don't miss any messages
process.on('message', (msg) => { process.on('message', (msg) => {
if (msg.exit) process.exit(); // if main told worker to exit if (msg.exit && process.exit) process.exit(); // if main told worker to exit
if (msg.test) process.send({ test: true }); if (msg.test && process.send) process.send({ test: true });
if (msg.image) detect(msg.image); // if main told worker to process image if (msg.image) detect(msg.image); // if main told worker to process image
log.data('Worker received message:', process.pid, msg); // generic log log.data('Worker received message:', process.pid, msg); // generic log
}); });
@ -72,7 +79,7 @@ async function main() {
await human.load(); await human.load();
// now we're ready, so send message back to main that it knows it can use this worker // now we're ready, so send message back to main that it knows it can use this worker
process.send({ ready: true }); if (process.send) process.send({ ready: true });
} }
main(); main();

View File

@ -1,4 +1,10 @@
// @ts-nocheck /**
* Human demo for NodeJS
*
* Uses NodeJS fork functionality with inter-processing-messaging
* Starts a pool of worker processes and dispatch work items to each worker when they are available
* Uses node-multiprocess-worker.js for actual processing
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
@ -7,7 +13,7 @@ const log = require('@vladmandic/pilogger'); // this is my simple logger with fe
const child_process = require('child_process'); const child_process = require('child_process');
// note that main process import faceapi or tfjs at all // note that main process import faceapi or tfjs at all
const imgPathRoot = './demo'; // modify to include your sample images const imgPathRoot = './assets'; // modify to include your sample images
const numWorkers = 4; // how many workers will be started const numWorkers = 4; // how many workers will be started
const workers = []; // this holds worker processes const workers = []; // this holds worker processes
const images = []; // this holds queue of enumerated images const images = []; // this holds queue of enumerated images
@ -33,14 +39,14 @@ function waitCompletion() {
if (activeWorkers > 0) setImmediate(() => waitCompletion()); if (activeWorkers > 0) setImmediate(() => waitCompletion());
else { else {
t[1] = process.hrtime.bigint(); t[1] = process.hrtime.bigint();
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms'); log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
} }
} }
function measureLatency() { function measureLatency() {
t[3] = process.hrtime.bigint(); t[3] = process.hrtime.bigint();
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000); const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000); const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip); log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
} }
@ -59,6 +65,7 @@ async function main() {
if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile)); if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile));
} }
numImages = images.length; numImages = images.length;
log.state('Enumerated images:', imgPathRoot, numImages);
t[0] = process.hrtime.bigint(); t[0] = process.hrtime.bigint();
// manage worker processes // manage worker processes
@ -71,7 +78,7 @@ async function main() {
// otherwise it's an unknown message // otherwise it's an unknown message
workers[i].on('message', (msg) => { workers[i].on('message', (msg) => {
if (msg.ready) detect(workers[i]); if (msg.ready) detect(workers[i]);
else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length); else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length, 'bodies:', msg.detected.body?.length, 'hands:', msg.detected.hand?.length, 'objects:', msg.detected.object?.length);
else if (msg.test) measureLatency(); else if (msg.test) measureLatency();
else log.data('Main: worker message:', workers[i].pid, msg); else log.data('Main: worker message:', workers[i].pid, msg);
}); });

View File

@ -1,10 +1,14 @@
/* /**
Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human * Human demo for NodeJS
* Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg *
each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing * Uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
if you want process at specific intervals, set output fps to some value * Each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
if you want to process an input stream, set real-time flag and set input as required * If you want process at specific intervals, set output fps to some value
* If you want to process an input stream, set real-time flag and set input as required
*
* Note that pipe2jpeg is not part of Human dependencies and should be installed manually
* Working version of ffmpeg must be present on the system
*/ */
const spawn = require('child_process').spawn; const spawn = require('child_process').spawn;

View File

@ -1,5 +1,9 @@
/* /**
Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human * Human demo for NodeJS
* Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human
*
* Note that node-webcam is not part of Human dependencies and should be installed manually
* Working version of fswebcam must be present on the system
*/ */
const util = require('util'); const util = require('util');

View File

@ -1,3 +1,7 @@
/**
* Human demo for NodeJS
*/
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
const fs = require('fs'); const fs = require('fs');
const process = require('process'); const process = require('process');

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

416
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

416
dist/human.js vendored

File diff suppressed because one or more lines are too long

112
dist/human.node-gpu.js vendored
View File

@ -16971,19 +16971,37 @@ async function predict5(input, config3) {
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]); annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
} }
} }
const box4 = predictions[i].box ? [ const landmarks = predictions[i].landmarks;
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
let boxRaw = [0, 0, 0, 0];
if (landmarks && landmarks.length > 0) {
for (const pt of landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
} else {
box4 = predictions[i].box ? [
Math.max(0, predictions[i].box.topLeft[0]), Math.max(0, predictions[i].box.topLeft[0]),
Math.max(0, predictions[i].box.topLeft[1]), Math.max(0, predictions[i].box.topLeft[1]),
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]), Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]) Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
] : [0, 0, 0, 0]; ] : [0, 0, 0, 0];
const boxRaw = [ boxRaw = [
predictions[i].box.topLeft[0] / input.shape[2], predictions[i].box.topLeft[0] / input.shape[2],
predictions[i].box.topLeft[1] / input.shape[1], predictions[i].box.topLeft[1] / input.shape[1],
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1] (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
]; ];
const landmarks = predictions[i].landmarks; }
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 }); hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
} }
return hands; return hands;
@ -18422,9 +18440,7 @@ var options = {
useDepth: true, useDepth: true,
useCurves: false, useCurves: false,
bufferedFactor: 2, bufferedFactor: 2,
bufferedOutput: false, bufferedOutput: false
useRawBoxes: false,
calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
@ -18533,12 +18549,8 @@ async function face2(inCanvas2, result, drawOptions) {
ctx.font = localOptions.font; ctx.font = localOptions.font;
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.drawBoxes) { if (localOptions.drawBoxes)
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
else
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions); rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
}
const labels2 = []; const labels2 = [];
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`); labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
if (f.genderConfidence) if (f.genderConfidence)
@ -18758,37 +18770,14 @@ async function hand2(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let box4; rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (!localOptions.calculateHandBox) {
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
} else {
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
if (h.landmarks && h.landmarks.length > 0) {
for (const pt of h.landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
}
}
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
else
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.fillStyle = localOptions.labelColor; ctx.fillStyle = localOptions.labelColor;
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.stroke(); ctx.stroke();
} }
@ -18849,9 +18838,6 @@ async function object(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
else
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions); rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
const label = `${Math.round(100 * h.score)}% ${h.label}`; const label = `${Math.round(100 * h.score)}% ${h.label}`;
@ -18921,6 +18907,12 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
}
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18936,11 +18928,10 @@ async function all(inCanvas2, result, drawOptions) {
return; return;
if (!(inCanvas2 instanceof HTMLCanvasElement)) if (!(inCanvas2 instanceof HTMLCanvasElement))
return; return;
if (localOptions.bufferedOutput) { if (localOptions.bufferedOutput)
calcBuffered(result, localOptions); calcBuffered(result, localOptions);
} else { else
bufferedResult = result; bufferedResult = result;
}
face2(inCanvas2, result.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
@ -18949,8 +18940,8 @@ async function all(inCanvas2, result, drawOptions) {
} }
// src/persons.ts // src/persons.ts
function join2(faces, bodies, hands, gestures) { function join2(faces, bodies, hands, gestures, shape) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
let id = 0; let id = 0;
const persons2 = []; const persons2 = [];
for (const face5 of faces) { for (const face5 of faces) {
@ -18984,12 +18975,23 @@ function join2(faces, bodies, hands, gestures) {
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id)) else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3); (_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
} }
person2.box = [ const x = [];
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER), const y = [];
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER), const extractXY = (box4) => {
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0), if (box4 && box4.length === 4) {
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0) x.push(box4[0], box4[0] + box4[2]);
]; y.push(box4[1], box4[1] + box4[3]);
}
};
extractXY((_k = person2.face) == null ? void 0 : _k.box);
extractXY((_l = person2.body) == null ? void 0 : _l.box);
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2); persons2.push(person2);
} }
return persons2; return persons2;
@ -20123,10 +20125,8 @@ var Human = class {
this.perf.object = elapsedTime; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
}
tf17.dispose(process5.tensor);
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -20148,9 +20148,11 @@ var Human = class {
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now(), timestamp: Date.now(),
get persons() { get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes); var _a;
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
} }
}; };
tf17.dispose(process5.tensor);
resolve(res); resolve(res);
}); });
} }

View File

@ -16972,19 +16972,37 @@ async function predict5(input, config3) {
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]); annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
} }
} }
const box4 = predictions[i].box ? [ const landmarks = predictions[i].landmarks;
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
let boxRaw = [0, 0, 0, 0];
if (landmarks && landmarks.length > 0) {
for (const pt of landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
} else {
box4 = predictions[i].box ? [
Math.max(0, predictions[i].box.topLeft[0]), Math.max(0, predictions[i].box.topLeft[0]),
Math.max(0, predictions[i].box.topLeft[1]), Math.max(0, predictions[i].box.topLeft[1]),
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]), Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]) Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
] : [0, 0, 0, 0]; ] : [0, 0, 0, 0];
const boxRaw = [ boxRaw = [
predictions[i].box.topLeft[0] / input.shape[2], predictions[i].box.topLeft[0] / input.shape[2],
predictions[i].box.topLeft[1] / input.shape[1], predictions[i].box.topLeft[1] / input.shape[1],
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1] (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
]; ];
const landmarks = predictions[i].landmarks; }
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 }); hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
} }
return hands; return hands;
@ -18423,9 +18441,7 @@ var options = {
useDepth: true, useDepth: true,
useCurves: false, useCurves: false,
bufferedFactor: 2, bufferedFactor: 2,
bufferedOutput: false, bufferedOutput: false
useRawBoxes: false,
calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
@ -18534,12 +18550,8 @@ async function face2(inCanvas2, result, drawOptions) {
ctx.font = localOptions.font; ctx.font = localOptions.font;
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.drawBoxes) { if (localOptions.drawBoxes)
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
else
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions); rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
}
const labels2 = []; const labels2 = [];
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`); labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
if (f.genderConfidence) if (f.genderConfidence)
@ -18759,37 +18771,14 @@ async function hand2(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let box4; rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (!localOptions.calculateHandBox) {
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
} else {
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
if (h.landmarks && h.landmarks.length > 0) {
for (const pt of h.landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
}
}
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
else
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.fillStyle = localOptions.labelColor; ctx.fillStyle = localOptions.labelColor;
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.stroke(); ctx.stroke();
} }
@ -18850,9 +18839,6 @@ async function object(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
else
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions); rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
const label = `${Math.round(100 * h.score)}% ${h.label}`; const label = `${Math.round(100 * h.score)}% ${h.label}`;
@ -18922,6 +18908,12 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
}
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18937,11 +18929,10 @@ async function all(inCanvas2, result, drawOptions) {
return; return;
if (!(inCanvas2 instanceof HTMLCanvasElement)) if (!(inCanvas2 instanceof HTMLCanvasElement))
return; return;
if (localOptions.bufferedOutput) { if (localOptions.bufferedOutput)
calcBuffered(result, localOptions); calcBuffered(result, localOptions);
} else { else
bufferedResult = result; bufferedResult = result;
}
face2(inCanvas2, result.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
@ -18950,8 +18941,8 @@ async function all(inCanvas2, result, drawOptions) {
} }
// src/persons.ts // src/persons.ts
function join2(faces, bodies, hands, gestures) { function join2(faces, bodies, hands, gestures, shape) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
let id = 0; let id = 0;
const persons2 = []; const persons2 = [];
for (const face5 of faces) { for (const face5 of faces) {
@ -18985,12 +18976,23 @@ function join2(faces, bodies, hands, gestures) {
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id)) else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3); (_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
} }
person2.box = [ const x = [];
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER), const y = [];
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER), const extractXY = (box4) => {
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0), if (box4 && box4.length === 4) {
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0) x.push(box4[0], box4[0] + box4[2]);
]; y.push(box4[1], box4[1] + box4[3]);
}
};
extractXY((_k = person2.face) == null ? void 0 : _k.box);
extractXY((_l = person2.body) == null ? void 0 : _l.box);
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2); persons2.push(person2);
} }
return persons2; return persons2;
@ -20124,10 +20126,8 @@ var Human = class {
this.perf.object = elapsedTime; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
}
tf17.dispose(process5.tensor);
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -20149,9 +20149,11 @@ var Human = class {
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now(), timestamp: Date.now(),
get persons() { get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes); var _a;
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
} }
}; };
tf17.dispose(process5.tensor);
resolve(res); resolve(res);
}); });
} }

112
dist/human.node.js vendored
View File

@ -16971,19 +16971,37 @@ async function predict5(input, config3) {
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]); annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
} }
} }
const box4 = predictions[i].box ? [ const landmarks = predictions[i].landmarks;
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
let boxRaw = [0, 0, 0, 0];
if (landmarks && landmarks.length > 0) {
for (const pt of landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
} else {
box4 = predictions[i].box ? [
Math.max(0, predictions[i].box.topLeft[0]), Math.max(0, predictions[i].box.topLeft[0]),
Math.max(0, predictions[i].box.topLeft[1]), Math.max(0, predictions[i].box.topLeft[1]),
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]), Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]) Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
] : [0, 0, 0, 0]; ] : [0, 0, 0, 0];
const boxRaw = [ boxRaw = [
predictions[i].box.topLeft[0] / input.shape[2], predictions[i].box.topLeft[0] / input.shape[2],
predictions[i].box.topLeft[1] / input.shape[1], predictions[i].box.topLeft[1] / input.shape[1],
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1] (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
]; ];
const landmarks = predictions[i].landmarks; }
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 }); hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
} }
return hands; return hands;
@ -18422,9 +18440,7 @@ var options = {
useDepth: true, useDepth: true,
useCurves: false, useCurves: false,
bufferedFactor: 2, bufferedFactor: 2,
bufferedOutput: false, bufferedOutput: false
useRawBoxes: false,
calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
@ -18533,12 +18549,8 @@ async function face2(inCanvas2, result, drawOptions) {
ctx.font = localOptions.font; ctx.font = localOptions.font;
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.drawBoxes) { if (localOptions.drawBoxes)
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
else
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions); rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
}
const labels2 = []; const labels2 = [];
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`); labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
if (f.genderConfidence) if (f.genderConfidence)
@ -18758,37 +18770,14 @@ async function hand2(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let box4; rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (!localOptions.calculateHandBox) {
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
} else {
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
if (h.landmarks && h.landmarks.length > 0) {
for (const pt of h.landmarks) {
if (pt[0] < box4[0])
box4[0] = pt[0];
if (pt[1] < box4[1])
box4[1] = pt[1];
if (pt[0] > box4[2])
box4[2] = pt[0];
if (pt[1] > box4[3])
box4[3] = pt[1];
}
box4[2] -= box4[0];
box4[3] -= box4[1];
}
}
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
else
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.fillStyle = localOptions.labelColor; ctx.fillStyle = localOptions.labelColor;
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]); ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.stroke(); ctx.stroke();
} }
@ -18849,9 +18838,6 @@ async function object(inCanvas2, result, drawOptions) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.useRawBoxes)
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
else
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions); rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
const label = `${Math.round(100 * h.score)}% ${h.label}`; const label = `${Math.round(100 * h.score)}% ${h.label}`;
@ -18921,6 +18907,12 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
}
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18936,11 +18928,10 @@ async function all(inCanvas2, result, drawOptions) {
return; return;
if (!(inCanvas2 instanceof HTMLCanvasElement)) if (!(inCanvas2 instanceof HTMLCanvasElement))
return; return;
if (localOptions.bufferedOutput) { if (localOptions.bufferedOutput)
calcBuffered(result, localOptions); calcBuffered(result, localOptions);
} else { else
bufferedResult = result; bufferedResult = result;
}
face2(inCanvas2, result.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
@ -18949,8 +18940,8 @@ async function all(inCanvas2, result, drawOptions) {
} }
// src/persons.ts // src/persons.ts
function join2(faces, bodies, hands, gestures) { function join2(faces, bodies, hands, gestures, shape) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H; var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
let id = 0; let id = 0;
const persons2 = []; const persons2 = [];
for (const face5 of faces) { for (const face5 of faces) {
@ -18984,12 +18975,23 @@ function join2(faces, bodies, hands, gestures) {
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id)) else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3); (_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
} }
person2.box = [ const x = [];
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER), const y = [];
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER), const extractXY = (box4) => {
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0), if (box4 && box4.length === 4) {
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0) x.push(box4[0], box4[0] + box4[2]);
]; y.push(box4[1], box4[1] + box4[3]);
}
};
extractXY((_k = person2.face) == null ? void 0 : _k.box);
extractXY((_l = person2.body) == null ? void 0 : _l.box);
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
const minX = Math.min(...x);
const minY = Math.min(...y);
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
if (shape && shape.length === 4)
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
persons2.push(person2); persons2.push(person2);
} }
return persons2; return persons2;
@ -20123,10 +20125,8 @@ var Human = class {
this.perf.object = elapsedTime; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async)
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]); [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
}
tf17.dispose(process5.tensor);
let gestureRes = []; let gestureRes = [];
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
@ -20148,9 +20148,11 @@ var Human = class {
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now(), timestamp: Date.now(),
get persons() { get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes); var _a;
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
} }
}; };
tf17.dispose(process5.tensor);
resolve(res); resolve(res);
}); });
} }

File diff suppressed because one or more lines are too long

View File

@ -61,14 +61,14 @@
"@tensorflow/tfjs-layers": "^3.6.0", "@tensorflow/tfjs-layers": "^3.6.0",
"@tensorflow/tfjs-node": "^3.6.1", "@tensorflow/tfjs-node": "^3.6.1",
"@tensorflow/tfjs-node-gpu": "^3.6.1", "@tensorflow/tfjs-node-gpu": "^3.6.1",
"@types/node": "^15.6.0", "@types/node": "^15.6.1",
"@typescript-eslint/eslint-plugin": "^4.24.0", "@typescript-eslint/eslint-plugin": "^4.25.0",
"@typescript-eslint/parser": "^4.24.0", "@typescript-eslint/parser": "^4.25.0",
"@vladmandic/pilogger": "^0.2.17", "@vladmandic/pilogger": "^0.2.17",
"canvas": "^2.8.0", "canvas": "^2.8.0",
"chokidar": "^3.5.1", "chokidar": "^3.5.1",
"dayjs": "^1.10.4", "dayjs": "^1.10.4",
"esbuild": "^0.12.1", "esbuild": "^0.12.2",
"eslint": "^7.27.0", "eslint": "^7.27.0",
"eslint-config-airbnb-base": "^14.2.1", "eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.23.3", "eslint-plugin-import": "^2.23.3",

View File

@ -1,3 +1,8 @@
/**
* Implements Human build process
* Used to generate prod builds for releases or by dev server to generate on-the-fly debug builds
*/
const ts = require('typescript'); const ts = require('typescript');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');

View File

@ -1,17 +1,17 @@
2021-05-24 11:07:05 INFO:  @vladmandic/human version 1.9.3 2021-05-25 08:45:35 INFO:  @vladmandic/human version 1.9.3
2021-05-24 11:07:05 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0 2021-05-25 08:45:35 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-24 11:07:05 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true} 2021-05-25 08:45:35 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-24 11:07:05 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"} 2021-05-25 08:45:35 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 11:07:05 STATE: Build for: node type: node: {"imports":37,"importBytes":429806,"outputBytes":386407,"outputFiles":"dist/human.node.js"} 2021-05-25 08:45:35 STATE: Build for: node type: node: {"imports":37,"importBytes":431051,"outputBytes":385549,"outputFiles":"dist/human.node.js"}
2021-05-24 11:07:05 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"} 2021-05-25 08:45:35 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 11:07:05 STATE: Build for: nodeGPU type: node: {"imports":37,"importBytes":429814,"outputBytes":386411,"outputFiles":"dist/human.node-gpu.js"} 2021-05-25 08:45:35 STATE: Build for: nodeGPU type: node: {"imports":37,"importBytes":431059,"outputBytes":385553,"outputFiles":"dist/human.node-gpu.js"}
2021-05-24 11:07:05 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"} 2021-05-25 08:45:35 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 11:07:05 STATE: Build for: nodeWASM type: node: {"imports":37,"importBytes":429881,"outputBytes":386483,"outputFiles":"dist/human.node-wasm.js"} 2021-05-25 08:45:35 STATE: Build for: nodeWASM type: node: {"imports":37,"importBytes":431126,"outputBytes":385625,"outputFiles":"dist/human.node-wasm.js"}
2021-05-24 11:07:05 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"} 2021-05-25 08:45:35 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 11:07:05 STATE: Build for: browserNoBundle type: esm: {"imports":37,"importBytes":429908,"outputBytes":236959,"outputFiles":"dist/human.esm-nobundle.js"} 2021-05-25 08:45:35 STATE: Build for: browserNoBundle type: esm: {"imports":37,"importBytes":431153,"outputBytes":236268,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-24 11:07:06 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"} 2021-05-25 08:45:36 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 11:07:06 STATE: Build for: browserBundle type: iife: {"imports":37,"importBytes":1539928,"outputBytes":1344893,"outputFiles":"dist/human.js"} 2021-05-25 08:45:36 STATE: Build for: browserBundle type: iife: {"imports":37,"importBytes":1541173,"outputBytes":1344154,"outputFiles":"dist/human.js"}
2021-05-24 11:07:07 STATE: Build for: browserBundle type: esm: {"imports":37,"importBytes":1539928,"outputBytes":1344885,"outputFiles":"dist/human.esm.js"} 2021-05-25 08:45:36 STATE: Build for: browserBundle type: esm: {"imports":37,"importBytes":1541173,"outputBytes":1344146,"outputFiles":"dist/human.esm.js"}
2021-05-24 11:07:07 INFO:  Generate types: ["src/human.ts"] 2021-05-25 08:45:36 INFO:  Generate types: ["src/human.ts"]
2021-05-24 11:07:12 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"] 2021-05-25 08:45:41 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-24 11:07:12 INFO:  Generate TypeDocs: ["src/human.ts"] 2021-05-25 08:45:41 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -1,3 +1,7 @@
/**
* Creates changelog in markdown format from git log as part of the build process
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const dayjs = require('dayjs'); const dayjs = require('dayjs');

View File

@ -1,5 +1,5 @@
/* /**
micro http2 server with file monitoring and automatic app rebuild Micro http/http2 server with file monitoring and automatic app rebuild
- can process concurrent http requests - can process concurrent http requests
- monitors specified filed and folders for changes - monitors specified filed and folders for changes
- triggers library and application rebuild - triggers library and application rebuild

View File

@ -1,5 +1,9 @@
#!/usr/bin/env -S node --no-deprecation --trace-warnings #!/usr/bin/env -S node --no-deprecation --trace-warnings
/**
* Helper app that analyzes any TensorFlow SavedModel or GraphModel for inputs and outputs
*/
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');

View File

@ -1,3 +1,8 @@
/**
* Module that analyzes person age
* Obsolete
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';

View File

@ -1,3 +1,7 @@
/**
* FaceMesh & BlazeFace Module entry point
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as blazeface from './blazeface'; import * as blazeface from './blazeface';

View File

@ -1,3 +1,7 @@
/**
* BlazePose Module
*/
// paper: https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html // paper: https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html
import { log, join } from '../helpers'; import { log, join } from '../helpers';

View File

@ -1,3 +1,7 @@
/**
* Module that implements helper draw functions, exposed as human.draw
*/
import { TRI468 as triangulation } from '../blazeface/coords'; import { TRI468 as triangulation } from '../blazeface/coords';
import { mergeDeep } from '../helpers'; import { mergeDeep } from '../helpers';
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'; import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
@ -22,7 +26,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
* -useCurves: draw polygons as cures or as lines, * -useCurves: draw polygons as cures or as lines,
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations * -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc. * -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
* -useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,
*/ */
export interface DrawOptions { export interface DrawOptions {
color: string, color: string,
@ -42,8 +45,6 @@ export interface DrawOptions {
useCurves: boolean, useCurves: boolean,
bufferedOutput: boolean, bufferedOutput: boolean,
bufferedFactor: number, bufferedFactor: number,
useRawBoxes: boolean,
calculateHandBox: boolean,
} }
export const options: DrawOptions = { export const options: DrawOptions = {
@ -64,8 +65,6 @@ export const options: DrawOptions = {
useCurves: <boolean>false, useCurves: <boolean>false,
bufferedFactor: <number>2, bufferedFactor: <number>2,
bufferedOutput: <boolean>false, bufferedOutput: <boolean>false,
useRawBoxes: <boolean>false,
calculateHandBox: <boolean>true,
}; };
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 }; let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
@ -173,10 +172,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
ctx.font = localOptions.font; ctx.font = localOptions.font;
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * f.boxRaw[0], inCanvas.height * f.boxRaw[1], inCanvas.width * f.boxRaw[2], inCanvas.height * f.boxRaw[3], localOptions);
else rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
}
// silly hack since fillText does not suport new line // silly hack since fillText does not suport new line
const labels:string[] = []; const labels:string[] = [];
labels.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`); labels.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
@ -374,31 +370,14 @@ export async function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, dra
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
let box; rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (!localOptions.calculateHandBox) {
box = localOptions.useRawBoxes ? h.boxRaw : h.box;
} else {
box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
if (h.landmarks && h.landmarks.length > 0) {
for (const pt of h.landmarks) {
if (pt[0] < box[0]) box[0] = pt[0];
if (pt[1] < box[1]) box[1] = pt[1];
if (pt[0] > box[2]) box[2] = pt[0];
if (pt[1] > box[3]) box[3] = pt[1];
}
box[2] -= box[0];
box[3] -= box[1];
}
}
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * box[0], inCanvas.height * box[1], inCanvas.width * box[2], inCanvas.height * box[3], localOptions);
else rect(ctx, box[0], box[1], box[2], box[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
if (localOptions.shadowColor && localOptions.shadowColor !== '') { if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText('hand', box[0] + 3, 1 + box[1] + localOptions.lineHeight, box[2]); ctx.fillText('hand', h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.fillStyle = localOptions.labelColor; ctx.fillStyle = localOptions.labelColor;
ctx.fillText('hand', box[0] + 2, 0 + box[1] + localOptions.lineHeight, box[2]); ctx.fillText('hand', h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
} }
ctx.stroke(); ctx.stroke();
} }
@ -457,8 +436,7 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color; ctx.fillStyle = localOptions.color;
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * h.boxRaw[0], inCanvas.height * h.boxRaw[1], inCanvas.width * h.boxRaw[2], inCanvas.height * h.boxRaw[3], localOptions); rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
else rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
if (localOptions.drawLabels) { if (localOptions.drawLabels) {
const label = `${Math.round(100 * h.score)}% ${h.label}`; const label = `${Math.round(100 * h.score)}% ${h.label}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') { if (localOptions.shadowColor && localOptions.shadowColor !== '') {
@ -481,6 +459,7 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
if (!ctx) return; if (!ctx) return;
ctx.lineJoin = 'round'; ctx.lineJoin = 'round';
ctx.font = localOptions.font; ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) { for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) { if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color; ctx.strokeStyle = localOptions.color;
@ -504,6 +483,7 @@ function calcBuffered(newResult, localOptions) {
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update // if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself // each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
// interpolate body results
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints
bufferedResult.body[i].box = newResult.body[i].box bufferedResult.body[i].box = newResult.body[i].box
@ -521,6 +501,7 @@ function calcBuffered(newResult, localOptions) {
})); }));
} }
// interpolate hand results
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations
bufferedResult.hand[i].box = newResult.hand[i].box bufferedResult.hand[i].box = newResult.hand[i].box
@ -538,6 +519,14 @@ function calcBuffered(newResult, localOptions) {
} }
} }
// interpolate person results
const newPersons = newResult.persons; // trigger getter function
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
bufferedResult.persons[i].box = newPersons[i].box
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / localOptions.bufferedFactor);
}
// no buffering implemented for face, object, gesture // no buffering implemented for face, object, gesture
// bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)); // bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
// bufferedResult.object = JSON.parse(JSON.stringify(newResult.object)); // bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
@ -555,15 +544,12 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
const localOptions = mergeDeep(options, drawOptions); const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return; if (!result || !inCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement)) return; if (!(inCanvas instanceof HTMLCanvasElement)) return;
if (localOptions.bufferedOutput) { if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
calcBuffered(result, localOptions); else bufferedResult = result; // just use results as-is
} else {
bufferedResult = result;
}
face(inCanvas, result.face, localOptions); // face does have buffering face(inCanvas, result.face, localOptions); // face does have buffering
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available
gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
// person(inCanvas, result.persons, localOptions); // use interpolated results if available
object(inCanvas, result.object, localOptions); // object detection does not have buffering object(inCanvas, result.object, localOptions); // object detection does not have buffering
} }

View File

@ -1,3 +1,7 @@
/**
* EfficientPose Module
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { Body } from '../result'; import { Body } from '../result';

View File

@ -1,3 +1,8 @@
/**
* Module that analyzes face descriptors/embedding
* Obsolete
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { Tensor, GraphModel } from '../tfjs/types'; import { Tensor, GraphModel } from '../tfjs/types';

View File

@ -1,3 +1,7 @@
/**
* Emotion Module
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';

View File

@ -1,3 +1,8 @@
/**
* Module that analyzes person age
* Obsolete
*/
import { log, now } from './helpers'; import { log, now } from './helpers';
import * as facemesh from './blazeface/facemesh'; import * as facemesh from './blazeface/facemesh';
import * as emotion from './emotion/emotion'; import * as emotion from './emotion/emotion';

View File

@ -1,3 +1,9 @@
/**
* HSE-FaceRes Module
* Returns Age, Gender, Descriptor
* Implements Face simmilarity function
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { Tensor, GraphModel } from '../tfjs/types'; import { Tensor, GraphModel } from '../tfjs/types';

View File

@ -1,3 +1,8 @@
/**
* Module that analyzes person gender
* Obsolete
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';

View File

@ -1,3 +1,7 @@
/**
* Gesture detection module
*/
import { Gesture } from '../result'; import { Gesture } from '../result';
export const body = (res): Gesture[] => { export const body = (res): Gesture[] => {

View File

@ -1,3 +1,7 @@
/**
* HandPose module entry point
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector'; import * as handdetector from './handdetector';
@ -30,19 +34,35 @@ export async function predict(input, config): Promise<Hand[]> {
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]); annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
} }
} }
const box: [number, number, number, number] = predictions[i].box ? [
const landmarks = predictions[i].landmarks as number[];
let box: [number, number, number, number] = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work
let boxRaw: [number, number, number, number] = [0, 0, 0, 0];
if (landmarks && landmarks.length > 0) { // if we have landmarks, calculate box based on landmarks
for (const pt of landmarks) {
if (pt[0] < box[0]) box[0] = pt[0];
if (pt[1] < box[1]) box[1] = pt[1];
if (pt[0] > box[2]) box[2] = pt[0];
if (pt[1] > box[3]) box[3] = pt[1];
}
box[2] -= box[0];
box[3] -= box[1];
boxRaw = [box[0] / input.shape[2], box[1] / input.shape[1], box[2] / input.shape[2], box[3] / input.shape[1]];
} else { // otherwise use box from prediction
box = predictions[i].box ? [
Math.max(0, predictions[i].box.topLeft[0]), Math.max(0, predictions[i].box.topLeft[0]),
Math.max(0, predictions[i].box.topLeft[1]), Math.max(0, predictions[i].box.topLeft[1]),
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]), Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]), Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]),
] : [0, 0, 0, 0]; ] : [0, 0, 0, 0];
const boxRaw: [number, number, number, number] = [ boxRaw = [
(predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.topLeft[1]) / input.shape[1], (predictions[i].box.topLeft[1]) / input.shape[1],
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2], (predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1], (predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
]; ];
const landmarks = predictions[i].landmarks as number[]; }
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks, annotations }); hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks, annotations });
} }
return hands; return hands;

View File

@ -1,3 +1,7 @@
/**
* Simple helper functions used accross codebase
*/
// helper function: join two paths // helper function: join two paths
export function join(folder: string, file: string): string { export function join(folder: string, file: string): string {
const separator = folder.endsWith('/') ? '' : '/'; const separator = folder.endsWith('/') ? '' : '/';

View File

@ -1,3 +1,7 @@
/**
* Human main module
*/
import { log, now, mergeDeep } from './helpers'; import { log, now, mergeDeep } from './helpers';
import { Config, defaults } from './config'; import { Config, defaults } from './config';
import { Result, Gesture } from './result'; import { Result, Gesture } from './result';
@ -517,10 +521,7 @@ export class Human {
this.analyze('End Object:'); this.analyze('End Object:');
// if async wait for results // if async wait for results
if (this.config.async) { if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
}
tf.dispose(process.tensor);
// run gesture analysis last // run gesture analysis last
let gestureRes: Gesture[] = []; let gestureRes: Gesture[] = [];
@ -542,8 +543,12 @@ export class Human {
performance: this.perf, performance: this.perf,
canvas: process.canvas, canvas: process.canvas,
timestamp: Date.now(), timestamp: Date.now(),
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes); }, get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
}; };
// finally dispose input tensor
tf.dispose(process.tensor);
// log('Result:', result); // log('Result:', result);
resolve(res); resolve(res);
}); });

View File

@ -1,3 +1,7 @@
/**
* Image Processing module used by Human
*/
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as fxImage from './imagefx'; import * as fxImage from './imagefx';
import { Tensor } from '../tfjs/types'; import { Tensor } from '../tfjs/types';

View File

@ -1,7 +1,5 @@
/* /*
WebGLImageFilter - MIT Licensed WebGLImageFilter by Dominic Szablewski: <https://github.com/phoboslab/WebGLImageFilter>
2013, Dominic Szablewski - phoboslab.org
<https://github.com/phoboslab/WebGLImageFilter>
*/ */
function GLProgram(gl, vertexSource, fragmentSource) { function GLProgram(gl, vertexSource, fragmentSource) {

View File

@ -1,3 +1,7 @@
/**
* CenterNet object detection module
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { labels } from './labels'; import { labels } from './labels';

View File

@ -1,3 +1,6 @@
/**
* CoCo Labels used by object detection modules
*/
export const labels = [ export const labels = [
{ class: 1, label: 'person' }, { class: 1, label: 'person' },
{ class: 2, label: 'bicycle' }, { class: 2, label: 'bicycle' },

View File

@ -1,3 +1,7 @@
/**
* NanoDet object detection module
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import { labels } from './labels'; import { labels } from './labels';

View File

@ -1,6 +1,10 @@
/**
* Module that analyzes existing results and recombines them into a unified person object
*/
import { Face, Body, Hand, Gesture, Person } from './result'; import { Face, Body, Hand, Gesture, Person } from './result';
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person> { export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>, shape: Array<number> | undefined): Array<Person> {
let id = 0; let id = 0;
const persons: Array<Person> = []; const persons: Array<Person> = [];
for (const face of faces) { // person is defined primarily by face and then we append other objects as found for (const face of faces) { // person is defined primarily by face and then we append other objects as found
@ -36,12 +40,27 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture); else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture); else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
} }
person.box = [ // this is incorrect as should be a caclulated value
Math.min(person.face?.box[0] || Number.MAX_SAFE_INTEGER, person.body?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[0] || Number.MAX_SAFE_INTEGER), // create new overarching box from all boxes beloning to person
Math.min(person.face?.box[1] || Number.MAX_SAFE_INTEGER, person.body?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[1] || Number.MAX_SAFE_INTEGER), const x: number[] = [];
Math.max(person.face?.box[2] || 0, person.body?.box[2] || 0, person.hands?.left?.box[2] || 0, person.hands?.right?.box[2] || 0), const y: number[] = [];
Math.max(person.face?.box[3] || 0, person.body?.box[3] || 0, person.hands?.left?.box[3] || 0, person.hands?.right?.box[3] || 0), const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
]; if (box && box.length === 4) {
x.push(box[0], box[0] + box[2]);
y.push(box[1], box[1] + box[3]);
}
};
extractXY(person.face?.box);
extractXY(person.body?.box);
extractXY(person.hands?.left?.box);
extractXY(person.hands?.right?.box);
const minX = Math.min(...x);
const minY = Math.min(...y);
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
// shape is known so we calculate boxRaw as well
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
persons.push(person); persons.push(person);
} }
return persons; return persons;

View File

@ -1,3 +1,7 @@
/**
* PoseNet module entry point
*/
import { log, join } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as poses from './poses'; import * as poses from './poses';

View File

@ -1,3 +1,7 @@
/**
* Profiling calculations
*/
import { log } from './helpers'; import { log } from './helpers';
export const data = {}; export const data = {};

View File

@ -1,3 +1,7 @@
/**
* Type definitions for Human results
*/
import { Tensor } from '../dist/tfjs.esm.js'; import { Tensor } from '../dist/tfjs.esm.js';
/** Face results /** Face results
@ -176,5 +180,5 @@ export interface Result {
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
readonly timestamp: number, readonly timestamp: number,
/** getter property that returns unified persons object */ /** getter property that returns unified persons object */
readonly persons: Array<Person>, persons: Array<Person>,
} }

View File

@ -1,3 +1,7 @@
/**
* Embedded sample images used during warmup in dataURL format
*/
// data:image/jpeg;base64, // data:image/jpeg;base64,
export const face = ` export const face = `
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA /9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA

View File

@ -1,3 +1,6 @@
/**
* Helper function that returns basic system info
*/
export function info(): { platform: string, agent: string } { export function info(): { platform: string, agent: string } {
let platform; let platform;
let agent; let agent;

View File

@ -1,3 +1,8 @@
/**
* Custom TFJS backend for Human based on WebGL
* Not used by default
*/
import { log } from '../helpers'; import { log } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';

View File

@ -1,4 +1,6 @@
// wrapper to load tfjs in a single place so version can be changed quickly /**
* Creates tfjs bundle used by Human browser build target
*/
// simplified // simplified
// { modules: 1250, moduleBytes: 4013323, imports: 7, importBytes: 2255, outputBytes: 2991826, outputFiles: 'dist/tfjs.esm.js' } // { modules: 1250, moduleBytes: 4013323, imports: 7, importBytes: 2255, outputBytes: 2991826, outputFiles: 'dist/tfjs.esm.js' }

View File

@ -1 +1,4 @@
/**
* Creates tfjs bundle used by Human node-gpu build target
*/
export * from '@tensorflow/tfjs-node-gpu'; export * from '@tensorflow/tfjs-node-gpu';

View File

@ -1,2 +1,5 @@
/**
* Creates tfjs bundle used by Human node-wasm build target
*/
export * from '@tensorflow/tfjs'; export * from '@tensorflow/tfjs';
export * from '@tensorflow/tfjs-backend-wasm'; export * from '@tensorflow/tfjs-backend-wasm';

View File

@ -1 +1,4 @@
/**
* Creates tfjs bundle used by Human node build target
*/
export * from '@tensorflow/tfjs-node'; export * from '@tensorflow/tfjs-node';

View File

@ -1,3 +1,13 @@
// export common types /**
* Export common TensorFlow types
*/
/**
* TensorFlow Tensor type
*/
export { Tensor } from '@tensorflow/tfjs-core/dist/index'; export { Tensor } from '@tensorflow/tfjs-core/dist/index';
/**
* TensorFlow GraphModel type
*/
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';

View File

@ -158,7 +158,7 @@ async function test(Human, inputConfig) {
testDetect(second, 'assets/human-sample-body.jpg', 'default'), testDetect(second, 'assets/human-sample-body.jpg', 'default'),
]); ]);
const t1 = process.hrtime.bigint(); const t1 = process.hrtime.bigint();
log('info', 'test complete:', Math.trunc(parseInt((t1 - t0).toString()) / 1000 / 1000), 'ms'); log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
} }
exports.test = test; exports.test = test;

View File

@ -1,120 +1,120 @@
2021-05-24 11:08:29 INFO:  @vladmandic/human version 1.9.3 2021-05-25 08:46:04 INFO:  @vladmandic/human version 1.9.3
2021-05-24 11:08:29 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0 2021-05-25 08:46:04 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-24 11:08:29 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"] 2021-05-25 08:46:04 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-24 11:08:29 INFO:  test-node.js start 2021-05-25 08:46:04 INFO:  test-node.js start
2021-05-24 11:08:29 STATE: test-node.js passed: create human 2021-05-25 08:46:05 STATE: test-node.js passed: create human
2021-05-24 11:08:29 INFO:  test-node.js human version: 1.9.3 2021-05-25 08:46:05 INFO:  test-node.js human version: 1.9.3
2021-05-24 11:08:29 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0 2021-05-25 08:46:05 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-24 11:08:29 INFO:  test-node.js tfjs version: 3.6.0 2021-05-25 08:46:05 INFO:  test-node.js tfjs version: 3.6.0
2021-05-24 11:08:30 STATE: test-node.js passed: set backend: tensorflow 2021-05-25 08:46:05 STATE: test-node.js passed: set backend: tensorflow
2021-05-24 11:08:30 STATE: test-node.js passed: load models 2021-05-25 08:46:05 STATE: test-node.js passed: load models
2021-05-24 11:08:30 STATE: test-node.js result: defined models: 13 loaded models: 6 2021-05-25 08:46:05 STATE: test-node.js result: defined models: 13 loaded models: 6
2021-05-24 11:08:30 STATE: test-node.js passed: warmup: none default 2021-05-25 08:46:05 STATE: test-node.js passed: warmup: none default
2021-05-24 11:08:31 STATE: test-node.js passed: warmup: face default 2021-05-25 08:46:07 STATE: test-node.js passed: warmup: face default
2021-05-24 11:08:31 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5} 2021-05-25 08:46:07 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-24 11:08:31 DATA:  test-node.js result: performance: load: 312 total: 1646 2021-05-25 08:46:07 DATA:  test-node.js result: performance: load: 346 total: 1673
2021-05-24 11:08:33 STATE: test-node.js passed: warmup: body default 2021-05-25 08:46:09 STATE: test-node.js passed: warmup: body default
2021-05-24 11:08:33 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17} 2021-05-25 08:46:09 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-24 11:08:33 DATA:  test-node.js result: performance: load: 312 total: 1613 2021-05-25 08:46:09 DATA:  test-node.js result: performance: load: 346 total: 1665
2021-05-24 11:08:33 INFO:  test-node.js test body variants 2021-05-25 08:46:09 INFO:  test-node.js test body variants
2021-05-24 11:08:34 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:10 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:35 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet 2021-05-25 08:46:11 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-24 11:08:35 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17} 2021-05-25 08:46:11 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-24 11:08:35 DATA:  test-node.js result: performance: load: 312 total: 983 2021-05-25 08:46:11 DATA:  test-node.js result: performance: load: 346 total: 1017
2021-05-24 11:08:36 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:12 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:36 STATE: test-node.js passed: detect: assets/human-sample-body.jpg blazepose 2021-05-25 08:46:12 STATE: test-node.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-24 11:08:36 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:12 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:36 DATA:  test-node.js result: performance: load: 312 total: 387 2021-05-25 08:46:12 DATA:  test-node.js result: performance: load: 346 total: 418
2021-05-24 11:08:38 STATE: test-node.js passed: detect: random default 2021-05-25 08:46:13 STATE: test-node.js passed: detect: random default
2021-05-24 11:08:38 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39} 2021-05-25 08:46:13 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
2021-05-24 11:08:38 DATA:  test-node.js result: performance: load: 312 total: 919 2021-05-25 08:46:13 DATA:  test-node.js result: performance: load: 346 total: 888
2021-05-24 11:08:38 INFO:  test-node.js test: first instance 2021-05-25 08:46:13 INFO:  test-node.js test: first instance
2021-05-24 11:08:38 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-25 08:46:13 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-24 11:08:40 STATE: test-node.js passed: detect: assets/sample-me.jpg default 2021-05-25 08:46:15 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-24 11:08:40 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:15 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:40 DATA:  test-node.js result: performance: load: 312 total: 1657 2021-05-25 08:46:15 DATA:  test-node.js result: performance: load: 346 total: 1761
2021-05-24 11:08:40 INFO:  test-node.js test: second instance 2021-05-25 08:46:15 INFO:  test-node.js test: second instance
2021-05-24 11:08:40 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-25 08:46:16 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-24 11:08:42 STATE: test-node.js passed: detect: assets/sample-me.jpg default 2021-05-25 08:46:17 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-24 11:08:42 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:17 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:42 DATA:  test-node.js result: performance: load: 5 total: 1630 2021-05-25 08:46:17 DATA:  test-node.js result: performance: load: 4 total: 1670
2021-05-24 11:08:42 INFO:  test-node.js test: concurrent 2021-05-25 08:46:17 INFO:  test-node.js test: concurrent
2021-05-24 11:08:42 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-25 08:46:17 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-24 11:08:42 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-25 08:46:17 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-24 11:08:43 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:18 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:44 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:19 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default 2021-05-25 08:46:25 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:25 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 312 total: 5886 2021-05-25 08:46:25 DATA:  test-node.js result: performance: load: 346 total: 5919
2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default 2021-05-25 08:46:25 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:25 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 5 total: 5886 2021-05-25 08:46:25 DATA:  test-node.js result: performance: load: 4 total: 5919
2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default 2021-05-25 08:46:25 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:25 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 312 total: 5886 2021-05-25 08:46:25 DATA:  test-node.js result: performance: load: 346 total: 5919
2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default 2021-05-25 08:46:25 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:25 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 5 total: 5886 2021-05-25 08:46:25 DATA:  test-node.js result: performance: load: 4 total: 5919
2021-05-24 11:08:50 INFO:  test-node.js test complete: 20201 ms 2021-05-25 08:46:25 INFO:  test-node.js test complete: 20499 ms
2021-05-24 11:08:50 INFO:  test-node-gpu.js start 2021-05-25 08:46:25 INFO:  test-node-gpu.js start
2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.534311: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory 2021-05-25 08:46:26 WARN:  test-node-gpu.js stderr: 2021-05-25 08:46:26.515096: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.593093: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory 2021-05-25 08:46:26 WARN:  test-node-gpu.js stderr: 2021-05-25 08:46:26.635286: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.593140: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist 2021-05-25 08:46:26 WARN:  test-node-gpu.js stderr: 2021-05-25 08:46:26.635353: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-24 11:08:50 STATE: test-node-gpu.js passed: create human 2021-05-25 08:46:26 STATE: test-node-gpu.js passed: create human
2021-05-24 11:08:50 INFO:  test-node-gpu.js human version: 1.9.3 2021-05-25 08:46:26 INFO:  test-node-gpu.js human version: 1.9.3
2021-05-24 11:08:50 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0 2021-05-25 08:46:26 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-24 11:08:50 INFO:  test-node-gpu.js tfjs version: 3.6.0 2021-05-25 08:46:26 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-24 11:08:51 STATE: test-node-gpu.js passed: set backend: tensorflow 2021-05-25 08:46:27 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-24 11:08:51 STATE: test-node-gpu.js passed: load models 2021-05-25 08:46:27 STATE: test-node-gpu.js passed: load models
2021-05-24 11:08:51 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6 2021-05-25 08:46:27 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6
2021-05-24 11:08:51 STATE: test-node-gpu.js passed: warmup: none default 2021-05-25 08:46:27 STATE: test-node-gpu.js passed: warmup: none default
2021-05-24 11:08:52 STATE: test-node-gpu.js passed: warmup: face default 2021-05-25 08:46:28 STATE: test-node-gpu.js passed: warmup: face default
2021-05-24 11:08:52 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5} 2021-05-25 08:46:28 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-24 11:08:52 DATA:  test-node-gpu.js result: performance: load: 333 total: 1664 2021-05-25 08:46:28 DATA:  test-node-gpu.js result: performance: load: 326 total: 1742
2021-05-24 11:08:54 STATE: test-node-gpu.js passed: warmup: body default 2021-05-25 08:46:30 STATE: test-node-gpu.js passed: warmup: body default
2021-05-24 11:08:54 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17} 2021-05-25 08:46:30 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-24 11:08:54 DATA:  test-node-gpu.js result: performance: load: 333 total: 1772 2021-05-25 08:46:30 DATA:  test-node-gpu.js result: performance: load: 326 total: 1599
2021-05-24 11:08:54 INFO:  test-node-gpu.js test body variants 2021-05-25 08:46:30 INFO:  test-node-gpu.js test body variants
2021-05-24 11:08:55 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:31 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:56 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet 2021-05-25 08:46:32 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-24 11:08:56 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17} 2021-05-25 08:46:32 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-24 11:08:56 DATA:  test-node-gpu.js result: performance: load: 333 total: 963 2021-05-25 08:46:32 DATA:  test-node-gpu.js result: performance: load: 326 total: 1448
2021-05-24 11:08:57 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:34 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:08:57 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose 2021-05-25 08:46:34 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-24 11:08:57 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:34 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:57 DATA:  test-node-gpu.js result: performance: load: 333 total: 397 2021-05-25 08:46:34 DATA:  test-node-gpu.js result: performance: load: 326 total: 627
2021-05-24 11:08:59 STATE: test-node-gpu.js passed: detect: random default 2021-05-25 08:46:35 STATE: test-node-gpu.js passed: detect: random default
2021-05-24 11:08:59 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39} 2021-05-25 08:46:35 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:08:59 DATA:  test-node-gpu.js result: performance: load: 333 total: 887 2021-05-25 08:46:35 DATA:  test-node-gpu.js result: performance: load: 326 total: 258
2021-05-24 11:08:59 INFO:  test-node-gpu.js test: first instance 2021-05-25 08:46:35 INFO:  test-node-gpu.js test: first instance
2021-05-24 11:08:59 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-25 08:46:35 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-24 11:09:01 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default 2021-05-25 08:46:35 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-24 11:09:01 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:35 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:01 DATA:  test-node-gpu.js result: performance: load: 333 total: 1643 2021-05-25 08:46:35 DATA:  test-node-gpu.js result: performance: load: 326 total: 234
2021-05-24 11:09:01 INFO:  test-node-gpu.js test: second instance 2021-05-25 08:46:35 INFO:  test-node-gpu.js test: second instance
2021-05-24 11:09:01 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-25 08:46:36 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-24 11:09:02 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default 2021-05-25 08:46:38 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-24 11:09:02 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:02 DATA:  test-node-gpu.js result: performance: load: 4 total: 1581 2021-05-25 08:46:38 DATA:  test-node-gpu.js result: performance: load: 4 total: 2409
2021-05-24 11:09:02 INFO:  test-node-gpu.js test: concurrent 2021-05-25 08:46:38 INFO:  test-node-gpu.js test: concurrent
2021-05-24 11:09:03 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-25 08:46:38 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-24 11:09:03 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-25 08:46:38 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-24 11:09:04 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:39 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:09:05 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-25 08:46:40 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default 2021-05-25 08:46:50 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 333 total: 6184 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: performance: load: 326 total: 9194
2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default 2021-05-25 08:46:50 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 4 total: 6184 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: performance: load: 4 total: 9194
2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default 2021-05-25 08:46:50 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 333 total: 6184 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: performance: load: 326 total: 9194
2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default 2021-05-25 08:46:50 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 4 total: 6184 2021-05-25 08:46:50 DATA:  test-node-gpu.js result: performance: load: 4 total: 9194
2021-05-24 11:09:11 INFO:  test-node-gpu.js test complete: 20649 ms 2021-05-25 08:46:50 INFO:  test-node-gpu.js test complete: 23315 ms
2021-05-24 11:09:11 INFO:  test-node-wasm.js start 2021-05-25 08:46:50 INFO:  test-node-wasm.js start
2021-05-24 11:09:11 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030 2021-05-25 08:46:50 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
2021-05-24 11:09:11 ERROR: test-node-wasm.js aborting test 2021-05-25 08:46:50 ERROR: test-node-wasm.js aborting test
2021-05-24 11:09:11 INFO:  status: {"passed":46,"failed":1} 2021-05-25 08:46:50 INFO:  status: {"passed":46,"failed":1}

File diff suppressed because one or more lines are too long

View File

@ -84,8 +84,7 @@
-useDepth: use z-axis coordinate as color shade, -useDepth: use z-axis coordinate as color shade,
-useCurves: draw polygons as cures or as lines, -useCurves: draw polygons as cures or as lines,
-bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
-bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc. -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.</p>
-useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,</p>
</div> </div>
</div> </div>
</section> </section>
@ -106,7 +105,6 @@
<ul class="tsd-index-list"> <ul class="tsd-index-list">
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedfactor" class="tsd-kind-icon">buffered<wbr>Factor</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedfactor" class="tsd-kind-icon">buffered<wbr>Factor</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#calculatehandbox" class="tsd-kind-icon">calculate<wbr>Hand<wbr>Box</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#color" class="tsd-kind-icon">color</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#color" class="tsd-kind-icon">color</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawboxes" class="tsd-kind-icon">draw<wbr>Boxes</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawboxes" class="tsd-kind-icon">draw<wbr>Boxes</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawlabels" class="tsd-kind-icon">draw<wbr>Labels</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawlabels" class="tsd-kind-icon">draw<wbr>Labels</a></li>
@ -122,7 +120,6 @@
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#shadowcolor" class="tsd-kind-icon">shadow<wbr>Color</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#shadowcolor" class="tsd-kind-icon">shadow<wbr>Color</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usecurves" class="tsd-kind-icon">use<wbr>Curves</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usecurves" class="tsd-kind-icon">use<wbr>Curves</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#userawboxes" class="tsd-kind-icon">use<wbr>Raw<wbr>Boxes</a></li>
</ul> </ul>
</section> </section>
</div> </div>
@ -144,13 +141,6 @@
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="calculatehandbox" class="tsd-anchor"></a>
<h3>calculate<wbr>Hand<wbr>Box</h3>
<div class="tsd-signature tsd-kind-icon">calculate<wbr>Hand<wbr>Box<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="color" class="tsd-anchor"></a> <a name="color" class="tsd-anchor"></a>
<h3>color</h3> <h3>color</h3>
@ -256,13 +246,6 @@
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="userawboxes" class="tsd-anchor"></a>
<h3>use<wbr>Raw<wbr>Boxes</h3>
<div class="tsd-signature tsd-kind-icon">use<wbr>Raw<wbr>Boxes<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
<aside class="tsd-sources">
</aside>
</section>
</section> </section>
</div> </div>
<div class="col-4 col-menu menu-sticky-wrap menu-highlight"> <div class="col-4 col-menu menu-sticky-wrap menu-highlight">
@ -298,9 +281,6 @@
<li class=" tsd-kind-property tsd-parent-kind-interface"> <li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a> <a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a>
</li> </li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#calculatehandbox" class="tsd-kind-icon">calculate<wbr>Hand<wbr>Box</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface"> <li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#color" class="tsd-kind-icon">color</a> <a href="drawoptions.html#color" class="tsd-kind-icon">color</a>
</li> </li>
@ -346,9 +326,6 @@
<li class=" tsd-kind-property tsd-parent-kind-interface"> <li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a> <a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a>
</li> </li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#userawboxes" class="tsd-kind-icon">use<wbr>Raw<wbr>Boxes</a>
</li>
</ul> </ul>
</li> </li>
</ul> </ul>

View File

@ -188,7 +188,7 @@
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="persons" class="tsd-anchor"></a> <a name="persons" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagReadonly">Readonly</span> persons</h3> <h3>persons</h3>
<div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Person</span><span class="tsd-signature-symbol">[]</span></div> <div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Person</span><span class="tsd-signature-symbol">[]</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>

View File

@ -1,3 +1,6 @@
/**
* FaceMesh & BlazeFace Module entry point
*/
export declare function predict(input: any, config: any): Promise<{ export declare function predict(input: any, config: any): Promise<{
confidence: any; confidence: any;
boxConfidence: any; boxConfidence: any;

View File

@ -1,3 +1,6 @@
/**
* BlazePose Module
*/
import { Body } from '../result'; import { Body } from '../result';
export declare function load(config: any): Promise<any>; export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<Body[]>; export declare function predict(image: any, config: any): Promise<Body[]>;

View File

@ -1,3 +1,6 @@
/**
* Module that implements helper draw functions, exposed as human.draw
*/
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'; import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
/** /**
* Draw Options * Draw Options
@ -19,7 +22,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
* -useCurves: draw polygons as cures or as lines, * -useCurves: draw polygons as cures or as lines,
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations * -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc. * -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
* -useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,
*/ */
export interface DrawOptions { export interface DrawOptions {
color: string; color: string;
@ -39,8 +41,6 @@ export interface DrawOptions {
useCurves: boolean; useCurves: boolean;
bufferedOutput: boolean; bufferedOutput: boolean;
bufferedFactor: number; bufferedFactor: number;
useRawBoxes: boolean;
calculateHandBox: boolean;
} }
export declare const options: DrawOptions; export declare const options: DrawOptions;
export declare function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture>, drawOptions?: DrawOptions): Promise<void>; export declare function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture>, drawOptions?: DrawOptions): Promise<void>;

View File

@ -1,2 +1,5 @@
/**
* Emotion Module
*/
export declare function load(config: any): Promise<any>; export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any, idx: any, count: any): Promise<unknown>; export declare function predict(image: any, config: any, idx: any, count: any): Promise<unknown>;

4
types/face.d.ts vendored
View File

@ -1,2 +1,6 @@
/**
* Module that analyzes person age
* Obsolete
*/
import { Face } from './result'; import { Face } from './result';
export declare const detectFace: (parent: any, input: any) => Promise<Face[]>; export declare const detectFace: (parent: any, input: any) => Promise<Face[]>;

View File

@ -1,3 +1,8 @@
/**
* HSE-FaceRes Module
* Returns Age, Gender, Descriptor
* Implements Face simmilarity function
*/
import { Tensor } from '../tfjs/types'; import { Tensor } from '../tfjs/types';
declare type DB = Array<{ declare type DB = Array<{
name: string; name: string;

View File

@ -1,3 +1,6 @@
/**
* Gesture detection module
*/
import { Gesture } from '../result'; import { Gesture } from '../result';
export declare const body: (res: any) => Gesture[]; export declare const body: (res: any) => Gesture[];
export declare const face: (res: any) => Gesture[]; export declare const face: (res: any) => Gesture[];

View File

@ -1,3 +1,6 @@
/**
* HandPose module entry point
*/
import { Hand } from '../result'; import { Hand } from '../result';
export declare function predict(input: any, config: any): Promise<Hand[]>; export declare function predict(input: any, config: any): Promise<Hand[]>;
export declare function load(config: any): Promise<[unknown, unknown]>; export declare function load(config: any): Promise<[unknown, unknown]>;

3
types/helpers.d.ts vendored
View File

@ -1,3 +1,6 @@
/**
* Simple helper functions used accross codebase
*/
export declare function join(folder: string, file: string): string; export declare function join(folder: string, file: string): string;
export declare function log(...msg: any[]): void; export declare function log(...msg: any[]): void;
export declare const now: () => number; export declare const now: () => number;

3
types/human.d.ts vendored
View File

@ -1,3 +1,6 @@
/**
* Human main module
*/
import { Config } from './config'; import { Config } from './config';
import { Result } from './result'; import { Result } from './result';
import * as tf from '../dist/tfjs.esm.js'; import * as tf from '../dist/tfjs.esm.js';

View File

@ -1,3 +1,6 @@
/**
* Image Processing module used by Human
*/
import { Tensor } from '../tfjs/types'; import { Tensor } from '../tfjs/types';
export declare function process(input: any, config: any): { export declare function process(input: any, config: any): {
tensor: Tensor | null; tensor: Tensor | null;

View File

@ -1,3 +1,6 @@
/**
* CenterNet object detection module
*/
import { Item } from '../result'; import { Item } from '../result';
export declare function load(config: any): Promise<any>; export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<Item[]>; export declare function predict(image: any, config: any): Promise<Item[]>;

View File

@ -1,3 +1,6 @@
/**
* CoCo Labels used by object detection modules
*/
export declare const labels: { export declare const labels: {
class: number; class: number;
label: string; label: string;

View File

@ -1,3 +1,6 @@
/**
* NanoDet object detection module
*/
import { Item } from '../result'; import { Item } from '../result';
export declare function load(config: any): Promise<any>; export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<Item[]>; export declare function predict(image: any, config: any): Promise<Item[]>;

5
types/persons.d.ts vendored
View File

@ -1,2 +1,5 @@
/**
* Module that analyzes existing results and recombines them into a unified person object
*/
import { Face, Body, Hand, Gesture, Person } from './result'; import { Face, Body, Hand, Gesture, Person } from './result';
export declare function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person>; export declare function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>, shape: Array<number> | undefined): Array<Person>;

View File

@ -1,3 +1,6 @@
/**
* PoseNet module entry point
*/
import { Body } from '../result'; import { Body } from '../result';
export declare function predict(input: any, config: any): Promise<Body[]>; export declare function predict(input: any, config: any): Promise<Body[]>;
export declare function load(config: any): Promise<any>; export declare function load(config: any): Promise<any>;

5
types/result.d.ts vendored
View File

@ -1,3 +1,6 @@
/**
* Type definitions for Human results
*/
import { Tensor } from '../dist/tfjs.esm.js'; import { Tensor } from '../dist/tfjs.esm.js';
/** Face results /** Face results
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models * Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
@ -201,5 +204,5 @@ export interface Result {
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
readonly timestamp: number; readonly timestamp: number;
/** getter property that returns unified persons object */ /** getter property that returns unified persons object */
readonly persons: Array<Person>; persons: Array<Person>;
} }

3
types/sample.d.ts vendored

File diff suppressed because one or more lines are too long

3
types/sysinfo.d.ts vendored
View File

@ -1,3 +1,6 @@
/**
* Helper function that returns basic system info
*/
export declare function info(): { export declare function info(): {
platform: string; platform: string;
agent: string; agent: string;

View File

@ -1,3 +1,7 @@
/**
* Custom TFJS backend for Human based on WebGL
* Not used by default
*/
export declare const config: { export declare const config: {
name: string; name: string;
priority: number; priority: number;

View File

@ -1,2 +1,11 @@
/**
* Export common TensorFlow types
*/
/**
* TensorFlow Tensor type
*/
export { Tensor } from '@tensorflow/tfjs-core/dist/index'; export { Tensor } from '@tensorflow/tfjs-core/dist/index';
/**
* TensorFlow GraphModel type
*/
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index'; export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';