mirror of https://github.com/vladmandic/human
update all box calculations
parent
e62f18e34e
commit
f357901e36
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* Demo for face mesh detection and projection as 3D object using Three.js
|
||||
*/
|
||||
|
||||
import { DoubleSide, Mesh, MeshBasicMaterial, OrthographicCamera, Scene, sRGBEncoding, VideoTexture, WebGLRenderer, BufferGeometry, BufferAttribute } from './helpers/three.js';
|
||||
import { OrbitControls } from './helpers/three-orbitControls.js';
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* Demo for face descriptor analysis and face simmilarity analysis
|
||||
*/
|
||||
|
||||
import Human from '../dist/human.esm.js';
|
||||
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||
|
||||
/**
|
||||
* Human demo for browsers
|
||||
*
|
||||
* Main demo app that exposes all Human functionality
|
||||
*/
|
||||
|
||||
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
|
||||
|
@ -10,6 +16,7 @@ let human;
|
|||
|
||||
const userConfig = {
|
||||
warmup: 'none',
|
||||
/*
|
||||
backend: 'webgl',
|
||||
async: false,
|
||||
cacheSensitivity: 0,
|
||||
|
@ -29,6 +36,7 @@ const userConfig = {
|
|||
// body: { enabled: true, modelPath: 'blazepose.json' },
|
||||
object: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
*/
|
||||
};
|
||||
|
||||
const drawOptions = {
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
*
|
||||
* Used by node-multiprocess.js as an on-demand started worker process
|
||||
* Receives messages from parent process and sends results
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
@ -19,16 +24,16 @@ const myConfig = {
|
|||
enabled: true,
|
||||
detector: { enabled: true, rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: false },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: {
|
||||
enabled: false,
|
||||
enabled: true,
|
||||
},
|
||||
// body: { modelPath: 'blazepose.json', enabled: true },
|
||||
body: { enabled: false },
|
||||
object: { enabled: false },
|
||||
body: { enabled: true },
|
||||
object: { enabled: true },
|
||||
};
|
||||
|
||||
// read image from a file and create tensor to be used by faceapi
|
||||
|
@ -44,8 +49,10 @@ async function image(img) {
|
|||
async function detect(img) {
|
||||
const tensor = await image(img);
|
||||
const result = await human.detect(tensor);
|
||||
process.send({ image: img, detected: result }); // send results back to main
|
||||
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
|
||||
if (process.send) { // check if ipc exists
|
||||
process.send({ image: img, detected: result }); // send results back to main
|
||||
process.send({ ready: true }); // send signal back to main that this worker is now idle and ready for next image
|
||||
}
|
||||
tensor.dispose();
|
||||
}
|
||||
|
||||
|
@ -57,8 +64,8 @@ async function main() {
|
|||
|
||||
// on worker start first initialize message handler so we don't miss any messages
|
||||
process.on('message', (msg) => {
|
||||
if (msg.exit) process.exit(); // if main told worker to exit
|
||||
if (msg.test) process.send({ test: true });
|
||||
if (msg.exit && process.exit) process.exit(); // if main told worker to exit
|
||||
if (msg.test && process.send) process.send({ test: true });
|
||||
if (msg.image) detect(msg.image); // if main told worker to process image
|
||||
log.data('Worker received message:', process.pid, msg); // generic log
|
||||
});
|
||||
|
@ -72,7 +79,7 @@ async function main() {
|
|||
await human.load();
|
||||
|
||||
// now we're ready, so send message back to main that it knows it can use this worker
|
||||
process.send({ ready: true });
|
||||
if (process.send) process.send({ ready: true });
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
// @ts-nocheck
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
*
|
||||
* Uses NodeJS fork functionality with inter-processing-messaging
|
||||
* Starts a pool of worker processes and dispatch work items to each worker when they are available
|
||||
* Uses node-multiprocess-worker.js for actual processing
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
@ -7,7 +13,7 @@ const log = require('@vladmandic/pilogger'); // this is my simple logger with fe
|
|||
const child_process = require('child_process');
|
||||
// note that main process import faceapi or tfjs at all
|
||||
|
||||
const imgPathRoot = './demo'; // modify to include your sample images
|
||||
const imgPathRoot = './assets'; // modify to include your sample images
|
||||
const numWorkers = 4; // how many workers will be started
|
||||
const workers = []; // this holds worker processes
|
||||
const images = []; // this holds queue of enumerated images
|
||||
|
@ -33,14 +39,14 @@ function waitCompletion() {
|
|||
if (activeWorkers > 0) setImmediate(() => waitCompletion());
|
||||
else {
|
||||
t[1] = process.hrtime.bigint();
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(parseInt(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(parseInt(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(parseInt(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
log.info('Processed:', numImages, 'images in', 'total:', Math.trunc(Number(t[1] - t[0]) / 1000000), 'ms', 'working:', Math.trunc(Number(t[1] - t[2]) / 1000000), 'ms', 'average:', Math.trunc(Number(t[1] - t[2]) / numImages / 1000000), 'ms');
|
||||
}
|
||||
}
|
||||
|
||||
function measureLatency() {
|
||||
t[3] = process.hrtime.bigint();
|
||||
const latencyInitialization = Math.trunc(parseInt(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(parseInt(t[3] - t[2]) / 1000 / 1000);
|
||||
const latencyInitialization = Math.trunc(Number(t[2] - t[0]) / 1000 / 1000);
|
||||
const latencyRoundTrip = Math.trunc(Number(t[3] - t[2]) / 1000 / 1000);
|
||||
log.info('Latency: worker initializtion: ', latencyInitialization, 'message round trip:', latencyRoundTrip);
|
||||
}
|
||||
|
||||
|
@ -59,6 +65,7 @@ async function main() {
|
|||
if (imgFile.toLocaleLowerCase().endsWith('.jpg')) images.push(path.join(imgPathRoot, imgFile));
|
||||
}
|
||||
numImages = images.length;
|
||||
log.state('Enumerated images:', imgPathRoot, numImages);
|
||||
|
||||
t[0] = process.hrtime.bigint();
|
||||
// manage worker processes
|
||||
|
@ -71,7 +78,7 @@ async function main() {
|
|||
// otherwise it's an unknown message
|
||||
workers[i].on('message', (msg) => {
|
||||
if (msg.ready) detect(workers[i]);
|
||||
else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length);
|
||||
else if (msg.image) log.data('Main: worker finished:', workers[i].pid, 'detected faces:', msg.detected.face?.length, 'bodies:', msg.detected.body?.length, 'hands:', msg.detected.hand?.length, 'objects:', msg.detected.object?.length);
|
||||
else if (msg.test) measureLatency();
|
||||
else log.data('Main: worker message:', workers[i].pid, msg);
|
||||
});
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
/*
|
||||
Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
|
||||
|
||||
uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
|
||||
each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
|
||||
if you want process at specific intervals, set output fps to some value
|
||||
if you want to process an input stream, set real-time flag and set input as required
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
* Unsupported sample of using external utility ffmpeg to capture to decode video input and process it using Human
|
||||
*
|
||||
* Uses ffmpeg to process video input and output stream of motion jpeg images which are then parsed for frame start/end markers by pipe2jpeg
|
||||
* Each frame triggers an event with jpeg buffer that then can be decoded and passed to human for processing
|
||||
* If you want process at specific intervals, set output fps to some value
|
||||
* If you want to process an input stream, set real-time flag and set input as required
|
||||
*
|
||||
* Note that pipe2jpeg is not part of Human dependencies and should be installed manually
|
||||
* Working version of ffmpeg must be present on the system
|
||||
*/
|
||||
|
||||
const spawn = require('child_process').spawn;
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
/*
|
||||
Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human
|
||||
/**
|
||||
* Human demo for NodeJS
|
||||
* Unsupported sample of using external utility fswebcam to capture screenshot from attached webcam in regular intervals and process it using Human
|
||||
*
|
||||
* Note that node-webcam is not part of Human dependencies and should be installed manually
|
||||
* Working version of fswebcam must be present on the system
|
||||
*/
|
||||
|
||||
const util = require('util');
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Human demo for NodeJS
|
||||
*/
|
||||
|
||||
const log = require('@vladmandic/pilogger');
|
||||
const fs = require('fs');
|
||||
const process = require('process');
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -16971,19 +16971,37 @@ async function predict5(input, config3) {
|
|||
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||
}
|
||||
}
|
||||
const box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
const boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
const landmarks = predictions[i].landmarks;
|
||||
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
let boxRaw = [0, 0, 0, 0];
|
||||
if (landmarks && landmarks.length > 0) {
|
||||
for (const pt of landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
|
||||
} else {
|
||||
box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
}
|
||||
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
|
||||
}
|
||||
return hands;
|
||||
|
@ -18422,9 +18440,7 @@ var options = {
|
|||
useDepth: true,
|
||||
useCurves: false,
|
||||
bufferedFactor: 2,
|
||||
bufferedOutput: false,
|
||||
useRawBoxes: false,
|
||||
calculateHandBox: true
|
||||
bufferedOutput: false
|
||||
};
|
||||
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||
function point(ctx, x, y, z = 0, localOptions) {
|
||||
|
@ -18533,12 +18549,8 @@ async function face2(inCanvas2, result, drawOptions) {
|
|||
ctx.font = localOptions.font;
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.drawBoxes) {
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
}
|
||||
if (localOptions.drawBoxes)
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
const labels2 = [];
|
||||
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
|
||||
if (f.genderConfidence)
|
||||
|
@ -18758,37 +18770,14 @@ async function hand2(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
let box4;
|
||||
if (!localOptions.calculateHandBox) {
|
||||
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
|
||||
} else {
|
||||
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
if (h.landmarks && h.landmarks.length > 0) {
|
||||
for (const pt of h.landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
}
|
||||
}
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
|
||||
else
|
||||
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.fillStyle = localOptions.labelColor;
|
||||
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
|
@ -18849,10 +18838,7 @@ async function object(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
|
@ -18921,6 +18907,12 @@ function calcBuffered(newResult, localOptions) {
|
|||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||
}
|
||||
}
|
||||
const newPersons = newResult.persons;
|
||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
|
||||
for (let i = 0; i < newPersons.length; i++) {
|
||||
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
|
||||
}
|
||||
}
|
||||
async function canvas(inCanvas2, outCanvas2) {
|
||||
if (!inCanvas2 || !outCanvas2)
|
||||
|
@ -18936,11 +18928,10 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
return;
|
||||
if (!(inCanvas2 instanceof HTMLCanvasElement))
|
||||
return;
|
||||
if (localOptions.bufferedOutput) {
|
||||
if (localOptions.bufferedOutput)
|
||||
calcBuffered(result, localOptions);
|
||||
} else {
|
||||
else
|
||||
bufferedResult = result;
|
||||
}
|
||||
face2(inCanvas2, result.face, localOptions);
|
||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||
|
@ -18949,8 +18940,8 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
|
||||
// src/persons.ts
|
||||
function join2(faces, bodies, hands, gestures) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
|
||||
function join2(faces, bodies, hands, gestures, shape) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
||||
let id = 0;
|
||||
const persons2 = [];
|
||||
for (const face5 of faces) {
|
||||
|
@ -18984,12 +18975,23 @@ function join2(faces, bodies, hands, gestures) {
|
|||
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
|
||||
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
|
||||
}
|
||||
person2.box = [
|
||||
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
|
||||
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
|
||||
];
|
||||
const x = [];
|
||||
const y = [];
|
||||
const extractXY = (box4) => {
|
||||
if (box4 && box4.length === 4) {
|
||||
x.push(box4[0], box4[0] + box4[2]);
|
||||
y.push(box4[1], box4[1] + box4[3]);
|
||||
}
|
||||
};
|
||||
extractXY((_k = person2.face) == null ? void 0 : _k.box);
|
||||
extractXY((_l = person2.body) == null ? void 0 : _l.box);
|
||||
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
|
||||
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
|
||||
const minX = Math.min(...x);
|
||||
const minY = Math.min(...y);
|
||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||
if (shape && shape.length === 4)
|
||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||
persons2.push(person2);
|
||||
}
|
||||
return persons2;
|
||||
|
@ -20123,10 +20125,8 @@ var Human = class {
|
|||
this.perf.object = elapsedTime;
|
||||
}
|
||||
this.analyze("End Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.async)
|
||||
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
}
|
||||
tf17.dispose(process5.tensor);
|
||||
let gestureRes = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
|
@ -20148,9 +20148,11 @@ var Human = class {
|
|||
canvas: process5.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes);
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf17.dispose(process5.tensor);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -16972,19 +16972,37 @@ async function predict5(input, config3) {
|
|||
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||
}
|
||||
}
|
||||
const box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
const boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
const landmarks = predictions[i].landmarks;
|
||||
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
let boxRaw = [0, 0, 0, 0];
|
||||
if (landmarks && landmarks.length > 0) {
|
||||
for (const pt of landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
|
||||
} else {
|
||||
box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
}
|
||||
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
|
||||
}
|
||||
return hands;
|
||||
|
@ -18423,9 +18441,7 @@ var options = {
|
|||
useDepth: true,
|
||||
useCurves: false,
|
||||
bufferedFactor: 2,
|
||||
bufferedOutput: false,
|
||||
useRawBoxes: false,
|
||||
calculateHandBox: true
|
||||
bufferedOutput: false
|
||||
};
|
||||
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||
function point(ctx, x, y, z = 0, localOptions) {
|
||||
|
@ -18534,12 +18550,8 @@ async function face2(inCanvas2, result, drawOptions) {
|
|||
ctx.font = localOptions.font;
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.drawBoxes) {
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
}
|
||||
if (localOptions.drawBoxes)
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
const labels2 = [];
|
||||
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
|
||||
if (f.genderConfidence)
|
||||
|
@ -18759,37 +18771,14 @@ async function hand2(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
let box4;
|
||||
if (!localOptions.calculateHandBox) {
|
||||
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
|
||||
} else {
|
||||
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
if (h.landmarks && h.landmarks.length > 0) {
|
||||
for (const pt of h.landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
}
|
||||
}
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
|
||||
else
|
||||
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.fillStyle = localOptions.labelColor;
|
||||
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
|
@ -18850,10 +18839,7 @@ async function object(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
|
@ -18922,6 +18908,12 @@ function calcBuffered(newResult, localOptions) {
|
|||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||
}
|
||||
}
|
||||
const newPersons = newResult.persons;
|
||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
|
||||
for (let i = 0; i < newPersons.length; i++) {
|
||||
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
|
||||
}
|
||||
}
|
||||
async function canvas(inCanvas2, outCanvas2) {
|
||||
if (!inCanvas2 || !outCanvas2)
|
||||
|
@ -18937,11 +18929,10 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
return;
|
||||
if (!(inCanvas2 instanceof HTMLCanvasElement))
|
||||
return;
|
||||
if (localOptions.bufferedOutput) {
|
||||
if (localOptions.bufferedOutput)
|
||||
calcBuffered(result, localOptions);
|
||||
} else {
|
||||
else
|
||||
bufferedResult = result;
|
||||
}
|
||||
face2(inCanvas2, result.face, localOptions);
|
||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||
|
@ -18950,8 +18941,8 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
|
||||
// src/persons.ts
|
||||
function join2(faces, bodies, hands, gestures) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
|
||||
function join2(faces, bodies, hands, gestures, shape) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
||||
let id = 0;
|
||||
const persons2 = [];
|
||||
for (const face5 of faces) {
|
||||
|
@ -18985,12 +18976,23 @@ function join2(faces, bodies, hands, gestures) {
|
|||
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
|
||||
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
|
||||
}
|
||||
person2.box = [
|
||||
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
|
||||
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
|
||||
];
|
||||
const x = [];
|
||||
const y = [];
|
||||
const extractXY = (box4) => {
|
||||
if (box4 && box4.length === 4) {
|
||||
x.push(box4[0], box4[0] + box4[2]);
|
||||
y.push(box4[1], box4[1] + box4[3]);
|
||||
}
|
||||
};
|
||||
extractXY((_k = person2.face) == null ? void 0 : _k.box);
|
||||
extractXY((_l = person2.body) == null ? void 0 : _l.box);
|
||||
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
|
||||
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
|
||||
const minX = Math.min(...x);
|
||||
const minY = Math.min(...y);
|
||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||
if (shape && shape.length === 4)
|
||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||
persons2.push(person2);
|
||||
}
|
||||
return persons2;
|
||||
|
@ -20124,10 +20126,8 @@ var Human = class {
|
|||
this.perf.object = elapsedTime;
|
||||
}
|
||||
this.analyze("End Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.async)
|
||||
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
}
|
||||
tf17.dispose(process5.tensor);
|
||||
let gestureRes = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
|
@ -20149,9 +20149,11 @@ var Human = class {
|
|||
canvas: process5.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes);
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf17.dispose(process5.tensor);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -16971,19 +16971,37 @@ async function predict5(input, config3) {
|
|||
annotations3[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||
}
|
||||
}
|
||||
const box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
const boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
const landmarks = predictions[i].landmarks;
|
||||
let box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
let boxRaw = [0, 0, 0, 0];
|
||||
if (landmarks && landmarks.length > 0) {
|
||||
for (const pt of landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
boxRaw = [box4[0] / input.shape[2], box4[1] / input.shape[1], box4[2] / input.shape[2], box4[3] / input.shape[1]];
|
||||
} else {
|
||||
box4 = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1])
|
||||
] : [0, 0, 0, 0];
|
||||
boxRaw = [
|
||||
predictions[i].box.topLeft[0] / input.shape[2],
|
||||
predictions[i].box.topLeft[1] / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1]
|
||||
];
|
||||
}
|
||||
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box: box4, boxRaw, landmarks, annotations: annotations3 });
|
||||
}
|
||||
return hands;
|
||||
|
@ -18422,9 +18440,7 @@ var options = {
|
|||
useDepth: true,
|
||||
useCurves: false,
|
||||
bufferedFactor: 2,
|
||||
bufferedOutput: false,
|
||||
useRawBoxes: false,
|
||||
calculateHandBox: true
|
||||
bufferedOutput: false
|
||||
};
|
||||
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||
function point(ctx, x, y, z = 0, localOptions) {
|
||||
|
@ -18533,12 +18549,8 @@ async function face2(inCanvas2, result, drawOptions) {
|
|||
ctx.font = localOptions.font;
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.drawBoxes) {
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * f.boxRaw[0], inCanvas2.height * f.boxRaw[1], inCanvas2.width * f.boxRaw[2], inCanvas2.height * f.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
}
|
||||
if (localOptions.drawBoxes)
|
||||
rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
const labels2 = [];
|
||||
labels2.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
|
||||
if (f.genderConfidence)
|
||||
|
@ -18758,37 +18770,14 @@ async function hand2(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
let box4;
|
||||
if (!localOptions.calculateHandBox) {
|
||||
box4 = localOptions.useRawBoxes ? h.boxRaw : h.box;
|
||||
} else {
|
||||
box4 = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
if (h.landmarks && h.landmarks.length > 0) {
|
||||
for (const pt of h.landmarks) {
|
||||
if (pt[0] < box4[0])
|
||||
box4[0] = pt[0];
|
||||
if (pt[1] < box4[1])
|
||||
box4[1] = pt[1];
|
||||
if (pt[0] > box4[2])
|
||||
box4[2] = pt[0];
|
||||
if (pt[1] > box4[3])
|
||||
box4[3] = pt[1];
|
||||
}
|
||||
box4[2] -= box4[0];
|
||||
box4[3] -= box4[1];
|
||||
}
|
||||
}
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * box4[0], inCanvas2.height * box4[1], inCanvas2.width * box4[2], inCanvas2.height * box4[3], localOptions);
|
||||
else
|
||||
rect(ctx, box4[0], box4[1], box4[2], box4[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText("hand", box4[0] + 3, 1 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.fillStyle = localOptions.labelColor;
|
||||
ctx.fillText("hand", box4[0] + 2, 0 + box4[1] + localOptions.lineHeight, box4[2]);
|
||||
ctx.fillText("hand", h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
|
@ -18849,10 +18838,7 @@ async function object(inCanvas2, result, drawOptions) {
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.useRawBoxes)
|
||||
rect(ctx, inCanvas2.width * h.boxRaw[0], inCanvas2.height * h.boxRaw[1], inCanvas2.width * h.boxRaw[2], inCanvas2.height * h.boxRaw[3], localOptions);
|
||||
else
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
|
||||
|
@ -18921,6 +18907,12 @@ function calcBuffered(newResult, localOptions) {
|
|||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||
}
|
||||
}
|
||||
const newPersons = newResult.persons;
|
||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
|
||||
for (let i = 0; i < newPersons.length; i++) {
|
||||
bufferedResult.persons[i].box = newPersons[i].box.map((box4, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box4) / localOptions.bufferedFactor);
|
||||
}
|
||||
}
|
||||
async function canvas(inCanvas2, outCanvas2) {
|
||||
if (!inCanvas2 || !outCanvas2)
|
||||
|
@ -18936,11 +18928,10 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
return;
|
||||
if (!(inCanvas2 instanceof HTMLCanvasElement))
|
||||
return;
|
||||
if (localOptions.bufferedOutput) {
|
||||
if (localOptions.bufferedOutput)
|
||||
calcBuffered(result, localOptions);
|
||||
} else {
|
||||
else
|
||||
bufferedResult = result;
|
||||
}
|
||||
face2(inCanvas2, result.face, localOptions);
|
||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||
|
@ -18949,8 +18940,8 @@ async function all(inCanvas2, result, drawOptions) {
|
|||
}
|
||||
|
||||
// src/persons.ts
|
||||
function join2(faces, bodies, hands, gestures) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
|
||||
function join2(faces, bodies, hands, gestures, shape) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
||||
let id = 0;
|
||||
const persons2 = [];
|
||||
for (const face5 of faces) {
|
||||
|
@ -18984,12 +18975,23 @@ function join2(faces, bodies, hands, gestures) {
|
|||
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
|
||||
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
|
||||
}
|
||||
person2.box = [
|
||||
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
|
||||
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
|
||||
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
|
||||
];
|
||||
const x = [];
|
||||
const y = [];
|
||||
const extractXY = (box4) => {
|
||||
if (box4 && box4.length === 4) {
|
||||
x.push(box4[0], box4[0] + box4[2]);
|
||||
y.push(box4[1], box4[1] + box4[3]);
|
||||
}
|
||||
};
|
||||
extractXY((_k = person2.face) == null ? void 0 : _k.box);
|
||||
extractXY((_l = person2.body) == null ? void 0 : _l.box);
|
||||
extractXY((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box);
|
||||
extractXY((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box);
|
||||
const minX = Math.min(...x);
|
||||
const minY = Math.min(...y);
|
||||
person2.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY];
|
||||
if (shape && shape.length === 4)
|
||||
person2.boxRaw = [person2.box[0] / shape[2], person2.box[1] / shape[1], person2.box[2] / shape[2], person2.box[3] / shape[1]];
|
||||
persons2.push(person2);
|
||||
}
|
||||
return persons2;
|
||||
|
@ -20123,10 +20125,8 @@ var Human = class {
|
|||
this.perf.object = elapsedTime;
|
||||
}
|
||||
this.analyze("End Object:");
|
||||
if (this.config.async) {
|
||||
if (this.config.async)
|
||||
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
}
|
||||
tf17.dispose(process5.tensor);
|
||||
let gestureRes = [];
|
||||
if (this.config.gesture.enabled) {
|
||||
timeStamp = now();
|
||||
|
@ -20148,9 +20148,11 @@ var Human = class {
|
|||
canvas: process5.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() {
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes);
|
||||
var _a;
|
||||
return join2(faceRes, bodyRes, handRes, gestureRes, (_a = process5 == null ? void 0 : process5.tensor) == null ? void 0 : _a.shape);
|
||||
}
|
||||
};
|
||||
tf17.dispose(process5.tensor);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -61,14 +61,14 @@
|
|||
"@tensorflow/tfjs-layers": "^3.6.0",
|
||||
"@tensorflow/tfjs-node": "^3.6.1",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.6.1",
|
||||
"@types/node": "^15.6.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.24.0",
|
||||
"@typescript-eslint/parser": "^4.24.0",
|
||||
"@types/node": "^15.6.1",
|
||||
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||
"@typescript-eslint/parser": "^4.25.0",
|
||||
"@vladmandic/pilogger": "^0.2.17",
|
||||
"canvas": "^2.8.0",
|
||||
"chokidar": "^3.5.1",
|
||||
"dayjs": "^1.10.4",
|
||||
"esbuild": "^0.12.1",
|
||||
"esbuild": "^0.12.2",
|
||||
"eslint": "^7.27.0",
|
||||
"eslint-config-airbnb-base": "^14.2.1",
|
||||
"eslint-plugin-import": "^2.23.3",
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Implements Human build process
|
||||
* Used to generate prod builds for releases or by dev server to generate on-the-fly debug builds
|
||||
*/
|
||||
|
||||
const ts = require('typescript');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
2021-05-24 11:07:05 [36mINFO: [39m @vladmandic/human version 1.9.3
|
||||
2021-05-24 11:07:05 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-05-24 11:07:05 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: node type: node: {"imports":37,"importBytes":429806,"outputBytes":386407,"outputFiles":"dist/human.node.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":37,"importBytes":429814,"outputBytes":386411,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":37,"importBytes":429881,"outputBytes":386483,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-24 11:07:05 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":37,"importBytes":429908,"outputBytes":236959,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-05-24 11:07:06 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-24 11:07:06 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":37,"importBytes":1539928,"outputBytes":1344893,"outputFiles":"dist/human.js"}
|
||||
2021-05-24 11:07:07 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":37,"importBytes":1539928,"outputBytes":1344885,"outputFiles":"dist/human.esm.js"}
|
||||
2021-05-24 11:07:07 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-05-24 11:07:12 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-05-24 11:07:12 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-05-25 08:45:35 [36mINFO: [39m @vladmandic/human version 1.9.3
|
||||
2021-05-25 08:45:35 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-05-25 08:45:35 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: node type: node: {"imports":37,"importBytes":431051,"outputBytes":385549,"outputFiles":"dist/human.node.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":37,"importBytes":431059,"outputBytes":385553,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":37,"importBytes":431126,"outputBytes":385625,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-25 08:45:35 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":37,"importBytes":431153,"outputBytes":236268,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-05-25 08:45:36 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-05-25 08:45:36 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":37,"importBytes":1541173,"outputBytes":1344154,"outputFiles":"dist/human.js"}
|
||||
2021-05-25 08:45:36 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":37,"importBytes":1541173,"outputBytes":1344146,"outputFiles":"dist/human.esm.js"}
|
||||
2021-05-25 08:45:36 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-05-25 08:45:41 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-05-25 08:45:41 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Creates changelog in markdown format from git log as part of the build process
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const dayjs = require('dayjs');
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
micro http2 server with file monitoring and automatic app rebuild
|
||||
/**
|
||||
Micro http/http2 server with file monitoring and automatic app rebuild
|
||||
- can process concurrent http requests
|
||||
- monitors specified filed and folders for changes
|
||||
- triggers library and application rebuild
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
#!/usr/bin/env -S node --no-deprecation --trace-warnings
|
||||
|
||||
/**
|
||||
* Helper app that analyzes any TensorFlow SavedModel or GraphModel for inputs and outputs
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person age
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* FaceMesh & BlazeFace Module entry point
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as blazeface from './blazeface';
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* BlazePose Module
|
||||
*/
|
||||
|
||||
// paper: https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Module that implements helper draw functions, exposed as human.draw
|
||||
*/
|
||||
|
||||
import { TRI468 as triangulation } from '../blazeface/coords';
|
||||
import { mergeDeep } from '../helpers';
|
||||
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
|
||||
|
@ -22,7 +26,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
|
|||
* -useCurves: draw polygons as cures or as lines,
|
||||
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
|
||||
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
|
||||
* -useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,
|
||||
*/
|
||||
export interface DrawOptions {
|
||||
color: string,
|
||||
|
@ -42,8 +45,6 @@ export interface DrawOptions {
|
|||
useCurves: boolean,
|
||||
bufferedOutput: boolean,
|
||||
bufferedFactor: number,
|
||||
useRawBoxes: boolean,
|
||||
calculateHandBox: boolean,
|
||||
}
|
||||
|
||||
export const options: DrawOptions = {
|
||||
|
@ -64,8 +65,6 @@ export const options: DrawOptions = {
|
|||
useCurves: <boolean>false,
|
||||
bufferedFactor: <number>2,
|
||||
bufferedOutput: <boolean>false,
|
||||
useRawBoxes: <boolean>false,
|
||||
calculateHandBox: <boolean>true,
|
||||
};
|
||||
|
||||
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
|
||||
|
@ -173,10 +172,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
|
|||
ctx.font = localOptions.font;
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.drawBoxes) {
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * f.boxRaw[0], inCanvas.height * f.boxRaw[1], inCanvas.width * f.boxRaw[2], inCanvas.height * f.boxRaw[3], localOptions);
|
||||
else rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
}
|
||||
if (localOptions.drawBoxes) rect(ctx, f.box[0], f.box[1], f.box[2], f.box[3], localOptions);
|
||||
// silly hack since fillText does not suport new line
|
||||
const labels:string[] = [];
|
||||
labels.push(`face confidence: ${Math.trunc(100 * f.confidence)}%`);
|
||||
|
@ -374,31 +370,14 @@ export async function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, dra
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
let box;
|
||||
if (!localOptions.calculateHandBox) {
|
||||
box = localOptions.useRawBoxes ? h.boxRaw : h.box;
|
||||
} else {
|
||||
box = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0];
|
||||
if (h.landmarks && h.landmarks.length > 0) {
|
||||
for (const pt of h.landmarks) {
|
||||
if (pt[0] < box[0]) box[0] = pt[0];
|
||||
if (pt[1] < box[1]) box[1] = pt[1];
|
||||
if (pt[0] > box[2]) box[2] = pt[0];
|
||||
if (pt[1] > box[3]) box[3] = pt[1];
|
||||
}
|
||||
box[2] -= box[0];
|
||||
box[3] -= box[1];
|
||||
}
|
||||
}
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * box[0], inCanvas.height * box[1], inCanvas.width * box[2], inCanvas.height * box[3], localOptions);
|
||||
else rect(ctx, box[0], box[1], box[2], box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||
ctx.fillStyle = localOptions.shadowColor;
|
||||
ctx.fillText('hand', box[0] + 3, 1 + box[1] + localOptions.lineHeight, box[2]);
|
||||
ctx.fillText('hand', h.box[0] + 3, 1 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.fillStyle = localOptions.labelColor;
|
||||
ctx.fillText('hand', box[0] + 2, 0 + box[1] + localOptions.lineHeight, box[2]);
|
||||
ctx.fillText('hand', h.box[0] + 2, 0 + h.box[1] + localOptions.lineHeight, h.box[2]);
|
||||
}
|
||||
ctx.stroke();
|
||||
}
|
||||
|
@ -457,8 +436,7 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
|
|||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
ctx.fillStyle = localOptions.color;
|
||||
if (localOptions.useRawBoxes) rect(ctx, inCanvas.width * h.boxRaw[0], inCanvas.height * h.boxRaw[1], inCanvas.width * h.boxRaw[2], inCanvas.height * h.boxRaw[3], localOptions);
|
||||
else rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
rect(ctx, h.box[0], h.box[1], h.box[2], h.box[3], localOptions);
|
||||
if (localOptions.drawLabels) {
|
||||
const label = `${Math.round(100 * h.score)}% ${h.label}`;
|
||||
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
|
||||
|
@ -481,6 +459,7 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
|
|||
if (!ctx) return;
|
||||
ctx.lineJoin = 'round';
|
||||
ctx.font = localOptions.font;
|
||||
|
||||
for (let i = 0; i < result.length; i++) {
|
||||
if (localOptions.drawBoxes) {
|
||||
ctx.strokeStyle = localOptions.color;
|
||||
|
@ -504,6 +483,7 @@ function calcBuffered(newResult, localOptions) {
|
|||
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
|
||||
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
|
||||
|
||||
// interpolate body results
|
||||
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
||||
for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints
|
||||
bufferedResult.body[i].box = newResult.body[i].box
|
||||
|
@ -521,6 +501,7 @@ function calcBuffered(newResult, localOptions) {
|
|||
}));
|
||||
}
|
||||
|
||||
// interpolate hand results
|
||||
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
||||
for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations
|
||||
bufferedResult.hand[i].box = newResult.hand[i].box
|
||||
|
@ -538,6 +519,14 @@ function calcBuffered(newResult, localOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
// interpolate person results
|
||||
const newPersons = newResult.persons; // trigger getter function
|
||||
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
|
||||
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
|
||||
bufferedResult.persons[i].box = newPersons[i].box
|
||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / localOptions.bufferedFactor);
|
||||
}
|
||||
|
||||
// no buffering implemented for face, object, gesture
|
||||
// bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||
// bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
|
||||
|
@ -555,15 +544,12 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
|
|||
const localOptions = mergeDeep(options, drawOptions);
|
||||
if (!result || !inCanvas) return;
|
||||
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
||||
if (localOptions.bufferedOutput) {
|
||||
calcBuffered(result, localOptions);
|
||||
} else {
|
||||
bufferedResult = result;
|
||||
}
|
||||
if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
|
||||
else bufferedResult = result; // just use results as-is
|
||||
face(inCanvas, result.face, localOptions); // face does have buffering
|
||||
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
|
||||
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
|
||||
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available
|
||||
gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
|
||||
// person(inCanvas, result.persons, localOptions); // use interpolated results if available
|
||||
object(inCanvas, result.object, localOptions); // object detection does not have buffering
|
||||
}
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* EfficientPose Module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Body } from '../result';
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes face descriptors/embedding
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Emotion Module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person age
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, now } from './helpers';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as emotion from './emotion/emotion';
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
/**
|
||||
* HSE-FaceRes Module
|
||||
* Returns Age, Gender, Descriptor
|
||||
* Implements Face simmilarity function
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { Tensor, GraphModel } from '../tfjs/types';
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Module that analyzes person gender
|
||||
* Obsolete
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Gesture detection module
|
||||
*/
|
||||
|
||||
import { Gesture } from '../result';
|
||||
|
||||
export const body = (res): Gesture[] => {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* HandPose module entry point
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as handdetector from './handdetector';
|
||||
|
@ -30,19 +34,35 @@ export async function predict(input, config): Promise<Hand[]> {
|
|||
annotations[key] = meshAnnotations[key].map((index) => predictions[i].landmarks[index]);
|
||||
}
|
||||
}
|
||||
const box: [number, number, number, number] = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]),
|
||||
] : [0, 0, 0, 0];
|
||||
const boxRaw: [number, number, number, number] = [
|
||||
(predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
];
|
||||
|
||||
const landmarks = predictions[i].landmarks as number[];
|
||||
|
||||
let box: [number, number, number, number] = [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, 0, 0]; // maximums so conditionals work
|
||||
let boxRaw: [number, number, number, number] = [0, 0, 0, 0];
|
||||
if (landmarks && landmarks.length > 0) { // if we have landmarks, calculate box based on landmarks
|
||||
for (const pt of landmarks) {
|
||||
if (pt[0] < box[0]) box[0] = pt[0];
|
||||
if (pt[1] < box[1]) box[1] = pt[1];
|
||||
if (pt[0] > box[2]) box[2] = pt[0];
|
||||
if (pt[1] > box[3]) box[3] = pt[1];
|
||||
}
|
||||
box[2] -= box[0];
|
||||
box[3] -= box[1];
|
||||
boxRaw = [box[0] / input.shape[2], box[1] / input.shape[1], box[2] / input.shape[2], box[3] / input.shape[1]];
|
||||
} else { // otherwise use box from prediction
|
||||
box = predictions[i].box ? [
|
||||
Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.max(0, predictions[i].box.topLeft[1]),
|
||||
Math.min(input.shape[2], predictions[i].box.bottomRight[0]) - Math.max(0, predictions[i].box.topLeft[0]),
|
||||
Math.min(input.shape[1], predictions[i].box.bottomRight[1]) - Math.max(0, predictions[i].box.topLeft[1]),
|
||||
] : [0, 0, 0, 0];
|
||||
boxRaw = [
|
||||
(predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
(predictions[i].box.bottomRight[0] - predictions[i].box.topLeft[0]) / input.shape[2],
|
||||
(predictions[i].box.bottomRight[1] - predictions[i].box.topLeft[1]) / input.shape[1],
|
||||
];
|
||||
}
|
||||
hands.push({ id: i, confidence: Math.round(100 * predictions[i].confidence) / 100, box, boxRaw, landmarks, annotations });
|
||||
}
|
||||
return hands;
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Simple helper functions used accross codebase
|
||||
*/
|
||||
|
||||
// helper function: join two paths
|
||||
export function join(folder: string, file: string): string {
|
||||
const separator = folder.endsWith('/') ? '' : '/';
|
||||
|
|
15
src/human.ts
15
src/human.ts
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Human main module
|
||||
*/
|
||||
|
||||
import { log, now, mergeDeep } from './helpers';
|
||||
import { Config, defaults } from './config';
|
||||
import { Result, Gesture } from './result';
|
||||
|
@ -517,10 +521,7 @@ export class Human {
|
|||
this.analyze('End Object:');
|
||||
|
||||
// if async wait for results
|
||||
if (this.config.async) {
|
||||
[faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
}
|
||||
tf.dispose(process.tensor);
|
||||
if (this.config.async) [faceRes, bodyRes, handRes, objectRes] = await Promise.all([faceRes, bodyRes, handRes, objectRes]);
|
||||
|
||||
// run gesture analysis last
|
||||
let gestureRes: Gesture[] = [];
|
||||
|
@ -542,8 +543,12 @@ export class Human {
|
|||
performance: this.perf,
|
||||
canvas: process.canvas,
|
||||
timestamp: Date.now(),
|
||||
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes); },
|
||||
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
|
||||
};
|
||||
|
||||
// finally dispose input tensor
|
||||
tf.dispose(process.tensor);
|
||||
|
||||
// log('Result:', result);
|
||||
resolve(res);
|
||||
});
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Image Processing module used by Human
|
||||
*/
|
||||
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as fxImage from './imagefx';
|
||||
import { Tensor } from '../tfjs/types';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/*
|
||||
WebGLImageFilter - MIT Licensed
|
||||
2013, Dominic Szablewski - phoboslab.org
|
||||
<https://github.com/phoboslab/WebGLImageFilter>
|
||||
WebGLImageFilter by Dominic Szablewski: <https://github.com/phoboslab/WebGLImageFilter>
|
||||
*/
|
||||
|
||||
function GLProgram(gl, vertexSource, fragmentSource) {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* CenterNet object detection module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* CoCo Labels used by object detection modules
|
||||
*/
|
||||
export const labels = [
|
||||
{ class: 1, label: 'person' },
|
||||
{ class: 2, label: 'bicycle' },
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* NanoDet object detection module
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import { labels } from './labels';
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
/**
|
||||
* Module that analyzes existing results and recombines them into a unified person object
|
||||
*/
|
||||
|
||||
import { Face, Body, Hand, Gesture, Person } from './result';
|
||||
|
||||
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person> {
|
||||
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>, shape: Array<number> | undefined): Array<Person> {
|
||||
let id = 0;
|
||||
const persons: Array<Person> = [];
|
||||
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
|
||||
|
@ -36,12 +40,27 @@ export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>
|
|||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
|
||||
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
|
||||
}
|
||||
person.box = [ // this is incorrect as should be a caclulated value
|
||||
Math.min(person.face?.box[0] || Number.MAX_SAFE_INTEGER, person.body?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[0] || Number.MAX_SAFE_INTEGER),
|
||||
Math.min(person.face?.box[1] || Number.MAX_SAFE_INTEGER, person.body?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[1] || Number.MAX_SAFE_INTEGER),
|
||||
Math.max(person.face?.box[2] || 0, person.body?.box[2] || 0, person.hands?.left?.box[2] || 0, person.hands?.right?.box[2] || 0),
|
||||
Math.max(person.face?.box[3] || 0, person.body?.box[3] || 0, person.hands?.left?.box[3] || 0, person.hands?.right?.box[3] || 0),
|
||||
];
|
||||
|
||||
// create new overarching box from all boxes beloning to person
|
||||
const x: number[] = [];
|
||||
const y: number[] = [];
|
||||
const extractXY = (box) => { // extract all [x, y] coordinates from boxes [x, y, width, height]
|
||||
if (box && box.length === 4) {
|
||||
x.push(box[0], box[0] + box[2]);
|
||||
y.push(box[1], box[1] + box[3]);
|
||||
}
|
||||
};
|
||||
extractXY(person.face?.box);
|
||||
extractXY(person.body?.box);
|
||||
extractXY(person.hands?.left?.box);
|
||||
extractXY(person.hands?.right?.box);
|
||||
const minX = Math.min(...x);
|
||||
const minY = Math.min(...y);
|
||||
person.box = [minX, minY, Math.max(...x) - minX, Math.max(...y) - minY]; // create new overarching box
|
||||
|
||||
// shape is known so we calculate boxRaw as well
|
||||
if (shape && shape.length === 4) person.boxRaw = [person.box[0] / shape[2], person.box[1] / shape[1], person.box[2] / shape[2], person.box[3] / shape[1]];
|
||||
|
||||
persons.push(person);
|
||||
}
|
||||
return persons;
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* PoseNet module entry point
|
||||
*/
|
||||
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as poses from './poses';
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Profiling calculations
|
||||
*/
|
||||
|
||||
import { log } from './helpers';
|
||||
|
||||
export const data = {};
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Type definitions for Human results
|
||||
*/
|
||||
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
|
||||
/** Face results
|
||||
|
@ -176,5 +180,5 @@ export interface Result {
|
|||
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
||||
readonly timestamp: number,
|
||||
/** getter property that returns unified persons object */
|
||||
readonly persons: Array<Person>,
|
||||
persons: Array<Person>,
|
||||
}
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Embedded sample images used during warmup in dataURL format
|
||||
*/
|
||||
|
||||
// data:image/jpeg;base64,
|
||||
export const face = `
|
||||
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Helper function that returns basic system info
|
||||
*/
|
||||
export function info(): { platform: string, agent: string } {
|
||||
let platform;
|
||||
let agent;
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
|
||||
import { log } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
// wrapper to load tfjs in a single place so version can be changed quickly
|
||||
/**
|
||||
* Creates tfjs bundle used by Human browser build target
|
||||
*/
|
||||
|
||||
// simplified
|
||||
// { modules: 1250, moduleBytes: 4013323, imports: 7, importBytes: 2255, outputBytes: 2991826, outputFiles: 'dist/tfjs.esm.js' }
|
||||
|
|
|
@ -1 +1,4 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node-gpu build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs-node-gpu';
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node-wasm build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs';
|
||||
export * from '@tensorflow/tfjs-backend-wasm';
|
||||
|
|
|
@ -1 +1,4 @@
|
|||
/**
|
||||
* Creates tfjs bundle used by Human node build target
|
||||
*/
|
||||
export * from '@tensorflow/tfjs-node';
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
// export common types
|
||||
/**
|
||||
* Export common TensorFlow types
|
||||
*/
|
||||
|
||||
/**
|
||||
* TensorFlow Tensor type
|
||||
*/
|
||||
export { Tensor } from '@tensorflow/tfjs-core/dist/index';
|
||||
|
||||
/**
|
||||
* TensorFlow GraphModel type
|
||||
*/
|
||||
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
||||
|
|
|
@ -158,7 +158,7 @@ async function test(Human, inputConfig) {
|
|||
testDetect(second, 'assets/human-sample-body.jpg', 'default'),
|
||||
]);
|
||||
const t1 = process.hrtime.bigint();
|
||||
log('info', 'test complete:', Math.trunc(parseInt((t1 - t0).toString()) / 1000 / 1000), 'ms');
|
||||
log('info', 'test complete:', Math.trunc(Number(t1 - t0) / 1000 / 1000), 'ms');
|
||||
}
|
||||
|
||||
exports.test = test;
|
||||
|
|
240
test/test.log
240
test/test.log
|
@ -1,120 +1,120 @@
|
|||
2021-05-24 11:08:29 [36mINFO: [39m @vladmandic/human version 1.9.3
|
||||
2021-05-24 11:08:29 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-05-24 11:08:29 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-05-24 11:08:29 [36mINFO: [39m test-node.js start
|
||||
2021-05-24 11:08:29 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-05-24 11:08:29 [36mINFO: [39m test-node.js human version: 1.9.3
|
||||
2021-05-24 11:08:29 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-05-24 11:08:29 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
||||
2021-05-24 11:08:30 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-05-24 11:08:30 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-05-24 11:08:30 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
||||
2021-05-24 11:08:30 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-05-24 11:08:31 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-05-24 11:08:31 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
||||
2021-05-24 11:08:31 [32mDATA: [39m test-node.js result: performance: load: 312 total: 1646
|
||||
2021-05-24 11:08:33 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-05-24 11:08:33 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-05-24 11:08:33 [32mDATA: [39m test-node.js result: performance: load: 312 total: 1613
|
||||
2021-05-24 11:08:33 [36mINFO: [39m test-node.js test body variants
|
||||
2021-05-24 11:08:34 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:35 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg posenet
|
||||
2021-05-24 11:08:35 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
||||
2021-05-24 11:08:35 [32mDATA: [39m test-node.js result: performance: load: 312 total: 983
|
||||
2021-05-24 11:08:36 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:36 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg blazepose
|
||||
2021-05-24 11:08:36 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:36 [32mDATA: [39m test-node.js result: performance: load: 312 total: 387
|
||||
2021-05-24 11:08:38 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-05-24 11:08:38 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:38 [32mDATA: [39m test-node.js result: performance: load: 312 total: 919
|
||||
2021-05-24 11:08:38 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-05-24 11:08:38 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-24 11:08:40 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-24 11:08:40 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:40 [32mDATA: [39m test-node.js result: performance: load: 312 total: 1657
|
||||
2021-05-24 11:08:40 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-05-24 11:08:40 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-24 11:08:42 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-24 11:08:42 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:42 [32mDATA: [39m test-node.js result: performance: load: 5 total: 1630
|
||||
2021-05-24 11:08:42 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-05-24 11:08:42 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-24 11:08:42 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-24 11:08:43 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:44 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:50 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: performance: load: 312 total: 5886
|
||||
2021-05-24 11:08:50 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: performance: load: 5 total: 5886
|
||||
2021-05-24 11:08:50 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: performance: load: 312 total: 5886
|
||||
2021-05-24 11:08:50 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:50 [32mDATA: [39m test-node.js result: performance: load: 5 total: 5886
|
||||
2021-05-24 11:08:50 [36mINFO: [39m test-node.js test complete: 20201 ms
|
||||
2021-05-24 11:08:50 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-05-24 11:08:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-24 11:08:50.534311: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-05-24 11:08:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-24 11:08:50.593093: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-05-24 11:08:50 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-24 11:08:50.593140: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-05-24 11:08:50 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-05-24 11:08:50 [36mINFO: [39m test-node-gpu.js human version: 1.9.3
|
||||
2021-05-24 11:08:50 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-05-24 11:08:50 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
||||
2021-05-24 11:08:51 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-05-24 11:08:51 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-05-24 11:08:51 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
||||
2021-05-24 11:08:51 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-05-24 11:08:52 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-05-24 11:08:52 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
||||
2021-05-24 11:08:52 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 1664
|
||||
2021-05-24 11:08:54 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-05-24 11:08:54 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-05-24 11:08:54 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 1772
|
||||
2021-05-24 11:08:54 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-05-24 11:08:55 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:56 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
|
||||
2021-05-24 11:08:56 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
||||
2021-05-24 11:08:56 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 963
|
||||
2021-05-24 11:08:57 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:08:57 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
|
||||
2021-05-24 11:08:57 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:57 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 397
|
||||
2021-05-24 11:08:59 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-05-24 11:08:59 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:08:59 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 887
|
||||
2021-05-24 11:08:59 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-05-24 11:08:59 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-24 11:09:01 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-24 11:09:01 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:01 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 1643
|
||||
2021-05-24 11:09:01 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-05-24 11:09:01 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-24 11:09:02 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-24 11:09:02 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:02 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 1581
|
||||
2021-05-24 11:09:02 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-05-24 11:09:03 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-24 11:09:03 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-24 11:09:04 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:09:05 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-24 11:09:11 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 6184
|
||||
2021-05-24 11:09:11 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 6184
|
||||
2021-05-24 11:09:11 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 333 total: 6184
|
||||
2021-05-24 11:09:11 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-24 11:09:11 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 6184
|
||||
2021-05-24 11:09:11 [36mINFO: [39m test-node-gpu.js test complete: 20649 ms
|
||||
2021-05-24 11:09:11 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-05-24 11:09:11 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-05-24 11:09:11 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-05-24 11:09:11 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
2021-05-25 08:46:04 [36mINFO: [39m @vladmandic/human version 1.9.3
|
||||
2021-05-25 08:46:04 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-05-25 08:46:04 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||
2021-05-25 08:46:04 [36mINFO: [39m test-node.js start
|
||||
2021-05-25 08:46:05 [35mSTATE:[39m test-node.js passed: create human
|
||||
2021-05-25 08:46:05 [36mINFO: [39m test-node.js human version: 1.9.3
|
||||
2021-05-25 08:46:05 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-05-25 08:46:05 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
||||
2021-05-25 08:46:05 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||
2021-05-25 08:46:05 [35mSTATE:[39m test-node.js passed: load models
|
||||
2021-05-25 08:46:05 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
||||
2021-05-25 08:46:05 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||
2021-05-25 08:46:07 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||
2021-05-25 08:46:07 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
||||
2021-05-25 08:46:07 [32mDATA: [39m test-node.js result: performance: load: 346 total: 1673
|
||||
2021-05-25 08:46:09 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||
2021-05-25 08:46:09 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-05-25 08:46:09 [32mDATA: [39m test-node.js result: performance: load: 346 total: 1665
|
||||
2021-05-25 08:46:09 [36mINFO: [39m test-node.js test body variants
|
||||
2021-05-25 08:46:10 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:11 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg posenet
|
||||
2021-05-25 08:46:11 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
||||
2021-05-25 08:46:11 [32mDATA: [39m test-node.js result: performance: load: 346 total: 1017
|
||||
2021-05-25 08:46:12 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:12 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg blazepose
|
||||
2021-05-25 08:46:12 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:12 [32mDATA: [39m test-node.js result: performance: load: 346 total: 418
|
||||
2021-05-25 08:46:13 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||
2021-05-25 08:46:13 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:13 [32mDATA: [39m test-node.js result: performance: load: 346 total: 888
|
||||
2021-05-25 08:46:13 [36mINFO: [39m test-node.js test: first instance
|
||||
2021-05-25 08:46:13 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-25 08:46:15 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-25 08:46:15 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:15 [32mDATA: [39m test-node.js result: performance: load: 346 total: 1761
|
||||
2021-05-25 08:46:15 [36mINFO: [39m test-node.js test: second instance
|
||||
2021-05-25 08:46:16 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-25 08:46:17 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-25 08:46:17 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:17 [32mDATA: [39m test-node.js result: performance: load: 4 total: 1670
|
||||
2021-05-25 08:46:17 [36mINFO: [39m test-node.js test: concurrent
|
||||
2021-05-25 08:46:17 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-25 08:46:17 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-25 08:46:18 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:19 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:25 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: performance: load: 346 total: 5919
|
||||
2021-05-25 08:46:25 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5919
|
||||
2021-05-25 08:46:25 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: performance: load: 346 total: 5919
|
||||
2021-05-25 08:46:25 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:25 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5919
|
||||
2021-05-25 08:46:25 [36mINFO: [39m test-node.js test complete: 20499 ms
|
||||
2021-05-25 08:46:25 [36mINFO: [39m test-node-gpu.js start
|
||||
2021-05-25 08:46:26 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-25 08:46:26.515096: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||
2021-05-25 08:46:26 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-25 08:46:26.635286: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||
2021-05-25 08:46:26 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-25 08:46:26.635353: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||
2021-05-25 08:46:26 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||
2021-05-25 08:46:26 [36mINFO: [39m test-node-gpu.js human version: 1.9.3
|
||||
2021-05-25 08:46:26 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||
2021-05-25 08:46:26 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
||||
2021-05-25 08:46:27 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||
2021-05-25 08:46:27 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||
2021-05-25 08:46:27 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
||||
2021-05-25 08:46:27 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||
2021-05-25 08:46:28 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||
2021-05-25 08:46:28 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
||||
2021-05-25 08:46:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 1742
|
||||
2021-05-25 08:46:30 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||
2021-05-25 08:46:30 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
||||
2021-05-25 08:46:30 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 1599
|
||||
2021-05-25 08:46:30 [36mINFO: [39m test-node-gpu.js test body variants
|
||||
2021-05-25 08:46:31 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:32 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
|
||||
2021-05-25 08:46:32 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
||||
2021-05-25 08:46:32 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 1448
|
||||
2021-05-25 08:46:34 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:34 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
|
||||
2021-05-25 08:46:34 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:34 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 627
|
||||
2021-05-25 08:46:35 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||
2021-05-25 08:46:35 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:35 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 258
|
||||
2021-05-25 08:46:35 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||
2021-05-25 08:46:35 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-25 08:46:35 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-25 08:46:35 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:35 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 234
|
||||
2021-05-25 08:46:35 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||
2021-05-25 08:46:36 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||
2021-05-25 08:46:38 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||
2021-05-25 08:46:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 2409
|
||||
2021-05-25 08:46:38 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||
2021-05-25 08:46:38 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-25 08:46:38 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||
2021-05-25 08:46:39 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:40 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||
2021-05-25 08:46:50 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 9194
|
||||
2021-05-25 08:46:50 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 9194
|
||||
2021-05-25 08:46:50 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: performance: load: 326 total: 9194
|
||||
2021-05-25 08:46:50 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
||||
2021-05-25 08:46:50 [32mDATA: [39m test-node-gpu.js result: performance: load: 4 total: 9194
|
||||
2021-05-25 08:46:50 [36mINFO: [39m test-node-gpu.js test complete: 23315 ms
|
||||
2021-05-25 08:46:50 [36mINFO: [39m test-node-wasm.js start
|
||||
2021-05-25 08:46:50 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||
2021-05-25 08:46:50 [31mERROR:[39m test-node-wasm.js aborting test
|
||||
2021-05-25 08:46:50 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -84,8 +84,7 @@
|
|||
-useDepth: use z-axis coordinate as color shade,
|
||||
-useCurves: draw polygons as cures or as lines,
|
||||
-bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
|
||||
-bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
|
||||
-useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,</p>
|
||||
-bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.</p>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
@ -106,7 +105,6 @@
|
|||
<ul class="tsd-index-list">
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedfactor" class="tsd-kind-icon">buffered<wbr>Factor</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#calculatehandbox" class="tsd-kind-icon">calculate<wbr>Hand<wbr>Box</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#color" class="tsd-kind-icon">color</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawboxes" class="tsd-kind-icon">draw<wbr>Boxes</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawlabels" class="tsd-kind-icon">draw<wbr>Labels</a></li>
|
||||
|
@ -122,7 +120,6 @@
|
|||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#shadowcolor" class="tsd-kind-icon">shadow<wbr>Color</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usecurves" class="tsd-kind-icon">use<wbr>Curves</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a></li>
|
||||
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#userawboxes" class="tsd-kind-icon">use<wbr>Raw<wbr>Boxes</a></li>
|
||||
</ul>
|
||||
</section>
|
||||
</div>
|
||||
|
@ -144,13 +141,6 @@
|
|||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="calculatehandbox" class="tsd-anchor"></a>
|
||||
<h3>calculate<wbr>Hand<wbr>Box</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">calculate<wbr>Hand<wbr>Box<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="color" class="tsd-anchor"></a>
|
||||
<h3>color</h3>
|
||||
|
@ -256,13 +246,6 @@
|
|||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="userawboxes" class="tsd-anchor"></a>
|
||||
<h3>use<wbr>Raw<wbr>Boxes</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">use<wbr>Raw<wbr>Boxes<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">boolean</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
</section>
|
||||
</section>
|
||||
</div>
|
||||
<div class="col-4 col-menu menu-sticky-wrap menu-highlight">
|
||||
|
@ -298,9 +281,6 @@
|
|||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="drawoptions.html#calculatehandbox" class="tsd-kind-icon">calculate<wbr>Hand<wbr>Box</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="drawoptions.html#color" class="tsd-kind-icon">color</a>
|
||||
</li>
|
||||
|
@ -346,9 +326,6 @@
|
|||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="drawoptions.html#usedepth" class="tsd-kind-icon">use<wbr>Depth</a>
|
||||
</li>
|
||||
<li class=" tsd-kind-property tsd-parent-kind-interface">
|
||||
<a href="drawoptions.html#userawboxes" class="tsd-kind-icon">use<wbr>Raw<wbr>Boxes</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
|
|
|
@ -188,7 +188,7 @@
|
|||
</section>
|
||||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
|
||||
<a name="persons" class="tsd-anchor"></a>
|
||||
<h3><span class="tsd-flag ts-flagReadonly">Readonly</span> persons</h3>
|
||||
<h3>persons</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Person</span><span class="tsd-signature-symbol">[]</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* FaceMesh & BlazeFace Module entry point
|
||||
*/
|
||||
export declare function predict(input: any, config: any): Promise<{
|
||||
confidence: any;
|
||||
boxConfidence: any;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* BlazePose Module
|
||||
*/
|
||||
import { Body } from '../result';
|
||||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<Body[]>;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Module that implements helper draw functions, exposed as human.draw
|
||||
*/
|
||||
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
|
||||
/**
|
||||
* Draw Options
|
||||
|
@ -19,7 +22,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
|
|||
* -useCurves: draw polygons as cures or as lines,
|
||||
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
|
||||
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
|
||||
* -useRawBoxes: Boolean: internal: use non-normalized coordinates when performing draw methods,
|
||||
*/
|
||||
export interface DrawOptions {
|
||||
color: string;
|
||||
|
@ -39,8 +41,6 @@ export interface DrawOptions {
|
|||
useCurves: boolean;
|
||||
bufferedOutput: boolean;
|
||||
bufferedFactor: number;
|
||||
useRawBoxes: boolean;
|
||||
calculateHandBox: boolean;
|
||||
}
|
||||
export declare const options: DrawOptions;
|
||||
export declare function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture>, drawOptions?: DrawOptions): Promise<void>;
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
/**
|
||||
* Emotion Module
|
||||
*/
|
||||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any, idx: any, count: any): Promise<unknown>;
|
||||
|
|
|
@ -1,2 +1,6 @@
|
|||
/**
|
||||
* Module that analyzes person age
|
||||
* Obsolete
|
||||
*/
|
||||
import { Face } from './result';
|
||||
export declare const detectFace: (parent: any, input: any) => Promise<Face[]>;
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
/**
|
||||
* HSE-FaceRes Module
|
||||
* Returns Age, Gender, Descriptor
|
||||
* Implements Face simmilarity function
|
||||
*/
|
||||
import { Tensor } from '../tfjs/types';
|
||||
declare type DB = Array<{
|
||||
name: string;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Gesture detection module
|
||||
*/
|
||||
import { Gesture } from '../result';
|
||||
export declare const body: (res: any) => Gesture[];
|
||||
export declare const face: (res: any) => Gesture[];
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* HandPose module entry point
|
||||
*/
|
||||
import { Hand } from '../result';
|
||||
export declare function predict(input: any, config: any): Promise<Hand[]>;
|
||||
export declare function load(config: any): Promise<[unknown, unknown]>;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Simple helper functions used accross codebase
|
||||
*/
|
||||
export declare function join(folder: string, file: string): string;
|
||||
export declare function log(...msg: any[]): void;
|
||||
export declare const now: () => number;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Human main module
|
||||
*/
|
||||
import { Config } from './config';
|
||||
import { Result } from './result';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Image Processing module used by Human
|
||||
*/
|
||||
import { Tensor } from '../tfjs/types';
|
||||
export declare function process(input: any, config: any): {
|
||||
tensor: Tensor | null;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* CenterNet object detection module
|
||||
*/
|
||||
import { Item } from '../result';
|
||||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<Item[]>;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* CoCo Labels used by object detection modules
|
||||
*/
|
||||
export declare const labels: {
|
||||
class: number;
|
||||
label: string;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* NanoDet object detection module
|
||||
*/
|
||||
import { Item } from '../result';
|
||||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<Item[]>;
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
/**
|
||||
* Module that analyzes existing results and recombines them into a unified person object
|
||||
*/
|
||||
import { Face, Body, Hand, Gesture, Person } from './result';
|
||||
export declare function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person>;
|
||||
export declare function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>, shape: Array<number> | undefined): Array<Person>;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* PoseNet module entry point
|
||||
*/
|
||||
import { Body } from '../result';
|
||||
export declare function predict(input: any, config: any): Promise<Body[]>;
|
||||
export declare function load(config: any): Promise<any>;
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Type definitions for Human results
|
||||
*/
|
||||
import { Tensor } from '../dist/tfjs.esm.js';
|
||||
/** Face results
|
||||
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
|
||||
|
@ -201,5 +204,5 @@ export interface Result {
|
|||
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
|
||||
readonly timestamp: number;
|
||||
/** getter property that returns unified persons object */
|
||||
readonly persons: Array<Person>;
|
||||
persons: Array<Person>;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,3 +1,6 @@
|
|||
/**
|
||||
* Helper function that returns basic system info
|
||||
*/
|
||||
export declare function info(): {
|
||||
platform: string;
|
||||
agent: string;
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
/**
|
||||
* Custom TFJS backend for Human based on WebGL
|
||||
* Not used by default
|
||||
*/
|
||||
export declare const config: {
|
||||
name: string;
|
||||
priority: number;
|
||||
|
|
|
@ -1,2 +1,11 @@
|
|||
/**
|
||||
* Export common TensorFlow types
|
||||
*/
|
||||
/**
|
||||
* TensorFlow Tensor type
|
||||
*/
|
||||
export { Tensor } from '@tensorflow/tfjs-core/dist/index';
|
||||
/**
|
||||
* TensorFlow GraphModel type
|
||||
*/
|
||||
export { GraphModel } from '@tensorflow/tfjs-converter/dist/index';
|
||||
|
|
Loading…
Reference in New Issue