implemented unified result.persons that combines face, body and hands for each person

pull/134/head
Vladimir Mandic 2021-05-24 11:10:13 -04:00
parent 100d142cfa
commit e62f18e34e
30 changed files with 1224 additions and 956 deletions

View File

@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/05/23 mandic00@live.com ### **HEAD -> main** 2021/05/24 mandic00@live.com
- added experimental results interpolation for smooth draw operations
### **1.9.3** 2021/05/23 mandic00@live.com ### **1.9.3** 2021/05/23 mandic00@live.com

View File

@ -10,7 +10,6 @@ let human;
const userConfig = { const userConfig = {
warmup: 'none', warmup: 'none',
/*
backend: 'webgl', backend: 'webgl',
async: false, async: false,
cacheSensitivity: 0, cacheSensitivity: 0,
@ -25,12 +24,11 @@ const userConfig = {
description: { enabled: false }, description: { enabled: false },
emotion: { enabled: false }, emotion: { enabled: false },
}, },
hand: { enabled: false }, hand: { enabled: true },
body: { enabled: false, modelPath: 'posenet.json' }, body: { enabled: true, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' }, // body: { enabled: true, modelPath: 'blazepose.json' },
object: { enabled: false }, object: { enabled: false },
gesture: { enabled: true }, gesture: { enabled: true },
*/
}; };
const drawOptions = { const drawOptions = {
@ -229,6 +227,8 @@ async function drawResults(input) {
// draw all results // draw all results
human.draw.all(canvas, result, drawOptions); human.draw.all(canvas, result, drawOptions);
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const person = result.persons; // invoke person getter
/* use individual functions /* use individual functions
human.draw.face(canvas, result.face); human.draw.face(canvas, result.face);
human.draw.body(canvas, result.body); human.draw.body(canvas, result.body);

View File

@ -100,7 +100,7 @@ async function detect(input) {
for (let i = 0; i < result.face.length; i++) { for (let i = 0; i < result.face.length; i++) {
const face = result.face[i]; const face = result.face[i];
const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr)); const emotion = face.emotion.reduce((prev, curr) => (prev.score > curr.score ? prev : curr));
log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.boxConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`); log.data(` Face: #${i} boxConfidence:${face.boxConfidence} faceConfidence:${face.faceConfidence} age:${face.age} genderConfidence:${face.genderConfidence} gender:${face.gender} emotionScore:${emotion.score} emotion:${emotion.emotion} iris:${face.iris}`);
} }
} else { } else {
log.data(' Face: N/A'); log.data(' Face: N/A');
@ -137,6 +137,20 @@ async function detect(input) {
} else { } else {
log.data(' Object: N/A'); log.data(' Object: N/A');
} }
// print data to console
if (result) {
log.data('Persons:');
const persons = result.persons;
for (let i = 0; i < persons.length; i++) {
const face = persons[i].face;
const faceTxt = face ? `confidence:${face.confidence} age:${face.age} gender:${face.gender} iris:${face.iris}` : null;
const body = persons[i].body;
const bodyTxt = body ? `confidence:${body.score} landmarks:${body.keypoints?.length}` : null;
log.data(` #${i}: Face:${faceTxt} Body:${bodyTxt} LeftHand:${persons[i].hands.left ? 'yes' : 'no'} RightHand:${persons[i].hands.right ? 'yes' : 'no'} Gestures:${persons[i].gestures.length}`);
}
}
return result; return result;
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

474
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

474
dist/human.js vendored

File diff suppressed because one or more lines are too long

133
dist/human.node-gpu.js vendored
View File

@ -18402,7 +18402,8 @@ __export(draw_exports, {
gesture: () => gesture, gesture: () => gesture,
hand: () => hand2, hand: () => hand2,
object: () => object, object: () => object,
options: () => options options: () => options,
person: () => person
}); });
var options = { var options = {
color: "rgba(173, 216, 230, 0.3)", color: "rgba(173, 216, 230, 0.3)",
@ -18425,7 +18426,7 @@ var options = {
useRawBoxes: false, useRawBoxes: false,
calculateHandBox: true calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.beginPath(); ctx.beginPath();
@ -18507,8 +18508,8 @@ async function gesture(inCanvas2, result, drawOptions) {
let what = []; let what = [];
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if (what.length > 1 && what[1].length > 0) { if (what.length > 1 && what[1].length > 0) {
const person = where[1] > 0 ? `#${where[1]}` : ""; const who = where[1] > 0 ? `#${where[1]}` : "";
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${who}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, 8, 2 + i * localOptions.lineHeight); ctx.fillText(label, 8, 2 + i * localOptions.lineHeight);
@ -18865,6 +18866,35 @@ async function object(inCanvas2, result, drawOptions) {
} }
} }
} }
async function person(inCanvas2, result, drawOptions) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
const ctx = inCanvas2.getContext("2d");
if (!ctx)
return;
ctx.lineJoin = "round";
ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
const label = `person #${i}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.stroke();
}
}
}
function calcBuffered(newResult, localOptions) { function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
@ -18891,9 +18921,6 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18914,11 +18941,58 @@ async function all(inCanvas2, result, drawOptions) {
} else { } else {
bufferedResult = result; bufferedResult = result;
} }
face2(inCanvas2, bufferedResult.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, bufferedResult.gesture, localOptions); gesture(inCanvas2, result.gesture, localOptions);
object(inCanvas2, bufferedResult.object, localOptions); object(inCanvas2, result.object, localOptions);
}
// src/persons.ts
function join2(faces, bodies, hands, gestures) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
let id = 0;
const persons2 = [];
for (const face5 of faces) {
const person2 = { id: id++, face: face5, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
for (const body4 of bodies) {
if (face5.box[0] > body4.box[0] && face5.box[0] < body4.box[0] + body4.box[2] && face5.box[1] + face5.box[3] > body4.box[1] && face5.box[1] + face5.box[3] < body4.box[1] + body4.box[3]) {
person2.body = body4;
}
}
if (person2.body) {
for (const hand3 of hands) {
if (hand3.box[0] + hand3.box[2] > person2.body.box[0] && hand3.box[0] + hand3.box[2] < person2.body.box[0] + person2.body.box[2] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.left = hand3;
}
if (hand3.box[0] < person2.body.box[0] + person2.body.box[2] && hand3.box[0] > person2.body.box[0] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.right = hand3;
}
}
}
for (const gesture3 of gestures) {
if (gesture3["face"] !== void 0 && gesture3["face"] === face5.id)
(_a = person2.gestures) == null ? void 0 : _a.push(gesture3);
else if (gesture3["iris"] !== void 0 && gesture3["iris"] === face5.id)
(_b = person2.gestures) == null ? void 0 : _b.push(gesture3);
else if (gesture3["body"] !== void 0 && gesture3["body"] === ((_c = person2.body) == null ? void 0 : _c.id))
(_d = person2.gestures) == null ? void 0 : _d.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_f = (_e = person2.hands) == null ? void 0 : _e.left) == null ? void 0 : _f.id))
(_g = person2.gestures) == null ? void 0 : _g.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
}
person2.box = [
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
];
persons2.push(person2);
}
return persons2;
} }
// src/sample.ts // src/sample.ts
@ -19660,10 +19734,10 @@ var Human = class {
this.analyze = (...msg) => { this.analyze = (...msg) => {
if (!__privateGet(this, _analyzeMemoryLeaks)) if (!__privateGet(this, _analyzeMemoryLeaks))
return; return;
const current = this.tf.engine().state.numTensors; const currentTensors = this.tf.engine().state.numTensors;
const previous = __privateGet(this, _numTensors); const previousTensors = __privateGet(this, _numTensors);
__privateSet(this, _numTensors, current); __privateSet(this, _numTensors, currentTensors);
const leaked = current - previous; const leaked = currentTensors - previousTensors;
if (leaked !== 0) if (leaked !== 0)
log(...msg, leaked); log(...msg, leaked);
}; };
@ -19982,7 +20056,7 @@ var Human = class {
let bodyRes; let bodyRes;
let handRes; let handRes;
let objectRes; let objectRes;
let current; let elapsedTime;
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face) if (this.perf.face)
@ -19991,9 +20065,9 @@ var Human = class {
this.state = "run:face"; this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.face = current; this.perf.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
if (this.config.async) { if (this.config.async) {
@ -20010,9 +20084,9 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("blazepose")) else if (this.config.body.modelPath.includes("blazepose"))
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.body = current; this.perf.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -20024,9 +20098,9 @@ var Human = class {
this.state = "run:hand"; this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.hand = current; this.perf.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -20044,9 +20118,9 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet")) else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.object = current; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async) {
@ -20072,7 +20146,10 @@ var Human = class {
object: objectRes, object: objectRes,
performance: this.perf, performance: this.perf,
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now() timestamp: Date.now(),
get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes);
}
}; };
resolve(res); resolve(res);
}); });

View File

@ -18403,7 +18403,8 @@ __export(draw_exports, {
gesture: () => gesture, gesture: () => gesture,
hand: () => hand2, hand: () => hand2,
object: () => object, object: () => object,
options: () => options options: () => options,
person: () => person
}); });
var options = { var options = {
color: "rgba(173, 216, 230, 0.3)", color: "rgba(173, 216, 230, 0.3)",
@ -18426,7 +18427,7 @@ var options = {
useRawBoxes: false, useRawBoxes: false,
calculateHandBox: true calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.beginPath(); ctx.beginPath();
@ -18508,8 +18509,8 @@ async function gesture(inCanvas2, result, drawOptions) {
let what = []; let what = [];
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if (what.length > 1 && what[1].length > 0) { if (what.length > 1 && what[1].length > 0) {
const person = where[1] > 0 ? `#${where[1]}` : ""; const who = where[1] > 0 ? `#${where[1]}` : "";
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${who}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, 8, 2 + i * localOptions.lineHeight); ctx.fillText(label, 8, 2 + i * localOptions.lineHeight);
@ -18866,6 +18867,35 @@ async function object(inCanvas2, result, drawOptions) {
} }
} }
} }
async function person(inCanvas2, result, drawOptions) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
const ctx = inCanvas2.getContext("2d");
if (!ctx)
return;
ctx.lineJoin = "round";
ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
const label = `person #${i}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.stroke();
}
}
}
function calcBuffered(newResult, localOptions) { function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
@ -18892,9 +18922,6 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18915,11 +18942,58 @@ async function all(inCanvas2, result, drawOptions) {
} else { } else {
bufferedResult = result; bufferedResult = result;
} }
face2(inCanvas2, bufferedResult.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, bufferedResult.gesture, localOptions); gesture(inCanvas2, result.gesture, localOptions);
object(inCanvas2, bufferedResult.object, localOptions); object(inCanvas2, result.object, localOptions);
}
// src/persons.ts
function join2(faces, bodies, hands, gestures) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
let id = 0;
const persons2 = [];
for (const face5 of faces) {
const person2 = { id: id++, face: face5, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
for (const body4 of bodies) {
if (face5.box[0] > body4.box[0] && face5.box[0] < body4.box[0] + body4.box[2] && face5.box[1] + face5.box[3] > body4.box[1] && face5.box[1] + face5.box[3] < body4.box[1] + body4.box[3]) {
person2.body = body4;
}
}
if (person2.body) {
for (const hand3 of hands) {
if (hand3.box[0] + hand3.box[2] > person2.body.box[0] && hand3.box[0] + hand3.box[2] < person2.body.box[0] + person2.body.box[2] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.left = hand3;
}
if (hand3.box[0] < person2.body.box[0] + person2.body.box[2] && hand3.box[0] > person2.body.box[0] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.right = hand3;
}
}
}
for (const gesture3 of gestures) {
if (gesture3["face"] !== void 0 && gesture3["face"] === face5.id)
(_a = person2.gestures) == null ? void 0 : _a.push(gesture3);
else if (gesture3["iris"] !== void 0 && gesture3["iris"] === face5.id)
(_b = person2.gestures) == null ? void 0 : _b.push(gesture3);
else if (gesture3["body"] !== void 0 && gesture3["body"] === ((_c = person2.body) == null ? void 0 : _c.id))
(_d = person2.gestures) == null ? void 0 : _d.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_f = (_e = person2.hands) == null ? void 0 : _e.left) == null ? void 0 : _f.id))
(_g = person2.gestures) == null ? void 0 : _g.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
}
person2.box = [
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
];
persons2.push(person2);
}
return persons2;
} }
// src/sample.ts // src/sample.ts
@ -19661,10 +19735,10 @@ var Human = class {
this.analyze = (...msg) => { this.analyze = (...msg) => {
if (!__privateGet(this, _analyzeMemoryLeaks)) if (!__privateGet(this, _analyzeMemoryLeaks))
return; return;
const current = this.tf.engine().state.numTensors; const currentTensors = this.tf.engine().state.numTensors;
const previous = __privateGet(this, _numTensors); const previousTensors = __privateGet(this, _numTensors);
__privateSet(this, _numTensors, current); __privateSet(this, _numTensors, currentTensors);
const leaked = current - previous; const leaked = currentTensors - previousTensors;
if (leaked !== 0) if (leaked !== 0)
log(...msg, leaked); log(...msg, leaked);
}; };
@ -19983,7 +20057,7 @@ var Human = class {
let bodyRes; let bodyRes;
let handRes; let handRes;
let objectRes; let objectRes;
let current; let elapsedTime;
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face) if (this.perf.face)
@ -19992,9 +20066,9 @@ var Human = class {
this.state = "run:face"; this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.face = current; this.perf.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
if (this.config.async) { if (this.config.async) {
@ -20011,9 +20085,9 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("blazepose")) else if (this.config.body.modelPath.includes("blazepose"))
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.body = current; this.perf.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -20025,9 +20099,9 @@ var Human = class {
this.state = "run:hand"; this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.hand = current; this.perf.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -20045,9 +20119,9 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet")) else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.object = current; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async) {
@ -20073,7 +20147,10 @@ var Human = class {
object: objectRes, object: objectRes,
performance: this.perf, performance: this.perf,
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now() timestamp: Date.now(),
get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes);
}
}; };
resolve(res); resolve(res);
}); });

133
dist/human.node.js vendored
View File

@ -18402,7 +18402,8 @@ __export(draw_exports, {
gesture: () => gesture, gesture: () => gesture,
hand: () => hand2, hand: () => hand2,
object: () => object, object: () => object,
options: () => options options: () => options,
person: () => person
}); });
var options = { var options = {
color: "rgba(173, 216, 230, 0.3)", color: "rgba(173, 216, 230, 0.3)",
@ -18425,7 +18426,7 @@ var options = {
useRawBoxes: false, useRawBoxes: false,
calculateHandBox: true calculateHandBox: true
}; };
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0 }; var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
ctx.beginPath(); ctx.beginPath();
@ -18507,8 +18508,8 @@ async function gesture(inCanvas2, result, drawOptions) {
let what = []; let what = [];
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if (what.length > 1 && what[1].length > 0) { if (what.length > 1 && what[1].length > 0) {
const person = where[1] > 0 ? `#${where[1]}` : ""; const who = where[1] > 0 ? `#${where[1]}` : "";
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${who}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") { if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, 8, 2 + i * localOptions.lineHeight); ctx.fillText(label, 8, 2 + i * localOptions.lineHeight);
@ -18865,6 +18866,35 @@ async function object(inCanvas2, result, drawOptions) {
} }
} }
} }
async function person(inCanvas2, result, drawOptions) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
const ctx = inCanvas2.getContext("2d");
if (!ctx)
return;
ctx.lineJoin = "round";
ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
const label = `person #${i}`;
if (localOptions.shadowColor && localOptions.shadowColor !== "") {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.stroke();
}
}
}
function calcBuffered(newResult, localOptions) { function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
@ -18891,9 +18921,6 @@ function calcBuffered(newResult, localOptions) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor)); bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
} }
} }
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
} }
async function canvas(inCanvas2, outCanvas2) { async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2) if (!inCanvas2 || !outCanvas2)
@ -18914,11 +18941,58 @@ async function all(inCanvas2, result, drawOptions) {
} else { } else {
bufferedResult = result; bufferedResult = result;
} }
face2(inCanvas2, bufferedResult.face, localOptions); face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions); body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions); hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, bufferedResult.gesture, localOptions); gesture(inCanvas2, result.gesture, localOptions);
object(inCanvas2, bufferedResult.object, localOptions); object(inCanvas2, result.object, localOptions);
}
// src/persons.ts
function join2(faces, bodies, hands, gestures) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _A, _B, _C, _D, _E, _F, _G, _H;
let id = 0;
const persons2 = [];
for (const face5 of faces) {
const person2 = { id: id++, face: face5, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
for (const body4 of bodies) {
if (face5.box[0] > body4.box[0] && face5.box[0] < body4.box[0] + body4.box[2] && face5.box[1] + face5.box[3] > body4.box[1] && face5.box[1] + face5.box[3] < body4.box[1] + body4.box[3]) {
person2.body = body4;
}
}
if (person2.body) {
for (const hand3 of hands) {
if (hand3.box[0] + hand3.box[2] > person2.body.box[0] && hand3.box[0] + hand3.box[2] < person2.body.box[0] + person2.body.box[2] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.left = hand3;
}
if (hand3.box[0] < person2.body.box[0] + person2.body.box[2] && hand3.box[0] > person2.body.box[0] && hand3.box[1] + hand3.box[3] > person2.body.box[1] && hand3.box[1] + hand3.box[3] < person2.body.box[1] + person2.body.box[3]) {
if (person2.hands)
person2.hands.right = hand3;
}
}
}
for (const gesture3 of gestures) {
if (gesture3["face"] !== void 0 && gesture3["face"] === face5.id)
(_a = person2.gestures) == null ? void 0 : _a.push(gesture3);
else if (gesture3["iris"] !== void 0 && gesture3["iris"] === face5.id)
(_b = person2.gestures) == null ? void 0 : _b.push(gesture3);
else if (gesture3["body"] !== void 0 && gesture3["body"] === ((_c = person2.body) == null ? void 0 : _c.id))
(_d = person2.gestures) == null ? void 0 : _d.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_f = (_e = person2.hands) == null ? void 0 : _e.left) == null ? void 0 : _f.id))
(_g = person2.gestures) == null ? void 0 : _g.push(gesture3);
else if (gesture3["hand"] !== void 0 && gesture3["hand"] === ((_i = (_h = person2.hands) == null ? void 0 : _h.right) == null ? void 0 : _i.id))
(_j = person2.gestures) == null ? void 0 : _j.push(gesture3);
}
person2.box = [
Math.min(((_k = person2.face) == null ? void 0 : _k.box[0]) || Number.MAX_SAFE_INTEGER, ((_l = person2.body) == null ? void 0 : _l.box[0]) || Number.MAX_SAFE_INTEGER, ((_n = (_m = person2.hands) == null ? void 0 : _m.left) == null ? void 0 : _n.box[0]) || Number.MAX_SAFE_INTEGER, ((_p = (_o = person2.hands) == null ? void 0 : _o.right) == null ? void 0 : _p.box[0]) || Number.MAX_SAFE_INTEGER),
Math.min(((_q = person2.face) == null ? void 0 : _q.box[1]) || Number.MAX_SAFE_INTEGER, ((_r = person2.body) == null ? void 0 : _r.box[1]) || Number.MAX_SAFE_INTEGER, ((_t = (_s = person2.hands) == null ? void 0 : _s.left) == null ? void 0 : _t.box[1]) || Number.MAX_SAFE_INTEGER, ((_v = (_u = person2.hands) == null ? void 0 : _u.right) == null ? void 0 : _v.box[1]) || Number.MAX_SAFE_INTEGER),
Math.max(((_w = person2.face) == null ? void 0 : _w.box[2]) || 0, ((_x = person2.body) == null ? void 0 : _x.box[2]) || 0, ((_z = (_y = person2.hands) == null ? void 0 : _y.left) == null ? void 0 : _z.box[2]) || 0, ((_B = (_A = person2.hands) == null ? void 0 : _A.right) == null ? void 0 : _B.box[2]) || 0),
Math.max(((_C = person2.face) == null ? void 0 : _C.box[3]) || 0, ((_D = person2.body) == null ? void 0 : _D.box[3]) || 0, ((_F = (_E = person2.hands) == null ? void 0 : _E.left) == null ? void 0 : _F.box[3]) || 0, ((_H = (_G = person2.hands) == null ? void 0 : _G.right) == null ? void 0 : _H.box[3]) || 0)
];
persons2.push(person2);
}
return persons2;
} }
// src/sample.ts // src/sample.ts
@ -19660,10 +19734,10 @@ var Human = class {
this.analyze = (...msg) => { this.analyze = (...msg) => {
if (!__privateGet(this, _analyzeMemoryLeaks)) if (!__privateGet(this, _analyzeMemoryLeaks))
return; return;
const current = this.tf.engine().state.numTensors; const currentTensors = this.tf.engine().state.numTensors;
const previous = __privateGet(this, _numTensors); const previousTensors = __privateGet(this, _numTensors);
__privateSet(this, _numTensors, current); __privateSet(this, _numTensors, currentTensors);
const leaked = current - previous; const leaked = currentTensors - previousTensors;
if (leaked !== 0) if (leaked !== 0)
log(...msg, leaked); log(...msg, leaked);
}; };
@ -19982,7 +20056,7 @@ var Human = class {
let bodyRes; let bodyRes;
let handRes; let handRes;
let objectRes; let objectRes;
let current; let elapsedTime;
if (this.config.async) { if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face) if (this.perf.face)
@ -19991,9 +20065,9 @@ var Human = class {
this.state = "run:face"; this.state = "run:face";
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : []; faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.face = current; this.perf.face = elapsedTime;
} }
this.analyze("Start Body:"); this.analyze("Start Body:");
if (this.config.async) { if (this.config.async) {
@ -20010,9 +20084,9 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict4(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("blazepose")) else if (this.config.body.modelPath.includes("blazepose"))
bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : []; bodyRes = this.config.body.enabled ? await predict6(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.body = current; this.perf.body = elapsedTime;
} }
this.analyze("End Body:"); this.analyze("End Body:");
this.analyze("Start Hand:"); this.analyze("Start Hand:");
@ -20024,9 +20098,9 @@ var Human = class {
this.state = "run:hand"; this.state = "run:hand";
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : []; handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.hand = current; this.perf.hand = elapsedTime;
} }
this.analyze("End Hand:"); this.analyze("End Hand:");
this.analyze("Start Object:"); this.analyze("Start Object:");
@ -20044,9 +20118,9 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict7(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet")) else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : []; objectRes = this.config.object.enabled ? await predict8(process5.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) if (elapsedTime > 0)
this.perf.object = current; this.perf.object = elapsedTime;
} }
this.analyze("End Object:"); this.analyze("End Object:");
if (this.config.async) { if (this.config.async) {
@ -20072,7 +20146,10 @@ var Human = class {
object: objectRes, object: objectRes,
performance: this.perf, performance: this.perf,
canvas: process5.canvas, canvas: process5.canvas,
timestamp: Date.now() timestamp: Date.now(),
get persons() {
return join2(faceRes, bodyRes, handRes, gestureRes);
}
}; };
resolve(res); resolve(res);
}); });

View File

@ -1,10 +1,13 @@
const ts = require('typescript'); const ts = require('typescript');
const fs = require('fs');
const path = require('path'); const path = require('path');
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
const esbuild = require('esbuild'); const esbuild = require('esbuild');
const TypeDoc = require('typedoc'); const TypeDoc = require('typedoc');
const changelog = require('./changelog'); const changelog = require('./changelog');
let logFile = 'build.log';
let busy = false; let busy = false;
let td = null; let td = null;
const banner = { js: ` const banner = { js: `
@ -290,7 +293,9 @@ async function build(f, msg, dev = false) {
} }
if (require.main === module) { if (require.main === module) {
log.logFile(path.join(__dirname, 'build.log')); logFile = path.join(__dirname, logFile);
if (fs.existsSync(logFile)) fs.unlinkSync(logFile);
log.logFile(logFile);
log.header(); log.header();
build('all', 'startup'); build('all', 'startup');
} else { } else {

View File

@ -1,122 +1,17 @@
2021-05-22 13:15:16 INFO:  @vladmandic/human version 1.9.2 2021-05-24 11:07:05 INFO:  @vladmandic/human version 1.9.3
2021-05-22 13:15:16 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0 2021-05-24 11:07:05 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 13:15:16 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true} 2021-05-24 11:07:05 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 13:15:16 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"} 2021-05-24 11:07:05 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 13:15:16 STATE: Build for: node type: node: {"imports":36,"importBytes":418395,"outputBytes":378042,"outputFiles":"dist/human.node.js"} 2021-05-24 11:07:05 STATE: Build for: node type: node: {"imports":37,"importBytes":429806,"outputBytes":386407,"outputFiles":"dist/human.node.js"}
2021-05-22 13:15:16 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"} 2021-05-24 11:07:05 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 13:15:16 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":418403,"outputBytes":378046,"outputFiles":"dist/human.node-gpu.js"} 2021-05-24 11:07:05 STATE: Build for: nodeGPU type: node: {"imports":37,"importBytes":429814,"outputBytes":386411,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 13:15:16 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"} 2021-05-24 11:07:05 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 13:15:16 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":418470,"outputBytes":378118,"outputFiles":"dist/human.node-wasm.js"} 2021-05-24 11:07:05 STATE: Build for: nodeWASM type: node: {"imports":37,"importBytes":429881,"outputBytes":386483,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 13:15:16 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"} 2021-05-24 11:07:05 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 13:15:16 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":418497,"outputBytes":232255,"outputFiles":"dist/human.esm-nobundle.js"} 2021-05-24 11:07:05 STATE: Build for: browserNoBundle type: esm: {"imports":37,"importBytes":429908,"outputBytes":236959,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 13:15:17 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"} 2021-05-24 11:07:06 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 13:15:17 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1528517,"outputBytes":1340115,"outputFiles":"dist/human.js"} 2021-05-24 11:07:06 STATE: Build for: browserBundle type: iife: {"imports":37,"importBytes":1539928,"outputBytes":1344893,"outputFiles":"dist/human.js"}
2021-05-22 13:15:17 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1528517,"outputBytes":1340107,"outputFiles":"dist/human.esm.js"} 2021-05-24 11:07:07 STATE: Build for: browserBundle type: esm: {"imports":37,"importBytes":1539928,"outputBytes":1344885,"outputFiles":"dist/human.esm.js"}
2021-05-22 13:15:17 INFO:  Generate types: ["src/human.ts"] 2021-05-24 11:07:07 INFO:  Generate types: ["src/human.ts"]
2021-05-22 13:15:23 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"] 2021-05-24 11:07:12 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 13:15:23 INFO:  Generate TypeDocs: ["src/human.ts"] 2021-05-24 11:07:12 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-22 14:53:16 INFO:  @vladmandic/human version 1.9.2
2021-05-22 14:53:16 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 14:53:16 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 14:53:16 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: node type: node: {"imports":36,"importBytes":419268,"outputBytes":377986,"outputFiles":"dist/human.node.js"}
2021-05-22 14:53:16 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":419276,"outputBytes":377990,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 14:53:16 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":419343,"outputBytes":378062,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 14:53:16 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:16 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":419370,"outputBytes":232264,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 14:53:17 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 14:53:17 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1529390,"outputBytes":1340125,"outputFiles":"dist/human.js"}
2021-05-22 14:53:18 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1529390,"outputBytes":1340117,"outputFiles":"dist/human.esm.js"}
2021-05-22 14:53:18 INFO:  Generate types: ["src/human.ts"]
2021-05-22 14:53:22 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 14:53:22 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-22 21:43:32 INFO:  @vladmandic/human version 1.9.2
2021-05-22 21:43:32 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 21:43:32 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 21:43:32 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:43:32 STATE: Build for: node type: node: {"imports":36,"importBytes":420766,"outputBytes":378274,"outputFiles":"dist/human.node.js"}
2021-05-22 21:43:32 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:43:32 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":420774,"outputBytes":378278,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 21:43:32 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:43:32 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":420841,"outputBytes":378350,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 21:43:32 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:43:32 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":420868,"outputBytes":232338,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 21:43:33 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:43:33 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1530888,"outputBytes":1340202,"outputFiles":"dist/human.js"}
2021-05-22 21:43:34 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1530888,"outputBytes":1340194,"outputFiles":"dist/human.esm.js"}
2021-05-22 21:43:34 INFO:  Generate types: ["src/human.ts"]
2021-05-22 21:43:39 ERROR: TSC: /home/vlado/dev/human/src/posenet/poses.ts [130,50]: Object is of type 'unknown'.
2021-05-22 21:43:39 ERROR: TSC: /home/vlado/dev/human/src/posenet/poses.ts [131,46]: Object is of type 'unknown'.
2021-05-22 21:43:39 ERROR: TSC: /home/vlado/dev/human/src/tfjs/types.ts [2,24]: Cannot find module '@tensorflow/tfjs-core/dist/index.js'. Did you mean to set the 'moduleResolution' option to 'node', or to add aliases to the 'paths' option?
2021-05-22 21:43:39 ERROR: TSC: /home/vlado/dev/human/src/tfjs/types.ts [3,28]: Cannot find module '@tensorflow/tfjs-converter/dist/index.js'. Did you mean to set the 'moduleResolution' option to 'node', or to add aliases to the 'paths' option?
2021-05-22 21:43:39 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 21:47:04 INFO:  @vladmandic/human version 1.9.2
2021-05-22 21:47:04 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 21:47:04 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 21:47:04 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:47:04 STATE: Build for: node type: node: {"imports":36,"importBytes":420846,"outputBytes":378274,"outputFiles":"dist/human.node.js"}
2021-05-22 21:47:04 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:47:04 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":420854,"outputBytes":378278,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 21:47:04 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:47:04 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":420921,"outputBytes":378350,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 21:47:04 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:47:04 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":420948,"outputBytes":232338,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 21:47:05 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:47:05 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1530968,"outputBytes":1340202,"outputFiles":"dist/human.js"}
2021-05-22 21:47:06 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1530968,"outputBytes":1340194,"outputFiles":"dist/human.esm.js"}
2021-05-22 21:47:06 INFO:  Generate types: ["src/human.ts"]
2021-05-22 21:47:11 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 21:47:11 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-22 21:52:11 INFO:  @vladmandic/human version 1.9.2
2021-05-22 21:52:11 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 21:52:11 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-22 21:52:11 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:52:11 STATE: Build for: node type: node: {"imports":36,"importBytes":420846,"outputBytes":378274,"outputFiles":"dist/human.node.js"}
2021-05-22 21:52:11 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:52:11 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":420854,"outputBytes":378278,"outputFiles":"dist/human.node-gpu.js"}
2021-05-22 21:52:11 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:52:11 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":420921,"outputBytes":378350,"outputFiles":"dist/human.node-wasm.js"}
2021-05-22 21:52:11 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:52:11 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":420948,"outputBytes":232338,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-22 21:52:12 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-22 21:52:12 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1530968,"outputBytes":1340202,"outputFiles":"dist/human.js"}
2021-05-22 21:52:13 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1530968,"outputBytes":1340194,"outputFiles":"dist/human.esm.js"}
2021-05-22 21:52:13 INFO:  Generate types: ["src/human.ts"]
2021-05-22 21:52:18 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-22 21:52:18 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-23 13:54:48 INFO:  @vladmandic/human version 1.9.3
2021-05-23 13:54:48 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-23 13:54:48 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-23 13:54:48 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-23 13:54:48 STATE: Build for: node type: node: {"imports":36,"importBytes":425082,"outputBytes":381351,"outputFiles":"dist/human.node.js"}
2021-05-23 13:54:48 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-23 13:54:48 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":425090,"outputBytes":381355,"outputFiles":"dist/human.node-gpu.js"}
2021-05-23 13:54:48 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-23 13:54:48 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":425157,"outputBytes":381427,"outputFiles":"dist/human.node-wasm.js"}
2021-05-23 13:54:48 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-23 13:54:48 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":425184,"outputBytes":234133,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-23 13:54:49 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-23 13:54:50 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1535204,"outputBytes":1342022,"outputFiles":"dist/human.js"}
2021-05-23 13:54:50 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1535204,"outputBytes":1342014,"outputFiles":"dist/human.esm.js"}
2021-05-23 13:54:50 INFO:  Generate types: ["src/human.ts"]
2021-05-23 13:54:56 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-23 13:54:56 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-24 07:15:57 INFO:  @vladmandic/human version 1.9.3
2021-05-24 07:15:57 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-24 07:15:57 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-24 07:15:57 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: node type: node: {"imports":36,"importBytes":424920,"outputBytes":381396,"outputFiles":"dist/human.node.js"}
2021-05-24 07:15:57 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: nodeGPU type: node: {"imports":36,"importBytes":424928,"outputBytes":381400,"outputFiles":"dist/human.node-gpu.js"}
2021-05-24 07:15:57 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: nodeWASM type: node: {"imports":36,"importBytes":424995,"outputBytes":381472,"outputFiles":"dist/human.node-wasm.js"}
2021-05-24 07:15:57 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:57 STATE: Build for: browserNoBundle type: esm: {"imports":36,"importBytes":425022,"outputBytes":234163,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-24 07:15:59 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2488,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-24 07:15:59 STATE: Build for: browserBundle type: iife: {"imports":36,"importBytes":1535042,"outputBytes":1342052,"outputFiles":"dist/human.js"}
2021-05-24 07:16:00 STATE: Build for: browserBundle type: esm: {"imports":36,"importBytes":1535042,"outputBytes":1342044,"outputFiles":"dist/human.esm.js"}
2021-05-24 07:16:00 INFO:  Generate types: ["src/human.ts"]
2021-05-24 07:16:07 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-24 07:16:07 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -1,6 +1,6 @@
import { TRI468 as triangulation } from '../blazeface/coords'; import { TRI468 as triangulation } from '../blazeface/coords';
import { mergeDeep } from '../helpers'; import { mergeDeep } from '../helpers';
import type { Result, Face, Body, Hand, Item, Gesture } from '../result'; import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
/** /**
* Draw Options * Draw Options
@ -68,7 +68,7 @@ export const options: DrawOptions = {
calculateHandBox: <boolean>true, calculateHandBox: <boolean>true,
}; };
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], performance: {}, timestamp: 0 }; let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function point(ctx, x, y, z = 0, localOptions) { function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color; ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + (2 * z)}, ${127.5 - (2 * z)}, 255, 0.3)` : localOptions.color;
@ -150,8 +150,8 @@ export async function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture
let what: unknown[] = []; // what&where is a record let what: unknown[] = []; // what&where is a record
[where, what] = Object.entries(result[j]); [where, what] = Object.entries(result[j]);
if ((what.length > 1) && ((what[1] as string).length > 0)) { if ((what.length > 1) && ((what[1] as string).length > 0)) {
const person = where[1] as number > 0 ? `#${where[1]}` : ''; const who = where[1] as number > 0 ? `#${where[1]}` : '';
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${who}: ${what[1]}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') { if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor; ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight)); ctx.fillText(label, 8, 2 + (i * localOptions.lineHeight));
@ -473,6 +473,33 @@ export async function object(inCanvas: HTMLCanvasElement, result: Array<Item>, d
} }
} }
export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>, drawOptions?: DrawOptions) {
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement)) return;
const ctx = inCanvas.getContext('2d');
if (!ctx) return;
ctx.lineJoin = 'round';
ctx.font = localOptions.font;
for (let i = 0; i < result.length; i++) {
if (localOptions.drawBoxes) {
ctx.strokeStyle = localOptions.color;
ctx.fillStyle = localOptions.color;
rect(ctx, result[i].box[0], result[i].box[1], result[i].box[2], result[i].box[3], localOptions);
if (localOptions.drawLabels) {
const label = `person #${i}`;
if (localOptions.shadowColor && localOptions.shadowColor !== '') {
ctx.fillStyle = localOptions.shadowColor;
ctx.fillText(label, result[i].box[0] + 3, 1 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.fillStyle = localOptions.labelColor;
ctx.fillText(label, result[i].box[0] + 2, 0 + result[i].box[1] + localOptions.lineHeight, result[i].box[2]);
}
ctx.stroke();
}
}
}
function calcBuffered(newResult, localOptions) { function calcBuffered(newResult, localOptions) {
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update // if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself // each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
@ -512,9 +539,9 @@ function calcBuffered(newResult, localOptions) {
} }
// no buffering implemented for face, object, gesture // no buffering implemented for face, object, gesture
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)); // bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object)); // bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture)); // bufferedResult.gesture = JSON.parse(JSON.stringify(newResult.gesture));
} }
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) { export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
@ -533,9 +560,10 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
} else { } else {
bufferedResult = result; bufferedResult = result;
} }
face(inCanvas, bufferedResult.face, localOptions); face(inCanvas, result.face, localOptions); // face does have buffering
body(inCanvas, bufferedResult.body, localOptions); body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
hand(inCanvas, bufferedResult.hand, localOptions); hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
gesture(inCanvas, bufferedResult.gesture, localOptions); gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
object(inCanvas, bufferedResult.object, localOptions); // person(inCanvas, result.persons, localOptions); // use interpolated results if available
object(inCanvas, result.object, localOptions); // object detection does not have buffering
} }

View File

@ -16,6 +16,7 @@ import * as centernet from './object/centernet';
import * as gesture from './gesture/gesture'; import * as gesture from './gesture/gesture';
import * as image from './image/image'; import * as image from './image/image';
import * as draw from './draw/draw'; import * as draw from './draw/draw';
import * as persons from './persons';
import * as sample from './sample'; import * as sample from './sample';
import * as app from '../package.json'; import * as app from '../package.json';
import { Tensor } from './tfjs/types'; import { Tensor } from './tfjs/types';
@ -179,10 +180,10 @@ export class Human {
/** @hidden */ /** @hidden */
analyze = (...msg) => { analyze = (...msg) => {
if (!this.#analyzeMemoryLeaks) return; if (!this.#analyzeMemoryLeaks) return;
const current = this.tf.engine().state.numTensors; const currentTensors = this.tf.engine().state.numTensors;
const previous = this.#numTensors; const previousTensors = this.#numTensors;
this.#numTensors = current; this.#numTensors = currentTensors;
const leaked = current - previous; const leaked = currentTensors - previousTensors;
if (leaked !== 0) log(...msg, leaked); if (leaked !== 0) log(...msg, leaked);
} }
@ -455,7 +456,7 @@ export class Human {
let bodyRes; let bodyRes;
let handRes; let handRes;
let objectRes; let objectRes;
let current; let elapsedTime;
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) { if (this.config.async) {
@ -465,8 +466,8 @@ export class Human {
this.state = 'run:face'; this.state = 'run:face';
timeStamp = now(); timeStamp = now();
faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : []; faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.face = current; if (elapsedTime > 0) this.perf.face = elapsedTime;
} }
// run body: can be posenet or blazepose // run body: can be posenet or blazepose
@ -480,8 +481,8 @@ export class Human {
timeStamp = now(); timeStamp = now();
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : []; if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : []; else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.body = current; if (elapsedTime > 0) this.perf.body = elapsedTime;
} }
this.analyze('End Body:'); this.analyze('End Body:');
@ -494,8 +495,8 @@ export class Human {
this.state = 'run:hand'; this.state = 'run:hand';
timeStamp = now(); timeStamp = now();
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : []; handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.hand = current; if (elapsedTime > 0) this.perf.hand = elapsedTime;
} }
this.analyze('End Hand:'); this.analyze('End Hand:');
@ -510,8 +511,8 @@ export class Human {
timeStamp = now(); timeStamp = now();
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : []; if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : []; else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : [];
current = Math.trunc(now() - timeStamp); elapsedTime = Math.trunc(now() - timeStamp);
if (current > 0) this.perf.object = current; if (elapsedTime > 0) this.perf.object = elapsedTime;
} }
this.analyze('End Object:'); this.analyze('End Object:');
@ -541,6 +542,7 @@ export class Human {
performance: this.perf, performance: this.perf,
canvas: process.canvas, canvas: process.canvas,
timestamp: Date.now(), timestamp: Date.now(),
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes); },
}; };
// log('Result:', result); // log('Result:', result);
resolve(res); resolve(res);

48
src/persons.ts Normal file
View File

@ -0,0 +1,48 @@
import { Face, Body, Hand, Gesture, Person } from './result';
export function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person> {
let id = 0;
const persons: Array<Person> = [];
for (const face of faces) { // person is defined primarily by face and then we append other objects as found
const person: Person = { id: id++, face, body: null, hands: { left: null, right: null }, gestures: [], box: [0, 0, 0, 0] };
for (const body of bodies) {
if (face.box[0] > body.box[0] // x within body
&& face.box[0] < body.box[0] + body.box[2]
&& face.box[1] + face.box[3] > body.box[1] // y within body
&& face.box[1] + face.box[3] < body.box[1] + body.box[3]) {
person.body = body;
}
}
if (person.body) { // only try to join hands if body is found
for (const hand of hands) {
if (hand.box[0] + hand.box[2] > person.body.box[0] // x within body for left hand
&& hand.box[0] + hand.box[2] < person.body.box[0] + person.body.box[2]
&& hand.box[1] + hand.box[3] > person.body.box[1] // x within body for left hand
&& hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {
if (person.hands) person.hands.left = hand;
}
if (hand.box[0] < person.body.box[0] + person.body.box[2] // x within body for right hand
&& hand.box[0] > person.body.box[0]
&& hand.box[1] + hand.box[3] > person.body.box[1] // x within body for right hand
&& hand.box[1] + hand.box[3] < person.body.box[1] + person.body.box[3]) {
if (person.hands) person.hands.right = hand;
}
}
}
for (const gesture of gestures) { // append all gestures according to ids
if (gesture['face'] !== undefined && gesture['face'] === face.id) person.gestures?.push(gesture);
else if (gesture['iris'] !== undefined && gesture['iris'] === face.id) person.gestures?.push(gesture);
else if (gesture['body'] !== undefined && gesture['body'] === person.body?.id) person.gestures?.push(gesture);
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.left?.id) person.gestures?.push(gesture);
else if (gesture['hand'] !== undefined && gesture['hand'] === person.hands?.right?.id) person.gestures?.push(gesture);
}
person.box = [ // this is incorrect as should be a caclulated value
Math.min(person.face?.box[0] || Number.MAX_SAFE_INTEGER, person.body?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[0] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[0] || Number.MAX_SAFE_INTEGER),
Math.min(person.face?.box[1] || Number.MAX_SAFE_INTEGER, person.body?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.left?.box[1] || Number.MAX_SAFE_INTEGER, person.hands?.right?.box[1] || Number.MAX_SAFE_INTEGER),
Math.max(person.face?.box[2] || 0, person.body?.box[2] || 0, person.hands?.left?.box[2] || 0, person.hands?.right?.box[2] || 0),
Math.max(person.face?.box[3] || 0, person.body?.box[3] || 0, person.hands?.left?.box[3] || 0, person.hands?.right?.box[3] || 0),
];
persons.push(person);
}
return persons;
}

View File

@ -4,9 +4,8 @@ import { Tensor } from '../dist/tfjs.esm.js';
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models * Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
* Some values may be null if specific model is not enabled * Some values may be null if specific model is not enabled
* *
* Array of individual results with one object per detected face
* Each result has: * Each result has:
* - id: face number * - id: face id number
* - confidence: overal detection confidence value * - confidence: overal detection confidence value
* - boxConfidence: face box detection confidence value * - boxConfidence: face box detection confidence value
* - faceConfidence: face keypoints detection confidence value * - faceConfidence: face keypoints detection confidence value
@ -52,7 +51,6 @@ export interface Face {
/** Body results /** Body results
* *
* Array of individual results with one object per detected body
* Each results has: * Each results has:
* - id: body id number * - id: body id number
* - score: overall detection score * - score: overall detection score
@ -80,13 +78,13 @@ export interface Body {
/** Hand results /** Hand results
* *
* Array of individual results with one object per detected hand
* Each result has: * Each result has:
* - confidence as value * - id: hand id number
* - box as array of [x, y, width, height], normalized to image resolution * - confidence: detection confidence score as value
* - boxRaw as array of [x, y, width, height], normalized to range 0..1 * - box: bounding box: x, y, width, height normalized to input image resolution
* - landmarks as array of [x, y, z] points of hand, normalized to image resolution * - boxRaw: bounding box: x, y, width, height normalized to 0..1
* - annotations as array of annotated face landmark points * - landmarks: landmarks as array of [x, y, z] points of hand, normalized to image resolution
* - annotations: annotated landmarks for each hand part
*/ */
export interface Hand { export interface Hand {
id: number, id: number,
@ -101,12 +99,13 @@ export interface Hand {
* *
* Array of individual results with one object per detected gesture * Array of individual results with one object per detected gesture
* Each result has: * Each result has:
* - id: object id number
* - score as value * - score as value
* - label as detected class name * - label as detected class name
* - center as array of [x, y], normalized to image resolution * - box: bounding box: x, y, width, height normalized to input image resolution
* - centerRaw as array of [x, y], normalized to range 0..1 * - boxRaw: bounding box: x, y, width, height normalized to 0..1
* - box as array of [x, y, width, height], normalized to image resolution * - center: optional center point as array of [x, y], normalized to image resolution
* - boxRaw as array of [x, y, width, height], normalized to range 0..1 * - centerRaw: optional center point as array of [x, y], normalized to range 0..1
*/ */
export interface Item { export interface Item {
id: number, id: number,
@ -133,6 +132,27 @@ export type Gesture =
| { 'body': number, gesture: string } | { 'body': number, gesture: string }
| { 'hand': number, gesture: string } | { 'hand': number, gesture: string }
/** Person getter
*
* Each result has:
* - id: person id
* - face: face object
* - body: body object
* - hands: array of hand objects
* - gestures: array of gestures
* - box: bounding box: x, y, width, height normalized to input image resolution
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
*/
export interface Person {
id: number,
face: Face,
body: Body | null,
hands: { left: Hand | null, right: Hand | null },
gestures: Array<Gesture>,
box: [number, number, number, number],
boxRaw?: [number, number, number, number],
}
/** /**
* Result interface definition for **Human** library * Result interface definition for **Human** library
* *
@ -149,7 +169,12 @@ export interface Result {
gesture: Array<Gesture>, gesture: Array<Gesture>,
/** {@link Object}: detection & analysis results */ /** {@link Object}: detection & analysis results */
object: Array<Item> object: Array<Item>
performance: Record<string, unknown>, /** global performance object with timing values for each operation */
canvas?: OffscreenCanvas | HTMLCanvasElement, readonly performance: Record<string, unknown>,
timestamp: number, /** optional processed canvas that can be used to draw input on screen */
readonly canvas?: OffscreenCanvas | HTMLCanvasElement,
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
readonly timestamp: number,
/** getter property that returns unified persons object */
readonly persons: Array<Person>,
} }

View File

@ -54,7 +54,8 @@ function printResults(detect) {
const person = (detect.face && detect.face[0]) ? { confidence: detect.face[0].confidence, age: detect.face[0].age, gender: detect.face[0].gender } : {}; const person = (detect.face && detect.face[0]) ? { confidence: detect.face[0].confidence, age: detect.face[0].age, gender: detect.face[0].gender } : {};
const object = (detect.object && detect.object[0]) ? { score: detect.object[0].score, class: detect.object[0].label } : {}; const object = (detect.object && detect.object[0]) ? { score: detect.object[0].score, class: detect.object[0].label } : {};
const body = (detect.body && detect.body[0]) ? { score: detect.body[0].score, keypoints: detect.body[0].keypoints.length } : {}; const body = (detect.body && detect.body[0]) ? { score: detect.body[0].score, keypoints: detect.body[0].keypoints.length } : {};
if (detect.face) log('data', ' result: face:', detect.face?.length, 'body:', detect.body?.length, 'hand:', detect.hand?.length, 'gesture:', detect.gesture?.length, 'object:', detect.object?.length, person, object, body); const persons = detect.persons;
if (detect.face) log('data', ' result: face:', detect.face?.length, 'body:', detect.body?.length, 'hand:', detect.hand?.length, 'gesture:', detect.gesture?.length, 'object:', detect.object?.length, 'person:', persons.length, person, object, body);
if (detect.performance) log('data', ' result: performance:', 'load:', detect?.performance.load, 'total:', detect.performance?.total); if (detect.performance) log('data', ' result: performance:', 'load:', detect?.performance.load, 'total:', detect.performance?.total);
} }

View File

@ -1,8 +1,11 @@
const fs = require('fs');
const path = require('path'); const path = require('path');
const process = require('process'); const process = require('process');
const { fork } = require('child_process'); const { fork } = require('child_process');
const log = require('@vladmandic/pilogger'); const log = require('@vladmandic/pilogger');
let logFile = 'test.log';
const tests = [ const tests = [
'test-node.js', 'test-node.js',
'test-node-gpu.js', 'test-node-gpu.js',
@ -57,7 +60,9 @@ async function runTest(test) {
} }
async function testAll() { async function testAll() {
log.logFile(path.join(__dirname, 'test.log')); logFile = path.join(__dirname, logFile);
if (fs.existsSync(logFile)) fs.unlinkSync(logFile);
log.logFile(logFile);
log.header(); log.header();
process.on('unhandledRejection', (data) => log.error('nodejs unhandled rejection', data)); process.on('unhandledRejection', (data) => log.error('nodejs unhandled rejection', data));
process.on('uncaughtException', (data) => log.error('nodejs unhandled exception', data)); process.on('uncaughtException', (data) => log.error('nodejs unhandled exception', data));

View File

@ -1,169 +1,120 @@
2021-05-22 21:52:51 INFO:  @vladmandic/human version 1.9.2 2021-05-24 11:08:29 INFO:  @vladmandic/human version 1.9.3
2021-05-22 21:52:51 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0 2021-05-24 11:08:29 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-22 21:52:51 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"] 2021-05-24 11:08:29 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-22 21:52:51 INFO:  test-node.js start 2021-05-24 11:08:29 INFO:  test-node.js start
2021-05-22 21:52:52 STATE: test-node.js passed: create human 2021-05-24 11:08:29 STATE: test-node.js passed: create human
2021-05-22 21:52:52 INFO:  test-node.js human version: 1.9.2 2021-05-24 11:08:29 INFO:  test-node.js human version: 1.9.3
2021-05-22 21:52:52 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0 2021-05-24 11:08:29 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-22 21:52:52 INFO:  test-node.js tfjs version: 3.6.0 2021-05-24 11:08:29 INFO:  test-node.js tfjs version: 3.6.0
2021-05-22 21:52:52 STATE: test-node.js passed: set backend: tensorflow 2021-05-24 11:08:30 STATE: test-node.js passed: set backend: tensorflow
2021-05-22 21:52:52 STATE: test-node.js passed: load models 2021-05-24 11:08:30 STATE: test-node.js passed: load models
2021-05-22 21:52:52 STATE: test-node.js result: defined models: 13 loaded models: 6 2021-05-24 11:08:30 STATE: test-node.js result: defined models: 13 loaded models: 6
2021-05-22 21:52:52 STATE: test-node.js passed: warmup: none default 2021-05-24 11:08:30 STATE: test-node.js passed: warmup: none default
2021-05-22 21:52:54 STATE: test-node.js passed: warmup: face default 2021-05-24 11:08:31 STATE: test-node.js passed: warmup: face default
2021-05-22 21:52:54 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5} 2021-05-24 11:08:31 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-22 21:52:54 DATA:  test-node.js result: performance: load: 400 total: 1668 2021-05-24 11:08:31 DATA:  test-node.js result: performance: load: 312 total: 1646
2021-05-22 21:52:55 STATE: test-node.js passed: warmup: body default 2021-05-24 11:08:33 STATE: test-node.js passed: warmup: body default
2021-05-22 21:52:55 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17} 2021-05-24 11:08:33 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-22 21:52:55 DATA:  test-node.js result: performance: load: 400 total: 1611 2021-05-24 11:08:33 DATA:  test-node.js result: performance: load: 312 total: 1613
2021-05-22 21:52:55 INFO:  test-node.js test body variants 2021-05-24 11:08:33 INFO:  test-node.js test body variants
2021-05-22 21:52:56 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:34 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:52:57 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet 2021-05-24 11:08:35 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-22 21:52:57 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17} 2021-05-24 11:08:35 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-22 21:52:57 DATA:  test-node.js result: performance: load: 400 total: 1031 2021-05-24 11:08:35 DATA:  test-node.js result: performance: load: 312 total: 983
2021-05-22 21:52:58 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:36 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:52:59 STATE: test-node.js passed: detect: assets/human-sample-body.jpg blazepose 2021-05-24 11:08:36 STATE: test-node.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-22 21:52:59 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:36 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:52:59 DATA:  test-node.js result: performance: load: 400 total: 466 2021-05-24 11:08:36 DATA:  test-node.js result: performance: load: 312 total: 387
2021-05-22 21:53:00 STATE: test-node.js passed: detect: random default 2021-05-24 11:08:38 STATE: test-node.js passed: detect: random default
2021-05-22 21:53:00 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 {} {} {"score":1,"keypoints":39} 2021-05-24 11:08:38 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
2021-05-22 21:53:00 DATA:  test-node.js result: performance: load: 400 total: 921 2021-05-24 11:08:38 DATA:  test-node.js result: performance: load: 312 total: 919
2021-05-22 21:53:00 INFO:  test-node.js test: first instance 2021-05-24 11:08:38 INFO:  test-node.js test: first instance
2021-05-22 21:53:00 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-24 11:08:38 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:02 STATE: test-node.js passed: detect: assets/sample-me.jpg default 2021-05-24 11:08:40 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:02 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:40 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:02 DATA:  test-node.js result: performance: load: 400 total: 1652 2021-05-24 11:08:40 DATA:  test-node.js result: performance: load: 312 total: 1657
2021-05-22 21:53:02 INFO:  test-node.js test: second instance 2021-05-24 11:08:40 INFO:  test-node.js test: second instance
2021-05-22 21:53:02 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-24 11:08:40 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:04 STATE: test-node.js passed: detect: assets/sample-me.jpg default 2021-05-24 11:08:42 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:04 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:42 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:04 DATA:  test-node.js result: performance: load: 1 total: 1529 2021-05-24 11:08:42 DATA:  test-node.js result: performance: load: 5 total: 1630
2021-05-22 21:53:04 INFO:  test-node.js test: concurrent 2021-05-24 11:08:42 INFO:  test-node.js test: concurrent
2021-05-22 21:53:04 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-24 11:08:42 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:04 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-24 11:08:42 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:05 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:43 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:06 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:44 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:12 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default 2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:53:12 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:12 DATA:  test-node.js result: performance: load: 400 total: 5889 2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 312 total: 5886
2021-05-22 21:53:12 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default 2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:53:12 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:12 DATA:  test-node.js result: performance: load: 1 total: 5889 2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 5 total: 5886
2021-05-22 21:53:12 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default 2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:53:12 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:12 DATA:  test-node.js result: performance: load: 400 total: 5889 2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 312 total: 5886
2021-05-22 21:53:12 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default 2021-05-24 11:08:50 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:53:12 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:50 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:12 DATA:  test-node.js result: performance: load: 1 total: 5889 2021-05-24 11:08:50 DATA:  test-node.js result: performance: load: 5 total: 5886
2021-05-22 21:53:12 INFO:  test-node.js test complete: 20467 ms 2021-05-24 11:08:50 INFO:  test-node.js test complete: 20201 ms
2021-05-22 21:53:12 INFO:  test-node-gpu.js start 2021-05-24 11:08:50 INFO:  test-node-gpu.js start
2021-05-22 21:53:13 WARN:  test-node-gpu.js stderr: 2021-05-22 21:53:13.374497: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory 2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.534311: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-22 21:53:13 WARN:  test-node-gpu.js stderr: 2021-05-22 21:53:13.489445: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory 2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.593093: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-22 21:53:13 WARN:  test-node-gpu.js stderr: 2021-05-22 21:53:13.489654: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist 2021-05-24 11:08:50 WARN:  test-node-gpu.js stderr: 2021-05-24 11:08:50.593140: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-22 21:53:13 STATE: test-node-gpu.js passed: create human 2021-05-24 11:08:50 STATE: test-node-gpu.js passed: create human
2021-05-22 21:53:13 INFO:  test-node-gpu.js human version: 1.9.2 2021-05-24 11:08:50 INFO:  test-node-gpu.js human version: 1.9.3
2021-05-22 21:53:13 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0 2021-05-24 11:08:50 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-22 21:53:13 INFO:  test-node-gpu.js tfjs version: 3.6.0 2021-05-24 11:08:50 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-22 21:53:13 STATE: test-node-gpu.js passed: set backend: tensorflow 2021-05-24 11:08:51 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-22 21:53:13 STATE: test-node-gpu.js passed: load models 2021-05-24 11:08:51 STATE: test-node-gpu.js passed: load models
2021-05-22 21:53:13 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6 2021-05-24 11:08:51 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6
2021-05-22 21:53:13 STATE: test-node-gpu.js passed: warmup: none default 2021-05-24 11:08:51 STATE: test-node-gpu.js passed: warmup: none default
2021-05-22 21:53:15 STATE: test-node-gpu.js passed: warmup: face default 2021-05-24 11:08:52 STATE: test-node-gpu.js passed: warmup: face default
2021-05-22 21:53:15 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5} 2021-05-24 11:08:52 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 5 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-22 21:53:15 DATA:  test-node-gpu.js result: performance: load: 330 total: 1688 2021-05-24 11:08:52 DATA:  test-node-gpu.js result: performance: load: 333 total: 1664
2021-05-22 21:53:17 STATE: test-node-gpu.js passed: warmup: body default 2021-05-24 11:08:54 STATE: test-node-gpu.js passed: warmup: body default
2021-05-22 21:53:17 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17} 2021-05-24 11:08:54 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-22 21:53:17 DATA:  test-node-gpu.js result: performance: load: 330 total: 1650 2021-05-24 11:08:54 DATA:  test-node-gpu.js result: performance: load: 333 total: 1772
2021-05-22 21:53:17 INFO:  test-node-gpu.js test body variants 2021-05-24 11:08:54 INFO:  test-node-gpu.js test body variants
2021-05-22 21:53:18 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:55 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:19 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet 2021-05-24 11:08:56 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-22 21:53:19 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17} 2021-05-24 11:08:56 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-22 21:53:19 DATA:  test-node-gpu.js result: performance: load: 330 total: 1058 2021-05-24 11:08:56 DATA:  test-node-gpu.js result: performance: load: 333 total: 963
2021-05-22 21:53:20 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:08:57 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:20 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose 2021-05-24 11:08:57 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-22 21:53:20 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:08:57 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:20 DATA:  test-node-gpu.js result: performance: load: 330 total: 437 2021-05-24 11:08:57 DATA:  test-node-gpu.js result: performance: load: 333 total: 397
2021-05-22 21:53:21 STATE: test-node-gpu.js passed: detect: random default 2021-05-24 11:08:59 STATE: test-node-gpu.js passed: detect: random default
2021-05-22 21:53:21 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 {} {} {"score":1,"keypoints":39} 2021-05-24 11:08:59 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
2021-05-22 21:53:21 DATA:  test-node-gpu.js result: performance: load: 330 total: 951 2021-05-24 11:08:59 DATA:  test-node-gpu.js result: performance: load: 333 total: 887
2021-05-22 21:53:21 INFO:  test-node-gpu.js test: first instance 2021-05-24 11:08:59 INFO:  test-node-gpu.js test: first instance
2021-05-22 21:53:22 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-24 11:08:59 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:24 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default 2021-05-24 11:09:01 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:24 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:01 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:24 DATA:  test-node-gpu.js result: performance: load: 330 total: 1968 2021-05-24 11:09:01 DATA:  test-node-gpu.js result: performance: load: 333 total: 1643
2021-05-22 21:53:24 INFO:  test-node-gpu.js test: second instance 2021-05-24 11:09:01 INFO:  test-node-gpu.js test: second instance
2021-05-22 21:53:24 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3] 2021-05-24 11:09:01 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:26 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default 2021-05-24 11:09:02 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:26 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:02 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:26 DATA:  test-node-gpu.js result: performance: load: 2 total: 1975 2021-05-24 11:09:02 DATA:  test-node-gpu.js result: performance: load: 4 total: 1581
2021-05-22 21:53:26 INFO:  test-node-gpu.js test: concurrent 2021-05-24 11:09:02 INFO:  test-node-gpu.js test: concurrent
2021-05-22 21:53:26 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-24 11:09:03 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:26 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3] 2021-05-24 11:09:03 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:27 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:09:04 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:28 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3] 2021-05-24 11:09:05 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:35 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default 2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: performance: load: 330 total: 6500 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 333 total: 6184
2021-05-22 21:53:35 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default 2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: performance: load: 2 total: 6500 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 4 total: 6184
2021-05-22 21:53:35 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default 2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: performance: load: 330 total: 6500 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 333 total: 6184
2021-05-22 21:53:35 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default 2021-05-24 11:09:11 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39} 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-22 21:53:35 DATA:  test-node-gpu.js result: performance: load: 2 total: 6500 2021-05-24 11:09:11 DATA:  test-node-gpu.js result: performance: load: 4 total: 6184
2021-05-22 21:53:35 INFO:  test-node-gpu.js test complete: 21880 ms 2021-05-24 11:09:11 INFO:  test-node-gpu.js test complete: 20649 ms
2021-05-22 21:53:35 INFO:  test-node-wasm.js start 2021-05-24 11:09:11 INFO:  test-node-wasm.js start
2021-05-22 21:53:35 STATE: test-node-wasm.js passed: model server: http://localhost:10030/models/ 2021-05-24 11:09:11 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
2021-05-22 21:53:35 STATE: test-node-wasm.js passed: create human 2021-05-24 11:09:11 ERROR: test-node-wasm.js aborting test
2021-05-22 21:53:35 INFO:  test-node-wasm.js human version: 1.9.2 2021-05-24 11:09:11 INFO:  status: {"passed":46,"failed":1}
2021-05-22 21:53:35 INFO:  test-node-wasm.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-22 21:53:35 INFO:  test-node-wasm.js tfjs version: 3.6.0
2021-05-22 21:53:36 STATE: test-node-wasm.js passed: set backend: wasm
2021-05-22 21:53:36 STATE: test-node-wasm.js passed: load models
2021-05-22 21:53:36 STATE: test-node-wasm.js result: defined models: 13 loaded models: 5
2021-05-22 21:53:36 STATE: test-node-wasm.js passed: warmup: none default
2021-05-22 21:53:36 ERROR: test-node-wasm.js failed: warmup: face default
2021-05-22 21:53:36 ERROR: test-node-wasm.js failed: warmup: body default
2021-05-22 21:53:36 INFO:  test-node-wasm.js test body variants
2021-05-22 21:53:38 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:41 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-22 21:53:41 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":0.91,"keypoints":17}
2021-05-22 21:53:41 DATA:  test-node-wasm.js result: performance: load: 655 total: 3191
2021-05-22 21:53:43 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:45 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-22 21:53:45 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 {"confidence":1} {} {"score":1,"keypoints":39}
2021-05-22 21:53:45 DATA:  test-node-wasm.js result: performance: load: 655 total: 2321
2021-05-22 21:53:47 STATE: test-node-wasm.js passed: detect: random default
2021-05-22 21:53:47 DATA:  test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 {} {} {"score":1,"keypoints":39}
2021-05-22 21:53:47 DATA:  test-node-wasm.js result: performance: load: 655 total: 1791
2021-05-22 21:53:47 INFO:  test-node-wasm.js test: first instance
2021-05-22 21:53:48 STATE: test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:51 STATE: test-node-wasm.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:51 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 0 {"confidence":1,"age":39.2,"gender":"male"} {} {"score":1,"keypoints":39}
2021-05-22 21:53:51 DATA:  test-node-wasm.js result: performance: load: 655 total: 2478
2021-05-22 21:53:51 INFO:  test-node-wasm.js test: second instance
2021-05-22 21:53:51 STATE: test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-22 21:53:54 STATE: test-node-wasm.js passed: detect: assets/sample-me.jpg default
2021-05-22 21:53:54 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 0 {"confidence":1,"age":39.2,"gender":"male"} {} {"score":1,"keypoints":39}
2021-05-22 21:53:54 DATA:  test-node-wasm.js result: performance: load: 5 total: 2418
2021-05-22 21:53:54 INFO:  test-node-wasm.js test: concurrent
2021-05-22 21:53:54 STATE: test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:54 STATE: test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-22 21:53:56 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:53:58 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-22 21:54:07 STATE: test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 0 {"confidence":1,"age":23.6,"gender":"female"} {} {"score":1,"keypoints":39}
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: performance: load: 655 total: 9455
2021-05-22 21:54:07 STATE: test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 6 object: 0 {"confidence":1,"age":23.6,"gender":"female"} {} {"score":1,"keypoints":39}
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: performance: load: 5 total: 9455
2021-05-22 21:54:07 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":1,"keypoints":39}
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: performance: load: 655 total: 9455
2021-05-22 21:54:07 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":1,"keypoints":39}
2021-05-22 21:54:07 DATA:  test-node-wasm.js result: performance: load: 5 total: 9455
2021-05-22 21:54:07 INFO:  test-node-wasm.js test complete: 31722 ms
2021-05-22 21:54:07 INFO:  status: {"passed":68,"failed":2}

File diff suppressed because one or more lines are too long

View File

@ -68,8 +68,7 @@
<div class="lead"> <div class="lead">
<p>Body results</p> <p>Body results</p>
</div> </div>
<p>Array of individual results with one object per detected body <p>Each results has:</p>
Each results has:</p>
<ul> <ul>
<li>id: body id number</li> <li>id: body id number</li>
<li>score: overall detection score</li> <li>score: overall detection score</li>

View File

@ -70,10 +70,9 @@
Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
Some values may be null if specific model is not enabled</p> Some values may be null if specific model is not enabled</p>
</div> </div>
<p>Array of individual results with one object per detected face <p>Each result has:</p>
Each result has:</p>
<ul> <ul>
<li>id: face number</li> <li>id: face id number</li>
<li>confidence: overal detection confidence value</li> <li>confidence: overal detection confidence value</li>
<li>boxConfidence: face box detection confidence value</li> <li>boxConfidence: face box detection confidence value</li>
<li>faceConfidence: face keypoints detection confidence value</li> <li>faceConfidence: face keypoints detection confidence value</li>
@ -87,7 +86,8 @@
<li>genderConfidence: gender detection confidence as value</li> <li>genderConfidence: gender detection confidence as value</li>
<li>emotion: emotions as array of possible emotions with their individual scores</li> <li>emotion: emotions as array of possible emotions with their individual scores</li>
<li>embedding: facial descriptor as array of numerical elements</li> <li>embedding: facial descriptor as array of numerical elements</li>
<li>iris: iris distance from current viewpoint as distance value</li> <li>iris: iris distance from current viewpoint as distance value in centimeters for a typical camera
field of view of 88 degrees. value should be adjusted manually as needed</li>
<li>rotation: face rotiation that contains both angles and matrix used for 3d transformations</li> <li>rotation: face rotiation that contains both angles and matrix used for 3d transformations</li>
<li>angle: face angle as object with values for roll, yaw and pitch angles</li> <li>angle: face angle as object with values for roll, yaw and pitch angles</li>
<li>matrix: 3d transofrmation matrix as array of numeric values</li> <li>matrix: 3d transofrmation matrix as array of numeric values</li>

View File

@ -68,14 +68,14 @@
<div class="lead"> <div class="lead">
<p>Hand results</p> <p>Hand results</p>
</div> </div>
<p>Array of individual results with one object per detected hand <p>Each result has:</p>
Each result has:</p>
<ul> <ul>
<li>confidence as value</li> <li>id: hand id number</li>
<li>box as array of [x, y, width, height], normalized to image resolution</li> <li>confidence: detection confidence score as value</li>
<li>boxRaw as array of [x, y, width, height], normalized to range 0..1</li> <li>box: bounding box: x, y, width, height normalized to input image resolution</li>
<li>landmarks as array of [x, y, z] points of hand, normalized to image resolution</li> <li>boxRaw: bounding box: x, y, width, height normalized to 0..1</li>
<li>annotations as array of annotated face landmark points</li> <li>landmarks: landmarks as array of [x, y, z] points of hand, normalized to image resolution</li>
<li>annotations: annotated landmarks for each hand part</li>
</ul> </ul>
</div> </div>
</section> </section>

View File

@ -71,12 +71,13 @@
<p>Array of individual results with one object per detected gesture <p>Array of individual results with one object per detected gesture
Each result has:</p> Each result has:</p>
<ul> <ul>
<li>id: object id number</li>
<li>score as value</li> <li>score as value</li>
<li>label as detected class name</li> <li>label as detected class name</li>
<li>center as array of [x, y], normalized to image resolution</li> <li>box: bounding box: x, y, width, height normalized to input image resolution</li>
<li>centerRaw as array of [x, y], normalized to range 0..1</li> <li>boxRaw: bounding box: x, y, width, height normalized to 0..1</li>
<li>box as array of [x, y, width, height], normalized to image resolution</li> <li>center: optional center point as array of [x, y], normalized to image resolution</li>
<li>boxRaw as array of [x, y, width, height], normalized to range 0..1</li> <li>centerRaw: optional center point as array of [x, y], normalized to range 0..1</li>
</ul> </ul>
</div> </div>
</section> </section>

View File

@ -93,6 +93,7 @@
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#hand" class="tsd-kind-icon">hand</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#hand" class="tsd-kind-icon">hand</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#object" class="tsd-kind-icon">object</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#object" class="tsd-kind-icon">object</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#performance" class="tsd-kind-icon">performance</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#performance" class="tsd-kind-icon">performance</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#persons" class="tsd-kind-icon">persons</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#timestamp" class="tsd-kind-icon">timestamp</a></li> <li class="tsd-kind-property tsd-parent-kind-interface"><a href="result.html#timestamp" class="tsd-kind-icon">timestamp</a></li>
</ul> </ul>
</section> </section>
@ -115,10 +116,15 @@
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="canvas" class="tsd-anchor"></a> <a name="canvas" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagOptional">Optional</span> canvas</h3> <h3><span class="tsd-flag ts-flagOptional">Optional</span> <span class="tsd-flag ts-flagReadonly">Readonly</span> canvas</h3>
<div class="tsd-signature tsd-kind-icon">canvas<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span></div> <div class="tsd-signature tsd-kind-icon">canvas<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>optional processed canvas that can be used to draw input on screen</p>
</div>
</div>
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="face" class="tsd-anchor"></a> <a name="face" class="tsd-anchor"></a>
@ -170,17 +176,39 @@
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="performance" class="tsd-anchor"></a> <a name="performance" class="tsd-anchor"></a>
<h3>performance</h3> <h3><span class="tsd-flag ts-flagReadonly">Readonly</span> performance</h3>
<div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div> <div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>global performance object with timing values for each operation</p>
</div>
</div>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="persons" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagReadonly">Readonly</span> persons</h3>
<div class="tsd-signature tsd-kind-icon">persons<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Person</span><span class="tsd-signature-symbol">[]</span></div>
<aside class="tsd-sources">
</aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>getter property that returns unified persons object</p>
</div>
</div>
</section> </section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface"> <section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="timestamp" class="tsd-anchor"></a> <a name="timestamp" class="tsd-anchor"></a>
<h3>timestamp</h3> <h3><span class="tsd-flag ts-flagReadonly">Readonly</span> timestamp</h3>
<div class="tsd-signature tsd-kind-icon">timestamp<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div> <div class="tsd-signature tsd-kind-icon">timestamp<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div>
<aside class="tsd-sources"> <aside class="tsd-sources">
</aside> </aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>timestamp of detection representing the milliseconds elapsed since the UNIX epoch</p>
</div>
</div>
</section> </section>
</section> </section>
</div> </div>
@ -244,6 +272,9 @@
<li class=" tsd-kind-property tsd-parent-kind-interface"> <li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="result.html#performance" class="tsd-kind-icon">performance</a> <a href="result.html#performance" class="tsd-kind-icon">performance</a>
</li> </li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="result.html#persons" class="tsd-kind-icon">persons</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface"> <li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="result.html#timestamp" class="tsd-kind-icon">timestamp</a> <a href="result.html#timestamp" class="tsd-kind-icon">timestamp</a>
</li> </li>

View File

@ -1,4 +1,4 @@
import type { Result, Face, Body, Hand, Item, Gesture } from '../result'; import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
/** /**
* Draw Options * Draw Options
* Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter * Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter
@ -48,5 +48,6 @@ export declare function face(inCanvas: HTMLCanvasElement, result: Array<Face>, d
export declare function body(inCanvas: HTMLCanvasElement, result: Array<Body>, drawOptions?: DrawOptions): Promise<void>; export declare function body(inCanvas: HTMLCanvasElement, result: Array<Body>, drawOptions?: DrawOptions): Promise<void>;
export declare function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, drawOptions?: DrawOptions): Promise<void>; export declare function hand(inCanvas: HTMLCanvasElement, result: Array<Hand>, drawOptions?: DrawOptions): Promise<void>;
export declare function object(inCanvas: HTMLCanvasElement, result: Array<Item>, drawOptions?: DrawOptions): Promise<void>; export declare function object(inCanvas: HTMLCanvasElement, result: Array<Item>, drawOptions?: DrawOptions): Promise<void>;
export declare function person(inCanvas: HTMLCanvasElement, result: Array<Person>, drawOptions?: DrawOptions): Promise<void>;
export declare function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement): Promise<void>; export declare function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement): Promise<void>;
export declare function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions): Promise<void>; export declare function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions): Promise<void>;

2
types/persons.d.ts vendored Normal file
View File

@ -0,0 +1,2 @@
import { Face, Body, Hand, Gesture, Person } from './result';
export declare function join(faces: Array<Face>, bodies: Array<Body>, hands: Array<Hand>, gestures: Array<Gesture>): Array<Person>;

62
types/result.d.ts vendored
View File

@ -3,9 +3,8 @@ import { Tensor } from '../dist/tfjs.esm.js';
* Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models * Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models
* Some values may be null if specific model is not enabled * Some values may be null if specific model is not enabled
* *
* Array of individual results with one object per detected face
* Each result has: * Each result has:
* - id: face number * - id: face id number
* - confidence: overal detection confidence value * - confidence: overal detection confidence value
* - boxConfidence: face box detection confidence value * - boxConfidence: face box detection confidence value
* - faceConfidence: face keypoints detection confidence value * - faceConfidence: face keypoints detection confidence value
@ -19,7 +18,8 @@ import { Tensor } from '../dist/tfjs.esm.js';
* - genderConfidence: gender detection confidence as value * - genderConfidence: gender detection confidence as value
* - emotion: emotions as array of possible emotions with their individual scores * - emotion: emotions as array of possible emotions with their individual scores
* - embedding: facial descriptor as array of numerical elements * - embedding: facial descriptor as array of numerical elements
* - iris: iris distance from current viewpoint as distance value * - iris: iris distance from current viewpoint as distance value in centimeters for a typical camera
* field of view of 88 degrees. value should be adjusted manually as needed
* - rotation: face rotiation that contains both angles and matrix used for 3d transformations * - rotation: face rotiation that contains both angles and matrix used for 3d transformations
* - angle: face angle as object with values for roll, yaw and pitch angles * - angle: face angle as object with values for roll, yaw and pitch angles
* - matrix: 3d transofrmation matrix as array of numeric values * - matrix: 3d transofrmation matrix as array of numeric values
@ -59,7 +59,6 @@ export interface Face {
} }
/** Body results /** Body results
* *
* Array of individual results with one object per detected body
* Each results has: * Each results has:
* - id: body id number * - id: body id number
* - score: overall detection score * - score: overall detection score
@ -94,13 +93,13 @@ export interface Body {
} }
/** Hand results /** Hand results
* *
* Array of individual results with one object per detected hand
* Each result has: * Each result has:
* - confidence as value * - id: hand id number
* - box as array of [x, y, width, height], normalized to image resolution * - confidence: detection confidence score as value
* - boxRaw as array of [x, y, width, height], normalized to range 0..1 * - box: bounding box: x, y, width, height normalized to input image resolution
* - landmarks as array of [x, y, z] points of hand, normalized to image resolution * - boxRaw: bounding box: x, y, width, height normalized to 0..1
* - annotations as array of annotated face landmark points * - landmarks: landmarks as array of [x, y, z] points of hand, normalized to image resolution
* - annotations: annotated landmarks for each hand part
*/ */
export interface Hand { export interface Hand {
id: number; id: number;
@ -117,12 +116,13 @@ export interface Hand {
* *
* Array of individual results with one object per detected gesture * Array of individual results with one object per detected gesture
* Each result has: * Each result has:
* - id: object id number
* - score as value * - score as value
* - label as detected class name * - label as detected class name
* - center as array of [x, y], normalized to image resolution * - box: bounding box: x, y, width, height normalized to input image resolution
* - centerRaw as array of [x, y], normalized to range 0..1 * - boxRaw: bounding box: x, y, width, height normalized to 0..1
* - box as array of [x, y, width, height], normalized to image resolution * - center: optional center point as array of [x, y], normalized to image resolution
* - boxRaw as array of [x, y, width, height], normalized to range 0..1 * - centerRaw: optional center point as array of [x, y], normalized to range 0..1
*/ */
export interface Item { export interface Item {
id: number; id: number;
@ -155,6 +155,29 @@ export declare type Gesture = {
'hand': number; 'hand': number;
gesture: string; gesture: string;
}; };
/** Person getter
*
* Each result has:
* - id: person id
* - face: face object
* - body: body object
* - hands: array of hand objects
* - gestures: array of gestures
* - box: bounding box: x, y, width, height normalized to input image resolution
* - boxRaw: bounding box: x, y, width, height normalized to 0..1
*/
export interface Person {
id: number;
face: Face;
body: Body | null;
hands: {
left: Hand | null;
right: Hand | null;
};
gestures: Array<Gesture>;
box: [number, number, number, number];
boxRaw?: [number, number, number, number];
}
/** /**
* Result interface definition for **Human** library * Result interface definition for **Human** library
* *
@ -171,7 +194,12 @@ export interface Result {
gesture: Array<Gesture>; gesture: Array<Gesture>;
/** {@link Object}: detection & analysis results */ /** {@link Object}: detection & analysis results */
object: Array<Item>; object: Array<Item>;
performance: Record<string, unknown>; /** global performance object with timing values for each operation */
canvas?: OffscreenCanvas | HTMLCanvasElement; readonly performance: Record<string, unknown>;
timestamp: number; /** optional processed canvas that can be used to draw input on screen */
readonly canvas?: OffscreenCanvas | HTMLCanvasElement;
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */
readonly timestamp: number;
/** getter property that returns unified persons object */
readonly persons: Array<Person>;
} }

2
wiki

@ -1 +1 @@
Subproject commit d3e31ec79f0f7f9b3382576dd246cd86de22bb43 Subproject commit e7c276c0f521c88a00601bd80c5d08be1345b6aa