implemented human.next global interpolation method

pull/134/head
Vladimir Mandic 2021-05-31 10:40:07 -04:00
parent 1aa8c70d0e
commit 9e51e1a476
29 changed files with 1399 additions and 1261 deletions

View File

@ -11,6 +11,7 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
### **HEAD -> main** 2021/05/30 mandic00@live.com
- finished draw buffering and smoothing and enabled by default
- implemented service worker
- quantized centernet
- release candidate

View File

@ -155,7 +155,7 @@ Additionally, `HTMLVideoElement`, `HTMLMediaElement` can be a standard `<video>`
Live streaming examples:
- **HLS** (*HTTP Live Streaming*) using `hls.js`
- **DASH** (Dynamic Adaptive Streaming over HTTP) using `dash.js`
- **WebRTC** media track
- **WebRTC** media track using built-in support
<br>
@ -197,31 +197,58 @@ or using `async/await`:
```js
// create instance of human with simple configuration using default values
const config = { backend: 'webgl' };
const human = new Human(config);
const human = new Human(config); // create instance of Human
async function detectVideo() {
const inputVideo = document.getElementById('video-id');
const outputCanvas = document.getElementById('canvas-id');
const result = await human.detect(inputVideo);
human.draw.all(outputCanvas, result);
requestAnimationFrame(detectVideo);
const result = await human.detect(inputVideo); // run detection
human.draw.all(outputCanvas, result); // draw all results
requestAnimationFrame(detectVideo); // run loop
}
detectVideo();
detectVideo(); // start loop
```
or using interpolated results for smooth video processing by separating detection and drawing loops:
```js
const human = new Human(); // create instance of Human
const inputVideo = document.getElementById('video-id');
const outputCanvas = document.getElementById('canvas-id');
let result;
async function detectVideo() {
result = await human.detect(inputVideo); // run detection
requestAnimationFrame(detectVideo); // run detect loop
}
async function drawVideo() {
if (result) { // check if result is available
const interpolated = human.next(result); // calculate next interpolated frame
human.draw.all(outputCanvas, interpolated); // draw the frame
}
requestAnimationFrame(drawVideo); // run draw loop
}
detectVideo(); // start detection loop
drawVideo(); // start draw loop
```
And for even better results, you can run detection in a separate web worker thread
<br><hr><br>
## Default models
Default models in Human library are:
- **Face Detection**: MediaPipe BlazeFace (Back version)
- **Face Detection**: MediaPipe BlazeFace - Back variation
- **Face Mesh**: MediaPipe FaceMesh
- **Face Description**: HSE FaceRes
- **Face Iris Analysis**: MediaPipe Iris
- **Face Description**: HSE FaceRes
- **Emotion Detection**: Oarriaga Emotion
- **Body Analysis**: PoseNet (AtomicBits version)
- **Body Analysis**: MoveNet - Lightning variation
Note that alternative models are provided and can be enabled via configuration
For example, `PoseNet` model can be switched for `BlazePose`, `EfficientPose` or `MoveNet` model depending on the use case

View File

@ -6,7 +6,6 @@ N/A
## Exploring Features
- Implement results interpolation on library level instead inside draw functions
- Switch to TypeScript 4.3
- Unify score/confidence variables
@ -16,9 +15,10 @@ N/A
## In Progress
## Issues
N/A
## Known Issues
- CenterNet with WebGL: <https://github.com/tensorflow/tfjs/issues/5145>
- CenterNet with WASM: <https://github.com/tensorflow/tfjs/issues/5110>
- NanoDet with WASM: <https://github.com/tensorflow/tfjs/issues/4824>
- BlazeFace and HandPose rotation in NodeJS: <https://github.com/tensorflow/tfjs/issues/4066>

View File

@ -38,25 +38,29 @@ const userConfig = {
enabled: false,
flip: false,
},
face: { enabled: false,
face: { enabled: true,
detector: { return: true },
mesh: { enabled: true },
iris: { enabled: true },
description: { enabled: true },
emotion: { enabled: true },
iris: { enabled: false },
description: { enabled: false },
emotion: { enabled: false },
},
hand: { enabled: false },
// body: { enabled: true, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' },
body: { enabled: false, modelPath: 'movenet-lightning.json' },
object: { enabled: true },
body: { enabled: false },
object: { enabled: false },
gesture: { enabled: true },
*/
};
const drawOptions = {
bufferedOutput: true, // makes draw functions interpolate results between each detection for smoother movement
bufferedFactor: 4, // speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
drawBoxes: true,
drawGaze: true,
drawLabels: true,
drawPolygons: true,
drawPoints: false,
};
// ui options
@ -223,17 +227,18 @@ async function drawResults(input) {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
}
// draw all results
human.draw.all(canvas, result, drawOptions);
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const person = result.persons; // invoke person getter
/* use individual functions
// draw all results using interpolated results
const interpolated = human.next(result);
human.draw.all(canvas, interpolated, drawOptions);
/* alternatively use individual functions
human.draw.face(canvas, result.face);
human.draw.body(canvas, result.body);
human.draw.hand(canvas, result.hand);
human.draw.object(canvas, result.object);
human.draw.gesture(canvas, result.gesture);
*/
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const person = result.persons; // explicitly invoke person getter
await calcSimmilariry(result);
// update log
@ -247,10 +252,9 @@ async function drawResults(input) {
document.getElementById('log').innerHTML = `
video: ${ui.camera.name} | facing: ${ui.camera.facing} | screen: ${window.innerWidth} x ${window.innerHeight} camera: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
backend: ${human.tf.getBackend()} | ${memory}<br>
performance: ${str(result.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}<br>
performance: ${str(lastDetectedResult.performance)}ms FPS process:${avgDetect} refresh:${avgDraw}<br>
${warning}<br>
`;
ui.framesDraw++;
ui.lastFrame = performance.now();
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

406
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

406
dist/human.js vendored

File diff suppressed because one or more lines are too long

303
dist/human.node-gpu.js vendored
View File

@ -157,7 +157,7 @@ var config = {
modelPath: "blazeface.json",
rotation: false,
maxDetected: 10,
skipFrames: 21,
skipFrames: 15,
minConfidence: 0.2,
iouThreshold: 0.1,
return: false
@ -173,13 +173,13 @@ var config = {
description: {
enabled: true,
modelPath: "faceres.json",
skipFrames: 31,
skipFrames: 16,
minConfidence: 0.1
},
emotion: {
enabled: true,
minConfidence: 0.1,
skipFrames: 32,
skipFrames: 17,
modelPath: "emotion.json"
}
},
@ -191,8 +191,8 @@ var config = {
},
hand: {
enabled: true,
rotation: false,
skipFrames: 32,
rotation: true,
skipFrames: 18,
minConfidence: 0.1,
iouThreshold: 0.1,
maxDetected: 2,
@ -210,7 +210,7 @@ var config = {
minConfidence: 0.2,
iouThreshold: 0.4,
maxDetected: 10,
skipFrames: 41
skipFrames: 19
}
};
@ -4336,10 +4336,10 @@ var calculateGaze = (mesh, box6) => {
(eyeCenter[0] - irisCenter[0]) / eyeSize[0] - offsetIris[0],
eyeRatio * (irisCenter[1] - eyeCenter[1]) / eyeSize[1] - offsetIris[1]
];
let vectorLength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
vectorLength = Math.min(vectorLength, box6[2] / 2, box6[3] / 2);
const vectorAngle = radians([0, 0], eyeDiff);
return { bearing: vectorAngle, strength: vectorLength };
let strength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
strength = Math.min(strength, box6[2] / 2, box6[3] / 2);
const bearing = (radians([0, 0], eyeDiff) + Math.PI / 2) % Math.PI;
return { bearing, strength };
};
var calculateFaceAngle = (face5, imageSize) => {
const normalize = (v) => {
@ -4432,7 +4432,9 @@ var detectFace = async (parent, input) => {
parent.state = "run:face";
timeStamp = now();
const faces = await predict(input, parent.config);
parent.perf.face = Math.trunc(now() - timeStamp);
parent.performance.face = Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4)
return [];
if (!faces)
return [];
for (let i = 0; i < faces.length; i++) {
@ -4449,7 +4451,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict2(faces[i].image, parent.config, i, faces.length) : {};
parent.perf.emotion = Math.trunc(now() - timeStamp);
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
@ -4459,7 +4461,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict3(faces[i].image, parent.config, i, faces.length) : [];
parent.perf.embedding = Math.trunc(now() - timeStamp);
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
if (parent.config.async) {
@ -4488,14 +4490,14 @@ var detectFace = async (parent, input) => {
}
parent.analyze("End FaceMesh:");
if (parent.config.async) {
if (parent.perf.face)
delete parent.perf.face;
if (parent.perf.age)
delete parent.perf.age;
if (parent.perf.gender)
delete parent.perf.gender;
if (parent.perf.emotion)
delete parent.perf.emotion;
if (parent.performance.face)
delete parent.performance.face;
if (parent.performance.age)
delete parent.performance.age;
if (parent.performance.gender)
delete parent.performance.gender;
if (parent.performance.emotion)
delete parent.performance.emotion;
}
return faceRes;
};
@ -8088,7 +8090,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image15.shape[2], palmCenter[1] / image15.shape[1]];
const rotatedImage = config3.hand.rotation ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotatedImage = config3.hand.rotation && tf11.ENV.flags.IS_BROWSER ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9809,10 +9811,8 @@ var options = {
fillPolygons: false,
useDepth: true,
useCurves: false,
bufferedFactor: 3,
bufferedOutput: true
};
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
@ -9995,22 +9995,19 @@ async function face2(inCanvas2, result, drawOptions) {
}
}
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing)) {
ctx.strokeStyle = "pink";
ctx.beginPath();
const leftGaze = [
f.annotations["leftEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["leftEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["leftEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["leftEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["leftEyeIris"][0][0], f.annotations["leftEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(leftGaze[0], leftGaze[1]);
ctx.stroke();
const rightGaze = [
f.annotations["rightEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["rightEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["rightEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["rightEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["rightEyeIris"][0][0], f.annotations["rightEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(rightGaze[0], rightGaze[1]);
ctx.stroke();
}
@ -10277,77 +10274,6 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / localOptions.bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / localOptions.bufferedFactor,
yaw: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / localOptions.bufferedFactor,
pitch: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / localOptions.bufferedFactor
};
const gaze = {
bearing: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / localOptions.bufferedFactor,
strength: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / localOptions.bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / localOptions.bufferedFactor);
}
}
}
async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
return;
@ -10357,22 +10283,18 @@ async function canvas(inCanvas2, outCanvas2) {
outCtx == null ? void 0 : outCtx.drawImage(inCanvas2, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
if (!bufferedResult)
bufferedResult = result;
else if (localOptions.bufferedOutput)
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
object(inCanvas2, bufferedResult.object, localOptions);
face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, result.body, localOptions);
hand2(inCanvas2, result.hand, localOptions);
object(inCanvas2, result.object, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
result.performance.draw = Math.trunc(now() - timestamp);
}
// src/persons.ts
@ -10433,6 +10355,84 @@ function join2(faces, bodies, hands, gestures, shape) {
return persons2;
}
// src/interpolate.ts
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function calc(newResult) {
const bufferedFactor = 1e3 / (Date.now() - newResult.timestamp) / 4;
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / bufferedFactor,
yaw: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / bufferedFactor,
pitch: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / bufferedFactor
};
const gaze = {
bearing: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / bufferedFactor,
strength: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / bufferedFactor);
}
}
bufferedResult.gesture = newResult.gesture;
bufferedResult.performance = newResult.performance;
return bufferedResult;
}
// src/sample.ts
var face3 = `
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA
@ -11162,7 +11162,7 @@ var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
var Human = class {
constructor(userConfig = {}) {
constructor(userConfig) {
__privateAdd(this, _numTensors, void 0);
__privateAdd(this, _analyzeMemoryLeaks, void 0);
__privateAdd(this, _checkSanity, void 0);
@ -11233,9 +11233,10 @@ var Human = class {
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
tf19.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
if (typeof this.config["deallocate"] !== "undefined") {
if (!this.config.object.enabled)
this.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
}
@ -11244,9 +11245,10 @@ var Human = class {
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}
await this.tf.ready();
this.perf.backend = Math.trunc(now() - timeStamp);
this.performance.backend = Math.trunc(now() - timeStamp);
}
});
this.next = (result) => calc(result || this.result);
__privateAdd(this, _skipFrame, async (input) => {
if (this.config.cacheSensitivity === 0)
return false;
@ -11337,17 +11339,17 @@ var Human = class {
}
return res;
});
this.config = mergeDeep(config, userConfig || {});
this.tf = tf19;
this.draw = draw_exports;
this.version = version;
this.config = mergeDeep(config, userConfig);
this.state = "idle";
__privateSet(this, _numTensors, 0);
__privateSet(this, _analyzeMemoryLeaks, false);
__privateSet(this, _checkSanity, false);
__privateSet(this, _firstRun, true);
__privateSet(this, _lastCacheDiff, 0);
this.perf = {};
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
this.models = {
face: null,
posenet: null,
@ -11355,7 +11357,6 @@ var Human = class {
efficientpose: null,
movenet: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
@ -11388,7 +11389,7 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
async load(userConfig = {}) {
async load(userConfig) {
this.state = "load";
const timeStamp = now();
if (userConfig)
@ -11462,10 +11463,10 @@ var Human = class {
__privateSet(this, _firstRun, false);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0))
this.perf.load = current;
if (current > (this.performance.load || 0))
this.performance.load = current;
}
async detect(input, userConfig = {}) {
async detect(input, userConfig) {
return new Promise(async (resolve) => {
this.state = "config";
let timeStamp;
@ -11486,18 +11487,18 @@ var Human = class {
resolve({ error: "could not convert input to tensor" });
return;
}
this.perf.image = Math.trunc(now() - timeStamp);
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
timeStamp = now();
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
if (!this.perf.frames)
this.perf.frames = 0;
if (!this.perf.cached)
this.perf.cached = 0;
this.perf.frames++;
if (!this.performance.frames)
this.performance.frames = 0;
if (!this.performance.cached)
this.performance.cached = 0;
this.performance.frames++;
if (this.config.skipFrame)
this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp);
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
@ -11506,15 +11507,15 @@ var Human = class {
let elapsedTime;
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face)
delete this.perf.face;
if (this.performance.face)
delete this.performance.face;
} else {
this.state = "run:face";
timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.face = elapsedTime;
this.performance.face = elapsedTime;
}
this.analyze("Start Body:");
if (this.config.async) {
@ -11526,8 +11527,8 @@ var Human = class {
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("movenet"))
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
if (this.perf.body)
delete this.perf.body;
if (this.performance.body)
delete this.performance.body;
} else {
this.state = "run:body";
timeStamp = now();
@ -11541,21 +11542,21 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.body = elapsedTime;
this.performance.body = elapsedTime;
}
this.analyze("End Body:");
this.analyze("Start Hand:");
if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
if (this.perf.hand)
delete this.perf.hand;
if (this.performance.hand)
delete this.performance.hand;
} else {
this.state = "run:hand";
timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.hand = elapsedTime;
this.performance.hand = elapsedTime;
}
this.analyze("End Hand:");
this.analyze("Start Object:");
@ -11564,8 +11565,8 @@ var Human = class {
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
if (this.perf.object)
delete this.perf.object;
if (this.performance.object)
delete this.performance.object;
} else {
this.state = "run:object";
timeStamp = now();
@ -11575,7 +11576,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.object = elapsedTime;
this.performance.object = elapsedTime;
}
this.analyze("End Object:");
if (this.config.async)
@ -11585,11 +11586,11 @@ var Human = class {
timeStamp = now();
gestureRes = [...face(faceRes), ...body(bodyRes), ...hand(handRes), ...iris(faceRes)];
if (!this.config.async)
this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture)
delete this.perf.gesture;
this.performance.gesture = Math.trunc(now() - timeStamp);
else if (this.performance.gesture)
delete this.performance.gesture;
}
this.perf.total = Math.trunc(now() - timeStart);
this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
this.result = {
face: faceRes,
@ -11597,7 +11598,7 @@ var Human = class {
hand: handRes,
gesture: gestureRes,
object: objectRes,
performance: this.perf,
performance: this.performance,
canvas: process5.canvas,
timestamp: Date.now(),
get persons() {
@ -11609,7 +11610,7 @@ var Human = class {
resolve(this.result);
});
}
async warmup(userConfig = {}) {
async warmup(userConfig) {
const t0 = now();
if (userConfig)
this.config = mergeDeep(this.config, userConfig);

View File

@ -158,7 +158,7 @@ var config = {
modelPath: "blazeface.json",
rotation: false,
maxDetected: 10,
skipFrames: 21,
skipFrames: 15,
minConfidence: 0.2,
iouThreshold: 0.1,
return: false
@ -174,13 +174,13 @@ var config = {
description: {
enabled: true,
modelPath: "faceres.json",
skipFrames: 31,
skipFrames: 16,
minConfidence: 0.1
},
emotion: {
enabled: true,
minConfidence: 0.1,
skipFrames: 32,
skipFrames: 17,
modelPath: "emotion.json"
}
},
@ -192,8 +192,8 @@ var config = {
},
hand: {
enabled: true,
rotation: false,
skipFrames: 32,
rotation: true,
skipFrames: 18,
minConfidence: 0.1,
iouThreshold: 0.1,
maxDetected: 2,
@ -211,7 +211,7 @@ var config = {
minConfidence: 0.2,
iouThreshold: 0.4,
maxDetected: 10,
skipFrames: 41
skipFrames: 19
}
};
@ -4337,10 +4337,10 @@ var calculateGaze = (mesh, box6) => {
(eyeCenter[0] - irisCenter[0]) / eyeSize[0] - offsetIris[0],
eyeRatio * (irisCenter[1] - eyeCenter[1]) / eyeSize[1] - offsetIris[1]
];
let vectorLength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
vectorLength = Math.min(vectorLength, box6[2] / 2, box6[3] / 2);
const vectorAngle = radians([0, 0], eyeDiff);
return { bearing: vectorAngle, strength: vectorLength };
let strength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
strength = Math.min(strength, box6[2] / 2, box6[3] / 2);
const bearing = (radians([0, 0], eyeDiff) + Math.PI / 2) % Math.PI;
return { bearing, strength };
};
var calculateFaceAngle = (face5, imageSize) => {
const normalize = (v) => {
@ -4433,7 +4433,9 @@ var detectFace = async (parent, input) => {
parent.state = "run:face";
timeStamp = now();
const faces = await predict(input, parent.config);
parent.perf.face = Math.trunc(now() - timeStamp);
parent.performance.face = Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4)
return [];
if (!faces)
return [];
for (let i = 0; i < faces.length; i++) {
@ -4450,7 +4452,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict2(faces[i].image, parent.config, i, faces.length) : {};
parent.perf.emotion = Math.trunc(now() - timeStamp);
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
@ -4460,7 +4462,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict3(faces[i].image, parent.config, i, faces.length) : [];
parent.perf.embedding = Math.trunc(now() - timeStamp);
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
if (parent.config.async) {
@ -4489,14 +4491,14 @@ var detectFace = async (parent, input) => {
}
parent.analyze("End FaceMesh:");
if (parent.config.async) {
if (parent.perf.face)
delete parent.perf.face;
if (parent.perf.age)
delete parent.perf.age;
if (parent.perf.gender)
delete parent.perf.gender;
if (parent.perf.emotion)
delete parent.perf.emotion;
if (parent.performance.face)
delete parent.performance.face;
if (parent.performance.age)
delete parent.performance.age;
if (parent.performance.gender)
delete parent.performance.gender;
if (parent.performance.emotion)
delete parent.performance.emotion;
}
return faceRes;
};
@ -8089,7 +8091,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image15.shape[2], palmCenter[1] / image15.shape[1]];
const rotatedImage = config3.hand.rotation ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotatedImage = config3.hand.rotation && tf11.ENV.flags.IS_BROWSER ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9810,10 +9812,8 @@ var options = {
fillPolygons: false,
useDepth: true,
useCurves: false,
bufferedFactor: 3,
bufferedOutput: true
};
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
@ -9996,22 +9996,19 @@ async function face2(inCanvas2, result, drawOptions) {
}
}
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing)) {
ctx.strokeStyle = "pink";
ctx.beginPath();
const leftGaze = [
f.annotations["leftEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["leftEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["leftEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["leftEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["leftEyeIris"][0][0], f.annotations["leftEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(leftGaze[0], leftGaze[1]);
ctx.stroke();
const rightGaze = [
f.annotations["rightEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["rightEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["rightEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["rightEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["rightEyeIris"][0][0], f.annotations["rightEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(rightGaze[0], rightGaze[1]);
ctx.stroke();
}
@ -10278,77 +10275,6 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / localOptions.bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / localOptions.bufferedFactor,
yaw: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / localOptions.bufferedFactor,
pitch: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / localOptions.bufferedFactor
};
const gaze = {
bearing: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / localOptions.bufferedFactor,
strength: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / localOptions.bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / localOptions.bufferedFactor);
}
}
}
async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
return;
@ -10358,22 +10284,18 @@ async function canvas(inCanvas2, outCanvas2) {
outCtx == null ? void 0 : outCtx.drawImage(inCanvas2, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
if (!bufferedResult)
bufferedResult = result;
else if (localOptions.bufferedOutput)
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
object(inCanvas2, bufferedResult.object, localOptions);
face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, result.body, localOptions);
hand2(inCanvas2, result.hand, localOptions);
object(inCanvas2, result.object, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
result.performance.draw = Math.trunc(now() - timestamp);
}
// src/persons.ts
@ -10434,6 +10356,84 @@ function join2(faces, bodies, hands, gestures, shape) {
return persons2;
}
// src/interpolate.ts
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function calc(newResult) {
const bufferedFactor = 1e3 / (Date.now() - newResult.timestamp) / 4;
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / bufferedFactor,
yaw: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / bufferedFactor,
pitch: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / bufferedFactor
};
const gaze = {
bearing: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / bufferedFactor,
strength: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / bufferedFactor);
}
}
bufferedResult.gesture = newResult.gesture;
bufferedResult.performance = newResult.performance;
return bufferedResult;
}
// src/sample.ts
var face3 = `
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA
@ -11163,7 +11163,7 @@ var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
var Human = class {
constructor(userConfig = {}) {
constructor(userConfig) {
__privateAdd(this, _numTensors, void 0);
__privateAdd(this, _analyzeMemoryLeaks, void 0);
__privateAdd(this, _checkSanity, void 0);
@ -11234,9 +11234,10 @@ var Human = class {
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
tf19.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
if (typeof this.config["deallocate"] !== "undefined") {
if (!this.config.object.enabled)
this.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
}
@ -11245,9 +11246,10 @@ var Human = class {
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}
await this.tf.ready();
this.perf.backend = Math.trunc(now() - timeStamp);
this.performance.backend = Math.trunc(now() - timeStamp);
}
});
this.next = (result) => calc(result || this.result);
__privateAdd(this, _skipFrame, async (input) => {
if (this.config.cacheSensitivity === 0)
return false;
@ -11338,17 +11340,17 @@ var Human = class {
}
return res;
});
this.config = mergeDeep(config, userConfig || {});
this.tf = tf19;
this.draw = draw_exports;
this.version = version;
this.config = mergeDeep(config, userConfig);
this.state = "idle";
__privateSet(this, _numTensors, 0);
__privateSet(this, _analyzeMemoryLeaks, false);
__privateSet(this, _checkSanity, false);
__privateSet(this, _firstRun, true);
__privateSet(this, _lastCacheDiff, 0);
this.perf = {};
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
this.models = {
face: null,
posenet: null,
@ -11356,7 +11358,6 @@ var Human = class {
efficientpose: null,
movenet: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
@ -11389,7 +11390,7 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
async load(userConfig = {}) {
async load(userConfig) {
this.state = "load";
const timeStamp = now();
if (userConfig)
@ -11463,10 +11464,10 @@ var Human = class {
__privateSet(this, _firstRun, false);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0))
this.perf.load = current;
if (current > (this.performance.load || 0))
this.performance.load = current;
}
async detect(input, userConfig = {}) {
async detect(input, userConfig) {
return new Promise(async (resolve) => {
this.state = "config";
let timeStamp;
@ -11487,18 +11488,18 @@ var Human = class {
resolve({ error: "could not convert input to tensor" });
return;
}
this.perf.image = Math.trunc(now() - timeStamp);
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
timeStamp = now();
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
if (!this.perf.frames)
this.perf.frames = 0;
if (!this.perf.cached)
this.perf.cached = 0;
this.perf.frames++;
if (!this.performance.frames)
this.performance.frames = 0;
if (!this.performance.cached)
this.performance.cached = 0;
this.performance.frames++;
if (this.config.skipFrame)
this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp);
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
@ -11507,15 +11508,15 @@ var Human = class {
let elapsedTime;
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face)
delete this.perf.face;
if (this.performance.face)
delete this.performance.face;
} else {
this.state = "run:face";
timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.face = elapsedTime;
this.performance.face = elapsedTime;
}
this.analyze("Start Body:");
if (this.config.async) {
@ -11527,8 +11528,8 @@ var Human = class {
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("movenet"))
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
if (this.perf.body)
delete this.perf.body;
if (this.performance.body)
delete this.performance.body;
} else {
this.state = "run:body";
timeStamp = now();
@ -11542,21 +11543,21 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.body = elapsedTime;
this.performance.body = elapsedTime;
}
this.analyze("End Body:");
this.analyze("Start Hand:");
if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
if (this.perf.hand)
delete this.perf.hand;
if (this.performance.hand)
delete this.performance.hand;
} else {
this.state = "run:hand";
timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.hand = elapsedTime;
this.performance.hand = elapsedTime;
}
this.analyze("End Hand:");
this.analyze("Start Object:");
@ -11565,8 +11566,8 @@ var Human = class {
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
if (this.perf.object)
delete this.perf.object;
if (this.performance.object)
delete this.performance.object;
} else {
this.state = "run:object";
timeStamp = now();
@ -11576,7 +11577,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.object = elapsedTime;
this.performance.object = elapsedTime;
}
this.analyze("End Object:");
if (this.config.async)
@ -11586,11 +11587,11 @@ var Human = class {
timeStamp = now();
gestureRes = [...face(faceRes), ...body(bodyRes), ...hand(handRes), ...iris(faceRes)];
if (!this.config.async)
this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture)
delete this.perf.gesture;
this.performance.gesture = Math.trunc(now() - timeStamp);
else if (this.performance.gesture)
delete this.performance.gesture;
}
this.perf.total = Math.trunc(now() - timeStart);
this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
this.result = {
face: faceRes,
@ -11598,7 +11599,7 @@ var Human = class {
hand: handRes,
gesture: gestureRes,
object: objectRes,
performance: this.perf,
performance: this.performance,
canvas: process5.canvas,
timestamp: Date.now(),
get persons() {
@ -11610,7 +11611,7 @@ var Human = class {
resolve(this.result);
});
}
async warmup(userConfig = {}) {
async warmup(userConfig) {
const t0 = now();
if (userConfig)
this.config = mergeDeep(this.config, userConfig);

303
dist/human.node.js vendored
View File

@ -157,7 +157,7 @@ var config = {
modelPath: "blazeface.json",
rotation: false,
maxDetected: 10,
skipFrames: 21,
skipFrames: 15,
minConfidence: 0.2,
iouThreshold: 0.1,
return: false
@ -173,13 +173,13 @@ var config = {
description: {
enabled: true,
modelPath: "faceres.json",
skipFrames: 31,
skipFrames: 16,
minConfidence: 0.1
},
emotion: {
enabled: true,
minConfidence: 0.1,
skipFrames: 32,
skipFrames: 17,
modelPath: "emotion.json"
}
},
@ -191,8 +191,8 @@ var config = {
},
hand: {
enabled: true,
rotation: false,
skipFrames: 32,
rotation: true,
skipFrames: 18,
minConfidence: 0.1,
iouThreshold: 0.1,
maxDetected: 2,
@ -210,7 +210,7 @@ var config = {
minConfidence: 0.2,
iouThreshold: 0.4,
maxDetected: 10,
skipFrames: 41
skipFrames: 19
}
};
@ -4336,10 +4336,10 @@ var calculateGaze = (mesh, box6) => {
(eyeCenter[0] - irisCenter[0]) / eyeSize[0] - offsetIris[0],
eyeRatio * (irisCenter[1] - eyeCenter[1]) / eyeSize[1] - offsetIris[1]
];
let vectorLength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
vectorLength = Math.min(vectorLength, box6[2] / 2, box6[3] / 2);
const vectorAngle = radians([0, 0], eyeDiff);
return { bearing: vectorAngle, strength: vectorLength };
let strength = Math.sqrt(eyeDiff[0] ** 2 + eyeDiff[1] ** 2);
strength = Math.min(strength, box6[2] / 2, box6[3] / 2);
const bearing = (radians([0, 0], eyeDiff) + Math.PI / 2) % Math.PI;
return { bearing, strength };
};
var calculateFaceAngle = (face5, imageSize) => {
const normalize = (v) => {
@ -4432,7 +4432,9 @@ var detectFace = async (parent, input) => {
parent.state = "run:face";
timeStamp = now();
const faces = await predict(input, parent.config);
parent.perf.face = Math.trunc(now() - timeStamp);
parent.performance.face = Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4)
return [];
if (!faces)
return [];
for (let i = 0; i < faces.length; i++) {
@ -4449,7 +4451,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:emotion";
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await predict2(faces[i].image, parent.config, i, faces.length) : {};
parent.perf.emotion = Math.trunc(now() - timeStamp);
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze("End Emotion:");
parent.analyze("Start Description:");
@ -4459,7 +4461,7 @@ var detectFace = async (parent, input) => {
parent.state = "run:description";
timeStamp = now();
descRes = parent.config.face.description.enabled ? await predict3(faces[i].image, parent.config, i, faces.length) : [];
parent.perf.embedding = Math.trunc(now() - timeStamp);
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze("End Description:");
if (parent.config.async) {
@ -4488,14 +4490,14 @@ var detectFace = async (parent, input) => {
}
parent.analyze("End FaceMesh:");
if (parent.config.async) {
if (parent.perf.face)
delete parent.perf.face;
if (parent.perf.age)
delete parent.perf.age;
if (parent.perf.gender)
delete parent.perf.gender;
if (parent.perf.emotion)
delete parent.perf.emotion;
if (parent.performance.face)
delete parent.performance.face;
if (parent.performance.age)
delete parent.performance.age;
if (parent.performance.gender)
delete parent.performance.gender;
if (parent.performance.emotion)
delete parent.performance.emotion;
}
return faceRes;
};
@ -8088,7 +8090,7 @@ var HandPipeline = class {
const angle = config3.hand.rotation ? computeRotation2(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = getBoxCenter2(currentBox);
const palmCenterNormalized = [palmCenter[0] / image15.shape[2], palmCenter[1] / image15.shape[1]];
const rotatedImage = config3.hand.rotation ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotatedImage = config3.hand.rotation && tf11.ENV.flags.IS_BROWSER ? tf11.image.rotateWithOffset(image15, angle, 0, palmCenterNormalized) : image15.clone();
const rotationMatrix = buildRotationMatrix2(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = cutBoxFromImageAndResize2(newBox, rotatedImage, [this.inputSize, this.inputSize]);
@ -9809,10 +9811,8 @@ var options = {
fillPolygons: false,
useDepth: true,
useCurves: false,
bufferedFactor: 3,
bufferedOutput: true
};
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
var rad2deg = (theta) => Math.round(theta * 180 / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
ctx.fillStyle = localOptions.useDepth && z ? `rgba(${127.5 + 2 * z}, ${127.5 - 2 * z}, 255, 0.3)` : localOptions.color;
@ -9995,22 +9995,19 @@ async function face2(inCanvas2, result, drawOptions) {
}
}
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing)) {
ctx.strokeStyle = "pink";
ctx.beginPath();
const leftGaze = [
f.annotations["leftEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["leftEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["leftEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["leftEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["leftEyeIris"][0][0], f.annotations["leftEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(leftGaze[0], leftGaze[1]);
ctx.stroke();
const rightGaze = [
f.annotations["rightEyeIris"][0][0] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2],
f.annotations["rightEyeIris"][0][1] - Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]
f.annotations["rightEyeIris"][0][0] + Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3],
f.annotations["rightEyeIris"][0][1] + Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]
];
ctx.beginPath();
ctx.moveTo(f.annotations["rightEyeIris"][0][0], f.annotations["rightEyeIris"][0][1]);
ctx.strokeStyle = "pink";
ctx.lineTo(rightGaze[0], rightGaze[1]);
ctx.stroke();
}
@ -10277,77 +10274,6 @@ async function person(inCanvas2, result, drawOptions) {
}
}
}
function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / localOptions.bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / localOptions.bufferedFactor,
yaw: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / localOptions.bufferedFactor,
pitch: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / localOptions.bufferedFactor
};
const gaze = {
bearing: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / localOptions.bufferedFactor,
strength: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / localOptions.bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / localOptions.bufferedFactor);
}
}
}
async function canvas(inCanvas2, outCanvas2) {
if (!inCanvas2 || !outCanvas2)
return;
@ -10357,22 +10283,18 @@ async function canvas(inCanvas2, outCanvas2) {
outCtx == null ? void 0 : outCtx.drawImage(inCanvas2, 0, 0);
}
async function all(inCanvas2, result, drawOptions) {
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas2)
return;
if (!(inCanvas2 instanceof HTMLCanvasElement))
return;
if (!bufferedResult)
bufferedResult = result;
else if (localOptions.bufferedOutput)
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
object(inCanvas2, bufferedResult.object, localOptions);
face2(inCanvas2, result.face, localOptions);
body2(inCanvas2, result.body, localOptions);
hand2(inCanvas2, result.hand, localOptions);
object(inCanvas2, result.object, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
result.performance.draw = Math.trunc(now() - timestamp);
}
// src/persons.ts
@ -10433,6 +10355,84 @@ function join2(faces, bodies, hands, gestures, shape) {
return persons2;
}
// src/interpolate.ts
var bufferedResult = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
function calc(newResult) {
const bufferedFactor = 1e3 / (Date.now() - newResult.timestamp) / 4;
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box6 = newResult.body[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box6 = newResult.hand[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = {};
for (const key of keys) {
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / bufferedFactor);
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / bufferedFactor,
yaw: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / bufferedFactor,
pitch: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / bufferedFactor
};
const gaze = {
bearing: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / bufferedFactor,
strength: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / bufferedFactor
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box: box6, boxRaw: boxRaw3 };
}
}
if (!bufferedResult.object || newResult.object.length !== bufferedResult.object.length) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object));
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box6 = newResult.object[i].box.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / bufferedFactor);
const boxRaw3 = newResult.object[i].boxRaw.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box: box6, boxRaw: boxRaw3 };
}
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) {
bufferedResult.persons[i].box = newPersons[i].box.map((box6, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box6) / bufferedFactor);
}
}
bufferedResult.gesture = newResult.gesture;
bufferedResult.performance = newResult.performance;
return bufferedResult;
}
// src/sample.ts
var face3 = `
/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA
@ -11162,7 +11162,7 @@ var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
var Human = class {
constructor(userConfig = {}) {
constructor(userConfig) {
__privateAdd(this, _numTensors, void 0);
__privateAdd(this, _analyzeMemoryLeaks, void 0);
__privateAdd(this, _checkSanity, void 0);
@ -11233,9 +11233,10 @@ var Human = class {
if (this.tf.getBackend() === "webgl" || this.tf.getBackend() === "humangl") {
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
tf19.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
if (typeof this.config["deallocate"] !== "undefined") {
if (!this.config.object.enabled)
this.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
}
@ -11244,9 +11245,10 @@ var Human = class {
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}
await this.tf.ready();
this.perf.backend = Math.trunc(now() - timeStamp);
this.performance.backend = Math.trunc(now() - timeStamp);
}
});
this.next = (result) => calc(result || this.result);
__privateAdd(this, _skipFrame, async (input) => {
if (this.config.cacheSensitivity === 0)
return false;
@ -11337,17 +11339,17 @@ var Human = class {
}
return res;
});
this.config = mergeDeep(config, userConfig || {});
this.tf = tf19;
this.draw = draw_exports;
this.version = version;
this.config = mergeDeep(config, userConfig);
this.state = "idle";
__privateSet(this, _numTensors, 0);
__privateSet(this, _analyzeMemoryLeaks, false);
__privateSet(this, _checkSanity, false);
__privateSet(this, _firstRun, true);
__privateSet(this, _lastCacheDiff, 0);
this.perf = {};
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
this.models = {
face: null,
posenet: null,
@ -11355,7 +11357,6 @@ var Human = class {
efficientpose: null,
movenet: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
@ -11388,7 +11389,7 @@ var Human = class {
match(faceEmbedding, db, threshold = 0) {
return match(faceEmbedding, db, threshold);
}
async load(userConfig = {}) {
async load(userConfig) {
this.state = "load";
const timeStamp = now();
if (userConfig)
@ -11462,10 +11463,10 @@ var Human = class {
__privateSet(this, _firstRun, false);
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load || 0))
this.perf.load = current;
if (current > (this.performance.load || 0))
this.performance.load = current;
}
async detect(input, userConfig = {}) {
async detect(input, userConfig) {
return new Promise(async (resolve) => {
this.state = "config";
let timeStamp;
@ -11486,18 +11487,18 @@ var Human = class {
resolve({ error: "could not convert input to tensor" });
return;
}
this.perf.image = Math.trunc(now() - timeStamp);
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze("Get Image:");
timeStamp = now();
this.config.skipFrame = await __privateGet(this, _skipFrame).call(this, process5.tensor);
if (!this.perf.frames)
this.perf.frames = 0;
if (!this.perf.cached)
this.perf.cached = 0;
this.perf.frames++;
if (!this.performance.frames)
this.performance.frames = 0;
if (!this.performance.cached)
this.performance.cached = 0;
this.performance.frames++;
if (this.config.skipFrame)
this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp);
this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze("Check Changed:");
let faceRes;
let bodyRes;
@ -11506,15 +11507,15 @@ var Human = class {
let elapsedTime;
if (this.config.async) {
faceRes = this.config.face.enabled ? detectFace(this, process5.tensor) : [];
if (this.perf.face)
delete this.perf.face;
if (this.performance.face)
delete this.performance.face;
} else {
this.state = "run:face";
timeStamp = now();
faceRes = this.config.face.enabled ? await detectFace(this, process5.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.face = elapsedTime;
this.performance.face = elapsedTime;
}
this.analyze("Start Body:");
if (this.config.async) {
@ -11526,8 +11527,8 @@ var Human = class {
bodyRes = this.config.body.enabled ? predict7(process5.tensor, this.config) : [];
else if (this.config.body.modelPath.includes("movenet"))
bodyRes = this.config.body.enabled ? predict8(process5.tensor, this.config) : [];
if (this.perf.body)
delete this.perf.body;
if (this.performance.body)
delete this.performance.body;
} else {
this.state = "run:body";
timeStamp = now();
@ -11541,21 +11542,21 @@ var Human = class {
bodyRes = this.config.body.enabled ? await predict8(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.body = elapsedTime;
this.performance.body = elapsedTime;
}
this.analyze("End Body:");
this.analyze("Start Hand:");
if (this.config.async) {
handRes = this.config.hand.enabled ? predict5(process5.tensor, this.config) : [];
if (this.perf.hand)
delete this.perf.hand;
if (this.performance.hand)
delete this.performance.hand;
} else {
this.state = "run:hand";
timeStamp = now();
handRes = this.config.hand.enabled ? await predict5(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.hand = elapsedTime;
this.performance.hand = elapsedTime;
}
this.analyze("End Hand:");
this.analyze("Start Object:");
@ -11564,8 +11565,8 @@ var Human = class {
objectRes = this.config.object.enabled ? predict9(process5.tensor, this.config) : [];
else if (this.config.object.modelPath.includes("centernet"))
objectRes = this.config.object.enabled ? predict10(process5.tensor, this.config) : [];
if (this.perf.object)
delete this.perf.object;
if (this.performance.object)
delete this.performance.object;
} else {
this.state = "run:object";
timeStamp = now();
@ -11575,7 +11576,7 @@ var Human = class {
objectRes = this.config.object.enabled ? await predict10(process5.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0)
this.perf.object = elapsedTime;
this.performance.object = elapsedTime;
}
this.analyze("End Object:");
if (this.config.async)
@ -11585,11 +11586,11 @@ var Human = class {
timeStamp = now();
gestureRes = [...face(faceRes), ...body(bodyRes), ...hand(handRes), ...iris(faceRes)];
if (!this.config.async)
this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture)
delete this.perf.gesture;
this.performance.gesture = Math.trunc(now() - timeStamp);
else if (this.performance.gesture)
delete this.performance.gesture;
}
this.perf.total = Math.trunc(now() - timeStart);
this.performance.total = Math.trunc(now() - timeStart);
this.state = "idle";
this.result = {
face: faceRes,
@ -11597,7 +11598,7 @@ var Human = class {
hand: handRes,
gesture: gestureRes,
object: objectRes,
performance: this.perf,
performance: this.performance,
canvas: process5.canvas,
timestamp: Date.now(),
get persons() {
@ -11609,7 +11610,7 @@ var Human = class {
resolve(this.result);
});
}
async warmup(userConfig = {}) {
async warmup(userConfig) {
const t0 = now();
if (userConfig)
this.config = mergeDeep(this.config, userConfig);

View File

@ -1,17 +1,17 @@
2021-05-30 23:20:36 INFO:  @vladmandic/human version 2.0.0
2021-05-30 23:20:36 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-30 23:20:36 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-30 23:20:36 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 23:20:36 STATE: Build for: node type: node: {"imports":39,"importBytes":415373,"outputBytes":369593,"outputFiles":"dist/human.node.js"}
2021-05-30 23:20:36 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 23:20:36 STATE: Build for: nodeGPU type: node: {"imports":39,"importBytes":415381,"outputBytes":369597,"outputFiles":"dist/human.node-gpu.js"}
2021-05-30 23:20:36 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 23:20:36 STATE: Build for: nodeWASM type: node: {"imports":39,"importBytes":415448,"outputBytes":369669,"outputFiles":"dist/human.node-wasm.js"}
2021-05-30 23:20:37 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 23:20:37 STATE: Build for: browserNoBundle type: esm: {"imports":39,"importBytes":415475,"outputBytes":243743,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-30 23:20:37 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 23:20:37 STATE: Build for: browserBundle type: iife: {"imports":39,"importBytes":1525499,"outputBytes":1351568,"outputFiles":"dist/human.js"}
2021-05-30 23:20:38 STATE: Build for: browserBundle type: esm: {"imports":39,"importBytes":1525499,"outputBytes":1351560,"outputFiles":"dist/human.esm.js"}
2021-05-30 23:20:38 INFO:  Generate types: ["src/human.ts"]
2021-05-30 23:20:43 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-30 23:20:43 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-31 10:37:01 INFO:  @vladmandic/human version 2.0.0
2021-05-31 10:37:01 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-31 10:37:01 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-31 10:37:01 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-31 10:37:01 STATE: Build for: node type: node: {"imports":40,"importBytes":416611,"outputBytes":369640,"outputFiles":"dist/human.node.js"}
2021-05-31 10:37:01 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-31 10:37:01 STATE: Build for: nodeGPU type: node: {"imports":40,"importBytes":416619,"outputBytes":369644,"outputFiles":"dist/human.node-gpu.js"}
2021-05-31 10:37:01 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-31 10:37:02 STATE: Build for: nodeWASM type: node: {"imports":40,"importBytes":416686,"outputBytes":369716,"outputFiles":"dist/human.node-wasm.js"}
2021-05-31 10:37:02 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-31 10:37:02 STATE: Build for: browserNoBundle type: esm: {"imports":40,"importBytes":416713,"outputBytes":243751,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-31 10:37:02 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
2021-05-31 10:37:03 STATE: Build for: browserBundle type: iife: {"imports":40,"importBytes":1526737,"outputBytes":1351575,"outputFiles":"dist/human.js"}
2021-05-31 10:37:03 STATE: Build for: browserBundle type: esm: {"imports":40,"importBytes":1526737,"outputBytes":1351567,"outputFiles":"dist/human.esm.js"}
2021-05-31 10:37:03 INFO:  Generate types: ["src/human.ts"]
2021-05-31 10:37:08 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-31 10:37:08 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -246,7 +246,7 @@ const config: Config = {
// this parameter is not valid in nodejs
maxDetected: 10, // maximum number of faces detected in the input
// should be set to the minimum number for performance
skipFrames: 21, // how many max frames to go without re-running the face bounding box detector
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero
// e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated face analysis as the head probably hasn't moved much
@ -272,7 +272,7 @@ const config: Config = {
// recommended to enable detector.rotation and mesh.enabled
modelPath: 'faceres.json', // face description model
// can be either absolute path or relative to modelBasePath
skipFrames: 31, // how many max frames to go without re-running the detector
skipFrames: 16, // how many max frames to go without re-running the detector
// only used when cacheSensitivity is not zero
minConfidence: 0.1, // threshold for discarding a prediction
},
@ -280,7 +280,7 @@ const config: Config = {
emotion: {
enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 32, // how max many frames to go without re-running the detector
skipFrames: 17, // how max many frames to go without re-running the detector
// only used when cacheSensitivity is not zero
modelPath: 'emotion.json', // face emotion model, can be absolute path or relative to modelBasePath
},
@ -298,9 +298,9 @@ const config: Config = {
hand: {
enabled: true,
rotation: false, // use best-guess rotated hand image or just box with rotation as-is
rotation: true, // use best-guess rotated hand image or just box with rotation as-is
// false means higher performance, but incorrect finger mapping if hand is inverted
skipFrames: 32, // how many max frames to go without re-running the hand bounding box detector
skipFrames: 18, // how many max frames to go without re-running the hand bounding box detector
// only used when cacheSensitivity is not zero
// e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated hand skeleton analysis as the hand probably
@ -325,7 +325,7 @@ const config: Config = {
minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.4, // ammount of overlap between two detected objects before one object is removed
maxDetected: 10, // maximum number of objects detected in the input
skipFrames: 41, // how many max frames to go without re-running the detector
skipFrames: 19, // how many max frames to go without re-running the detector
// only used when cacheSensitivity is not zero
},
};

View File

@ -3,7 +3,7 @@
*/
import { TRI468 as triangulation } from '../blazeface/coords';
import { mergeDeep } from '../helpers';
import { mergeDeep, now } from '../helpers';
import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result';
/**
@ -25,7 +25,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
* -useDepth: use z-axis coordinate as color shade,
* -useCurves: draw polygons as cures or as lines,
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
*/
export interface DrawOptions {
color: string,
@ -45,7 +44,6 @@ export interface DrawOptions {
useDepth: boolean,
useCurves: boolean,
bufferedOutput: boolean,
bufferedFactor: number,
}
export const options: DrawOptions = {
@ -65,12 +63,9 @@ export const options: DrawOptions = {
fillPolygons: <boolean>false,
useDepth: <boolean>true,
useCurves: <boolean>false,
bufferedFactor: <number>3,
bufferedOutput: <boolean>true,
};
let bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
const rad2deg = (theta) => Math.round((theta * 180) / Math.PI);
function point(ctx, x, y, z = 0, localOptions) {
@ -246,24 +241,23 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
}
}
if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing) {
const leftGaze = [
f.annotations['leftEyeIris'][0][0] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
f.annotations['leftEyeIris'][0][1] - (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
];
ctx.beginPath();
ctx.moveTo(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]);
ctx.strokeStyle = 'pink';
ctx.beginPath();
const leftGaze = [
f.annotations['leftEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
f.annotations['leftEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
];
ctx.moveTo(f.annotations['leftEyeIris'][0][0], f.annotations['leftEyeIris'][0][1]);
ctx.lineTo(leftGaze[0], leftGaze[1]);
ctx.stroke();
const rightGaze = [
f.annotations['rightEyeIris'][0][0] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
f.annotations['rightEyeIris'][0][1] - (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
f.annotations['rightEyeIris'][0][0] + (Math.sin(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[3]),
f.annotations['rightEyeIris'][0][1] + (Math.cos(f.rotation.gaze.bearing) * f.rotation.gaze.strength * f.box[2]),
];
ctx.beginPath();
ctx.moveTo(f.annotations['rightEyeIris'][0][0], f.annotations['rightEyeIris'][0][1]);
ctx.strokeStyle = 'pink';
ctx.lineTo(rightGaze[0], rightGaze[1]);
ctx.stroke();
}
}
@ -507,110 +501,6 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
}
}
function calcBuffered(newResult: Result, localOptions) {
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
// thus mixing by-reference and by-value assignments to minimize memory operations
// interpolate body results
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); // deep clone once
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box = newResult.body[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
const boxRaw = newResult.body[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
const keypoints = newResult.body[i].keypoints // update keypoints
.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / localOptions.bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y,
},
}));
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints }; // shallow clone plus updated values
}
}
// interpolate hand results
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); // deep clone once
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box = (newResult.hand[i].box// update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.hand[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor)) as [number, number, number, number];
const landmarks = newResult.hand[i].landmarks // update landmarks
.map((landmark, j) => landmark
.map((coord, k) => (((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor)) as [number, number, number]);
const keys = Object.keys(newResult.hand[i].annotations); // update annotations
const annotations = {};
for (const key of keys) {
annotations[key] = newResult.hand[i].annotations[key]
.map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, landmarks, annotations }; // shallow clone plus updated values
}
}
// interpolate face results
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)); // deep clone once
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box = (newResult.face[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.face[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor)) as [number, number, number, number];
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / localOptions.bufferedFactor,
yaw: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / localOptions.bufferedFactor,
pitch: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / localOptions.bufferedFactor,
};
const gaze = {
bearing: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / localOptions.bufferedFactor, // not correct due to wrap-around
/*
angle: Math.atan2( // average angle is calculated differently
Math.sin(bufferedResult.face[i].rotation.gaze.angle) + Math.sin(newResult.face[i].rotation.gaze.angle),
Math.cos(bufferedResult.face[i].rotation.gaze.angle) + Math.sin(newResult.face[i].rotation.gaze.angle),
),
*/
strength: ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / localOptions.bufferedFactor,
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values
}
}
// interpolate object detection results
if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object)); // deep clone once
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box = newResult.object[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw = newResult.object[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box, boxRaw }; // shallow clone plus updated values
}
}
// interpolate person results
const newPersons = newResult.persons; // trigger getter function
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons));
} else {
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
bufferedResult.persons[i].box = (newPersons[i].box
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / localOptions.bufferedFactor)) as [number, number, number, number];
}
}
}
export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasElement) {
if (!inCanvas || !outCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement) || !(outCanvas instanceof HTMLCanvasElement)) return;
@ -619,17 +509,30 @@ export async function canvas(inCanvas: HTMLCanvasElement, outCanvas: HTMLCanvasE
}
export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptions?: DrawOptions) {
const timestamp = now();
const localOptions = mergeDeep(options, drawOptions);
if (!result || !inCanvas) return;
if (!(inCanvas instanceof HTMLCanvasElement)) return;
if (!bufferedResult) bufferedResult = result; // first pass
else if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
else bufferedResult = result; // or just use results as-is
face(inCanvas, bufferedResult.face, localOptions);
body(inCanvas, bufferedResult.body, localOptions);
hand(inCanvas, bufferedResult.hand, localOptions);
object(inCanvas, bufferedResult.object, localOptions);
// person(inCanvas, bufferedResult.persons, localOptions);
face(inCanvas, result.face, localOptions);
body(inCanvas, result.body, localOptions);
hand(inCanvas, result.hand, localOptions);
object(inCanvas, result.object, localOptions);
// person(inCanvas, result.persons, localOptions);
gesture(inCanvas, result.gesture, localOptions); // gestures do not have buffering
/*
if (!bufferedResult) bufferedResult = result; // first pass
else if (localOptions.bufferedOutput) calcBuffered(result); // do results interpolation
else bufferedResult = result; // or just use results as-is
const promises: Promise<void>[] = [];
promises.push(face(inCanvas, bufferedResult.face, localOptions));
promises.push(body(inCanvas, bufferedResult.body, localOptions));
promises.push(hand(inCanvas, bufferedResult.hand, localOptions));
promises.push(object(inCanvas, bufferedResult.object, localOptions));
// promises.push(person(inCanvas, bufferedResult.persons, localOptions));
promises.push(gesture(inCanvas, result.gesture, localOptions)); // gestures do not have buffering
// await Promise.all(promises);
*/
result.performance.draw = Math.trunc(now() - timestamp);
}

View File

@ -8,9 +8,10 @@ import * as facemesh from './blazeface/facemesh';
import * as emotion from './emotion/emotion';
import * as faceres from './faceres/faceres';
import { Face } from './result';
import { Tensor } from './tfjs/types';
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
const rad2deg = (theta) => (theta * 180) / Math.PI;
const rad2deg = (theta) => Math.round((theta * 180) / Math.PI);
const calculateGaze = (mesh, box): { bearing: number, strength: number } => {
const radians = (pt1, pt2) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points
@ -31,11 +32,11 @@ const calculateGaze = (mesh, box): { bearing: number, strength: number } => {
(eyeCenter[0] - irisCenter[0]) / eyeSize[0] - offsetIris[0],
eyeRatio * (irisCenter[1] - eyeCenter[1]) / eyeSize[1] - offsetIris[1],
];
let vectorLength = Math.sqrt((eyeDiff[0] ** 2) + (eyeDiff[1] ** 2)); // vector length is a diagonal between two differences
vectorLength = Math.min(vectorLength, box[2] / 2, box[3] / 2); // limit strength to half of box size
const vectorAngle = radians([0, 0], eyeDiff); // using eyeDiff instead eyeCenter/irisCenter combo due to manual adjustments
let strength = Math.sqrt((eyeDiff[0] ** 2) + (eyeDiff[1] ** 2)); // vector length is a diagonal between two differences
strength = Math.min(strength, box[2] / 2, box[3] / 2); // limit strength to half of box size to avoid clipping due to low precision
const bearing = (radians([0, 0], eyeDiff) + (Math.PI / 2)) % Math.PI; // using eyeDiff instead eyeCenter/irisCenter combo due to manual adjustments and rotate clockwise 90degrees
return { bearing: vectorAngle, strength: vectorLength };
return { bearing, strength };
};
const calculateFaceAngle = (face, imageSize): {
@ -137,7 +138,7 @@ const calculateFaceAngle = (face, imageSize): {
return { angle, matrix, gaze };
};
export const detectFace = async (parent, input): Promise<Face[]> => {
export const detectFace = async (parent /* instance of human */, input: Tensor): Promise<Face[]> => {
// run facemesh, includes blazeface and iris
// eslint-disable-next-line no-async-promise-executor
let timeStamp;
@ -150,7 +151,8 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
parent.state = 'run:face';
timeStamp = now();
const faces = await facemesh.predict(input, parent.config);
parent.perf.face = Math.trunc(now() - timeStamp);
parent.performance.face = Math.trunc(now() - timeStamp);
if (!input.shape || input.shape.length !== 4) return [];
if (!faces) return [];
// for (const face of faces) {
for (let i = 0; i < faces.length; i++) {
@ -172,7 +174,7 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
parent.state = 'run:emotion';
timeStamp = now();
emotionRes = parent.config.face.emotion.enabled ? await emotion.predict(faces[i].image, parent.config, i, faces.length) : {};
parent.perf.emotion = Math.trunc(now() - timeStamp);
parent.performance.emotion = Math.trunc(now() - timeStamp);
}
parent.analyze('End Emotion:');
@ -184,7 +186,7 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
parent.state = 'run:description';
timeStamp = now();
descRes = parent.config.face.description.enabled ? await faceres.predict(faces[i].image, parent.config, i, faces.length) : [];
parent.perf.embedding = Math.trunc(now() - timeStamp);
parent.performance.embedding = Math.trunc(now() - timeStamp);
}
parent.analyze('End Description:');
@ -226,10 +228,10 @@ export const detectFace = async (parent, input): Promise<Face[]> => {
}
parent.analyze('End FaceMesh:');
if (parent.config.async) {
if (parent.perf.face) delete parent.perf.face;
if (parent.perf.age) delete parent.perf.age;
if (parent.perf.gender) delete parent.perf.gender;
if (parent.perf.emotion) delete parent.perf.emotion;
if (parent.performance.face) delete parent.performance.face;
if (parent.performance.age) delete parent.performance.age;
if (parent.performance.gender) delete parent.performance.gender;
if (parent.performance.emotion) delete parent.performance.emotion;
}
return faceRes;
};

View File

@ -109,7 +109,7 @@ export class HandPipeline {
const angle = config.hand.rotation ? util.computeRotation(currentBox.palmLandmarks[palmLandmarksPalmBase], currentBox.palmLandmarks[palmLandmarksMiddleFingerBase]) : 0;
const palmCenter = box.getBoxCenter(currentBox);
const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];
const rotatedImage = config.hand.rotation ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();
const rotatedImage = config.hand.rotation && tf.ENV.flags.IS_BROWSER ? tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized) : image.clone();
const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);
const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;
const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);

View File

@ -23,6 +23,7 @@ import * as gesture from './gesture/gesture';
import * as image from './image/image';
import * as draw from './draw/draw';
import * as persons from './persons';
import * as interpolate from './interpolate';
import * as sample from './sample';
import * as app from '../package.json';
import { Tensor } from './tfjs/types';
@ -60,7 +61,7 @@ type Model = unknown;
* @param userConfig: {@link Config}
*/
export class Human {
/** Current version of Human library in semver format */
/** Current version of Human library in *semver* format */
version: string;
/** Current configuration
* - Details: {@link Config}
@ -72,6 +73,7 @@ export class Human {
result: Result;
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
*/
state: string;
/** @internal: Instance of current image being processed */
@ -105,7 +107,6 @@ export class Human {
efficientpose: Model | null,
movenet: Model | null,
handpose: [Model, Model] | null,
iris: Model | null,
age: Model | null,
gender: Model | null,
emotion: Model | null,
@ -124,14 +125,14 @@ export class Human {
centernet: typeof centernet;
faceres: typeof faceres;
};
/** Face triangualtion array of 468 points, used for triangle references between points */
/** Reference face triangualtion array of 468 points, used for triangle references between points */
faceTriangulation: typeof facemesh.triangulation;
/** UV map of 468 values, used for 3D mapping of the face mesh */
/** Refernce UV map of 468 values, used for 3D mapping of the face mesh */
faceUVMap: typeof facemesh.uvmap;
/** Platform and agent information detected by Human */
sysinfo: { platform: string, agent: string };
/** Performance object that contains values for all recently performed operations */
perf: Record<string, unknown>; // perf members are dynamically defined as needed
performance: Record<string, unknown>; // perf members are dynamically defined as needed
#numTensors: number;
#analyzeMemoryLeaks: boolean;
#checkSanity: boolean;
@ -145,18 +146,18 @@ export class Human {
* Creates instance of Human library that is futher used for all operations
* @param userConfig: {@link Config}
*/
constructor(userConfig: Config | Record<string, unknown> = {}) {
constructor(userConfig?: Config | Record<string, unknown>) {
this.config = mergeDeep(defaults, userConfig || {});
this.tf = tf;
this.draw = draw;
this.version = app.version;
this.config = mergeDeep(defaults, userConfig);
this.state = 'idle';
this.#numTensors = 0;
this.#analyzeMemoryLeaks = false;
this.#checkSanity = false;
this.#firstRun = true;
this.#lastCacheDiff = 0;
this.perf = {};
this.performance = { backend: 0, load: 0, image: 0, frames: 0, cached: 0, changed: 0, total: 0, draw: 0 };
// object that contains all initialized models
this.models = {
face: null,
@ -165,7 +166,6 @@ export class Human {
efficientpose: null,
movenet: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
@ -253,9 +253,9 @@ export class Human {
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userConfig: {@link Config}
* @param userConfig?: {@link Config}
*/
async load(userConfig: Config | Record<string, unknown> = {}) {
async load(userConfig?: Config | Record<string, unknown>) {
this.state = 'load';
const timeStamp = now();
if (userConfig) this.config = mergeDeep(this.config, userConfig);
@ -315,7 +315,7 @@ export class Human {
}
const current = Math.trunc(now() - timeStamp);
if (current > (this.perf.load as number || 0)) this.perf.load = current;
if (current > (this.performance.load as number || 0)) this.performance.load = current;
}
// check if backend needs initialization if it changed
@ -366,9 +366,9 @@ export class Human {
if (this.tf.getBackend() === 'webgl' || this.tf.getBackend() === 'humangl') {
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
this.tf.ENV.set('WEBGL_CPU_FORWARD', true);
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
if (typeof this.config['deallocate'] !== 'undefined') {
if (!this.config.object.enabled) this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
if (typeof this.config['deallocate'] !== 'undefined' && this.config['deallocate']) { // hidden param
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
}
@ -376,10 +376,18 @@ export class Human {
if (this.config.debug) log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}
await this.tf.ready();
this.perf.backend = Math.trunc(now() - timeStamp);
this.performance.backend = Math.trunc(now() - timeStamp);
}
}
/**
* Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
* @param result?: use specific result set to run interpolation on
* @returns result: {@link Result}
*/
next = (result?: Result) => interpolate.calc(result || this.result) as Result;
// check if input changed sufficiently to trigger new detections
/** @hidden */
#skipFrame = async (input) => {
@ -414,10 +422,10 @@ export class Human {
* - Run inference for all configured models
* - Process and return result: {@link Result}
* @param input: Input
* @param userConfig: Config
* @param userConfig?: Config
* @returns result: Result
*/
async detect(input: Input, userConfig: Config | Record<string, unknown> = {}): Promise<Result | Error> {
async detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error> {
// detection happens inside a promise
return new Promise(async (resolve) => {
this.state = 'config';
@ -466,18 +474,18 @@ export class Human {
resolve({ error: 'could not convert input to tensor' });
return;
}
this.perf.image = Math.trunc(now() - timeStamp);
this.performance.image = Math.trunc(now() - timeStamp);
this.analyze('Get Image:');
timeStamp = now();
// @ts-ignore hidden dynamic property that is not part of definitions
this.config.skipFrame = await this.#skipFrame(process.tensor);
if (!this.perf.frames) this.perf.frames = 0;
if (!this.perf.cached) this.perf.cached = 0;
(this.perf.frames as number)++;
if (!this.performance.frames) this.performance.frames = 0;
if (!this.performance.cached) this.performance.cached = 0;
(this.performance.frames as number)++;
// @ts-ignore hidden dynamic property that is not part of definitions
if (this.config.skipFrame) this.perf.cached++;
this.perf.changed = Math.trunc(now() - timeStamp);
if (this.config.skipFrame) this.performance.cached++;
this.performance.changed = Math.trunc(now() - timeStamp);
this.analyze('Check Changed:');
// prepare where to store model results
@ -491,13 +499,13 @@ export class Human {
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
if (this.config.async) {
faceRes = this.config.face.enabled ? face.detectFace(this, process.tensor) : [];
if (this.perf.face) delete this.perf.face;
if (this.performance.face) delete this.performance.face;
} else {
this.state = 'run:face';
timeStamp = now();
faceRes = this.config.face.enabled ? await face.detectFace(this, process.tensor) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.face = elapsedTime;
if (elapsedTime > 0) this.performance.face = elapsedTime;
}
// run body: can be posenet, blazepose, efficientpose, movenet
@ -507,7 +515,7 @@ export class Human {
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('movenet')) bodyRes = this.config.body.enabled ? movenet.predict(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
if (this.performance.body) delete this.performance.body;
} else {
this.state = 'run:body';
timeStamp = now();
@ -516,7 +524,7 @@ export class Human {
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(process.tensor, this.config) : [];
else if (this.config.body.modelPath.includes('movenet')) bodyRes = this.config.body.enabled ? await movenet.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.body = elapsedTime;
if (elapsedTime > 0) this.performance.body = elapsedTime;
}
this.analyze('End Body:');
@ -524,13 +532,13 @@ export class Human {
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? handpose.predict(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
if (this.performance.hand) delete this.performance.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await handpose.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.hand = elapsedTime;
if (elapsedTime > 0) this.performance.hand = elapsedTime;
}
this.analyze('End Hand:');
@ -539,14 +547,14 @@ export class Human {
if (this.config.async) {
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? nanodet.predict(process.tensor, this.config) : [];
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? centernet.predict(process.tensor, this.config) : [];
if (this.perf.object) delete this.perf.object;
if (this.performance.object) delete this.performance.object;
} else {
this.state = 'run:object';
timeStamp = now();
if (this.config.object.modelPath.includes('nanodet')) objectRes = this.config.object.enabled ? await nanodet.predict(process.tensor, this.config) : [];
else if (this.config.object.modelPath.includes('centernet')) objectRes = this.config.object.enabled ? await centernet.predict(process.tensor, this.config) : [];
elapsedTime = Math.trunc(now() - timeStamp);
if (elapsedTime > 0) this.perf.object = elapsedTime;
if (elapsedTime > 0) this.performance.object = elapsedTime;
}
this.analyze('End Object:');
@ -558,11 +566,11 @@ export class Human {
if (this.config.gesture.enabled) {
timeStamp = now();
gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture;
if (!this.config.async) this.performance.gesture = Math.trunc(now() - timeStamp);
else if (this.performance.gesture) delete this.performance.gesture;
}
this.perf.total = Math.trunc(now() - timeStart);
this.performance.total = Math.trunc(now() - timeStart);
this.state = 'idle';
this.result = {
face: faceRes,
@ -570,7 +578,7 @@ export class Human {
hand: handRes,
gesture: gestureRes,
object: objectRes,
performance: this.perf,
performance: this.performance,
canvas: process.canvas,
timestamp: Date.now(),
get persons() { return persons.join(faceRes, bodyRes, handRes, gestureRes, process?.tensor?.shape); },
@ -666,9 +674,9 @@ export class Human {
/** Warmup metho pre-initializes all models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig: Config
* @param userConfig?: Config
*/
async warmup(userConfig: Config | Record<string, unknown> = {}): Promise<Result | { error }> {
async warmup(userConfig?: Config | Record<string, unknown>): Promise<Result | { error }> {
const t0 = now();
if (userConfig) this.config = mergeDeep(this.config, userConfig);
if (!this.config.warmup || this.config.warmup === 'none') return { error: 'null' };

114
src/interpolate.ts Normal file
View File

@ -0,0 +1,114 @@
/**
* Module that interpolates results for smoother animations
*/
import type { Result, Face, Body, Hand, Item, Gesture, Person } from './result';
const bufferedResult: Result = { face: [], body: [], hand: [], gesture: [], object: [], persons: [], performance: {}, timestamp: 0 };
export function calc(newResult: Result): Result {
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
// thus mixing by-reference and by-value assignments to minimize memory operations
const bufferedFactor = 1000 / (Date.now() - newResult.timestamp) / 4;
// interpolate body results
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) {
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body as Body[])); // deep clone once
} else {
for (let i = 0; i < newResult.body.length; i++) {
const box = newResult.body[i].box // update box
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / bufferedFactor) as [number, number, number, number];
const boxRaw = newResult.body[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / bufferedFactor) as [number, number, number, number];
const keypoints = newResult.body[i].keypoints // update keypoints
.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
x: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.x + keypoint.position.x) / bufferedFactor : keypoint.position.x,
y: bufferedResult.body[i].keypoints[j] ? ((bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / bufferedFactor : keypoint.position.y,
},
}));
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints }; // shallow clone plus updated values
}
}
// interpolate hand results
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) {
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand as Hand[])); // deep clone once
} else {
for (let i = 0; i < newResult.hand.length; i++) {
const box = (newResult.hand[i].box// update box
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.hand[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / bufferedFactor)) as [number, number, number, number];
const landmarks = newResult.hand[i].landmarks // update landmarks
.map((landmark, j) => landmark
.map((coord, k) => (((bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / bufferedFactor)) as [number, number, number]);
const keys = Object.keys(newResult.hand[i].annotations); // update annotations
const annotations = {};
for (const key of keys) {
annotations[key] = newResult.hand[i].annotations[key]
.map((val, j) => val.map((coord, k) => ((bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, landmarks, annotations }; // shallow clone plus updated values
}
}
// interpolate face results
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) {
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face as Face[])); // deep clone once
} else {
for (let i = 0; i < newResult.face.length; i++) {
const box = (newResult.face[i].box // update box
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / bufferedFactor)) as [number, number, number, number];
const boxRaw = (newResult.face[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / bufferedFactor)) as [number, number, number, number];
const matrix = newResult.face[i].rotation.matrix;
const angle = {
roll: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.roll + newResult.face[i].rotation.angle.roll) / bufferedFactor,
yaw: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.yaw + newResult.face[i].rotation.angle.yaw) / bufferedFactor,
pitch: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.angle.pitch + newResult.face[i].rotation.angle.pitch) / bufferedFactor,
};
const gaze = {
// not fully correct due projection on circle, also causes wrap-around draw on jump from negative to positive
bearing: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.bearing + newResult.face[i].rotation.gaze.bearing) / bufferedFactor,
strength: ((bufferedFactor - 1) * bufferedResult.face[i].rotation.gaze.strength + newResult.face[i].rotation.gaze.strength) / bufferedFactor,
};
const rotation = { angle, matrix, gaze };
bufferedResult.face[i] = { ...newResult.face[i], rotation, box, boxRaw }; // shallow clone plus updated values
}
}
// interpolate object detection results
if (!bufferedResult.object || (newResult.object.length !== bufferedResult.object.length)) {
bufferedResult.object = JSON.parse(JSON.stringify(newResult.object as Item[])); // deep clone once
} else {
for (let i = 0; i < newResult.object.length; i++) {
const box = newResult.object[i].box // update box
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].box[j] + b) / bufferedFactor);
const boxRaw = newResult.object[i].boxRaw // update boxRaw
.map((b, j) => ((bufferedFactor - 1) * bufferedResult.object[i].boxRaw[j] + b) / bufferedFactor);
bufferedResult.object[i] = { ...newResult.object[i], box, boxRaw }; // shallow clone plus updated values
}
}
// interpolate person results
const newPersons = newResult.persons; // trigger getter function
if (!bufferedResult.persons || (newPersons.length !== bufferedResult.persons.length)) {
bufferedResult.persons = JSON.parse(JSON.stringify(newPersons as Person[]));
} else {
for (let i = 0; i < newPersons.length; i++) { // update person box, we don't update the rest as it's updated as reference anyhow
bufferedResult.persons[i].box = (newPersons[i].box
.map((box, j) => ((bufferedFactor - 1) * bufferedResult.persons[i].box[j] + box) / bufferedFactor)) as [number, number, number, number];
}
}
// just copy latest gestures without interpolation
bufferedResult.gesture = newResult.gesture as Gesture[];
bufferedResult.performance = newResult.performance;
return bufferedResult;
}

View File

@ -176,7 +176,7 @@ export interface Result {
/** {@link Object}: detection & analysis results */
object: Array<Item>
/** global performance object with timing values for each operation */
readonly performance: Record<string, unknown>,
performance: Record<string, unknown>,
/** optional processed canvas that can be used to draw input on screen */
readonly canvas?: OffscreenCanvas | HTMLCanvasElement,
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */

View File

@ -1,120 +1,169 @@
2021-05-30 18:44:32 INFO:  @vladmandic/human version 2.0.0
2021-05-30 18:44:32 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-30 18:44:32 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-30 18:44:32 INFO:  test-node.js start
2021-05-30 18:44:34 STATE: test-node.js passed: create human
2021-05-30 18:44:34 INFO:  test-node.js human version: 2.0.0
2021-05-30 18:44:34 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-30 18:44:34 INFO:  test-node.js tfjs version: 3.6.0
2021-05-30 18:44:34 STATE: test-node.js passed: set backend: tensorflow
2021-05-30 18:44:34 STATE: test-node.js passed: load models
2021-05-30 18:44:34 STATE: test-node.js result: defined models: 14 loaded models: 6
2021-05-30 18:44:34 STATE: test-node.js passed: warmup: none default
2021-05-30 18:44:36 STATE: test-node.js passed: warmup: face default
2021-05-30 18:44:36 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-30 18:44:36 DATA:  test-node.js result: performance: load: 351 total: 1597
2021-05-30 18:44:38 STATE: test-node.js passed: warmup: body default
2021-05-30 18:44:38 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:38 DATA:  test-node.js result: performance: load: 351 total: 1550
2021-05-30 18:44:38 INFO:  test-node.js test body variants
2021-05-30 18:44:39 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:40 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-30 18:44:40 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-30 18:44:40 DATA:  test-node.js result: performance: load: 351 total: 950
2021-05-30 18:44:40 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:41 STATE: test-node.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-30 18:44:41 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:41 DATA:  test-node.js result: performance: load: 351 total: 386
2021-05-30 18:44:41 STATE: test-node.js passed: detect: random default
2021-05-30 18:44:41 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
2021-05-30 18:44:41 DATA:  test-node.js result: performance: load: 351 total: 181
2021-05-30 18:44:41 INFO:  test-node.js test: first instance
2021-05-30 18:44:42 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-30 18:44:42 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-30 18:44:42 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-30 18:44:42 DATA:  test-node.js result: performance: load: 351 total: 113
2021-05-30 18:44:42 INFO:  test-node.js test: second instance
2021-05-30 18:44:42 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-30 18:44:43 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-30 18:44:43 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-30 18:44:43 DATA:  test-node.js result: performance: load: 3 total: 1446
2021-05-30 18:44:43 INFO:  test-node.js test: concurrent
2021-05-30 18:44:44 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-30 18:44:44 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-30 18:44:45 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:46 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:51 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-30 18:44:51 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-30 18:44:51 DATA:  test-node.js result: performance: load: 351 total: 5303
2021-05-30 18:44:51 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-30 18:44:51 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-30 18:44:51 DATA:  test-node.js result: performance: load: 3 total: 5303
2021-05-30 18:44:51 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-30 18:44:51 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:51 DATA:  test-node.js result: performance: load: 351 total: 5303
2021-05-30 18:44:51 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-30 18:44:51 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:51 DATA:  test-node.js result: performance: load: 3 total: 5303
2021-05-30 18:44:51 INFO:  test-node.js test complete: 16908 ms
2021-05-30 18:44:51 INFO:  test-node-gpu.js start
2021-05-30 18:44:52 WARN:  test-node-gpu.js stderr: 2021-05-30 18:44:52.591364: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-30 18:44:52 WARN:  test-node-gpu.js stderr: 2021-05-30 18:44:52.796392: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-30 18:44:52 WARN:  test-node-gpu.js stderr: 2021-05-30 18:44:52.796422: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-30 18:44:52 STATE: test-node-gpu.js passed: create human
2021-05-30 18:44:52 INFO:  test-node-gpu.js human version: 2.0.0
2021-05-30 18:44:52 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-30 18:44:52 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-30 18:44:53 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-30 18:44:53 STATE: test-node-gpu.js passed: load models
2021-05-30 18:44:53 STATE: test-node-gpu.js result: defined models: 14 loaded models: 6
2021-05-30 18:44:53 STATE: test-node-gpu.js passed: warmup: none default
2021-05-30 18:44:54 STATE: test-node-gpu.js passed: warmup: face default
2021-05-30 18:44:55 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-30 18:44:55 DATA:  test-node-gpu.js result: performance: load: 337 total: 1707
2021-05-30 18:44:56 STATE: test-node-gpu.js passed: warmup: body default
2021-05-30 18:44:56 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:56 DATA:  test-node-gpu.js result: performance: load: 337 total: 1518
2021-05-30 18:44:56 INFO:  test-node-gpu.js test body variants
2021-05-30 18:44:57 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:58 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-30 18:44:58 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-30 18:44:58 DATA:  test-node-gpu.js result: performance: load: 337 total: 1060
2021-05-30 18:44:59 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:44:59 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-30 18:44:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:44:59 DATA:  test-node-gpu.js result: performance: load: 337 total: 321
2021-05-30 18:45:00 STATE: test-node-gpu.js passed: detect: random default
2021-05-30 18:45:00 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0,"keypoints":0}
2021-05-30 18:45:00 DATA:  test-node-gpu.js result: performance: load: 337 total: 182
2021-05-30 18:45:00 INFO:  test-node-gpu.js test: first instance
2021-05-30 18:45:00 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-30 18:45:00 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-30 18:45:00 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.72,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-30 18:45:00 DATA:  test-node-gpu.js result: performance: load: 337 total: 112
2021-05-30 18:45:00 INFO:  test-node-gpu.js test: second instance
2021-05-30 18:45:01 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-30 18:45:02 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-30 18:45:02 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-30 18:45:02 DATA:  test-node-gpu.js result: performance: load: 4 total: 1445
2021-05-30 18:45:02 INFO:  test-node-gpu.js test: concurrent
2021-05-30 18:45:02 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-30 18:45:02 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-30 18:45:03 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:45:04 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-30 18:45:09 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: performance: load: 337 total: 5272
2021-05-30 18:45:09 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: performance: load: 4 total: 5272
2021-05-30 18:45:09 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: performance: load: 337 total: 5272
2021-05-30 18:45:09 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-30 18:45:09 DATA:  test-node-gpu.js result: performance: load: 4 total: 5272
2021-05-30 18:45:09 INFO:  test-node-gpu.js test complete: 17008 ms
2021-05-30 18:45:09 INFO:  test-node-wasm.js start
2021-05-30 18:45:10 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
2021-05-30 18:45:10 ERROR: test-node-wasm.js aborting test
2021-05-30 18:45:10 INFO:  status: {"passed":46,"failed":1}
2021-05-31 10:37:36 INFO:  @vladmandic/human version 2.0.0
2021-05-31 10:37:36 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-31 10:37:36 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-31 10:37:36 INFO:  test-node.js start
2021-05-31 10:37:37 STATE: test-node.js passed: create human
2021-05-31 10:37:37 INFO:  test-node.js human version: 2.0.0
2021-05-31 10:37:37 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-31 10:37:37 INFO:  test-node.js tfjs version: 3.6.0
2021-05-31 10:37:37 STATE: test-node.js passed: set backend: tensorflow
2021-05-31 10:37:37 STATE: test-node.js passed: load models
2021-05-31 10:37:37 STATE: test-node.js result: defined models: 13 loaded models: 6
2021-05-31 10:37:37 STATE: test-node.js passed: warmup: none default
2021-05-31 10:37:39 STATE: test-node.js passed: warmup: face default
2021-05-31 10:37:39 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-31 10:37:39 DATA:  test-node.js result: performance: load: 341 total: 1576
2021-05-31 10:37:41 STATE: test-node.js passed: warmup: body default
2021-05-31 10:37:41 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:37:41 DATA:  test-node.js result: performance: load: 341 total: 1531
2021-05-31 10:37:41 INFO:  test-node.js test body variants
2021-05-31 10:37:42 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:37:43 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-31 10:37:43 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-31 10:37:43 DATA:  test-node.js result: performance: load: 341 total: 1023
2021-05-31 10:37:44 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:37:44 STATE: test-node.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-31 10:37:44 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:37:44 DATA:  test-node.js result: performance: load: 341 total: 305
2021-05-31 10:37:45 STATE: test-node.js passed: detect: random default
2021-05-31 10:37:45 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-05-31 10:37:45 DATA:  test-node.js result: performance: load: 341 total: 817
2021-05-31 10:37:45 INFO:  test-node.js test: first instance
2021-05-31 10:37:45 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:37:47 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:37:47 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-31 10:37:47 DATA:  test-node.js result: performance: load: 341 total: 1502
2021-05-31 10:37:47 INFO:  test-node.js test: second instance
2021-05-31 10:37:47 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:37:48 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:37:48 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-31 10:37:48 DATA:  test-node.js result: performance: load: 5 total: 1518
2021-05-31 10:37:48 INFO:  test-node.js test: concurrent
2021-05-31 10:37:49 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:37:49 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:37:50 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:37:51 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:37:56 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:37:56 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-31 10:37:56 DATA:  test-node.js result: performance: load: 341 total: 5470
2021-05-31 10:37:56 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:37:56 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-31 10:37:56 DATA:  test-node.js result: performance: load: 5 total: 5470
2021-05-31 10:37:56 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:37:56 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:37:56 DATA:  test-node.js result: performance: load: 341 total: 5470
2021-05-31 10:37:56 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:37:56 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:37:56 DATA:  test-node.js result: performance: load: 5 total: 5470
2021-05-31 10:37:56 INFO:  test-node.js test complete: 19120 ms
2021-05-31 10:37:56 INFO:  test-node-gpu.js start
2021-05-31 10:37:57 WARN:  test-node-gpu.js stderr: 2021-05-31 10:37:57.184324: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-31 10:37:57 WARN:  test-node-gpu.js stderr: 2021-05-31 10:37:57.237012: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-31 10:37:57 WARN:  test-node-gpu.js stderr: 2021-05-31 10:37:57.237106: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-31 10:37:57 STATE: test-node-gpu.js passed: create human
2021-05-31 10:37:57 INFO:  test-node-gpu.js human version: 2.0.0
2021-05-31 10:37:57 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-31 10:37:57 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-31 10:37:57 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-31 10:37:57 STATE: test-node-gpu.js passed: load models
2021-05-31 10:37:57 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6
2021-05-31 10:37:57 STATE: test-node-gpu.js passed: warmup: none default
2021-05-31 10:37:59 STATE: test-node-gpu.js passed: warmup: face default
2021-05-31 10:37:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-31 10:37:59 DATA:  test-node-gpu.js result: performance: load: 336 total: 1872
2021-05-31 10:38:01 STATE: test-node-gpu.js passed: warmup: body default
2021-05-31 10:38:01 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:38:01 DATA:  test-node-gpu.js result: performance: load: 336 total: 1592
2021-05-31 10:38:01 INFO:  test-node-gpu.js test body variants
2021-05-31 10:38:02 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:03 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-31 10:38:03 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-31 10:38:03 DATA:  test-node-gpu.js result: performance: load: 336 total: 945
2021-05-31 10:38:04 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:04 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-31 10:38:04 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:38:04 DATA:  test-node-gpu.js result: performance: load: 336 total: 337
2021-05-31 10:38:05 STATE: test-node-gpu.js passed: detect: random default
2021-05-31 10:38:05 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-05-31 10:38:05 DATA:  test-node-gpu.js result: performance: load: 336 total: 778
2021-05-31 10:38:05 INFO:  test-node-gpu.js test: first instance
2021-05-31 10:38:05 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:38:07 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:38:07 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-31 10:38:07 DATA:  test-node-gpu.js result: performance: load: 336 total: 1591
2021-05-31 10:38:07 INFO:  test-node-gpu.js test: second instance
2021-05-31 10:38:07 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:38:08 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:38:08 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.68,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-31 10:38:08 DATA:  test-node-gpu.js result: performance: load: 3 total: 1345
2021-05-31 10:38:08 INFO:  test-node-gpu.js test: concurrent
2021-05-31 10:38:09 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:38:09 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:38:10 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:11 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:16 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: performance: load: 336 total: 5204
2021-05-31 10:38:16 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: performance: load: 3 total: 5203
2021-05-31 10:38:16 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: performance: load: 336 total: 5204
2021-05-31 10:38:16 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-31 10:38:16 DATA:  test-node-gpu.js result: performance: load: 3 total: 5203
2021-05-31 10:38:16 INFO:  test-node-gpu.js test complete: 19021 ms
2021-05-31 10:38:16 INFO:  test-node-wasm.js start
2021-05-31 10:38:16 STATE: test-node-wasm.js passed: model server: http://localhost:10030/models/
2021-05-31 10:38:16 STATE: test-node-wasm.js passed: create human
2021-05-31 10:38:16 INFO:  test-node-wasm.js human version: 2.0.0
2021-05-31 10:38:16 INFO:  test-node-wasm.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-31 10:38:16 INFO:  test-node-wasm.js tfjs version: 3.6.0
2021-05-31 10:38:17 STATE: test-node-wasm.js passed: set backend: wasm
2021-05-31 10:38:17 STATE: test-node-wasm.js passed: load models
2021-05-31 10:38:17 STATE: test-node-wasm.js result: defined models: 13 loaded models: 5
2021-05-31 10:38:17 STATE: test-node-wasm.js passed: warmup: none default
2021-05-31 10:38:17 ERROR: test-node-wasm.js failed: warmup: face default
2021-05-31 10:38:17 ERROR: test-node-wasm.js failed: warmup: body default
2021-05-31 10:38:17 INFO:  test-node-wasm.js test body variants
2021-05-31 10:38:19 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:22 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-31 10:38:22 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":0.96,"keypoints":16}
2021-05-31 10:38:22 DATA:  test-node-wasm.js result: performance: load: 650 total: 3248
2021-05-31 10:38:24 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:26 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-31 10:38:26 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"confidence":1} {} {"score":0.93,"keypoints":17}
2021-05-31 10:38:26 DATA:  test-node-wasm.js result: performance: load: 650 total: 1942
2021-05-31 10:38:28 STATE: test-node-wasm.js passed: detect: random default
2021-05-31 10:38:28 DATA:  test-node-wasm.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-05-31 10:38:28 DATA:  test-node-wasm.js result: performance: load: 650 total: 1631
2021-05-31 10:38:28 INFO:  test-node-wasm.js test: first instance
2021-05-31 10:38:29 STATE: test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:38:31 STATE: test-node-wasm.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:38:31 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {} {"score":0.67,"keypoints":7}
2021-05-31 10:38:31 DATA:  test-node-wasm.js result: performance: load: 650 total: 2284
2021-05-31 10:38:31 INFO:  test-node-wasm.js test: second instance
2021-05-31 10:38:32 STATE: test-node-wasm.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-31 10:38:34 STATE: test-node-wasm.js passed: detect: assets/sample-me.jpg default
2021-05-31 10:38:34 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {} {"score":0.67,"keypoints":7}
2021-05-31 10:38:34 DATA:  test-node-wasm.js result: performance: load: 4 total: 2287
2021-05-31 10:38:34 INFO:  test-node-wasm.js test: concurrent
2021-05-31 10:38:34 STATE: test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:38:34 STATE: test-node-wasm.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-31 10:38:36 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:38 STATE: test-node-wasm.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-31 10:38:47 STATE: test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: performance: load: 650 total: 9230
2021-05-31 10:38:47 STATE: test-node-wasm.js passed: detect: assets/human-sample-face.jpg default
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 0 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {} {"score":0.73,"keypoints":17}
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: performance: load: 4 total: 9230
2021-05-31 10:38:47 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: performance: load: 650 total: 9230
2021-05-31 10:38:47 STATE: test-node-wasm.js passed: detect: assets/human-sample-body.jpg default
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 0 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {} {"score":0.93,"keypoints":17}
2021-05-31 10:38:47 DATA:  test-node-wasm.js result: performance: load: 4 total: 9230
2021-05-31 10:38:47 INFO:  test-node-wasm.js test complete: 30664 ms
2021-05-31 10:38:47 INFO:  status: {"passed":68,"failed":2}

File diff suppressed because one or more lines are too long

View File

@ -109,7 +109,7 @@
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#faceuvmap" class="tsd-kind-icon">faceUVMap</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#image" class="tsd-kind-icon">image</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#models" class="tsd-kind-icon">models</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#perf" class="tsd-kind-icon">perf</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#performance" class="tsd-kind-icon">performance</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#result" class="tsd-kind-icon">result</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#state" class="tsd-kind-icon">state</a></li>
<li class="tsd-kind-property tsd-parent-kind-class"><a href="human.html#sysinfo" class="tsd-kind-icon">sysinfo</a></li>
@ -124,6 +124,7 @@
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#enhance" class="tsd-kind-icon">enhance</a></li>
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#load" class="tsd-kind-icon">load</a></li>
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#match" class="tsd-kind-icon">match</a></li>
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#next" class="tsd-kind-icon">next</a></li>
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#similarity" class="tsd-kind-icon">similarity</a></li>
<li class="tsd-kind-method tsd-parent-kind-class"><a href="human.html#warmup" class="tsd-kind-icon">warmup</a></li>
</ul>
@ -151,7 +152,7 @@
<h4 class="tsd-parameters-title">Parameters</h4>
<ul class="tsd-parameters">
<li>
<h5>userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> = {}</span></h5>
<h5><span class="tsd-flag ts-flagOptional">Optional</span> userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></h5>
</li>
</ul>
<h4 class="tsd-returns-title">Returns <a href="human.html" class="tsd-signature-type" data-tsd-kind="Class">Human</a></h4>
@ -411,7 +412,7 @@
</aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Face triangualtion array of 468 points, used for triangle references between points</p>
<p>Reference face triangualtion array of 468 points, used for triangle references between points</p>
</div>
</div>
</section>
@ -423,7 +424,7 @@
</aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>UV map of 468 values, used for 3D mapping of the face mesh</p>
<p>Refernce UV map of 468 values, used for 3D mapping of the face mesh</p>
</div>
</div>
</section>
@ -455,7 +456,7 @@
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-class">
<a name="models" class="tsd-anchor"></a>
<h3>models</h3>
<div class="tsd-signature tsd-kind-icon">models<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>age<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>blazepose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>centernet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>efficientpose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>embedding<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>face<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">]</span><span class="tsd-signature-symbol">; </span>faceres<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>gender<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>handpose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">]</span><span class="tsd-signature-symbol">; </span>iris<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>movenet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>nanodet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>posenet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol"> }</span></div>
<div class="tsd-signature tsd-kind-icon">models<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>age<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>blazepose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>centernet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>efficientpose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>embedding<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>face<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">]</span><span class="tsd-signature-symbol">; </span>faceres<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>gender<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>handpose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">]</span><span class="tsd-signature-symbol">; </span>movenet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>nanodet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">; </span>posenet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol"> }</span></div>
<aside class="tsd-sources">
</aside>
<div class="tsd-comment tsd-typography">
@ -498,9 +499,6 @@
<li class="tsd-parameter">
<h5>handpose<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">[</span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">]</span></h5>
</li>
<li class="tsd-parameter">
<h5>iris<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span></h5>
</li>
<li class="tsd-parameter">
<h5>movenet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">unknown</span></h5>
</li>
@ -514,9 +512,9 @@
</div>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-class">
<a name="perf" class="tsd-anchor"></a>
<h3>perf</h3>
<div class="tsd-signature tsd-kind-icon">perf<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div>
<a name="performance" class="tsd-anchor"></a>
<h3>performance</h3>
<div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div>
<aside class="tsd-sources">
</aside>
<div class="tsd-comment tsd-typography">
@ -551,6 +549,7 @@
<p>Current state of Human library</p>
<ul>
<li>Can be polled to determine operations that are currently executed</li>
<li>Progresses through: &#39;config&#39;, &#39;check&#39;, &#39;backend&#39;, &#39;load&#39;, &#39;run:<model>&#39;, &#39;idle&#39;</li>
</ul>
</div>
</div>
@ -603,7 +602,7 @@
</aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Current version of Human library in semver format</p>
<p>Current version of Human library in <em>semver</em> format</p>
</div>
</div>
</section>
@ -637,7 +636,7 @@
<h5>input: <a href="../index.html#input" class="tsd-signature-type" data-tsd-kind="Type alias">Input</a></h5>
</li>
<li>
<h5>userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> = {}</span></h5>
<h5><span class="tsd-flag ts-flagOptional">Optional</span> userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></h5>
</li>
</ul>
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><a href="../index.html#error" class="tsd-signature-type" data-tsd-kind="Type alias">Error</a><span class="tsd-signature-symbol"> | </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol">&gt;</span></h4>
@ -692,7 +691,7 @@
<h4 class="tsd-parameters-title">Parameters</h4>
<ul class="tsd-parameters">
<li>
<h5>userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> = {}</span></h5>
<h5><span class="tsd-flag ts-flagOptional">Optional</span> userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></h5>
</li>
</ul>
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">void</span><span class="tsd-signature-symbol">&gt;</span></h4>
@ -745,6 +744,33 @@
</li>
</ul>
</section>
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
<a name="next" class="tsd-anchor"></a>
<h3>next</h3>
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
<li class="tsd-signature tsd-kind-icon">next<span class="tsd-signature-symbol">(</span>result<span class="tsd-signature-symbol">?: </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a></li>
</ul>
<ul class="tsd-descriptions">
<li class="tsd-description">
<aside class="tsd-sources">
</aside>
<div class="tsd-comment tsd-typography">
<div class="lead">
<p>Runs interpolation using last known result and returns smoothened result
Interpolation is based on time since last known result so can be called independently</p>
</div>
</div>
<h4 class="tsd-parameters-title">Parameters</h4>
<ul class="tsd-parameters">
<li>
<h5><span class="tsd-flag ts-flagOptional">Optional</span> result: <a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a></h5>
</li>
</ul>
<h4 class="tsd-returns-title">Returns <a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a></h4>
<p>result: <a href="../interfaces/result.html">Result</a></p>
</li>
</ul>
</section>
<section class="tsd-panel tsd-member tsd-kind-method tsd-parent-kind-class">
<a name="similarity" class="tsd-anchor"></a>
<h3>similarity</h3>
@ -799,7 +825,7 @@
<h4 class="tsd-parameters-title">Parameters</h4>
<ul class="tsd-parameters">
<li>
<h5>userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span><span class="tsd-signature-symbol"> = {}</span></h5>
<h5><span class="tsd-flag ts-flagOptional">Optional</span> userConfig: <a href="../interfaces/config.html" class="tsd-signature-type" data-tsd-kind="Interface">Config</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></h5>
</li>
</ul>
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol">&lt;</span><a href="../interfaces/result.html" class="tsd-signature-type" data-tsd-kind="Interface">Result</a><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-symbol">{ </span>error<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol"> }</span><span class="tsd-signature-symbol">&gt;</span></h4>
@ -851,7 +877,7 @@
<a href="human.html#models" class="tsd-kind-icon">models</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-class">
<a href="human.html#perf" class="tsd-kind-icon">perf</a>
<a href="human.html#performance" class="tsd-kind-icon">performance</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-class">
<a href="human.html#result" class="tsd-kind-icon">result</a>
@ -880,6 +906,9 @@
<li class=" tsd-kind-method tsd-parent-kind-class">
<a href="human.html#match" class="tsd-kind-icon">match</a>
</li>
<li class=" tsd-kind-method tsd-parent-kind-class">
<a href="human.html#next" class="tsd-kind-icon">next</a>
</li>
<li class=" tsd-kind-method tsd-parent-kind-class">
<a href="human.html#similarity" class="tsd-kind-icon">similarity</a>
</li>

View File

@ -83,8 +83,7 @@
-fillPolygons: should drawn polygons be filled,
-useDepth: use z-axis coordinate as color shade,
-useCurves: draw polygons as cures or as lines,
-bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
-bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.</p>
-bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations</p>
</div>
</div>
</section>
@ -103,7 +102,6 @@
<section class="tsd-index-section ">
<h3>Properties</h3>
<ul class="tsd-index-list">
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedfactor" class="tsd-kind-icon">buffered<wbr>Factor</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#color" class="tsd-kind-icon">color</a></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><a href="drawoptions.html#drawboxes" class="tsd-kind-icon">draw<wbr>Boxes</a></li>
@ -128,13 +126,6 @@
</section>
<section class="tsd-panel-group tsd-member-group ">
<h2>Properties</h2>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="bufferedfactor" class="tsd-anchor"></a>
<h3>buffered<wbr>Factor</h3>
<div class="tsd-signature tsd-kind-icon">buffered<wbr>Factor<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">number</span></div>
<aside class="tsd-sources">
</aside>
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="bufferedoutput" class="tsd-anchor"></a>
<h3>buffered<wbr>Output</h3>
@ -283,9 +274,6 @@
<li class="current tsd-kind-interface">
<a href="drawoptions.html" class="tsd-kind-icon">Draw<wbr>Options</a>
<ul>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#bufferedfactor" class="tsd-kind-icon">buffered<wbr>Factor</a>
</li>
<li class=" tsd-kind-property tsd-parent-kind-interface">
<a href="drawoptions.html#bufferedoutput" class="tsd-kind-icon">buffered<wbr>Output</a>
</li>

View File

@ -176,7 +176,7 @@
</section>
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-interface">
<a name="performance" class="tsd-anchor"></a>
<h3><span class="tsd-flag ts-flagReadonly">Readonly</span> performance</h3>
<h3>performance</h3>
<div class="tsd-signature tsd-kind-icon">performance<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-type">Record</span><span class="tsd-signature-symbol">&lt;</span><span class="tsd-signature-type">string</span><span class="tsd-signature-symbol">, </span><span class="tsd-signature-type">unknown</span><span class="tsd-signature-symbol">&gt;</span></div>
<aside class="tsd-sources">
</aside>

View File

@ -21,7 +21,6 @@ import type { Result, Face, Body, Hand, Item, Gesture, Person } from '../result'
* -useDepth: use z-axis coordinate as color shade,
* -useCurves: draw polygons as cures or as lines,
* -bufferedOutput: experimental: allows to call draw methods multiple times for each detection and interpolate results between results thus achieving smoother animations
* -bufferedFactor: speed of interpolation convergence where 1 means 100% immediately, 2 means 50% at each interpolation, etc.
*/
export interface DrawOptions {
color: string;
@ -41,7 +40,6 @@ export interface DrawOptions {
useDepth: boolean;
useCurves: boolean;
bufferedOutput: boolean;
bufferedFactor: number;
}
export declare const options: DrawOptions;
export declare function gesture(inCanvas: HTMLCanvasElement, result: Array<Gesture>, drawOptions?: DrawOptions): Promise<void>;

23
types/human.d.ts vendored
View File

@ -45,7 +45,7 @@ declare type Model = unknown;
*/
export declare class Human {
#private;
/** Current version of Human library in semver format */
/** Current version of Human library in *semver* format */
version: string;
/** Current configuration
* - Details: {@link Config}
@ -57,6 +57,7 @@ export declare class Human {
result: Result;
/** Current state of Human library
* - Can be polled to determine operations that are currently executed
* - Progresses through: 'config', 'check', 'backend', 'load', 'run:<model>', 'idle'
*/
state: string;
/** @internal: Instance of current image being processed */
@ -93,7 +94,6 @@ export declare class Human {
efficientpose: Model | null;
movenet: Model | null;
handpose: [Model, Model] | null;
iris: Model | null;
age: Model | null;
gender: Model | null;
emotion: Model | null;
@ -112,9 +112,9 @@ export declare class Human {
centernet: typeof centernet;
faceres: typeof faceres;
};
/** Face triangualtion array of 468 points, used for triangle references between points */
/** Reference face triangualtion array of 468 points, used for triangle references between points */
faceTriangulation: typeof facemesh.triangulation;
/** UV map of 468 values, used for 3D mapping of the face mesh */
/** Refernce UV map of 468 values, used for 3D mapping of the face mesh */
faceUVMap: typeof facemesh.uvmap;
/** Platform and agent information detected by Human */
sysinfo: {
@ -122,7 +122,7 @@ export declare class Human {
agent: string;
};
/** Performance object that contains values for all recently performed operations */
perf: Record<string, unknown>;
performance: Record<string, unknown>;
/**
* Creates instance of Human library that is futher used for all operations
* @param userConfig: {@link Config}
@ -160,23 +160,30 @@ export declare class Human {
};
/** Load method preloads all configured models on-demand
* - Not explicitly required as any required model is load implicitly on it's first run
* @param userConfig: {@link Config}
* @param userConfig?: {@link Config}
*/
load(userConfig?: Config | Record<string, unknown>): Promise<void>;
/**
* Runs interpolation using last known result and returns smoothened result
* Interpolation is based on time since last known result so can be called independently
* @param result?: use specific result set to run interpolation on
* @returns result: {@link Result}
*/
next: (result?: Result | undefined) => Result;
/** Main detection method
* - Analyze configuration: {@link Config}
* - Pre-process input: {@link Input}
* - Run inference for all configured models
* - Process and return result: {@link Result}
* @param input: Input
* @param userConfig: Config
* @param userConfig?: Config
* @returns result: Result
*/
detect(input: Input, userConfig?: Config | Record<string, unknown>): Promise<Result | Error>;
/** Warmup metho pre-initializes all models for faster inference
* - can take significant time on startup
* - only used for `webgl` and `humangl` backends
* @param userConfig: Config
* @param userConfig?: Config
*/
warmup(userConfig?: Config | Record<string, unknown>): Promise<Result | {
error: any;

5
types/interpolate.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
/**
* Module that interpolates results for smoother animations
*/
import type { Result } from './result';
export declare function calc(newResult: Result): Result;

2
types/result.d.ts vendored
View File

@ -200,7 +200,7 @@ export interface Result {
/** {@link Object}: detection & analysis results */
object: Array<Item>;
/** global performance object with timing values for each operation */
readonly performance: Record<string, unknown>;
performance: Record<string, unknown>;
/** optional processed canvas that can be used to draw input on screen */
readonly canvas?: OffscreenCanvas | HTMLCanvasElement;
/** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */