release candidate

pull/134/head
Vladimir Mandic 2021-05-30 12:03:34 -04:00
parent 261ecda985
commit 3cf15ff629
26 changed files with 635 additions and 577 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human
Version: **1.9.4**
Version: **2.0.0**
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog
### **HEAD -> main** 2021/05/29 mandic00@live.com
### **HEAD -> main** 2021/05/30 mandic00@live.com
- quantize handdetect model
- added experimental movenet-lightning and removed blazepose from default dist
- added experimental face.rotation.gaze
- fix and optimize for mobile platform

View File

@ -66,6 +66,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
- [**Platform Support**](https://github.com/vladmandic/human/wiki/Platforms)
- [**List of Models & Credits**](https://github.com/vladmandic/human/wiki/Models)
- [**Security & Privacy Policy**](https://github.com/vladmandic/human/blob/main/SECURITY.md)
- [**License & Usage Restrictions**](https://github.com/vladmandic/human/blob/main/LICENSE)
<br>
@ -77,8 +78,8 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
## Options
As presented in the demo application...
> [demo/index.html](demo/index.html)
All options as presented in the demo application...
> [demo/index.html](demo/index.html)
![Options visible in demo](assets/screenshot-menu.png)
@ -88,7 +89,7 @@ As presented in the demo application...
<br>
**Training image:**
**Validation image:**
> [demo/index.html](demo/index.html?image=%22../assets/human-sample-upper.jpg%22)
![Example Training Image](assets/screenshot-sample.png)
@ -104,7 +105,10 @@ As presented in the demo application...
![Example Using WebCam](assets/screenshot-webcam.jpg)
**Face Similarity Matching:**
> [demo/facematch.html](demo/facematch.html)
Extracts all faces from provided input images,
sorts them by similarity to selected face
and optionally matches detected face with database of known people to guess their names
> [demo/facematch.html](demo/facematch.html)
![Face Matching](assets/screenshot-facematch.jpg)

14
TODO.md
View File

@ -7,7 +7,7 @@ N/A
## Exploring Features
- Implement demo as installable PWA with model caching
- Implement results interpolation on library level
- Implement results interpolation on library level instead inside draw functions
- Switch to TypeScript 4.3
## Explore Models
@ -16,6 +16,14 @@ N/A
## In Progress
- Face interpolation
- Gaze interpolation
- Face rotation interpolation
- Object detection interpolation
- Unify score/confidence variables
## Issues
- CenterNet WebGL: <https://github.com/tensorflow/tfjs/issues/5145>
- CenterNet WASM: <https://github.com/tensorflow/tfjs/issues/5110>
- NanoDet WASM: <https://github.com/tensorflow/tfjs/issues/4824>
- BlazeFace and HandPose rotation in NodeJS: <https://github.com/tensorflow/tfjs/issues/4066>
- TypeDoc with TS 4.3: <https://github.com/TypeStrong/typedoc/issues/1589>

BIN
assets/screenshot-kiara.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

View File

@ -13,6 +13,7 @@ function log(...msg) {
onmessage = async (msg) => {
if (busy) return;
busy = true;
// received from index.js using:
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
let result = {};

View File

@ -1,13 +1,26 @@
// @ts-nocheck // typescript checks disabled as this is pure javascript
/**
* Human demo for browsers
*
* Main demo app that exposes all Human functionality
* @description Main demo app that exposes all Human functionality
*
* @params Optional URL parameters:
* image=<imagePath:string>: perform detection on specific image and finish
* worker=<true|false>: use WebWorkers
* backend=<webgl|wasm|cpu>: use specific TF backend for operations
* preload=<true|false>: pre-load all configured models
* warmup=<true|false>: warmup all configured models
*
* @example <https://wyse:10031/?backend=wasm&worker=true&image="/assets/sample-me.jpg">
*
* @configuration
* userConfig={}: contains all model configuration used by human
* drawOptions={}: contains all draw variables used by human.draw
* ui={}: contains all variables exposed in the UI
*/
// @ts-nocheck // typescript checks disabled as this is pure javascript
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
import Menu from './helpers/menu.js';
import GLBench from './helpers/gl-bench.js';
import webRTC from './helpers/webrtc.js';
@ -15,8 +28,7 @@ import webRTC from './helpers/webrtc.js';
let human;
const userConfig = {
warmup: 'full',
/*
warmup: 'none',
backend: 'webgl',
async: false,
cacheSensitivity: 0,
@ -34,10 +46,9 @@ const userConfig = {
hand: { enabled: false },
// body: { enabled: true, modelPath: 'posenet.json' },
// body: { enabled: true, modelPath: 'blazepose.json' },
body: { enabled: true, modelPath: 'movenet-lightning.json' },
object: { enabled: false },
body: { enabled: false, modelPath: 'movenet-lightning.json' },
object: { enabled: true },
gesture: { enabled: true },
*/
};
const drawOptions = {
@ -53,7 +64,7 @@ const ui = {
facing: true, // camera facing front or back
baseBackground: 'rgba(50, 50, 50, 1)', // 'grey'
columns: 2, // when processing sample images create this many columns
useWorker: false, // use web workers for processing
useWorker: true, // use web workers for processing
worker: 'index-worker.js',
maxFPSframes: 10, // keep fps history for how many frames
modelsPreload: true, // preload human models on startup
@ -84,6 +95,7 @@ const ui = {
// sample images
compare: '../assets/sample-me.jpg', // base image for face compare
/*
samples: [
'../assets/sample6.jpg',
'../assets/sample1.jpg',
@ -92,45 +104,10 @@ const ui = {
'../assets/sample3.jpg',
'../assets/sample2.jpg',
],
/*
ui.samples = [
'../private/daz3d/daz3d-brianna.jpg',
'../private/daz3d/daz3d-chiyo.jpg',
'../private/daz3d/daz3d-cody.jpg',
'../private/daz3d/daz3d-drew-01.jpg',
'../private/daz3d/daz3d-drew-02.jpg',
'../private/daz3d/daz3d-ella-01.jpg',
'../private/daz3d/daz3d-ella-02.jpg',
'../private/daz3d/daz3d-_emotions01.jpg',
'../private/daz3d/daz3d-_emotions02.jpg',
'../private/daz3d/daz3d-_emotions03.jpg',
'../private/daz3d/daz3d-_emotions04.jpg',
'../private/daz3d/daz3d-_emotions05.jpg',
'../private/daz3d/daz3d-gillian.jpg',
'../private/daz3d/daz3d-ginnifer.jpg',
'../private/daz3d/daz3d-hye-01.jpg',
'../private/daz3d/daz3d-hye-02.jpg',
'../private/daz3d/daz3d-kaia.jpg',
'../private/daz3d/daz3d-karen.jpg',
'../private/daz3d/daz3d-kiaria-01.jpg',
'../private/daz3d/daz3d-kiaria-02.jpg',
'../private/daz3d/daz3d-lilah-01.jpg',
'../private/daz3d/daz3d-lilah-02.jpg',
'../private/daz3d/daz3d-lilah-03.jpg',
'../private/daz3d/daz3d-lila.jpg',
'../private/daz3d/daz3d-lindsey.jpg',
'../private/daz3d/daz3d-megah.jpg',
'../private/daz3d/daz3d-selina-01.jpg',
'../private/daz3d/daz3d-selina-02.jpg',
'../private/daz3d/daz3d-snow.jpg',
'../private/daz3d/daz3d-sunshine.jpg',
'../private/daz3d/daz3d-taia.jpg',
'../private/daz3d/daz3d-tuesday-01.jpg',
'../private/daz3d/daz3d-tuesday-02.jpg',
'../private/daz3d/daz3d-tuesday-03.jpg',
'../private/daz3d/daz3d-zoe.jpg',
];
*/
samples: [
'../private/daz3d/daz3d-kiaria-02.jpg',
],
};
// global variables
@ -267,9 +244,9 @@ async function drawResults(input) {
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
if (ui.buffered) {
ui.drawThread = requestAnimationFrame(() => drawResults(input, canvas));
} else if (!ui.buffered && ui.drawThread) {
} else {
log('stopping buffered refresh');
cancelAnimationFrame(ui.drawThread);
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
ui.drawThread = null;
}
}
@ -435,7 +412,7 @@ function runHumanDetect(input, canvas, timestamp) {
offscreen.width = canvas.width;
offscreen.height = canvas.height;
const ctx = offscreen.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
ctx.drawImage(input, 0, 0, canvas.width, canvas.height);
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
// perform detection in worker
webWorker(input, data, canvas, userConfig, timestamp);
@ -522,6 +499,7 @@ async function detectVideo() {
// just initialize everything and call main function
async function detectSampleImages() {
document.getElementById('play').style.display = 'none';
document.getElementById('canvas').style.display = 'none';
document.getElementById('samples-container').style.display = 'block';
log('running detection of sample images');
@ -530,6 +508,9 @@ async function detectSampleImages() {
for (const m of Object.values(menu)) m.hide();
for (const image of ui.samples) await processImage(image);
status();
document.getElementById('play').style.display = 'none';
document.getElementById('loader').style.display = 'none';
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
}
function setupMenu() {
@ -692,6 +673,12 @@ async function main() {
document.documentElement.style.setProperty('--icon-size', ui.iconSize);
// sanity check for webworker compatibility
if (typeof Worker === 'undefined' || typeof OffscreenCanvas === 'undefined') {
ui.useWorker = false;
log('workers are disabled due to missing browser functionality');
}
// parse url search params
const params = new URLSearchParams(location.search);
log('url options:', params.toString());

View File

@ -4,6 +4,7 @@
const log = require('@vladmandic/pilogger');
const fs = require('fs');
const path = require('path');
const process = require('process');
const fetch = require('node-fetch').default;
@ -142,6 +143,7 @@ async function detect(input) {
log.data(' Object: N/A');
}
fs.writeFileSync('result.json', JSON.stringify(result, null, 2));
// print data to console
if (result) {
log.data('Persons:');
@ -182,13 +184,26 @@ async function main() {
log.header();
log.info('Current folder:', process.env.PWD);
await init();
const f = process.argv[2];
if (process.argv.length !== 3) {
log.warn('Parameters: <input image> missing');
log.warn('Parameters: <input image | folder> missing');
await test();
} else if (!fs.existsSync(process.argv[2]) && !process.argv[2].startsWith('http')) {
} else if (!fs.existsSync(f) && !f.startsWith('http')) {
log.error(`File not found: ${process.argv[2]}`);
} else {
await detect(process.argv[2]);
if (fs.existsSync(f)) {
const stat = fs.statSync(f);
if (stat.isDirectory()) {
const dir = fs.readdirSync(f);
for (const file of dir) {
await detect(path.join(f, file));
}
} else {
await detect(f);
}
} else {
await detect(f);
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

10
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

10
dist/human.js vendored

File diff suppressed because one or more lines are too long

View File

@ -17598,6 +17598,8 @@ async function load11(config3) {
return model8;
}
async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const squeezeT = tf17.squeeze(res);
@ -17617,7 +17619,7 @@ async function process3(res, inputSize, outputShape, config3) {
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score3 = detections[0][id][4];
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
const classVal = detections[0][id][5];
const label = labels[classVal].label;
const boxRaw3 = [
@ -17636,18 +17638,16 @@ async function process3(res, inputSize, outputShape, config3) {
}
return results;
}
async function predict10(image15, config3) {
async function predict10(input, config3) {
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
skipped6++;
return last4;
}
skipped6 = 0;
return new Promise(async (resolve) => {
const outputSize = [image15.shape[2], image15.shape[1]];
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
let objectT;
if (config3.object.enabled)
objectT = model8.execute(resize, "tower_0/detections");
const outputSize = [input.shape[2], input.shape[1]];
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
resize.dispose();
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
last4 = obj;
@ -19105,9 +19105,9 @@ function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
for (let i = 0; i < newResult.body.length; i++) {
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
@ -19115,17 +19115,27 @@ function calcBuffered(newResult, localOptions) {
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
for (let i = 0; i < newResult.hand.length; i++) {
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = [];
for (const key of keys) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
@ -19152,7 +19162,7 @@ async function all(inCanvas2, result, drawOptions) {
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, result.face, localOptions);
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
@ -19941,7 +19951,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
2Q==`;
// package.json
var version = "1.9.4";
var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;

View File

@ -17599,6 +17599,8 @@ async function load11(config3) {
return model8;
}
async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const squeezeT = tf17.squeeze(res);
@ -17618,7 +17620,7 @@ async function process3(res, inputSize, outputShape, config3) {
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score3 = detections[0][id][4];
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
const classVal = detections[0][id][5];
const label = labels[classVal].label;
const boxRaw3 = [
@ -17637,18 +17639,16 @@ async function process3(res, inputSize, outputShape, config3) {
}
return results;
}
async function predict10(image15, config3) {
async function predict10(input, config3) {
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
skipped6++;
return last4;
}
skipped6 = 0;
return new Promise(async (resolve) => {
const outputSize = [image15.shape[2], image15.shape[1]];
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
let objectT;
if (config3.object.enabled)
objectT = model8.execute(resize, "tower_0/detections");
const outputSize = [input.shape[2], input.shape[1]];
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
resize.dispose();
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
last4 = obj;
@ -19106,9 +19106,9 @@ function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
for (let i = 0; i < newResult.body.length; i++) {
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
@ -19116,17 +19116,27 @@ function calcBuffered(newResult, localOptions) {
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
for (let i = 0; i < newResult.hand.length; i++) {
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = [];
for (const key of keys) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
@ -19153,7 +19163,7 @@ async function all(inCanvas2, result, drawOptions) {
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, result.face, localOptions);
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
@ -19942,7 +19952,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
2Q==`;
// package.json
var version = "1.9.4";
var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;

42
dist/human.node.js vendored
View File

@ -17598,6 +17598,8 @@ async function load11(config3) {
return model8;
}
async function process3(res, inputSize, outputShape, config3) {
if (!res)
return [];
const results = [];
const detections = res.arraySync();
const squeezeT = tf17.squeeze(res);
@ -17617,7 +17619,7 @@ async function process3(res, inputSize, outputShape, config3) {
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score3 = detections[0][id][4];
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
const classVal = detections[0][id][5];
const label = labels[classVal].label;
const boxRaw3 = [
@ -17636,18 +17638,16 @@ async function process3(res, inputSize, outputShape, config3) {
}
return results;
}
async function predict10(image15, config3) {
async function predict10(input, config3) {
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
skipped6++;
return last4;
}
skipped6 = 0;
return new Promise(async (resolve) => {
const outputSize = [image15.shape[2], image15.shape[1]];
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
let objectT;
if (config3.object.enabled)
objectT = model8.execute(resize, "tower_0/detections");
const outputSize = [input.shape[2], input.shape[1]];
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
resize.dispose();
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
last4 = obj;
@ -19105,9 +19105,9 @@ function calcBuffered(newResult, localOptions) {
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
for (let i = 0; i < newResult.body.length; i++) {
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
position: {
@ -19115,17 +19115,27 @@ function calcBuffered(newResult, localOptions) {
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
}
}));
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
}
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
for (let i = 0; i < newResult.hand.length; i++) {
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const annotations3 = [];
for (const key of keys) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
}
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
for (let i = 0; i < newResult.face.length; i++) {
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
}
const newPersons = newResult.persons;
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
@ -19152,7 +19162,7 @@ async function all(inCanvas2, result, drawOptions) {
calcBuffered(result, localOptions);
else
bufferedResult = result;
face2(inCanvas2, result.face, localOptions);
face2(inCanvas2, bufferedResult.face, localOptions);
body2(inCanvas2, bufferedResult.body, localOptions);
hand2(inCanvas2, bufferedResult.hand, localOptions);
gesture(inCanvas2, result.gesture, localOptions);
@ -19941,7 +19951,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
2Q==`;
// package.json
var version = "1.9.4";
var version = "2.0.0";
// src/human.ts
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;

2
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,6 @@
{
"name": "@vladmandic/human",
"version": "1.9.4",
"version": "2.0.0",
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
"sideEffects": false,
"main": "dist/human.node.js",
@ -68,10 +68,10 @@
"canvas": "^2.8.0",
"chokidar": "^3.5.1",
"dayjs": "^1.10.5",
"esbuild": "^0.12.4",
"esbuild": "^0.12.5",
"eslint": "^7.27.0",
"eslint-config-airbnb-base": "^14.2.1",
"eslint-plugin-import": "^2.23.3",
"eslint-plugin-import": "^2.23.4",
"eslint-plugin-json": "^3.0.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",

View File

@ -1,17 +1,17 @@
2021-05-29 18:28:57 INFO:  @vladmandic/human version 1.9.4
2021-05-29 18:28:57 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-29 18:28:57 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-29 18:28:57 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-29 18:28:57 STATE: Build for: node type: node: {"imports":39,"importBytes":443782,"outputBytes":396032,"outputFiles":"dist/human.node.js"}
2021-05-29 18:28:57 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-29 18:28:57 STATE: Build for: nodeGPU type: node: {"imports":39,"importBytes":443790,"outputBytes":396036,"outputFiles":"dist/human.node-gpu.js"}
2021-05-29 18:28:57 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-29 18:28:57 STATE: Build for: nodeWASM type: node: {"imports":39,"importBytes":443857,"outputBytes":396108,"outputFiles":"dist/human.node-wasm.js"}
2021-05-29 18:28:57 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-29 18:28:57 STATE: Build for: browserNoBundle type: esm: {"imports":39,"importBytes":443884,"outputBytes":242161,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-29 18:28:58 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
2021-05-29 18:28:59 STATE: Build for: browserBundle type: iife: {"imports":39,"importBytes":1553904,"outputBytes":1349968,"outputFiles":"dist/human.js"}
2021-05-29 18:28:59 STATE: Build for: browserBundle type: esm: {"imports":39,"importBytes":1553904,"outputBytes":1349960,"outputFiles":"dist/human.esm.js"}
2021-05-29 18:28:59 INFO:  Generate types: ["src/human.ts"]
2021-05-29 18:29:05 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-29 18:29:05 INFO:  Generate TypeDocs: ["src/human.ts"]
2021-05-30 12:02:00 INFO:  @vladmandic/human version 2.0.0
2021-05-30 12:02:00 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-30 12:02:00 INFO:  Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
2021-05-30 12:02:00 STATE: Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 12:02:00 STATE: Build for: node type: node: {"imports":39,"importBytes":444757,"outputBytes":396794,"outputFiles":"dist/human.node.js"}
2021-05-30 12:02:00 STATE: Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 12:02:00 STATE: Build for: nodeGPU type: node: {"imports":39,"importBytes":444765,"outputBytes":396798,"outputFiles":"dist/human.node-gpu.js"}
2021-05-30 12:02:00 STATE: Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 12:02:01 STATE: Build for: nodeWASM type: node: {"imports":39,"importBytes":444832,"outputBytes":396870,"outputFiles":"dist/human.node-wasm.js"}
2021-05-30 12:02:01 STATE: Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 12:02:01 STATE: Build for: browserNoBundle type: esm: {"imports":39,"importBytes":444859,"outputBytes":242556,"outputFiles":"dist/human.esm-nobundle.js"}
2021-05-30 12:02:01 STATE: Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
2021-05-30 12:02:02 STATE: Build for: browserBundle type: iife: {"imports":39,"importBytes":1554883,"outputBytes":1350365,"outputFiles":"dist/human.js"}
2021-05-30 12:02:02 STATE: Build for: browserBundle type: esm: {"imports":39,"importBytes":1554883,"outputBytes":1350357,"outputFiles":"dist/human.esm.js"}
2021-05-30 12:02:02 INFO:  Generate types: ["src/human.ts"]
2021-05-30 12:02:07 INFO:  Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
2021-05-30 12:02:07 INFO:  Generate TypeDocs: ["src/human.ts"]

View File

@ -503,17 +503,18 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
}
function calcBuffered(newResult, localOptions) {
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
// thus mixing by-reference and by-value assignments to minimize memory operations
// interpolate body results
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints
bufferedResult.body[i].box = newResult.body[i].box
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box) / localOptions.bufferedFactor) as [number, number, number, number];
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box) / localOptions.bufferedFactor) as [number, number, number, number];
bufferedResult.body[i].keypoints = newResult.body[i].keypoints
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); // deep clone once
for (let i = 0; i < newResult.body.length; i++) {
const box = newResult.body[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
const boxRaw = newResult.body[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
const keypoints = newResult.body[i].keypoints // update keypoints
.map((keypoint, j) => ({
score: keypoint.score,
part: keypoint.part,
@ -522,24 +523,37 @@ function calcBuffered(newResult, localOptions) {
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y,
},
}));
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints }; // shallow clone plus updated values
}
// interpolate hand results
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations
bufferedResult.hand[i].box = newResult.hand[i].box
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box) / localOptions.bufferedFactor);
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box) / localOptions.bufferedFactor);
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); // deep clone once
for (let i = 0; i < newResult.hand.length; i++) {
const box = newResult.hand[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw = newResult.hand[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
const landmarks = newResult.hand[i].landmarks // update landmarks
.map((landmark, j) => landmark
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
const keys = Object.keys(newResult.hand[i].annotations);
const keys = Object.keys(newResult.hand[i].annotations); // update annotations
const annotations = [];
for (const key of keys) {
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key]
annotations[key] = newResult.hand[i].annotations[key]
.map((val, j) => val
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
}
bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, landmarks, annotations }; // shallow clone plus updated values
}
// interpolate face results
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)); // deep clone once
for (let i = 0; i < newResult.face.length; i++) {
const box = newResult.face[i].box // update box
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
const boxRaw = newResult.face[i].boxRaw // update boxRaw
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
bufferedResult.face[i] = { ...newResult.face[i], box, boxRaw }; // shallow clone plus updated values
}
// interpolate person results
@ -569,7 +583,7 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
if (!(inCanvas instanceof HTMLCanvasElement)) return;
if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
else bufferedResult = result; // just use results as-is
face(inCanvas, result.face, localOptions); // face does have buffering
face(inCanvas, bufferedResult.face, localOptions); // face does have buffering
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available

View File

@ -24,18 +24,18 @@ export async function load(config) {
}
async function process(res, inputSize, outputShape, config) {
if (!res) return [];
const results: Array<Item> = [];
const detections = res.arraySync();
const squeezeT = tf.squeeze(res);
res.dispose();
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
squeezeT.dispose();
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // tf.nms expects y, x
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x
const boxesT = stackT.squeeze();
const scoresT = arr[4].squeeze();
const classesT = arr[5].squeeze();
arr.forEach((t) => t.dispose());
// @ts-ignore boxesT type is not correctly inferred
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
boxesT.dispose();
scoresT.dispose();
@ -44,7 +44,7 @@ async function process(res, inputSize, outputShape, config) {
nmsT.dispose();
let i = 0;
for (const id of nms) {
const score = detections[0][id][4];
const score = Math.trunc(100 * detections[0][id][4]) / 100;
const classVal = detections[0][id][5];
const label = labels[classVal].label;
const boxRaw = [
@ -64,18 +64,16 @@ async function process(res, inputSize, outputShape, config) {
return results;
}
export async function predict(image, config): Promise<Item[]> {
export async function predict(input, config): Promise<Item[]> {
if ((skipped < config.object.skipFrames) && config.skipFrame && (last.length > 0)) {
skipped++;
return last;
}
skipped = 0;
return new Promise(async (resolve) => {
const outputSize = [image.shape[2], image.shape[1]];
const resize = tf.image.resizeBilinear(image, [model.inputSize, model.inputSize], false);
let objectT;
if (config.object.enabled) objectT = model.execute(resize, 'tower_0/detections');
const outputSize = [input.shape[2], input.shape[1]];
const resize = tf.image.resizeBilinear(input, [model.inputSize, model.inputSize]);
const objectT = config.object.enabled ? model.execute(resize, ['tower_0/detections']) : null;
resize.dispose();
const obj = await process(objectT, model.inputSize, outputSize, config);

View File

@ -1,120 +1,120 @@
2021-05-28 15:52:57 INFO:  @vladmandic/human version 1.9.4
2021-05-28 15:52:57 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-28 15:52:57 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-28 15:52:57 INFO:  test-node.js start
2021-05-28 15:52:59 STATE: test-node.js passed: create human
2021-05-28 15:52:59 INFO:  test-node.js human version: 1.9.4
2021-05-28 15:52:59 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-28 15:52:59 INFO:  test-node.js tfjs version: 3.6.0
2021-05-28 15:52:59 STATE: test-node.js passed: set backend: tensorflow
2021-05-28 15:52:59 STATE: test-node.js passed: load models
2021-05-28 15:52:59 STATE: test-node.js result: defined models: 13 loaded models: 6
2021-05-28 15:52:59 STATE: test-node.js passed: warmup: none default
2021-05-28 15:53:01 STATE: test-node.js passed: warmup: face default
2021-05-28 15:53:01 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-28 15:53:01 DATA:  test-node.js result: performance: load: 397 total: 1679
2021-05-28 15:53:02 STATE: test-node.js passed: warmup: body default
2021-05-28 15:53:02 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-28 15:53:02 DATA:  test-node.js result: performance: load: 397 total: 1462
2021-05-28 15:53:02 INFO:  test-node.js test body variants
2021-05-28 15:53:03 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:04 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-28 15:53:04 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-28 15:53:04 DATA:  test-node.js result: performance: load: 397 total: 957
2021-05-28 15:53:05 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:06 STATE: test-node.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-28 15:53:06 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:06 DATA:  test-node.js result: performance: load: 397 total: 523
2021-05-28 15:53:07 STATE: test-node.js passed: detect: random default
2021-05-28 15:53:07 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
2021-05-28 15:53:07 DATA:  test-node.js result: performance: load: 397 total: 970
2021-05-28 15:53:07 INFO:  test-node.js test: first instance
2021-05-28 15:53:07 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-28 15:53:09 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-28 15:53:09 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:09 DATA:  test-node.js result: performance: load: 397 total: 1591
2021-05-28 15:53:09 INFO:  test-node.js test: second instance
2021-05-28 15:53:09 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-28 15:53:11 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-28 15:53:11 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:11 DATA:  test-node.js result: performance: load: 4 total: 1475
2021-05-28 15:53:11 INFO:  test-node.js test: concurrent
2021-05-28 15:53:11 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-28 15:53:11 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-28 15:53:12 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:13 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:18 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-28 15:53:18 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:18 DATA:  test-node.js result: performance: load: 397 total: 5615
2021-05-28 15:53:18 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-28 15:53:18 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:18 DATA:  test-node.js result: performance: load: 4 total: 5615
2021-05-28 15:53:18 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-28 15:53:18 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:18 DATA:  test-node.js result: performance: load: 397 total: 5615
2021-05-28 15:53:18 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-28 15:53:18 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:18 DATA:  test-node.js result: performance: load: 4 total: 5615
2021-05-28 15:53:18 INFO:  test-node.js test complete: 19711 ms
2021-05-28 15:53:18 INFO:  test-node-gpu.js start
2021-05-28 15:53:20 WARN:  test-node-gpu.js stderr: 2021-05-28 15:53:20.148509: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-28 15:53:20 WARN:  test-node-gpu.js stderr: 2021-05-28 15:53:20.400714: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-28 15:53:20 WARN:  test-node-gpu.js stderr: 2021-05-28 15:53:20.400783: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-28 15:53:20 STATE: test-node-gpu.js passed: create human
2021-05-28 15:53:20 INFO:  test-node-gpu.js human version: 1.9.4
2021-05-28 15:53:20 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-28 15:53:20 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-28 15:53:20 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-28 15:53:20 STATE: test-node-gpu.js passed: load models
2021-05-28 15:53:20 STATE: test-node-gpu.js result: defined models: 13 loaded models: 6
2021-05-28 15:53:20 STATE: test-node-gpu.js passed: warmup: none default
2021-05-28 15:53:22 STATE: test-node-gpu.js passed: warmup: face default
2021-05-28 15:53:22 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
2021-05-28 15:53:22 DATA:  test-node-gpu.js result: performance: load: 310 total: 1956
2021-05-28 15:53:24 STATE: test-node-gpu.js passed: warmup: body default
2021-05-28 15:53:24 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
2021-05-28 15:53:24 DATA:  test-node-gpu.js result: performance: load: 310 total: 1601
2021-05-28 15:53:24 INFO:  test-node-gpu.js test body variants
2021-05-28 15:53:25 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:26 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-28 15:53:26 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
2021-05-28 15:53:26 DATA:  test-node-gpu.js result: performance: load: 310 total: 1047
2021-05-28 15:53:27 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:27 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
2021-05-28 15:53:27 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:27 DATA:  test-node-gpu.js result: performance: load: 310 total: 436
2021-05-28 15:53:28 STATE: test-node-gpu.js passed: detect: random default
2021-05-28 15:53:28 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:28 DATA:  test-node-gpu.js result: performance: load: 310 total: 209
2021-05-28 15:53:28 INFO:  test-node-gpu.js test: first instance
2021-05-28 15:53:28 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-28 15:53:28 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-28 15:53:28 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:28 DATA:  test-node-gpu.js result: performance: load: 310 total: 184
2021-05-28 15:53:28 INFO:  test-node-gpu.js test: second instance
2021-05-28 15:53:29 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-28 15:53:30 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-28 15:53:30 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:30 DATA:  test-node-gpu.js result: performance: load: 2 total: 1487
2021-05-28 15:53:30 INFO:  test-node-gpu.js test: concurrent
2021-05-28 15:53:30 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-28 15:53:30 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-28 15:53:31 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:32 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-28 15:53:38 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: performance: load: 310 total: 5528
2021-05-28 15:53:38 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: performance: load: 2 total: 5528
2021-05-28 15:53:38 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: performance: load: 310 total: 5528
2021-05-28 15:53:38 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
2021-05-28 15:53:38 DATA:  test-node-gpu.js result: performance: load: 2 total: 5528
2021-05-28 15:53:38 INFO:  test-node-gpu.js test complete: 17887 ms
2021-05-28 15:53:38 INFO:  test-node-wasm.js start
2021-05-28 15:53:38 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
2021-05-28 15:53:38 ERROR: test-node-wasm.js aborting test
2021-05-28 15:53:38 INFO:  status: {"passed":46,"failed":1}
2021-05-29 18:30:17 INFO:  @vladmandic/human version 1.9.4
2021-05-29 18:30:17 INFO:  User: vlado Platform: linux Arch: x64 Node: v16.0.0
2021-05-29 18:30:17 INFO:  tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
2021-05-29 18:30:17 INFO:  test-node.js start
2021-05-29 18:30:19 STATE: test-node.js passed: create human
2021-05-29 18:30:19 INFO:  test-node.js human version: 1.9.4
2021-05-29 18:30:19 INFO:  test-node.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-29 18:30:19 INFO:  test-node.js tfjs version: 3.6.0
2021-05-29 18:30:19 STATE: test-node.js passed: set backend: tensorflow
2021-05-29 18:30:19 STATE: test-node.js passed: load models
2021-05-29 18:30:19 STATE: test-node.js result: defined models: 14 loaded models: 6
2021-05-29 18:30:19 STATE: test-node.js passed: warmup: none default
2021-05-29 18:30:21 STATE: test-node.js passed: warmup: face default
2021-05-29 18:30:21 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-29 18:30:21 DATA:  test-node.js result: performance: load: 345 total: 1666
2021-05-29 18:30:22 STATE: test-node.js passed: warmup: body default
2021-05-29 18:30:22 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:22 DATA:  test-node.js result: performance: load: 345 total: 1523
2021-05-29 18:30:22 INFO:  test-node.js test body variants
2021-05-29 18:30:23 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:24 STATE: test-node.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-29 18:30:24 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-29 18:30:24 DATA:  test-node.js result: performance: load: 345 total: 987
2021-05-29 18:30:25 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:26 STATE: test-node.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-29 18:30:26 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:26 DATA:  test-node.js result: performance: load: 345 total: 318
2021-05-29 18:30:27 STATE: test-node.js passed: detect: random default
2021-05-29 18:30:27 DATA:  test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-05-29 18:30:27 DATA:  test-node.js result: performance: load: 345 total: 788
2021-05-29 18:30:27 INFO:  test-node.js test: first instance
2021-05-29 18:30:27 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-29 18:30:28 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-29 18:30:28 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-29 18:30:28 DATA:  test-node.js result: performance: load: 345 total: 1516
2021-05-29 18:30:28 INFO:  test-node.js test: second instance
2021-05-29 18:30:29 STATE: test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-29 18:30:30 STATE: test-node.js passed: detect: assets/sample-me.jpg default
2021-05-29 18:30:30 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-29 18:30:30 DATA:  test-node.js result: performance: load: 7 total: 1435
2021-05-29 18:30:30 INFO:  test-node.js test: concurrent
2021-05-29 18:30:30 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-29 18:30:30 STATE: test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-29 18:30:31 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:32 STATE: test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:38 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-29 18:30:38 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-29 18:30:38 DATA:  test-node.js result: performance: load: 345 total: 5657
2021-05-29 18:30:38 STATE: test-node.js passed: detect: assets/human-sample-face.jpg default
2021-05-29 18:30:38 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-29 18:30:38 DATA:  test-node.js result: performance: load: 7 total: 5657
2021-05-29 18:30:38 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-29 18:30:38 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:38 DATA:  test-node.js result: performance: load: 345 total: 5657
2021-05-29 18:30:38 STATE: test-node.js passed: detect: assets/human-sample-body.jpg default
2021-05-29 18:30:38 DATA:  test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:38 DATA:  test-node.js result: performance: load: 7 total: 5657
2021-05-29 18:30:38 INFO:  test-node.js test complete: 19401 ms
2021-05-29 18:30:38 INFO:  test-node-gpu.js start
2021-05-29 18:30:39 WARN:  test-node-gpu.js stderr: 2021-05-29 18:30:39.644962: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-05-29 18:30:39 WARN:  test-node-gpu.js stderr: 2021-05-29 18:30:39.887957: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-05-29 18:30:39 WARN:  test-node-gpu.js stderr: 2021-05-29 18:30:39.888022: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
2021-05-29 18:30:39 STATE: test-node-gpu.js passed: create human
2021-05-29 18:30:39 INFO:  test-node-gpu.js human version: 1.9.4
2021-05-29 18:30:39 INFO:  test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
2021-05-29 18:30:39 INFO:  test-node-gpu.js tfjs version: 3.6.0
2021-05-29 18:30:40 STATE: test-node-gpu.js passed: set backend: tensorflow
2021-05-29 18:30:40 STATE: test-node-gpu.js passed: load models
2021-05-29 18:30:40 STATE: test-node-gpu.js result: defined models: 14 loaded models: 6
2021-05-29 18:30:40 STATE: test-node-gpu.js passed: warmup: none default
2021-05-29 18:30:42 STATE: test-node-gpu.js passed: warmup: face default
2021-05-29 18:30:42 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.73,"keypoints":5}
2021-05-29 18:30:42 DATA:  test-node-gpu.js result: performance: load: 348 total: 1692
2021-05-29 18:30:43 STATE: test-node-gpu.js passed: warmup: body default
2021-05-29 18:30:43 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:43 DATA:  test-node-gpu.js result: performance: load: 348 total: 1521
2021-05-29 18:30:43 INFO:  test-node-gpu.js test body variants
2021-05-29 18:30:44 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:45 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
2021-05-29 18:30:45 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.96,"keypoints":16}
2021-05-29 18:30:45 DATA:  test-node-gpu.js result: performance: load: 348 total: 1028
2021-05-29 18:30:46 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:46 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg movenet
2021-05-29 18:30:46 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:46 DATA:  test-node-gpu.js result: performance: load: 348 total: 327
2021-05-29 18:30:47 STATE: test-node-gpu.js passed: detect: random default
2021-05-29 18:30:47 DATA:  test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
2021-05-29 18:30:47 DATA:  test-node-gpu.js result: performance: load: 348 total: 757
2021-05-29 18:30:47 INFO:  test-node-gpu.js test: first instance
2021-05-29 18:30:48 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-29 18:30:49 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-29 18:30:49 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-29 18:30:49 DATA:  test-node-gpu.js result: performance: load: 348 total: 1413
2021-05-29 18:30:49 INFO:  test-node-gpu.js test: second instance
2021-05-29 18:30:50 STATE: test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
2021-05-29 18:30:51 STATE: test-node-gpu.js passed: detect: assets/sample-me.jpg default
2021-05-29 18:30:51 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
2021-05-29 18:30:51 DATA:  test-node-gpu.js result: performance: load: 2 total: 1429
2021-05-29 18:30:51 INFO:  test-node-gpu.js test: concurrent
2021-05-29 18:30:51 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-29 18:30:51 STATE: test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
2021-05-29 18:30:52 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:53 STATE: test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
2021-05-29 18:30:59 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: performance: load: 348 total: 5404
2021-05-29 18:30:59 STATE: test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: performance: load: 2 total: 5404
2021-05-29 18:30:59 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: performance: load: 348 total: 5404
2021-05-29 18:30:59 STATE: test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
2021-05-29 18:30:59 DATA:  test-node-gpu.js result: performance: load: 2 total: 5404
2021-05-29 18:30:59 INFO:  test-node-gpu.js test complete: 19062 ms
2021-05-29 18:30:59 INFO:  test-node-wasm.js start
2021-05-29 18:30:59 ERROR: test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
2021-05-29 18:30:59 ERROR: test-node-wasm.js aborting test
2021-05-29 18:30:59 INFO:  status: {"passed":46,"failed":1}

View File

@ -3,4 +3,4 @@
*/
import { Item } from '../result';
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<Item[]>;
export declare function predict(input: any, config: any): Promise<Item[]>;

2
wiki

@ -1 +1 @@
Subproject commit 317a8fc76cd933cc38f59948ffade324fc8f1df2
Subproject commit 78e6de4516ab49f47a906ec7778073b2dbbfed3f