mirror of https://github.com/vladmandic/human
release candidate
parent
261ecda985
commit
3cf15ff629
|
@ -1,6 +1,6 @@
|
||||||
# @vladmandic/human
|
# @vladmandic/human
|
||||||
|
|
||||||
Version: **1.9.4**
|
Version: **2.0.0**
|
||||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||||
|
|
||||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||||
|
@ -9,8 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
### **HEAD -> main** 2021/05/29 mandic00@live.com
|
### **HEAD -> main** 2021/05/30 mandic00@live.com
|
||||||
|
|
||||||
|
- quantize handdetect model
|
||||||
- added experimental movenet-lightning and removed blazepose from default dist
|
- added experimental movenet-lightning and removed blazepose from default dist
|
||||||
- added experimental face.rotation.gaze
|
- added experimental face.rotation.gaze
|
||||||
- fix and optimize for mobile platform
|
- fix and optimize for mobile platform
|
||||||
|
|
12
README.md
12
README.md
|
@ -66,6 +66,7 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
|
||||||
- [**Platform Support**](https://github.com/vladmandic/human/wiki/Platforms)
|
- [**Platform Support**](https://github.com/vladmandic/human/wiki/Platforms)
|
||||||
- [**List of Models & Credits**](https://github.com/vladmandic/human/wiki/Models)
|
- [**List of Models & Credits**](https://github.com/vladmandic/human/wiki/Models)
|
||||||
- [**Security & Privacy Policy**](https://github.com/vladmandic/human/blob/main/SECURITY.md)
|
- [**Security & Privacy Policy**](https://github.com/vladmandic/human/blob/main/SECURITY.md)
|
||||||
|
- [**License & Usage Restrictions**](https://github.com/vladmandic/human/blob/main/LICENSE)
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
@ -77,8 +78,8 @@ Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) fo
|
||||||
|
|
||||||
## Options
|
## Options
|
||||||
|
|
||||||
As presented in the demo application...
|
All options as presented in the demo application...
|
||||||
> [demo/index.html](demo/index.html)
|
> [demo/index.html](demo/index.html)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
@ -88,7 +89,7 @@ As presented in the demo application...
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
**Training image:**
|
**Validation image:**
|
||||||
> [demo/index.html](demo/index.html?image=%22../assets/human-sample-upper.jpg%22)
|
> [demo/index.html](demo/index.html?image=%22../assets/human-sample-upper.jpg%22)
|
||||||
|
|
||||||

|

|
||||||
|
@ -104,7 +105,10 @@ As presented in the demo application...
|
||||||

|

|
||||||
|
|
||||||
**Face Similarity Matching:**
|
**Face Similarity Matching:**
|
||||||
> [demo/facematch.html](demo/facematch.html)
|
Extracts all faces from provided input images,
|
||||||
|
sorts them by similarity to selected face
|
||||||
|
and optionally matches detected face with database of known people to guess their names
|
||||||
|
> [demo/facematch.html](demo/facematch.html)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|
14
TODO.md
14
TODO.md
|
@ -7,7 +7,7 @@ N/A
|
||||||
## Exploring Features
|
## Exploring Features
|
||||||
|
|
||||||
- Implement demo as installable PWA with model caching
|
- Implement demo as installable PWA with model caching
|
||||||
- Implement results interpolation on library level
|
- Implement results interpolation on library level instead inside draw functions
|
||||||
- Switch to TypeScript 4.3
|
- Switch to TypeScript 4.3
|
||||||
|
|
||||||
## Explore Models
|
## Explore Models
|
||||||
|
@ -16,6 +16,14 @@ N/A
|
||||||
|
|
||||||
## In Progress
|
## In Progress
|
||||||
|
|
||||||
- Face interpolation
|
- Face rotation interpolation
|
||||||
- Gaze interpolation
|
- Object detection interpolation
|
||||||
- Unify score/confidence variables
|
- Unify score/confidence variables
|
||||||
|
|
||||||
|
## Issues
|
||||||
|
|
||||||
|
- CenterNet WebGL: <https://github.com/tensorflow/tfjs/issues/5145>
|
||||||
|
- CenterNet WASM: <https://github.com/tensorflow/tfjs/issues/5110>
|
||||||
|
- NanoDet WASM: <https://github.com/tensorflow/tfjs/issues/4824>
|
||||||
|
- BlazeFace and HandPose rotation in NodeJS: <https://github.com/tensorflow/tfjs/issues/4066>
|
||||||
|
- TypeDoc with TS 4.3: <https://github.com/TypeStrong/typedoc/issues/1589>
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
|
@ -13,6 +13,7 @@ function log(...msg) {
|
||||||
onmessage = async (msg) => {
|
onmessage = async (msg) => {
|
||||||
if (busy) return;
|
if (busy) return;
|
||||||
busy = true;
|
busy = true;
|
||||||
|
// received from index.js using:
|
||||||
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
let result = {};
|
let result = {};
|
||||||
|
|
|
@ -1,13 +1,26 @@
|
||||||
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Human demo for browsers
|
* Human demo for browsers
|
||||||
*
|
*
|
||||||
* Main demo app that exposes all Human functionality
|
* @description Main demo app that exposes all Human functionality
|
||||||
|
*
|
||||||
|
* @params Optional URL parameters:
|
||||||
|
* image=<imagePath:string>: perform detection on specific image and finish
|
||||||
|
* worker=<true|false>: use WebWorkers
|
||||||
|
* backend=<webgl|wasm|cpu>: use specific TF backend for operations
|
||||||
|
* preload=<true|false>: pre-load all configured models
|
||||||
|
* warmup=<true|false>: warmup all configured models
|
||||||
|
*
|
||||||
|
* @example <https://wyse:10031/?backend=wasm&worker=true&image="/assets/sample-me.jpg">
|
||||||
|
*
|
||||||
|
* @configuration
|
||||||
|
* userConfig={}: contains all model configuration used by human
|
||||||
|
* drawOptions={}: contains all draw variables used by human.draw
|
||||||
|
* ui={}: contains all variables exposed in the UI
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
// @ts-nocheck // typescript checks disabled as this is pure javascript
|
||||||
|
|
||||||
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
import Human from '../dist/human.esm.js'; // equivalent of @vladmandic/human
|
||||||
// import Human from '../dist/human.esm-nobundle.js'; // this requires that tf is loaded manually and bundled before human can be used
|
|
||||||
import Menu from './helpers/menu.js';
|
import Menu from './helpers/menu.js';
|
||||||
import GLBench from './helpers/gl-bench.js';
|
import GLBench from './helpers/gl-bench.js';
|
||||||
import webRTC from './helpers/webrtc.js';
|
import webRTC from './helpers/webrtc.js';
|
||||||
|
@ -15,8 +28,7 @@ import webRTC from './helpers/webrtc.js';
|
||||||
let human;
|
let human;
|
||||||
|
|
||||||
const userConfig = {
|
const userConfig = {
|
||||||
warmup: 'full',
|
warmup: 'none',
|
||||||
/*
|
|
||||||
backend: 'webgl',
|
backend: 'webgl',
|
||||||
async: false,
|
async: false,
|
||||||
cacheSensitivity: 0,
|
cacheSensitivity: 0,
|
||||||
|
@ -34,10 +46,9 @@ const userConfig = {
|
||||||
hand: { enabled: false },
|
hand: { enabled: false },
|
||||||
// body: { enabled: true, modelPath: 'posenet.json' },
|
// body: { enabled: true, modelPath: 'posenet.json' },
|
||||||
// body: { enabled: true, modelPath: 'blazepose.json' },
|
// body: { enabled: true, modelPath: 'blazepose.json' },
|
||||||
body: { enabled: true, modelPath: 'movenet-lightning.json' },
|
body: { enabled: false, modelPath: 'movenet-lightning.json' },
|
||||||
object: { enabled: false },
|
object: { enabled: true },
|
||||||
gesture: { enabled: true },
|
gesture: { enabled: true },
|
||||||
*/
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const drawOptions = {
|
const drawOptions = {
|
||||||
|
@ -53,7 +64,7 @@ const ui = {
|
||||||
facing: true, // camera facing front or back
|
facing: true, // camera facing front or back
|
||||||
baseBackground: 'rgba(50, 50, 50, 1)', // 'grey'
|
baseBackground: 'rgba(50, 50, 50, 1)', // 'grey'
|
||||||
columns: 2, // when processing sample images create this many columns
|
columns: 2, // when processing sample images create this many columns
|
||||||
useWorker: false, // use web workers for processing
|
useWorker: true, // use web workers for processing
|
||||||
worker: 'index-worker.js',
|
worker: 'index-worker.js',
|
||||||
maxFPSframes: 10, // keep fps history for how many frames
|
maxFPSframes: 10, // keep fps history for how many frames
|
||||||
modelsPreload: true, // preload human models on startup
|
modelsPreload: true, // preload human models on startup
|
||||||
|
@ -84,6 +95,7 @@ const ui = {
|
||||||
|
|
||||||
// sample images
|
// sample images
|
||||||
compare: '../assets/sample-me.jpg', // base image for face compare
|
compare: '../assets/sample-me.jpg', // base image for face compare
|
||||||
|
/*
|
||||||
samples: [
|
samples: [
|
||||||
'../assets/sample6.jpg',
|
'../assets/sample6.jpg',
|
||||||
'../assets/sample1.jpg',
|
'../assets/sample1.jpg',
|
||||||
|
@ -92,45 +104,10 @@ const ui = {
|
||||||
'../assets/sample3.jpg',
|
'../assets/sample3.jpg',
|
||||||
'../assets/sample2.jpg',
|
'../assets/sample2.jpg',
|
||||||
],
|
],
|
||||||
/*
|
|
||||||
ui.samples = [
|
|
||||||
'../private/daz3d/daz3d-brianna.jpg',
|
|
||||||
'../private/daz3d/daz3d-chiyo.jpg',
|
|
||||||
'../private/daz3d/daz3d-cody.jpg',
|
|
||||||
'../private/daz3d/daz3d-drew-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-drew-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-ella-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-ella-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-_emotions01.jpg',
|
|
||||||
'../private/daz3d/daz3d-_emotions02.jpg',
|
|
||||||
'../private/daz3d/daz3d-_emotions03.jpg',
|
|
||||||
'../private/daz3d/daz3d-_emotions04.jpg',
|
|
||||||
'../private/daz3d/daz3d-_emotions05.jpg',
|
|
||||||
'../private/daz3d/daz3d-gillian.jpg',
|
|
||||||
'../private/daz3d/daz3d-ginnifer.jpg',
|
|
||||||
'../private/daz3d/daz3d-hye-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-hye-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-kaia.jpg',
|
|
||||||
'../private/daz3d/daz3d-karen.jpg',
|
|
||||||
'../private/daz3d/daz3d-kiaria-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-kiaria-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-lilah-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-lilah-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-lilah-03.jpg',
|
|
||||||
'../private/daz3d/daz3d-lila.jpg',
|
|
||||||
'../private/daz3d/daz3d-lindsey.jpg',
|
|
||||||
'../private/daz3d/daz3d-megah.jpg',
|
|
||||||
'../private/daz3d/daz3d-selina-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-selina-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-snow.jpg',
|
|
||||||
'../private/daz3d/daz3d-sunshine.jpg',
|
|
||||||
'../private/daz3d/daz3d-taia.jpg',
|
|
||||||
'../private/daz3d/daz3d-tuesday-01.jpg',
|
|
||||||
'../private/daz3d/daz3d-tuesday-02.jpg',
|
|
||||||
'../private/daz3d/daz3d-tuesday-03.jpg',
|
|
||||||
'../private/daz3d/daz3d-zoe.jpg',
|
|
||||||
];
|
|
||||||
*/
|
*/
|
||||||
|
samples: [
|
||||||
|
'../private/daz3d/daz3d-kiaria-02.jpg',
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
// global variables
|
// global variables
|
||||||
|
@ -267,9 +244,9 @@ async function drawResults(input) {
|
||||||
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
|
// if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
|
||||||
if (ui.buffered) {
|
if (ui.buffered) {
|
||||||
ui.drawThread = requestAnimationFrame(() => drawResults(input, canvas));
|
ui.drawThread = requestAnimationFrame(() => drawResults(input, canvas));
|
||||||
} else if (!ui.buffered && ui.drawThread) {
|
} else {
|
||||||
log('stopping buffered refresh');
|
log('stopping buffered refresh');
|
||||||
cancelAnimationFrame(ui.drawThread);
|
if (ui.drawThread) cancelAnimationFrame(ui.drawThread);
|
||||||
ui.drawThread = null;
|
ui.drawThread = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -435,7 +412,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
||||||
offscreen.width = canvas.width;
|
offscreen.width = canvas.width;
|
||||||
offscreen.height = canvas.height;
|
offscreen.height = canvas.height;
|
||||||
const ctx = offscreen.getContext('2d');
|
const ctx = offscreen.getContext('2d');
|
||||||
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
|
ctx.drawImage(input, 0, 0, canvas.width, canvas.height);
|
||||||
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
const data = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||||
// perform detection in worker
|
// perform detection in worker
|
||||||
webWorker(input, data, canvas, userConfig, timestamp);
|
webWorker(input, data, canvas, userConfig, timestamp);
|
||||||
|
@ -522,6 +499,7 @@ async function detectVideo() {
|
||||||
|
|
||||||
// just initialize everything and call main function
|
// just initialize everything and call main function
|
||||||
async function detectSampleImages() {
|
async function detectSampleImages() {
|
||||||
|
document.getElementById('play').style.display = 'none';
|
||||||
document.getElementById('canvas').style.display = 'none';
|
document.getElementById('canvas').style.display = 'none';
|
||||||
document.getElementById('samples-container').style.display = 'block';
|
document.getElementById('samples-container').style.display = 'block';
|
||||||
log('running detection of sample images');
|
log('running detection of sample images');
|
||||||
|
@ -530,6 +508,9 @@ async function detectSampleImages() {
|
||||||
for (const m of Object.values(menu)) m.hide();
|
for (const m of Object.values(menu)) m.hide();
|
||||||
for (const image of ui.samples) await processImage(image);
|
for (const image of ui.samples) await processImage(image);
|
||||||
status();
|
status();
|
||||||
|
document.getElementById('play').style.display = 'none';
|
||||||
|
document.getElementById('loader').style.display = 'none';
|
||||||
|
if (ui.detectThread) cancelAnimationFrame(ui.detectThread);
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupMenu() {
|
function setupMenu() {
|
||||||
|
@ -692,6 +673,12 @@ async function main() {
|
||||||
|
|
||||||
document.documentElement.style.setProperty('--icon-size', ui.iconSize);
|
document.documentElement.style.setProperty('--icon-size', ui.iconSize);
|
||||||
|
|
||||||
|
// sanity check for webworker compatibility
|
||||||
|
if (typeof Worker === 'undefined' || typeof OffscreenCanvas === 'undefined') {
|
||||||
|
ui.useWorker = false;
|
||||||
|
log('workers are disabled due to missing browser functionality');
|
||||||
|
}
|
||||||
|
|
||||||
// parse url search params
|
// parse url search params
|
||||||
const params = new URLSearchParams(location.search);
|
const params = new URLSearchParams(location.search);
|
||||||
log('url options:', params.toString());
|
log('url options:', params.toString());
|
||||||
|
|
21
demo/node.js
21
demo/node.js
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
const fetch = require('node-fetch').default;
|
const fetch = require('node-fetch').default;
|
||||||
|
|
||||||
|
@ -142,6 +143,7 @@ async function detect(input) {
|
||||||
log.data(' Object: N/A');
|
log.data(' Object: N/A');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync('result.json', JSON.stringify(result, null, 2));
|
||||||
// print data to console
|
// print data to console
|
||||||
if (result) {
|
if (result) {
|
||||||
log.data('Persons:');
|
log.data('Persons:');
|
||||||
|
@ -182,13 +184,26 @@ async function main() {
|
||||||
log.header();
|
log.header();
|
||||||
log.info('Current folder:', process.env.PWD);
|
log.info('Current folder:', process.env.PWD);
|
||||||
await init();
|
await init();
|
||||||
|
const f = process.argv[2];
|
||||||
if (process.argv.length !== 3) {
|
if (process.argv.length !== 3) {
|
||||||
log.warn('Parameters: <input image> missing');
|
log.warn('Parameters: <input image | folder> missing');
|
||||||
await test();
|
await test();
|
||||||
} else if (!fs.existsSync(process.argv[2]) && !process.argv[2].startsWith('http')) {
|
} else if (!fs.existsSync(f) && !f.startsWith('http')) {
|
||||||
log.error(`File not found: ${process.argv[2]}`);
|
log.error(`File not found: ${process.argv[2]}`);
|
||||||
} else {
|
} else {
|
||||||
await detect(process.argv[2]);
|
if (fs.existsSync(f)) {
|
||||||
|
const stat = fs.statSync(f);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
const dir = fs.readdirSync(f);
|
||||||
|
for (const file of dir) {
|
||||||
|
await detect(path.join(f, file));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await detect(f);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await detect(f);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -17598,6 +17598,8 @@ async function load11(config3) {
|
||||||
return model8;
|
return model8;
|
||||||
}
|
}
|
||||||
async function process3(res, inputSize, outputShape, config3) {
|
async function process3(res, inputSize, outputShape, config3) {
|
||||||
|
if (!res)
|
||||||
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = res.arraySync();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
|
@ -17617,7 +17619,7 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
nmsT.dispose();
|
nmsT.dispose();
|
||||||
let i = 0;
|
let i = 0;
|
||||||
for (const id of nms) {
|
for (const id of nms) {
|
||||||
const score3 = detections[0][id][4];
|
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
|
||||||
const classVal = detections[0][id][5];
|
const classVal = detections[0][id][5];
|
||||||
const label = labels[classVal].label;
|
const label = labels[classVal].label;
|
||||||
const boxRaw3 = [
|
const boxRaw3 = [
|
||||||
|
@ -17636,18 +17638,16 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
async function predict10(image15, config3) {
|
async function predict10(input, config3) {
|
||||||
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
||||||
skipped6++;
|
skipped6++;
|
||||||
return last4;
|
return last4;
|
||||||
}
|
}
|
||||||
skipped6 = 0;
|
skipped6 = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const outputSize = [image15.shape[2], image15.shape[1]];
|
const outputSize = [input.shape[2], input.shape[1]];
|
||||||
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
|
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
|
||||||
let objectT;
|
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
|
||||||
if (config3.object.enabled)
|
|
||||||
objectT = model8.execute(resize, "tower_0/detections");
|
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
||||||
last4 = obj;
|
last4 = obj;
|
||||||
|
@ -19105,9 +19105,9 @@ function calcBuffered(newResult, localOptions) {
|
||||||
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
||||||
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
||||||
for (let i = 0; i < newResult.body.length; i++) {
|
for (let i = 0; i < newResult.body.length; i++) {
|
||||||
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
|
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
|
||||||
score: keypoint.score,
|
score: keypoint.score,
|
||||||
part: keypoint.part,
|
part: keypoint.part,
|
||||||
position: {
|
position: {
|
||||||
|
@ -19115,17 +19115,27 @@ function calcBuffered(newResult, localOptions) {
|
||||||
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
|
||||||
}
|
}
|
||||||
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
||||||
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
||||||
for (let i = 0; i < newResult.hand.length; i++) {
|
for (let i = 0; i < newResult.hand.length; i++) {
|
||||||
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
||||||
const keys = Object.keys(newResult.hand[i].annotations);
|
const keys = Object.keys(newResult.hand[i].annotations);
|
||||||
|
const annotations3 = [];
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||||
}
|
}
|
||||||
|
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
|
||||||
|
}
|
||||||
|
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
|
||||||
|
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||||
|
for (let i = 0; i < newResult.face.length; i++) {
|
||||||
|
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
|
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
|
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
|
||||||
}
|
}
|
||||||
const newPersons = newResult.persons;
|
const newPersons = newResult.persons;
|
||||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||||
|
@ -19152,7 +19162,7 @@ async function all(inCanvas2, result, drawOptions) {
|
||||||
calcBuffered(result, localOptions);
|
calcBuffered(result, localOptions);
|
||||||
else
|
else
|
||||||
bufferedResult = result;
|
bufferedResult = result;
|
||||||
face2(inCanvas2, result.face, localOptions);
|
face2(inCanvas2, bufferedResult.face, localOptions);
|
||||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||||
gesture(inCanvas2, result.gesture, localOptions);
|
gesture(inCanvas2, result.gesture, localOptions);
|
||||||
|
@ -19941,7 +19951,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
||||||
2Q==`;
|
2Q==`;
|
||||||
|
|
||||||
// package.json
|
// package.json
|
||||||
var version = "1.9.4";
|
var version = "2.0.0";
|
||||||
|
|
||||||
// src/human.ts
|
// src/human.ts
|
||||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||||
|
|
|
@ -17599,6 +17599,8 @@ async function load11(config3) {
|
||||||
return model8;
|
return model8;
|
||||||
}
|
}
|
||||||
async function process3(res, inputSize, outputShape, config3) {
|
async function process3(res, inputSize, outputShape, config3) {
|
||||||
|
if (!res)
|
||||||
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = res.arraySync();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
|
@ -17618,7 +17620,7 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
nmsT.dispose();
|
nmsT.dispose();
|
||||||
let i = 0;
|
let i = 0;
|
||||||
for (const id of nms) {
|
for (const id of nms) {
|
||||||
const score3 = detections[0][id][4];
|
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
|
||||||
const classVal = detections[0][id][5];
|
const classVal = detections[0][id][5];
|
||||||
const label = labels[classVal].label;
|
const label = labels[classVal].label;
|
||||||
const boxRaw3 = [
|
const boxRaw3 = [
|
||||||
|
@ -17637,18 +17639,16 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
async function predict10(image15, config3) {
|
async function predict10(input, config3) {
|
||||||
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
||||||
skipped6++;
|
skipped6++;
|
||||||
return last4;
|
return last4;
|
||||||
}
|
}
|
||||||
skipped6 = 0;
|
skipped6 = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const outputSize = [image15.shape[2], image15.shape[1]];
|
const outputSize = [input.shape[2], input.shape[1]];
|
||||||
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
|
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
|
||||||
let objectT;
|
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
|
||||||
if (config3.object.enabled)
|
|
||||||
objectT = model8.execute(resize, "tower_0/detections");
|
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
||||||
last4 = obj;
|
last4 = obj;
|
||||||
|
@ -19106,9 +19106,9 @@ function calcBuffered(newResult, localOptions) {
|
||||||
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
||||||
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
||||||
for (let i = 0; i < newResult.body.length; i++) {
|
for (let i = 0; i < newResult.body.length; i++) {
|
||||||
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
|
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
|
||||||
score: keypoint.score,
|
score: keypoint.score,
|
||||||
part: keypoint.part,
|
part: keypoint.part,
|
||||||
position: {
|
position: {
|
||||||
|
@ -19116,17 +19116,27 @@ function calcBuffered(newResult, localOptions) {
|
||||||
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
|
||||||
}
|
}
|
||||||
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
||||||
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
||||||
for (let i = 0; i < newResult.hand.length; i++) {
|
for (let i = 0; i < newResult.hand.length; i++) {
|
||||||
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
||||||
const keys = Object.keys(newResult.hand[i].annotations);
|
const keys = Object.keys(newResult.hand[i].annotations);
|
||||||
|
const annotations3 = [];
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||||
}
|
}
|
||||||
|
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
|
||||||
|
}
|
||||||
|
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
|
||||||
|
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||||
|
for (let i = 0; i < newResult.face.length; i++) {
|
||||||
|
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
|
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
|
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
|
||||||
}
|
}
|
||||||
const newPersons = newResult.persons;
|
const newPersons = newResult.persons;
|
||||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||||
|
@ -19153,7 +19163,7 @@ async function all(inCanvas2, result, drawOptions) {
|
||||||
calcBuffered(result, localOptions);
|
calcBuffered(result, localOptions);
|
||||||
else
|
else
|
||||||
bufferedResult = result;
|
bufferedResult = result;
|
||||||
face2(inCanvas2, result.face, localOptions);
|
face2(inCanvas2, bufferedResult.face, localOptions);
|
||||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||||
gesture(inCanvas2, result.gesture, localOptions);
|
gesture(inCanvas2, result.gesture, localOptions);
|
||||||
|
@ -19942,7 +19952,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
||||||
2Q==`;
|
2Q==`;
|
||||||
|
|
||||||
// package.json
|
// package.json
|
||||||
var version = "1.9.4";
|
var version = "2.0.0";
|
||||||
|
|
||||||
// src/human.ts
|
// src/human.ts
|
||||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||||
|
|
|
@ -17598,6 +17598,8 @@ async function load11(config3) {
|
||||||
return model8;
|
return model8;
|
||||||
}
|
}
|
||||||
async function process3(res, inputSize, outputShape, config3) {
|
async function process3(res, inputSize, outputShape, config3) {
|
||||||
|
if (!res)
|
||||||
|
return [];
|
||||||
const results = [];
|
const results = [];
|
||||||
const detections = res.arraySync();
|
const detections = res.arraySync();
|
||||||
const squeezeT = tf17.squeeze(res);
|
const squeezeT = tf17.squeeze(res);
|
||||||
|
@ -17617,7 +17619,7 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
nmsT.dispose();
|
nmsT.dispose();
|
||||||
let i = 0;
|
let i = 0;
|
||||||
for (const id of nms) {
|
for (const id of nms) {
|
||||||
const score3 = detections[0][id][4];
|
const score3 = Math.trunc(100 * detections[0][id][4]) / 100;
|
||||||
const classVal = detections[0][id][5];
|
const classVal = detections[0][id][5];
|
||||||
const label = labels[classVal].label;
|
const label = labels[classVal].label;
|
||||||
const boxRaw3 = [
|
const boxRaw3 = [
|
||||||
|
@ -17636,18 +17638,16 @@ async function process3(res, inputSize, outputShape, config3) {
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
async function predict10(image15, config3) {
|
async function predict10(input, config3) {
|
||||||
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
if (skipped6 < config3.object.skipFrames && config3.skipFrame && last4.length > 0) {
|
||||||
skipped6++;
|
skipped6++;
|
||||||
return last4;
|
return last4;
|
||||||
}
|
}
|
||||||
skipped6 = 0;
|
skipped6 = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const outputSize = [image15.shape[2], image15.shape[1]];
|
const outputSize = [input.shape[2], input.shape[1]];
|
||||||
const resize = tf17.image.resizeBilinear(image15, [model8.inputSize, model8.inputSize], false);
|
const resize = tf17.image.resizeBilinear(input, [model8.inputSize, model8.inputSize]);
|
||||||
let objectT;
|
const objectT = config3.object.enabled ? model8.execute(resize, ["tower_0/detections"]) : null;
|
||||||
if (config3.object.enabled)
|
|
||||||
objectT = model8.execute(resize, "tower_0/detections");
|
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
const obj = await process3(objectT, model8.inputSize, outputSize, config3);
|
||||||
last4 = obj;
|
last4 = obj;
|
||||||
|
@ -19105,9 +19105,9 @@ function calcBuffered(newResult, localOptions) {
|
||||||
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
if (!bufferedResult.body || newResult.body.length !== bufferedResult.body.length)
|
||||||
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
||||||
for (let i = 0; i < newResult.body.length; i++) {
|
for (let i = 0; i < newResult.body.length; i++) {
|
||||||
bufferedResult.body[i].box = newResult.body[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.body[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.body[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.body[i].keypoints = newResult.body[i].keypoints.map((keypoint, j) => ({
|
const keypoints3 = newResult.body[i].keypoints.map((keypoint, j) => ({
|
||||||
score: keypoint.score,
|
score: keypoint.score,
|
||||||
part: keypoint.part,
|
part: keypoint.part,
|
||||||
position: {
|
position: {
|
||||||
|
@ -19115,17 +19115,27 @@ function calcBuffered(newResult, localOptions) {
|
||||||
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
bufferedResult.body[i] = { ...newResult.body[i], box: box6, boxRaw: boxRaw3, keypoints: keypoints3 };
|
||||||
}
|
}
|
||||||
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
if (!bufferedResult.hand || newResult.hand.length !== bufferedResult.hand.length)
|
||||||
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
||||||
for (let i = 0; i < newResult.hand.length; i++) {
|
for (let i = 0; i < newResult.hand.length; i++) {
|
||||||
bufferedResult.hand[i].box = newResult.hand[i].box.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box6) / localOptions.bufferedFactor);
|
const box6 = newResult.hand[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw.map((box6, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box6) / localOptions.bufferedFactor);
|
const boxRaw3 = newResult.hand[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
const landmarks = newResult.hand[i].landmarks.map((landmark, j) => landmark.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
||||||
const keys = Object.keys(newResult.hand[i].annotations);
|
const keys = Object.keys(newResult.hand[i].annotations);
|
||||||
|
const annotations3 = [];
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
annotations3[key] = newResult.hand[i].annotations[key].map((val, j) => val.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||||
}
|
}
|
||||||
|
bufferedResult.hand[i] = { ...newResult.hand[i], box: box6, boxRaw: boxRaw3, landmarks, annotations: annotations3 };
|
||||||
|
}
|
||||||
|
if (!bufferedResult.face || newResult.face.length !== bufferedResult.face.length)
|
||||||
|
bufferedResult.face = JSON.parse(JSON.stringify(newResult.face));
|
||||||
|
for (let i = 0; i < newResult.face.length; i++) {
|
||||||
|
const box6 = newResult.face[i].box.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
|
const boxRaw3 = newResult.face[i].boxRaw.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
|
bufferedResult.face[i] = { ...newResult.face[i], box: box6, boxRaw: boxRaw3 };
|
||||||
}
|
}
|
||||||
const newPersons = newResult.persons;
|
const newPersons = newResult.persons;
|
||||||
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
if (!bufferedResult.persons || newPersons.length !== bufferedResult.persons.length)
|
||||||
|
@ -19152,7 +19162,7 @@ async function all(inCanvas2, result, drawOptions) {
|
||||||
calcBuffered(result, localOptions);
|
calcBuffered(result, localOptions);
|
||||||
else
|
else
|
||||||
bufferedResult = result;
|
bufferedResult = result;
|
||||||
face2(inCanvas2, result.face, localOptions);
|
face2(inCanvas2, bufferedResult.face, localOptions);
|
||||||
body2(inCanvas2, bufferedResult.body, localOptions);
|
body2(inCanvas2, bufferedResult.body, localOptions);
|
||||||
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
hand2(inCanvas2, bufferedResult.hand, localOptions);
|
||||||
gesture(inCanvas2, result.gesture, localOptions);
|
gesture(inCanvas2, result.gesture, localOptions);
|
||||||
|
@ -19941,7 +19951,7 @@ lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/
|
||||||
2Q==`;
|
2Q==`;
|
||||||
|
|
||||||
// package.json
|
// package.json
|
||||||
var version = "1.9.4";
|
var version = "2.0.0";
|
||||||
|
|
||||||
// src/human.ts
|
// src/human.ts
|
||||||
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
var _numTensors, _analyzeMemoryLeaks, _checkSanity, _firstRun, _lastInputSum, _lastCacheDiff, _sanity, _checkBackend, _skipFrame, _warmupBitmap, _warmupCanvas, _warmupNode;
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@vladmandic/human",
|
"name": "@vladmandic/human",
|
||||||
"version": "1.9.4",
|
"version": "2.0.0",
|
||||||
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
|
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
|
||||||
"sideEffects": false,
|
"sideEffects": false,
|
||||||
"main": "dist/human.node.js",
|
"main": "dist/human.node.js",
|
||||||
|
@ -68,10 +68,10 @@
|
||||||
"canvas": "^2.8.0",
|
"canvas": "^2.8.0",
|
||||||
"chokidar": "^3.5.1",
|
"chokidar": "^3.5.1",
|
||||||
"dayjs": "^1.10.5",
|
"dayjs": "^1.10.5",
|
||||||
"esbuild": "^0.12.4",
|
"esbuild": "^0.12.5",
|
||||||
"eslint": "^7.27.0",
|
"eslint": "^7.27.0",
|
||||||
"eslint-config-airbnb-base": "^14.2.1",
|
"eslint-config-airbnb-base": "^14.2.1",
|
||||||
"eslint-plugin-import": "^2.23.3",
|
"eslint-plugin-import": "^2.23.4",
|
||||||
"eslint-plugin-json": "^3.0.0",
|
"eslint-plugin-json": "^3.0.0",
|
||||||
"eslint-plugin-node": "^11.1.0",
|
"eslint-plugin-node": "^11.1.0",
|
||||||
"eslint-plugin-promise": "^5.1.0",
|
"eslint-plugin-promise": "^5.1.0",
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
2021-05-29 18:28:57 [36mINFO: [39m @vladmandic/human version 1.9.4
|
2021-05-30 12:02:00 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||||
2021-05-29 18:28:57 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
2021-05-30 12:02:00 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||||
2021-05-29 18:28:57 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
2021-05-30 12:02:00 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
2021-05-30 12:02:00 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1292,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: node type: node: {"imports":39,"importBytes":443782,"outputBytes":396032,"outputFiles":"dist/human.node.js"}
|
2021-05-30 12:02:00 [35mSTATE:[39m Build for: node type: node: {"imports":39,"importBytes":444757,"outputBytes":396794,"outputFiles":"dist/human.node.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
2021-05-30 12:02:00 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1300,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":39,"importBytes":443790,"outputBytes":396036,"outputFiles":"dist/human.node-gpu.js"}
|
2021-05-30 12:02:00 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":39,"importBytes":444765,"outputBytes":396798,"outputFiles":"dist/human.node-gpu.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
2021-05-30 12:02:00 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1367,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":39,"importBytes":443857,"outputBytes":396108,"outputFiles":"dist/human.node-wasm.js"}
|
2021-05-30 12:02:01 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":39,"importBytes":444832,"outputBytes":396870,"outputFiles":"dist/human.node-wasm.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
2021-05-30 12:02:01 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-05-29 18:28:57 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":39,"importBytes":443884,"outputBytes":242161,"outputFiles":"dist/human.esm-nobundle.js"}
|
2021-05-30 12:02:01 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":39,"importBytes":444859,"outputBytes":242556,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||||
2021-05-29 18:28:58 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111414,"outputFiles":"dist/tfjs.esm.js"}
|
2021-05-30 12:02:01 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1274,"moduleBytes":4114813,"imports":7,"importBytes":2478,"outputBytes":1111418,"outputFiles":"dist/tfjs.esm.js"}
|
||||||
2021-05-29 18:28:59 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":39,"importBytes":1553904,"outputBytes":1349968,"outputFiles":"dist/human.js"}
|
2021-05-30 12:02:02 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":39,"importBytes":1554883,"outputBytes":1350365,"outputFiles":"dist/human.js"}
|
||||||
2021-05-29 18:28:59 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":39,"importBytes":1553904,"outputBytes":1349960,"outputFiles":"dist/human.esm.js"}
|
2021-05-30 12:02:02 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":39,"importBytes":1554883,"outputBytes":1350357,"outputFiles":"dist/human.esm.js"}
|
||||||
2021-05-29 18:28:59 [36mINFO: [39m Generate types: ["src/human.ts"]
|
2021-05-30 12:02:02 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||||
2021-05-29 18:29:05 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
2021-05-30 12:02:07 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||||
2021-05-29 18:29:05 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
2021-05-30 12:02:07 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||||
|
|
|
@ -503,17 +503,18 @@ export async function person(inCanvas: HTMLCanvasElement, result: Array<Person>,
|
||||||
}
|
}
|
||||||
|
|
||||||
function calcBuffered(newResult, localOptions) {
|
function calcBuffered(newResult, localOptions) {
|
||||||
// if (newResult.timestamp !== bufferedResult?.timestamp) bufferedResult = JSON.parse(JSON.stringify(newResult)); // no need to force update
|
// each record is only updated using deep clone when number of detected record changes, otherwise it will converge by itself
|
||||||
// each record is only updated using deep copy when number of detected record changes, otherwise it will converge by itself
|
// otherwise bufferedResult is a shallow clone of result plus updated local calculated values
|
||||||
|
// thus mixing by-reference and by-value assignments to minimize memory operations
|
||||||
|
|
||||||
// interpolate body results
|
// interpolate body results
|
||||||
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body));
|
if (!bufferedResult.body || (newResult.body.length !== bufferedResult.body.length)) bufferedResult.body = JSON.parse(JSON.stringify(newResult.body)); // deep clone once
|
||||||
for (let i = 0; i < newResult.body.length; i++) { // update body: box, boxRaw, keypoints
|
for (let i = 0; i < newResult.body.length; i++) {
|
||||||
bufferedResult.body[i].box = newResult.body[i].box
|
const box = newResult.body[i].box // update box
|
||||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + box) / localOptions.bufferedFactor) as [number, number, number, number];
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].box[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
|
||||||
bufferedResult.body[i].boxRaw = newResult.body[i].boxRaw
|
const boxRaw = newResult.body[i].boxRaw // update boxRaw
|
||||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + box) / localOptions.bufferedFactor) as [number, number, number, number];
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].boxRaw[j] + b) / localOptions.bufferedFactor) as [number, number, number, number];
|
||||||
bufferedResult.body[i].keypoints = newResult.body[i].keypoints
|
const keypoints = newResult.body[i].keypoints // update keypoints
|
||||||
.map((keypoint, j) => ({
|
.map((keypoint, j) => ({
|
||||||
score: keypoint.score,
|
score: keypoint.score,
|
||||||
part: keypoint.part,
|
part: keypoint.part,
|
||||||
|
@ -522,24 +523,37 @@ function calcBuffered(newResult, localOptions) {
|
||||||
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y,
|
y: bufferedResult.body[i].keypoints[j] ? ((localOptions.bufferedFactor - 1) * bufferedResult.body[i].keypoints[j].position.y + keypoint.position.y) / localOptions.bufferedFactor : keypoint.position.y,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
bufferedResult.body[i] = { ...newResult.body[i], box, boxRaw, keypoints }; // shallow clone plus updated values
|
||||||
}
|
}
|
||||||
|
|
||||||
// interpolate hand results
|
// interpolate hand results
|
||||||
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand));
|
if (!bufferedResult.hand || (newResult.hand.length !== bufferedResult.hand.length)) bufferedResult.hand = JSON.parse(JSON.stringify(newResult.hand)); // deep clone once
|
||||||
for (let i = 0; i < newResult.hand.length; i++) { // update body: box, boxRaw, landmarks, annotations
|
for (let i = 0; i < newResult.hand.length; i++) {
|
||||||
bufferedResult.hand[i].box = newResult.hand[i].box
|
const box = newResult.hand[i].box // update box
|
||||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + box) / localOptions.bufferedFactor);
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].boxRaw = newResult.hand[i].boxRaw
|
const boxRaw = newResult.hand[i].boxRaw // update boxRaw
|
||||||
.map((box, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + box) / localOptions.bufferedFactor);
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
bufferedResult.hand[i].landmarks = newResult.hand[i].landmarks
|
const landmarks = newResult.hand[i].landmarks // update landmarks
|
||||||
.map((landmark, j) => landmark
|
.map((landmark, j) => landmark
|
||||||
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].landmarks[j][k] + coord) / localOptions.bufferedFactor));
|
||||||
const keys = Object.keys(newResult.hand[i].annotations);
|
const keys = Object.keys(newResult.hand[i].annotations); // update annotations
|
||||||
|
const annotations = [];
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
bufferedResult.hand[i].annotations[key] = newResult.hand[i].annotations[key]
|
annotations[key] = newResult.hand[i].annotations[key]
|
||||||
.map((val, j) => val
|
.map((val, j) => val
|
||||||
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
.map((coord, k) => ((localOptions.bufferedFactor - 1) * bufferedResult.hand[i].annotations[key][j][k] + coord) / localOptions.bufferedFactor));
|
||||||
}
|
}
|
||||||
|
bufferedResult.hand[i] = { ...newResult.hand[i], box, boxRaw, landmarks, annotations }; // shallow clone plus updated values
|
||||||
|
}
|
||||||
|
|
||||||
|
// interpolate face results
|
||||||
|
if (!bufferedResult.face || (newResult.face.length !== bufferedResult.face.length)) bufferedResult.face = JSON.parse(JSON.stringify(newResult.face)); // deep clone once
|
||||||
|
for (let i = 0; i < newResult.face.length; i++) {
|
||||||
|
const box = newResult.face[i].box // update box
|
||||||
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].box[j] + b) / localOptions.bufferedFactor);
|
||||||
|
const boxRaw = newResult.face[i].boxRaw // update boxRaw
|
||||||
|
.map((b, j) => ((localOptions.bufferedFactor - 1) * bufferedResult.face[i].boxRaw[j] + b) / localOptions.bufferedFactor);
|
||||||
|
bufferedResult.face[i] = { ...newResult.face[i], box, boxRaw }; // shallow clone plus updated values
|
||||||
}
|
}
|
||||||
|
|
||||||
// interpolate person results
|
// interpolate person results
|
||||||
|
@ -569,7 +583,7 @@ export async function all(inCanvas: HTMLCanvasElement, result: Result, drawOptio
|
||||||
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
if (!(inCanvas instanceof HTMLCanvasElement)) return;
|
||||||
if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
|
if (localOptions.bufferedOutput) calcBuffered(result, localOptions); // do results interpolation
|
||||||
else bufferedResult = result; // just use results as-is
|
else bufferedResult = result; // just use results as-is
|
||||||
face(inCanvas, result.face, localOptions); // face does have buffering
|
face(inCanvas, bufferedResult.face, localOptions); // face does have buffering
|
||||||
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
|
body(inCanvas, bufferedResult.body, localOptions); // use interpolated results if available
|
||||||
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
|
hand(inCanvas, bufferedResult.hand, localOptions); // use interpolated results if available
|
||||||
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available
|
// person(inCanvas, bufferedResult.persons, localOptions); // use interpolated results if available
|
||||||
|
|
|
@ -24,18 +24,18 @@ export async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function process(res, inputSize, outputShape, config) {
|
async function process(res, inputSize, outputShape, config) {
|
||||||
|
if (!res) return [];
|
||||||
const results: Array<Item> = [];
|
const results: Array<Item> = [];
|
||||||
const detections = res.arraySync();
|
const detections = res.arraySync();
|
||||||
const squeezeT = tf.squeeze(res);
|
const squeezeT = tf.squeeze(res);
|
||||||
res.dispose();
|
res.dispose();
|
||||||
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
|
const arr = tf.split(squeezeT, 6, 1); // x1, y1, x2, y2, score, class
|
||||||
squeezeT.dispose();
|
squeezeT.dispose();
|
||||||
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // tf.nms expects y, x
|
const stackT = tf.stack([arr[1], arr[0], arr[3], arr[2]], 1); // reorder dims as tf.nms expects y, x
|
||||||
const boxesT = stackT.squeeze();
|
const boxesT = stackT.squeeze();
|
||||||
const scoresT = arr[4].squeeze();
|
const scoresT = arr[4].squeeze();
|
||||||
const classesT = arr[5].squeeze();
|
const classesT = arr[5].squeeze();
|
||||||
arr.forEach((t) => t.dispose());
|
arr.forEach((t) => t.dispose());
|
||||||
// @ts-ignore boxesT type is not correctly inferred
|
|
||||||
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
const nmsT = await tf.image.nonMaxSuppressionAsync(boxesT, scoresT, config.object.maxDetected, config.object.iouThreshold, config.object.minConfidence);
|
||||||
boxesT.dispose();
|
boxesT.dispose();
|
||||||
scoresT.dispose();
|
scoresT.dispose();
|
||||||
|
@ -44,7 +44,7 @@ async function process(res, inputSize, outputShape, config) {
|
||||||
nmsT.dispose();
|
nmsT.dispose();
|
||||||
let i = 0;
|
let i = 0;
|
||||||
for (const id of nms) {
|
for (const id of nms) {
|
||||||
const score = detections[0][id][4];
|
const score = Math.trunc(100 * detections[0][id][4]) / 100;
|
||||||
const classVal = detections[0][id][5];
|
const classVal = detections[0][id][5];
|
||||||
const label = labels[classVal].label;
|
const label = labels[classVal].label;
|
||||||
const boxRaw = [
|
const boxRaw = [
|
||||||
|
@ -64,18 +64,16 @@ async function process(res, inputSize, outputShape, config) {
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config): Promise<Item[]> {
|
export async function predict(input, config): Promise<Item[]> {
|
||||||
if ((skipped < config.object.skipFrames) && config.skipFrame && (last.length > 0)) {
|
if ((skipped < config.object.skipFrames) && config.skipFrame && (last.length > 0)) {
|
||||||
skipped++;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
skipped = 0;
|
skipped = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const outputSize = [image.shape[2], image.shape[1]];
|
const outputSize = [input.shape[2], input.shape[1]];
|
||||||
const resize = tf.image.resizeBilinear(image, [model.inputSize, model.inputSize], false);
|
const resize = tf.image.resizeBilinear(input, [model.inputSize, model.inputSize]);
|
||||||
|
const objectT = config.object.enabled ? model.execute(resize, ['tower_0/detections']) : null;
|
||||||
let objectT;
|
|
||||||
if (config.object.enabled) objectT = model.execute(resize, 'tower_0/detections');
|
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
|
|
||||||
const obj = await process(objectT, model.inputSize, outputSize, config);
|
const obj = await process(objectT, model.inputSize, outputSize, config);
|
||||||
|
|
240
test/test.log
240
test/test.log
|
@ -1,120 +1,120 @@
|
||||||
2021-05-28 15:52:57 [36mINFO: [39m @vladmandic/human version 1.9.4
|
2021-05-29 18:30:17 [36mINFO: [39m @vladmandic/human version 1.9.4
|
||||||
2021-05-28 15:52:57 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
2021-05-29 18:30:17 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||||
2021-05-28 15:52:57 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
2021-05-29 18:30:17 [36mINFO: [39m tests: ["test-node.js","test-node-gpu.js","test-node-wasm.js"]
|
||||||
2021-05-28 15:52:57 [36mINFO: [39m test-node.js start
|
2021-05-29 18:30:17 [36mINFO: [39m test-node.js start
|
||||||
2021-05-28 15:52:59 [35mSTATE:[39m test-node.js passed: create human
|
2021-05-29 18:30:19 [35mSTATE:[39m test-node.js passed: create human
|
||||||
2021-05-28 15:52:59 [36mINFO: [39m test-node.js human version: 1.9.4
|
2021-05-29 18:30:19 [36mINFO: [39m test-node.js human version: 1.9.4
|
||||||
2021-05-28 15:52:59 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
2021-05-29 18:30:19 [36mINFO: [39m test-node.js platform: linux x64 agent: NodeJS v16.0.0
|
||||||
2021-05-28 15:52:59 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
2021-05-29 18:30:19 [36mINFO: [39m test-node.js tfjs version: 3.6.0
|
||||||
2021-05-28 15:52:59 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
2021-05-29 18:30:19 [35mSTATE:[39m test-node.js passed: set backend: tensorflow
|
||||||
2021-05-28 15:52:59 [35mSTATE:[39m test-node.js passed: load models
|
2021-05-29 18:30:19 [35mSTATE:[39m test-node.js passed: load models
|
||||||
2021-05-28 15:52:59 [35mSTATE:[39m test-node.js result: defined models: 13 loaded models: 6
|
2021-05-29 18:30:19 [35mSTATE:[39m test-node.js result: defined models: 14 loaded models: 6
|
||||||
2021-05-28 15:52:59 [35mSTATE:[39m test-node.js passed: warmup: none default
|
2021-05-29 18:30:19 [35mSTATE:[39m test-node.js passed: warmup: none default
|
||||||
2021-05-28 15:53:01 [35mSTATE:[39m test-node.js passed: warmup: face default
|
2021-05-29 18:30:21 [35mSTATE:[39m test-node.js passed: warmup: face default
|
||||||
2021-05-28 15:53:01 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
2021-05-29 18:30:21 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.73,"keypoints":5}
|
||||||
2021-05-28 15:53:01 [32mDATA: [39m test-node.js result: performance: load: 397 total: 1679
|
2021-05-29 18:30:21 [32mDATA: [39m test-node.js result: performance: load: 345 total: 1666
|
||||||
2021-05-28 15:53:02 [35mSTATE:[39m test-node.js passed: warmup: body default
|
2021-05-29 18:30:22 [35mSTATE:[39m test-node.js passed: warmup: body default
|
||||||
2021-05-28 15:53:02 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
2021-05-29 18:30:22 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:02 [32mDATA: [39m test-node.js result: performance: load: 397 total: 1462
|
2021-05-29 18:30:22 [32mDATA: [39m test-node.js result: performance: load: 345 total: 1523
|
||||||
2021-05-28 15:53:02 [36mINFO: [39m test-node.js test body variants
|
2021-05-29 18:30:22 [36mINFO: [39m test-node.js test body variants
|
||||||
2021-05-28 15:53:03 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:23 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:04 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg posenet
|
2021-05-29 18:30:24 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg posenet
|
||||||
2021-05-28 15:53:04 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
2021-05-29 18:30:24 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.96,"keypoints":16}
|
||||||
2021-05-28 15:53:04 [32mDATA: [39m test-node.js result: performance: load: 397 total: 957
|
2021-05-29 18:30:24 [32mDATA: [39m test-node.js result: performance: load: 345 total: 987
|
||||||
2021-05-28 15:53:05 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:25 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:06 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg blazepose
|
2021-05-29 18:30:26 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg movenet
|
||||||
2021-05-28 15:53:06 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:26 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:06 [32mDATA: [39m test-node.js result: performance: load: 397 total: 523
|
2021-05-29 18:30:26 [32mDATA: [39m test-node.js result: performance: load: 345 total: 318
|
||||||
2021-05-28 15:53:07 [35mSTATE:[39m test-node.js passed: detect: random default
|
2021-05-29 18:30:27 [35mSTATE:[39m test-node.js passed: detect: random default
|
||||||
2021-05-28 15:53:07 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 0 person: 0 {} {} {"score":1,"keypoints":39}
|
2021-05-29 18:30:27 [32mDATA: [39m test-node.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||||
2021-05-28 15:53:07 [32mDATA: [39m test-node.js result: performance: load: 397 total: 970
|
2021-05-29 18:30:27 [32mDATA: [39m test-node.js result: performance: load: 345 total: 788
|
||||||
2021-05-28 15:53:07 [36mINFO: [39m test-node.js test: first instance
|
2021-05-29 18:30:27 [36mINFO: [39m test-node.js test: first instance
|
||||||
2021-05-28 15:53:07 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-05-29 18:30:27 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||||
2021-05-28 15:53:09 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
2021-05-29 18:30:28 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||||
2021-05-28 15:53:09 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:28 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
|
||||||
2021-05-28 15:53:09 [32mDATA: [39m test-node.js result: performance: load: 397 total: 1591
|
2021-05-29 18:30:28 [32mDATA: [39m test-node.js result: performance: load: 345 total: 1516
|
||||||
2021-05-28 15:53:09 [36mINFO: [39m test-node.js test: second instance
|
2021-05-29 18:30:28 [36mINFO: [39m test-node.js test: second instance
|
||||||
2021-05-28 15:53:09 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-05-29 18:30:29 [35mSTATE:[39m test-node.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||||
2021-05-28 15:53:11 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
2021-05-29 18:30:30 [35mSTATE:[39m test-node.js passed: detect: assets/sample-me.jpg default
|
||||||
2021-05-28 15:53:11 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:30 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
|
||||||
2021-05-28 15:53:11 [32mDATA: [39m test-node.js result: performance: load: 4 total: 1475
|
2021-05-29 18:30:30 [32mDATA: [39m test-node.js result: performance: load: 7 total: 1435
|
||||||
2021-05-28 15:53:11 [36mINFO: [39m test-node.js test: concurrent
|
2021-05-29 18:30:30 [36mINFO: [39m test-node.js test: concurrent
|
||||||
2021-05-28 15:53:11 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-05-29 18:30:30 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||||
2021-05-28 15:53:11 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-05-29 18:30:30 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||||
2021-05-28 15:53:12 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:31 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:13 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:32 [35mSTATE:[39m test-node.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:18 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
2021-05-29 18:30:38 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: performance: load: 397 total: 5615
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: performance: load: 345 total: 5657
|
||||||
2021-05-28 15:53:18 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
2021-05-29 18:30:38 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-face.jpg default
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5615
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: performance: load: 7 total: 5657
|
||||||
2021-05-28 15:53:18 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
2021-05-29 18:30:38 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: performance: load: 397 total: 5615
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: performance: load: 345 total: 5657
|
||||||
2021-05-28 15:53:18 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
2021-05-29 18:30:38 [35mSTATE:[39m test-node.js passed: detect: assets/human-sample-body.jpg default
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:18 [32mDATA: [39m test-node.js result: performance: load: 4 total: 5615
|
2021-05-29 18:30:38 [32mDATA: [39m test-node.js result: performance: load: 7 total: 5657
|
||||||
2021-05-28 15:53:18 [36mINFO: [39m test-node.js test complete: 19711 ms
|
2021-05-29 18:30:38 [36mINFO: [39m test-node.js test complete: 19401 ms
|
||||||
2021-05-28 15:53:18 [36mINFO: [39m test-node-gpu.js start
|
2021-05-29 18:30:38 [36mINFO: [39m test-node-gpu.js start
|
||||||
2021-05-28 15:53:20 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-28 15:53:20.148509: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
2021-05-29 18:30:39 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-29 18:30:39.644962: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
|
||||||
2021-05-28 15:53:20 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-28 15:53:20.400714: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
2021-05-29 18:30:39 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-29 18:30:39.887957: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
|
||||||
2021-05-28 15:53:20 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-28 15:53:20.400783: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
2021-05-29 18:30:39 [33mWARN: [39m test-node-gpu.js stderr: 2021-05-29 18:30:39.888022: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (wyse): /proc/driver/nvidia/version does not exist
|
||||||
2021-05-28 15:53:20 [35mSTATE:[39m test-node-gpu.js passed: create human
|
2021-05-29 18:30:39 [35mSTATE:[39m test-node-gpu.js passed: create human
|
||||||
2021-05-28 15:53:20 [36mINFO: [39m test-node-gpu.js human version: 1.9.4
|
2021-05-29 18:30:39 [36mINFO: [39m test-node-gpu.js human version: 1.9.4
|
||||||
2021-05-28 15:53:20 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
2021-05-29 18:30:39 [36mINFO: [39m test-node-gpu.js platform: linux x64 agent: NodeJS v16.0.0
|
||||||
2021-05-28 15:53:20 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
2021-05-29 18:30:39 [36mINFO: [39m test-node-gpu.js tfjs version: 3.6.0
|
||||||
2021-05-28 15:53:20 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
2021-05-29 18:30:40 [35mSTATE:[39m test-node-gpu.js passed: set backend: tensorflow
|
||||||
2021-05-28 15:53:20 [35mSTATE:[39m test-node-gpu.js passed: load models
|
2021-05-29 18:30:40 [35mSTATE:[39m test-node-gpu.js passed: load models
|
||||||
2021-05-28 15:53:20 [35mSTATE:[39m test-node-gpu.js result: defined models: 13 loaded models: 6
|
2021-05-29 18:30:40 [35mSTATE:[39m test-node-gpu.js result: defined models: 14 loaded models: 6
|
||||||
2021-05-28 15:53:20 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
2021-05-29 18:30:40 [35mSTATE:[39m test-node-gpu.js passed: warmup: none default
|
||||||
2021-05-28 15:53:22 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
2021-05-29 18:30:42 [35mSTATE:[39m test-node-gpu.js passed: warmup: face default
|
||||||
2021-05-28 15:53:22 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.96,"keypoints":5}
|
2021-05-29 18:30:42 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8246909379959106,"class":"person"} {"score":0.73,"keypoints":5}
|
||||||
2021-05-28 15:53:22 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 1956
|
2021-05-29 18:30:42 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 1692
|
||||||
2021-05-28 15:53:24 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
2021-05-29 18:30:43 [35mSTATE:[39m test-node-gpu.js passed: warmup: body default
|
||||||
2021-05-28 15:53:24 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.92,"keypoints":17}
|
2021-05-29 18:30:43 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":29.5,"gender":"female"} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:24 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 1601
|
2021-05-29 18:30:43 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 1521
|
||||||
2021-05-28 15:53:24 [36mINFO: [39m test-node-gpu.js test body variants
|
2021-05-29 18:30:43 [36mINFO: [39m test-node-gpu.js test body variants
|
||||||
2021-05-28 15:53:25 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:44 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:26 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
|
2021-05-29 18:30:45 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg posenet
|
||||||
2021-05-28 15:53:26 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.91,"keypoints":17}
|
2021-05-29 18:30:45 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.96,"keypoints":16}
|
||||||
2021-05-28 15:53:26 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 1047
|
2021-05-29 18:30:45 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 1028
|
||||||
2021-05-28 15:53:27 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:46 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:27 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg blazepose
|
2021-05-29 18:30:46 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg movenet
|
||||||
2021-05-28 15:53:27 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:46 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1} {"score":0.7261000871658325,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:27 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 436
|
2021-05-29 18:30:46 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 327
|
||||||
2021-05-28 15:53:28 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
2021-05-29 18:30:47 [35mSTATE:[39m test-node-gpu.js passed: detect: random default
|
||||||
2021-05-28 15:53:28 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:47 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 0 object: 0 person: 0 {} {} {"score":0,"keypoints":0}
|
||||||
2021-05-28 15:53:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 209
|
2021-05-29 18:30:47 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 757
|
||||||
2021-05-28 15:53:28 [36mINFO: [39m test-node-gpu.js test: first instance
|
2021-05-29 18:30:47 [36mINFO: [39m test-node-gpu.js test: first instance
|
||||||
2021-05-28 15:53:28 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-05-29 18:30:48 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||||
2021-05-28 15:53:28 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
2021-05-29 18:30:49 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||||
2021-05-28 15:53:28 [32mDATA: [39m test-node-gpu.js result: face: 0 body: 1 hand: 0 gesture: 1 object: 1 person: 0 {} {"score":0.7261000871658325,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:49 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
|
||||||
2021-05-28 15:53:28 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 184
|
2021-05-29 18:30:49 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 1413
|
||||||
2021-05-28 15:53:28 [36mINFO: [39m test-node-gpu.js test: second instance
|
2021-05-29 18:30:49 [36mINFO: [39m test-node-gpu.js test: second instance
|
||||||
2021-05-28 15:53:29 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
2021-05-29 18:30:50 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/sample-me.jpg [1,700,700,3]
|
||||||
2021-05-28 15:53:30 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
2021-05-29 18:30:51 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/sample-me.jpg default
|
||||||
2021-05-28 15:53:30 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:51 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 2 person: 1 {"confidence":1,"age":39.2,"gender":"male"} {"score":0.6820425987243652,"class":"person"} {"score":0.67,"keypoints":7}
|
||||||
2021-05-28 15:53:30 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 1487
|
2021-05-29 18:30:51 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 1429
|
||||||
2021-05-28 15:53:30 [36mINFO: [39m test-node-gpu.js test: concurrent
|
2021-05-29 18:30:51 [36mINFO: [39m test-node-gpu.js test: concurrent
|
||||||
2021-05-28 15:53:30 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-05-29 18:30:51 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||||
2021-05-28 15:53:30 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
2021-05-29 18:30:51 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-face.jpg [1,256,256,3]
|
||||||
2021-05-28 15:53:31 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:52 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:32 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
2021-05-29 18:30:53 [35mSTATE:[39m test-node-gpu.js passed: load image: assets/human-sample-body.jpg [1,1200,1200,3]
|
||||||
2021-05-28 15:53:38 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
2021-05-29 18:30:59 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 5528
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 5404
|
||||||
2021-05-28 15:53:38 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
2021-05-29 18:30:59 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-face.jpg default
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"confidence":1,"age":23.6,"gender":"female"} {"score":0.8257162570953369,"class":"person"} {"score":0.73,"keypoints":17}
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 5528
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 5404
|
||||||
2021-05-28 15:53:38 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
2021-05-29 18:30:59 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 310 total: 5528
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: performance: load: 348 total: 5404
|
||||||
2021-05-28 15:53:38 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
2021-05-29 18:30:59 [35mSTATE:[39m test-node-gpu.js passed: detect: assets/human-sample-body.jpg default
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":1,"keypoints":39}
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 3 object: 1 person: 1 {"confidence":1,"age":28.5,"gender":"female"} {"score":0.7273815870285034,"class":"person"} {"score":0.93,"keypoints":17}
|
||||||
2021-05-28 15:53:38 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 5528
|
2021-05-29 18:30:59 [32mDATA: [39m test-node-gpu.js result: performance: load: 2 total: 5404
|
||||||
2021-05-28 15:53:38 [36mINFO: [39m test-node-gpu.js test complete: 17887 ms
|
2021-05-29 18:30:59 [36mINFO: [39m test-node-gpu.js test complete: 19062 ms
|
||||||
2021-05-28 15:53:38 [36mINFO: [39m test-node-wasm.js start
|
2021-05-29 18:30:59 [36mINFO: [39m test-node-wasm.js start
|
||||||
2021-05-28 15:53:38 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
2021-05-29 18:30:59 [31mERROR:[39m test-node-wasm.js failed: model server: request to http://localhost:10030/models/ failed, reason: connect ECONNREFUSED 127.0.0.1:10030
|
||||||
2021-05-28 15:53:38 [31mERROR:[39m test-node-wasm.js aborting test
|
2021-05-29 18:30:59 [31mERROR:[39m test-node-wasm.js aborting test
|
||||||
2021-05-28 15:53:38 [36mINFO: [39m status: {"passed":46,"failed":1}
|
2021-05-29 18:30:59 [36mINFO: [39m status: {"passed":46,"failed":1}
|
||||||
|
|
|
@ -3,4 +3,4 @@
|
||||||
*/
|
*/
|
||||||
import { Item } from '../result';
|
import { Item } from '../result';
|
||||||
export declare function load(config: any): Promise<any>;
|
export declare function load(config: any): Promise<any>;
|
||||||
export declare function predict(image: any, config: any): Promise<Item[]>;
|
export declare function predict(input: any, config: any): Promise<Item[]>;
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit 317a8fc76cd933cc38f59948ffade324fc8f1df2
|
Subproject commit 78e6de4516ab49f47a906ec7778073b2dbbfed3f
|
Loading…
Reference in New Issue