added node build and demo

pull/293/head
Vladimir Mandic 2020-10-13 20:52:30 -04:00
parent b19e6372c8
commit 11137f4523
7 changed files with 192 additions and 75 deletions

View File

@ -5,6 +5,8 @@
**Package**: <https://www.npmjs.com/package/@vladmandic/human> **Package**: <https://www.npmjs.com/package/@vladmandic/human>
**Live Demo**: <https://vladmandic.github.io/human/demo/demo-esm.html> **Live Demo**: <https://vladmandic.github.io/human/demo/demo-esm.html>
Compatible with Browser, WebWorker and NodeJS** execution!
*Suggestions are welcome!* *Suggestions are welcome!*
<hr> <hr>
@ -88,13 +90,15 @@ You also need to install and includ `tfjs` in your project
Install with: Install with:
```shell ```shell
npm install @tensorflow/tfjs @vladmandic/human npm install @tensorflow/tfjs-node @vladmandic/human
``` ```
And then use with: And then use with:
```js ```js
import * as tf from '@tensorflow/tfjs'; const tf = require('@tensorflow/tfjs-node');
import human from '@vladmandic/Human'; const human = require('@vladmandic/human');
``` ```
*See limitations for NodeJS usage under `demo`*
### Weights ### Weights
@ -108,10 +112,20 @@ If your application resides in a different folder, modify `modelPath` property i
Demos are included in `/demo`: Demos are included in `/demo`:
- `demo-esm`: Demo using ESM module Browser:
- `demo-iife`: Demo using IIFE module - `demo-esm`: Demo using Browser with ESM module
- `demo-iife`: Demo using Browser with IIFE module
- `demo-webworker`: Demo using Browser with ESM module and Web Workers
*All three following demos are identical, they just illustrate different ways to load and work with `Human` library:*
Both demos are identical, they just illustrate different ways to load `Human` library NodeJS:
- `demo-node`: Demo using NodeJS with CJS module
This is a very simple demo as althought `Human` library is compatible with NodeJS execution
and is able to load images and models from local filesystem,
`tfjs-node` backend does not implement function required for execution of some models
Currently only body pose detection works while face and hand models are not supported
See `tfjs-node` issue <https://github.com/tensorflow/tfjs/issues/4066> for details
<hr> <hr>
@ -222,31 +236,31 @@ result = {
face: // <array of detected objects> face: // <array of detected objects>
[ [
{ {
confidence: // <number> confidence, // <number>
box: // <array [x, y, width, height]> box, // <array [x, y, width, height]>
mesh: // <array of 3D points [x, y, z]> (468 base points & 10 iris points) mesh, // <array of 3D points [x, y, z]> (468 base points & 10 iris points)
annotations: // <list of object { landmark: array of points }> (32 base annotated landmarks & 2 iris annotations) annotations, // <list of object { landmark: array of points }> (32 base annotated landmarks & 2 iris annotations)
iris: // <number> (relative distance of iris to camera, multiple by focal lenght to get actual distance) iris, // <number> (relative distance of iris to camera, multiple by focal lenght to get actual distance)
age: // <number> (estimated age) age, // <number> (estimated age)
gender: // <string> (male or female) gender, // <string> (male or female)
} }
], ],
body: // <array of detected objects> body: // <array of detected objects>
[ [
{ {
score: // <number>, score, // <number>,
keypoints: // <array of 2D landmarks [ score, landmark, position [x, y] ]> (17 annotated landmarks) keypoints, // <array of 2D landmarks [ score, landmark, position [x, y] ]> (17 annotated landmarks)
} }
], ],
hand: // <array of detected objects> hand: // <array of detected objects>
[ [
{ {
confidence: // <number>, confidence, // <number>,
box: // <array [x, y, width, height]>, box, // <array [x, y, width, height]>,
landmarks: // <array of 3D points [x, y,z]> (21 points) landmarks, // <array of 3D points [x, y,z]> (21 points)
annotations: // <array of 3D landmarks [ landmark: <array of points> ]> (5 annotated landmakrs) annotations, // <array of 3D landmarks [ landmark: <array of points> ]> (5 annotated landmakrs)
} }
] ],
} }
``` ```
@ -286,4 +300,6 @@ Library can also be used on mobile devices
## Todo ## Todo
- Improve detection of smaller faces - Improve detection of smaller faces
- Tweak default parameters
- Verify age/gender models - Verify age/gender models
- Make it work with multiple hands

View File

@ -215,9 +215,11 @@ function setupGUI() {
settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main')); settings = QuickSettings.create(10, 10, 'Settings', document.getElementById('main'));
settings.addRange('FPS', 0, 100, 0, 1); settings.addRange('FPS', 0, 100, 0, 1);
settings.addBoolean('Pause', false, (val) => { settings.addBoolean('Pause', false, (val) => {
if (val) document.getElementById('video').pause(); const video = document.getElementById('video');
else document.getElementById('video').play(); const canvas = document.getElementById('canvas');
runHumanDetect(); if (val) video.pause();
else video.play();
runHumanDetect(video, canvas);
}); });
settings.addHTML('line1', '<hr>'); settings.hideTitle('line1'); settings.addHTML('line1', '<hr>'); settings.hideTitle('line1');
settings.addBoolean('Draw Boxes', false); settings.addBoolean('Draw Boxes', false);
@ -283,10 +285,10 @@ async function setupCamera() {
video.srcObject = stream; video.srcObject = stream;
return new Promise((resolve) => { return new Promise((resolve) => {
video.onloadedmetadata = () => { video.onloadedmetadata = () => {
resolve(video);
video.width = video.videoWidth; video.width = video.videoWidth;
video.height = video.videoHeight; video.height = video.videoHeight;
video.play(); video.play();
resolve(video);
}; };
}); });
} }

68
demo/demo-node.js Normal file
View File

@ -0,0 +1,68 @@
const fs = require('fs');
const process = require('process');
const console = require('console');
const tf = require('@tensorflow/tfjs-node');
const human = require('..'); // this would be '@vladmandic/human'
const logger = new console.Console({
stdout: process.stdout,
stderr: process.stderr,
ignoreErrors: true,
groupIndentation: 2,
inspectOptions: {
showHidden: true,
depth: 5,
colors: true,
showProxy: true,
maxArrayLength: 1024,
maxStringLength: 10240,
breakLength: 200,
compact: 64,
sorted: false,
getters: true,
},
});
const config = {
face: {
enabled: false,
detector: { modelPath: 'file://models/blazeface/model.json', inputSize: 128, maxFaces: 10, skipFrames: 5, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
mesh: { enabled: true, modelPath: 'file://models/facemesh/model.json', inputSize: 192 },
iris: { enabled: true, modelPath: 'file://models/iris/model.json', inputSize: 192 },
age: { enabled: true, modelPath: 'file://models/ssrnet-age/imdb/model.json', inputSize: 64, skipFrames: 5 },
gender: { enabled: true, modelPath: 'file://models/ssrnet-gender/imdb/model.json' },
},
body: { enabled: true, modelPath: 'file://models/posenet/model.json', inputResolution: 257, outputStride: 16, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
hand: {
enabled: false,
inputSize: 256,
skipFrames: 5,
minConfidence: 0.8,
iouThreshold: 0.3,
scoreThreshold: 0.75,
detector: { anchors: 'file://models/handdetect/anchors.json', modelPath: 'file://models/handdetect/model.json' },
skeleton: { modelPath: 'file://models/handskeleton/model.json' },
},
};
async function detect(input, output) {
await tf.setBackend('tensorflow');
await tf.ready();
logger.info('TFJS Flags:', tf.env().features);
logger.log('Loading:', input);
const buffer = fs.readFileSync(input);
const image = tf.node.decodeImage(buffer);
logger.log('Processing:', image.shape);
const result = await human.detect(image, config);
logger.log(result);
// Draw detected data and save processed image
logger.log('Saving:', output);
}
async function main() {
if (process.argv.length !== 4) logger.error('Parameters: <input image> <output image>');
else if (!fs.existsSync(process.argv[2])) logger.error(`File not found: ${process.argv[2]}`);
else detect(process.argv[2], process.argv[3]);
}
main();

View File

@ -0,0 +1,22 @@
import human from '../dist/human.esm.js';
onmessage = async (msg) => {
const result = await human.detect(msg.data.image, msg.data.config);
postMessage(result);
};
/*
web workers are finicky
- cannot pass HTMLImage or HTMLVideo to web worker, so need to pass canvas instead
- canvases can execute transferControlToOffscreen() and then become offscreenCanvas which can be passed to worker, but...
cannot transfer canvas that has a rendering context (basically, first time you execute getContext() on it)
which means that if we pass main Canvas that will be used to render results on,
then all operations on it must be within webworker and we cannot touch it in the main thread at all.
doable, but...how to paint a video frame on it before we pass it?
and we create new offscreenCanvas that we drew video frame on and pass it's imageData and return results from worker
then there is an overhead of creating it and it ends up being slower than executing in the main thread
*/

View File

@ -3,7 +3,7 @@
"version": "0.2.8", "version": "0.2.8",
"description": "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction", "description": "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
"sideEffects": false, "sideEffects": false,
"main": "src/index.js", "main": "dist/human.node.js",
"module": "dist/human.esm.js", "module": "dist/human.esm.js",
"browser": "dist/human.js", "browser": "dist/human.js",
"author": "Vladimir Mandic <mandic00@live.com>", "author": "Vladimir Mandic <mandic00@live.com>",
@ -20,7 +20,8 @@
"url": "git+https://github.com/vladmandic/human.git" "url": "git+https://github.com/vladmandic/human.git"
}, },
"dependencies": { "dependencies": {
"@tensorflow/tfjs": "^2.6.0" "@tensorflow/tfjs": "^2.6.0",
"@tensorflow/tfjs-node": "^2.6.0"
}, },
"devDependencies": { "devDependencies": {
"esbuild": "^0.7.13", "esbuild": "^0.7.13",
@ -33,10 +34,12 @@
"rimraf": "^3.0.2" "rimraf": "^3.0.2"
}, },
"scripts": { "scripts": {
"lint": "eslint src/*", "start": "node --trace-warnings --trace-uncaught --no-deprecation demo/demo-node.js",
"build": "rimraf dist/ && npm run build-esm && npm run build-iife", "lint": "eslint src/*.js demo/*.js",
"build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --outfile=dist/human.esm.js src/index.js", "build": "rimraf dist/ && npm run build-esm && npm run build-iife && npm run build-node",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --global-name=human --outfile=dist/human.js src/index.js" "build-esm": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --outfile=dist/human.esm.js src/index.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --outfile=dist/human.js src/index.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --outfile=dist/human.node.js src/index.js"
}, },
"keywords": [ "keywords": [
"tensorflowjs", "tensorflowjs",

View File

@ -17,9 +17,13 @@ async function loadHandPoseModel(url) {
// of bounding boxes, each of which is assigned a score during prediction. The // of bounding boxes, each of which is assigned a score during prediction. The
// anchors define the coordinates of these boxes. // anchors define the coordinates of these boxes.
async function loadAnchors(url) { async function loadAnchors(url) {
return tf.util if (tf.env().features.IS_NODE) {
.fetch(url) // eslint-disable-next-line global-require
.then((d) => d.json()); const fs = require('fs');
const data = await fs.readFileSync(url.replace('file://', ''));
return JSON.parse(data);
}
return tf.util.fetch(url).then((d) => d.json());
} }
/** /**

View File

@ -31,14 +31,16 @@ function mergeDeep(...objects) {
} }
async function detect(input, userConfig) { async function detect(input, userConfig) {
// eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve) => {
const config = mergeDeep(defaults, userConfig); const config = mergeDeep(defaults, userConfig);
// load models if enabled // load models if enabled
if (config.face.age.enabled) await ssrnet.loadAge(config);
if (config.face.gender.enabled) await ssrnet.loadGender(config);
if (config.body.enabled && !models.posenet) models.posenet = await posenet.load(config.body); if (config.body.enabled && !models.posenet) models.posenet = await posenet.load(config.body);
if (config.hand.enabled && !models.handpose) models.handpose = await handpose.load(config.hand); if (config.hand.enabled && !models.handpose) models.handpose = await handpose.load(config.hand);
if (config.face.enabled && !models.facemesh) models.facemesh = await facemesh.load(config.face); if (config.face.enabled && !models.facemesh) models.facemesh = await facemesh.load(config.face);
if (config.face.age.enabled) await ssrnet.loadAge(config);
if (config.face.gender.enabled) await ssrnet.loadGender(config);
tf.engine().startScope(); tf.engine().startScope();
@ -75,9 +77,9 @@ async function detect(input, userConfig) {
} }
tf.engine().endScope(); tf.engine().endScope();
// combine results // combine results
return { face: faceRes, body: poseRes, hand: handRes }; resolve({ face: faceRes, body: poseRes, hand: handRes });
});
} }
exports.detect = detect; exports.detect = detect;