autodetect skipFrames

pull/50/head
Vladimir Mandic 2020-10-18 08:07:45 -04:00
parent d29b16a8f4
commit 5e90ab0d51
25 changed files with 324 additions and 303 deletions

View File

@ -12,7 +12,7 @@
Compatible with Browser, WebWorker and NodeJS execution!
(and maybe with React-Native as it doesn't use any DOM objects)
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations*
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations and planned enhancements*
*Suggestions are welcome!*
@ -124,8 +124,8 @@ And then use with:
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.cjs
```
Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix
For example:
```js
const config = {
@ -213,7 +213,6 @@ Note that user object and default configuration are merged using deep-merge, so
Configurtion object is large, but typically you only need to modify few values:
- `enabled`: Choose which models to use
- `skipFrames`: Must be set to 0 for static images
- `modelPath`: Update as needed to reflect your application's relative path
@ -234,8 +233,9 @@ config = {
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
skipFrames: 10, // how many frames to go without re-running the face bounding box detector
// only used for video inputs, ignored for static inputs
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
// as the face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.5, // threshold for discarding a prediction
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
@ -256,7 +256,7 @@ config = {
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
// which determines training set for model
inputSize: 64, // fixed value
skipFrames: 10, // how many frames to go without re-running the detector
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
},
gender: {
enabled: true,
@ -267,7 +267,7 @@ config = {
enabled: true,
inputSize: 64, // fixed value
minConfidence: 0.5, // threshold for discarding a prediction
skipFrames: 10, // how many frames to go without re-running the detector
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
useGrayscale: true, // convert image to grayscale before prediction or use highest channel
modelPath: '../models/emotion/model.json',
},
@ -285,8 +285,9 @@ config = {
enabled: true,
inputSize: 256, // fixed value
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector
// only used for video inputs
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.5, // threshold for discarding a prediction
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression

View File

@ -16,7 +16,7 @@ export default {
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
skipFrames: 10, // how many frames to go without re-running the face bounding box detector
skipFrames: 10, // how many frames to go without re-running the face bounding box detector, only used for video inputs
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.5, // threshold for discarding a prediction
@ -39,7 +39,7 @@ export default {
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
// which determines training set for model
inputSize: 64, // fixed value
skipFrames: 10, // how many frames to go without re-running the detector
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
},
gender: {
enabled: true,
@ -67,9 +67,9 @@ export default {
hand: {
enabled: true,
inputSize: 256, // fixed value
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.5, // threshold for discarding a prediction
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression

View File

@ -180,12 +180,6 @@ function runHumanDetect(input, canvas) {
// main processing function when input is image, can use direct invocation or web worker
async function processImage(input) {
// must be zero for images
config.face.detector.skipFrames = 0;
config.face.emotion.skipFrames = 0;
config.face.age.skipFrames = 0;
config.hand.skipFrames = 0;
timeStamp = performance.now();
return new Promise((resolve) => {
const image = document.getElementById('image');
@ -234,7 +228,7 @@ async function detectVideo() {
// just initialize everything and call main function
async function detectSampleImages() {
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${ui.columns}rem`);
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${1.2 * ui.columns}rem`);
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
document.getElementById('canvas').style.display = 'none';
document.getElementById('samples').style.display = 'block';
@ -244,6 +238,7 @@ async function detectSampleImages() {
function setupMenu() {
menu = new Menu(document.body);
menu.addTitle('...');
menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt));
menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
@ -297,7 +292,6 @@ function setupMenu() {
menu.addBool('Fill Polygons', ui, 'fillPolygons');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addValue('State', '');
menu.addChart('FPS', 'FPS');
}

View File

@ -13,7 +13,7 @@ async function drawFace(result, canvas, ui, triangulation) {
// silly hack since fillText does not suport new line
const labels = [];
if (face.agConfidence) labels.push(`${Math.trunc(100 * face.agConfidence)}% ${face.gender || ''}`);
if (face.age) labels.push(`Age:${face.age || ''}`);
if (face.age) labels.push(`age:${face.age || ''}`);
if (face.iris) labels.push(`iris: ${face.iris}`);
if (face.emotion && face.emotion[0]) labels.push(`${Math.trunc(100 * face.emotion[0].score)}% ${face.emotion[0].emotion}`);
ctx.fillStyle = ui.baseLabel;

View File

@ -1,19 +1,22 @@
const css = `
.menu-container { display: block; background: darkslategray; position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); }
.menu-container { display: block; background: darkslategray; position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); box-shadow: 0 0 8px dimgrey; }
.menu-container:hover { box-shadow: 0 0 8px lightgrey; }
.menu { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; }
.menu-title { padding: 0; }
.menu-title { text-align: right; cursor: pointer; }
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
.menu-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: lightcoral; border-radius: 1rem; transition: left 0.6s ease; }
.menu-label { padding: 0; }
.menu-chart-title { align-items: center; }
.menu-chart-canvas { background: transparent; height: 40px; width: 180px; margin: 0.2rem 0.2rem 0.2rem 1rem; }
.menu-button { border: 0; background: lightblue; width: -webkit-fill-available; padding: 8px; margin: 8px 0 8px 0; cursor: pointer; box-shadow: 4px 4px 4px 0 dimgrey; }
.menu-button:hover { background: lightgreen; }
.menu-button:hover { background: lightgreen; box-shadow: 4px 4px 4px 0 black; }
.menu-button:focus { outline: none; }
.menu-checkbox { width: 2.8rem; height: 1rem; background: black; margin: 0.5rem 0.8rem 0 0; position: relative; border-radius: 1rem; }
.menu-checkbox:after { content: 'OFF'; color: lightcoral; position: absolute; right: 0.2rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
.menu-checkbox:before { content: 'ON'; color: lightgreen; position: absolute; left: 0.3rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
.menu-checkbox-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: lightcoral; border-radius: 1rem; transition: left 0.6s ease; }
input[type=checkbox] { visibility: hidden; }
input[type=checkbox]:checked + label { left: 1.4rem; background: lightgreen; }
@ -45,6 +48,7 @@ class Menu {
this.menu = createElem(parent);
this._id = 0;
this._maxFPS = 0;
this.hidden = 0;
}
get newID() {
@ -64,9 +68,22 @@ class Menu {
return this.menu.offsetHeight;
}
async addTitle(title) {
const el = document.createElement('div');
el.className = 'menu-title';
el.id = this.newID;
el.innerHTML = title;
this.menu.appendChild(el);
el.addEventListener('click', () => {
this.hidden = !this.hidden;
const all = document.getElementsByClassName('menu');
for (const item of all) item.style.display = this.hidden ? 'none' : 'flex';
});
}
async addLabel(title) {
const el = document.createElement('div');
el.className = 'menu menu-title';
el.className = 'menu menu-label';
el.id = this.newID;
el.innerHTML = title;
this.menu.appendChild(el);
@ -75,9 +92,9 @@ class Menu {
async addBool(title, object, variable, callback) {
const el = document.createElement('div');
el.className = 'menu';
el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-label" for="${this.ID}"></label></div>${title}`;
el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-checkbox-label" for="${this.ID}"></label></div>${title}`;
this.menu.appendChild(el);
document.getElementById(this.ID).addEventListener('change', (evt) => {
el.addEventListener('change', (evt) => {
object[variable] = evt.target.checked;
if (callback) callback(evt.target.checked);
});
@ -88,7 +105,7 @@ class Menu {
el.className = 'menu';
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
this.menu.appendChild(el);
document.getElementById(this.ID).addEventListener('change', (evt) => {
el.addEventListener('change', (evt) => {
object[variable] = evt.target.value;
evt.target.setAttribute('value', evt.target.value);
if (callback) callback(evt.target.value);
@ -106,11 +123,14 @@ class Menu {
async addButton(titleOn, titleOff, callback) {
const el = document.createElement('button');
el.className = 'menu menu-button';
el.style.fontFamily = document.body.style.fontFamily;
el.style.fontSize = document.body.style.fontSize;
el.style.fontVariant = document.body.style.fontVariant;
el.type = 'button';
el.id = this.newID;
el.innerText = titleOn;
this.menu.appendChild(el);
document.getElementById(this.ID).addEventListener('click', () => {
el.addEventListener('click', () => {
if (el.innerText === titleOn) el.innerText = titleOff;
else el.innerText = titleOn;
if (callback) callback(el.innerText !== titleOn);

View File

@ -27,21 +27,15 @@ const config = {
backend: 'tensorflow',
console: true,
face: {
enabled: false,
detector: { modelPath: 'file://models/blazeface/model.json', inputSize: 128, maxFaces: 10, skipFrames: 10, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
mesh: { enabled: true, modelPath: 'file://models/facemesh/model.json', inputSize: 192 },
iris: { enabled: true, modelPath: 'file://models/iris/model.json', inputSize: 192 },
age: { enabled: true, modelPath: 'file://models/ssrnet-age/imdb/model.json', inputSize: 64, skipFrames: 5 },
gender: { enabled: true, modelPath: 'file://models/ssrnet-gender/imdb/model.json' },
detector: { modelPath: 'file://models/blazeface/back/model.json' },
mesh: { modelPath: 'file://models/facemesh/model.json' },
iris: { modelPath: 'file://models/iris/model.json' },
age: { modelPath: 'file://models/ssrnet-age/imdb/model.json' },
gender: { modelPath: 'file://models/ssrnet-gender/imdb/model.json' },
emotion: { modelPath: 'file://models/emotion/model.json' },
},
body: { enabled: true, modelPath: 'file://models/posenet/model.json', inputResolution: 257, outputStride: 16, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
body: { modelPath: 'file://models/posenet/model.json' },
hand: {
enabled: false,
inputSize: 256,
skipFrames: 10,
minConfidence: 0.8,
iouThreshold: 0.3,
scoreThreshold: 0.75,
detector: { anchors: 'file://models/handdetect/anchors.json', modelPath: 'file://models/handdetect/model.json' },
skeleton: { modelPath: 'file://models/handskeleton/model.json' },
},

71
dist/human.cjs vendored
View File

@ -531,6 +531,7 @@ var require_pipeline = __commonJS((exports2) => {
async predict(input, config2) {
this.skipFrames = config2.detector.skipFrames;
this.maxFaces = config2.detector.maxFaces;
this.runsWithoutFaceDetector++;
if (this.shouldUpdateRegionsOfInterest()) {
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
if (detector.boxes.length === 0) {
@ -557,8 +558,6 @@ var require_pipeline = __commonJS((exports2) => {
});
this.updateRegionsOfInterest(scaledBoxes);
this.runsWithoutFaceDetector = 0;
} else {
this.runsWithoutFaceDetector++;
}
const results = tf2.tidy(() => this.regionsOfInterest.map((box, i) => {
let angle = 0;
@ -664,12 +663,9 @@ var require_pipeline = __commonJS((exports2) => {
}
}
shouldUpdateRegionsOfInterest() {
const roisCount = this.regionsOfInterest.length;
const noROIs = roisCount === 0;
if (this.maxFaces === 1 || noROIs) {
return noROIs;
}
return roisCount !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
if (this.regionsOfInterest.length === 0)
return true;
return this.regionsOfInterest.length !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
}
calculateLandmarksBoundingBox(landmarks) {
const xs = landmarks.map((d) => d[0]);
@ -3900,13 +3896,11 @@ var require_ssrnet = __commonJS((exports2) => {
return models2.gender;
}
async function predict(image, config2) {
if (frame > config2.face.age.skipFrames) {
frame = 0;
} else {
if (frame < config2.face.age.skipFrames) {
frame += 1;
}
if (frame === 0)
return last;
}
frame = 0;
let enhance;
if (image instanceof tf2.Tensor) {
const resize = tf2.image.resizeBilinear(image, [config2.face.age.inputSize, config2.face.age.inputSize], false);
@ -3970,11 +3964,11 @@ var require_emotion = __commonJS((exports2) => {
return models2.emotion;
}
async function predict(image, config2) {
frame += 1;
if (frame >= config2.face.emotion.skipFrames) {
frame = 0;
if (frame < config2.face.emotion.skipFrames) {
frame += 1;
return last;
}
frame = 0;
const enhance = tf2.tidy(() => {
if (image instanceof tf2.Tensor) {
const resize = tf2.image.resizeBilinear(image, [config2.face.emotion.inputSize, config2.face.emotion.inputSize], false);
@ -4895,6 +4889,7 @@ var require_pipeline2 = __commonJS((exports2) => {
this.maxContinuousChecks = config2.skipFrames;
this.detectionConfidence = config2.minConfidence;
this.maxHands = config2.maxHands;
this.runsWithoutHandDetector++;
const useFreshBox = this.shouldUpdateRegionsOfInterest();
if (useFreshBox === true) {
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config2);
@ -4903,8 +4898,6 @@ var require_pipeline2 = __commonJS((exports2) => {
this.updateRegionsOfInterest(boundingBoxPredictions[i], true, i);
}
this.runsWithoutHandDetector = 0;
} else {
this.runsWithoutHandDetector++;
}
const hands = [];
if (!this.regionsOfInterest)
@ -4983,7 +4976,7 @@ var require_pipeline2 = __commonJS((exports2) => {
}
}
shouldUpdateRegionsOfInterest() {
return !this.regionsOfInterest || this.regionsOfInterest.length === 0 || this.runsWithoutHandDetector >= this.maxContinuousChecks;
return !this.regionsOfInterest || this.regionsOfInterest.length === 0 || this.runsWithoutHandDetector >= this.skipFrames;
}
}
exports2.HandPipeline = HandPipeline;
@ -5000,7 +4993,7 @@ var require_handpose = __commonJS((exports2) => {
this.pipeline = pipeline;
}
async estimateHands(input, config2) {
this.maxContinuousChecks = config2.skipFrames;
this.skipFrames = config2.skipFrames;
this.detectionConfidence = config2.minConfidence;
this.maxHands = config2.maxHands;
const image = tf2.tidy(() => {
@ -5138,7 +5131,7 @@ var require_config = __commonJS((exports2) => {
var require_package = __commonJS((exports2, module2) => {
module2.exports = {
name: "@vladmandic/human",
version: "0.3.6",
version: "0.3.8",
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
sideEffects: false,
main: "dist/human.cjs",
@ -5175,12 +5168,12 @@ var require_package = __commonJS((exports2, module2) => {
rimraf: "^3.0.2"
},
scripts: {
start: "node --trace-warnings --trace-uncaught --no-deprecation demo/node.js",
start: "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
lint: "eslint src/*.js demo/*.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/index.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/index.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/index.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/index.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/human.js",
build: "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
update: "npm update --depth 20 && npm dedupe && npm prune && npm audit",
changelog: "node changelog.js"
@ -5200,7 +5193,7 @@ var require_package = __commonJS((exports2, module2) => {
};
});
// src/index.js
// src/human.js
const tf = require("@tensorflow/tfjs");
const facemesh = require_facemesh();
const ssrnet = require_ssrnet();
@ -5220,6 +5213,10 @@ const models = {
gender: null,
emotion: null
};
const override = {
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, emotion: {skipFrames: 0}},
hand: {skipFrames: 0}
};
const now = () => {
if (typeof performance !== "undefined")
return performance.now();
@ -5261,11 +5258,18 @@ function mergeDeep(...objects) {
function sanity(input) {
if (!input)
return "input is not defined";
const width = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
if (!width || width === 0)
return "input is empty";
if (input.readyState && input.readyState <= 2)
return "input is not ready";
if (tf.ENV.flags.IS_BROWSER && (input instanceof ImageData || input instanceof HTMLImageElement || input instanceof HTMLCanvasElement || input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
const width = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
if (!width || width === 0)
return "input is empty";
}
if (tf.ENV.flags.IS_BROWSER && (input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
if (input.readyState && input.readyState <= 2)
return "input is not ready";
}
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return "input must be a tensor";
}
try {
tf.getBackend();
} catch {
@ -5294,7 +5298,8 @@ async function detect(input, userConfig = {}) {
const perf = {};
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig);
const shouldOverride = tf.ENV.flags.IS_NODE || tf.ENV.flags.IS_BROWSER && !(input instanceof HTMLVideoElement || input instanceof HTMLMediaElement);
config = mergeDeep(defaults, userConfig, shouldOverride ? override : {});
perf.config = Math.trunc(now() - timeStamp);
timeStamp = now();
state = "check";

38
dist/human.cjs.json vendored
View File

@ -1,15 +1,15 @@
{
"inputs": {
"config.js": {
"bytes": 4774,
"bytes": 4862,
"imports": []
},
"package.json": {
"bytes": 2605,
"bytes": 2635,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"bytes": 2019,
"imports": []
},
"src/facemesh/blazeface.js": {
@ -45,7 +45,7 @@
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14393,
"bytes": 14262,
"imports": [
{
"path": "src/facemesh/box.js"
@ -83,7 +83,7 @@
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"bytes": 2356,
"imports": [
{
"path": "src/handpose/handdetector.js"
@ -101,7 +101,7 @@
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"bytes": 8178,
"imports": [
{
"path": "src/handpose/box.js"
@ -115,8 +115,8 @@
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 7526,
"src/human.js": {
"bytes": 8299,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1965,
"bytes": 1937,
"imports": []
}
},
@ -253,7 +253,7 @@
"dist/human.cjs.map": {
"imports": [],
"inputs": {},
"bytes": 219894
"bytes": 220934
},
"dist/human.cjs": {
"imports": [],
@ -271,7 +271,7 @@
"bytesInOutput": 3027
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 13366
"bytesInOutput": 13270
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 20586
@ -283,10 +283,10 @@
"bytesInOutput": 2950
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 2194
"bytesInOutput": 2158
},
"src/emotion/emotion.js": {
"bytesInOutput": 2134
"bytesInOutput": 2133
},
"src/posenet/modelBase.js": {
"bytesInOutput": 1120
@ -334,22 +334,22 @@
"bytesInOutput": 2671
},
"src/handpose/pipeline.js": {
"bytesInOutput": 7651
"bytesInOutput": 7625
},
"src/handpose/handpose.js": {
"bytesInOutput": 2518
"bytesInOutput": 2509
},
"config.js": {
"bytesInOutput": 1872
},
"package.json": {
"bytesInOutput": 2748
"bytesInOutput": 2778
},
"src/index.js": {
"bytesInOutput": 6514
"src/human.js": {
"bytesInOutput": 7273
}
},
"bytes": 134107
"bytes": 134728
}
}
}

6
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,15 +1,15 @@
{
"inputs": {
"config.js": {
"bytes": 4774,
"bytes": 4862,
"imports": []
},
"package.json": {
"bytes": 2605,
"bytes": 2635,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"bytes": 2019,
"imports": []
},
"src/facemesh/blazeface.js": {
@ -45,7 +45,7 @@
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14393,
"bytes": 14262,
"imports": [
{
"path": "src/facemesh/box.js"
@ -83,7 +83,7 @@
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"bytes": 2356,
"imports": [
{
"path": "src/handpose/handdetector.js"
@ -101,7 +101,7 @@
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"bytes": 8178,
"imports": [
{
"path": "src/handpose/box.js"
@ -115,8 +115,8 @@
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 7526,
"src/human.js": {
"bytes": 8299,
"imports": [
{
"path": "src/facemesh/facemesh.js"
@ -245,7 +245,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1965,
"bytes": 1937,
"imports": []
}
},
@ -253,7 +253,7 @@
"dist/human.esm-nobundle.js.map": {
"imports": [],
"inputs": {},
"bytes": 198188
"bytes": 199199
},
"dist/human.esm-nobundle.js": {
"imports": [],
@ -271,7 +271,7 @@
"bytesInOutput": 1176
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5602
"bytesInOutput": 5593
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16790
@ -283,10 +283,10 @@
"bytesInOutput": 1391
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1149
"bytesInOutput": 1142
},
"src/emotion/emotion.js": {
"bytesInOutput": 1148
"bytesInOutput": 1147
},
"src/posenet/modelBase.js": {
"bytesInOutput": 597
@ -334,22 +334,22 @@
"bytesInOutput": 984
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3232
"bytesInOutput": 3218
},
"src/handpose/handpose.js": {
"bytesInOutput": 1326
"bytesInOutput": 1317
},
"config.js": {
"bytesInOutput": 1146
},
"package.json": {
"bytesInOutput": 2275
"bytesInOutput": 2305
},
"src/index.js": {
"bytesInOutput": 3564
"src/human.js": {
"bytesInOutput": 4135
}
},
"bytes": 69404
"bytes": 69965
}
}
}

110
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

38
dist/human.esm.json vendored
View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 4774,
"bytes": 4862,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -149,11 +149,11 @@
]
},
"package.json": {
"bytes": 2605,
"bytes": 2635,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"bytes": 2019,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -204,7 +204,7 @@
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14393,
"bytes": 14262,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -252,7 +252,7 @@
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"bytes": 2356,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -273,7 +273,7 @@
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"bytes": 8178,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -290,8 +290,8 @@
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 7526,
"src/human.js": {
"bytes": 8299,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1965,
"bytes": 1937,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 4959239
"bytes": 4960250
},
"dist/human.esm.js": {
"imports": [],
@ -539,7 +539,7 @@
"bytesInOutput": 1195
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5577
"bytesInOutput": 5568
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16791
@ -551,10 +551,10 @@
"bytesInOutput": 1376
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1150
"bytesInOutput": 1143
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
"bytesInOutput": 1142
},
"src/posenet/modelBase.js": {
"bytesInOutput": 575
@ -602,22 +602,22 @@
"bytesInOutput": 993
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3228
"bytesInOutput": 3214
},
"src/handpose/handpose.js": {
"bytesInOutput": 1312
"bytesInOutput": 1303
},
"config.js": {
"bytesInOutput": 1147
},
"package.json": {
"bytesInOutput": 2276
"bytesInOutput": 2306
},
"src/index.js": {
"bytesInOutput": 3669
"src/human.js": {
"bytesInOutput": 4246
}
},
"bytes": 1106324
"bytes": 1106891
}
}
}

110
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

38
dist/human.json vendored
View File

@ -1,7 +1,7 @@
{
"inputs": {
"config.js": {
"bytes": 4774,
"bytes": 4862,
"imports": []
},
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -149,11 +149,11 @@
]
},
"package.json": {
"bytes": 2605,
"bytes": 2635,
"imports": []
},
"src/emotion/emotion.js": {
"bytes": 2020,
"bytes": 2019,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -204,7 +204,7 @@
"imports": []
},
"src/facemesh/pipeline.js": {
"bytes": 14393,
"bytes": 14262,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -252,7 +252,7 @@
]
},
"src/handpose/handpose.js": {
"bytes": 2365,
"bytes": 2356,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -273,7 +273,7 @@
"imports": []
},
"src/handpose/pipeline.js": {
"bytes": 8202,
"bytes": 8178,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -290,8 +290,8 @@
"bytes": 2488,
"imports": []
},
"src/index.js": {
"bytes": 7526,
"src/human.js": {
"bytes": 8299,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -436,7 +436,7 @@
]
},
"src/ssrnet/ssrnet.js": {
"bytes": 1965,
"bytes": 1937,
"imports": [
{
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -464,7 +464,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 4959239
"bytes": 4960250
},
"dist/human.js": {
"imports": [],
@ -539,7 +539,7 @@
"bytesInOutput": 1195
},
"src/facemesh/pipeline.js": {
"bytesInOutput": 5577
"bytesInOutput": 5568
},
"src/facemesh/uvcoords.js": {
"bytesInOutput": 16791
@ -551,10 +551,10 @@
"bytesInOutput": 1376
},
"src/ssrnet/ssrnet.js": {
"bytesInOutput": 1150
"bytesInOutput": 1143
},
"src/emotion/emotion.js": {
"bytesInOutput": 1143
"bytesInOutput": 1142
},
"src/posenet/modelBase.js": {
"bytesInOutput": 575
@ -602,22 +602,22 @@
"bytesInOutput": 993
},
"src/handpose/pipeline.js": {
"bytesInOutput": 3228
"bytesInOutput": 3214
},
"src/handpose/handpose.js": {
"bytesInOutput": 1312
"bytesInOutput": 1303
},
"config.js": {
"bytesInOutput": 1147
},
"package.json": {
"bytesInOutput": 2276
"bytesInOutput": 2306
},
"src/index.js": {
"bytesInOutput": 3669
"src/human.js": {
"bytesInOutput": 4246
}
},
"bytes": 1106333
"bytes": 1106900
}
}
}

View File

@ -37,12 +37,12 @@
"rimraf": "^3.0.2"
},
"scripts": {
"start": "node --trace-warnings --trace-uncaught --no-deprecation demo/node.js",
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
"lint": "eslint src/*.js demo/*.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/index.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/index.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/index.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/index.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/human.js",
"build": "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
"update": "npm update --depth 20 && npm dedupe && npm prune && npm audit",
"changelog": "node changelog.js"

View File

@ -22,11 +22,11 @@ async function load(config) {
}
async function predict(image, config) {
frame += 1;
if (frame >= config.face.emotion.skipFrames) {
frame = 0;
if (frame < config.face.emotion.skipFrames) {
frame += 1;
return last;
}
frame = 0;
const enhance = tf.tidy(() => {
if (image instanceof tf.Tensor) {
const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);

View File

@ -131,8 +131,8 @@ class Pipeline {
async predict(input, config) {
this.skipFrames = config.detector.skipFrames;
this.maxFaces = config.detector.maxFaces;
this.runsWithoutFaceDetector++;
if (this.shouldUpdateRegionsOfInterest()) {
// const { boxes, scaleFactor } = await this.boundingBoxDetector.getBoundingBoxes(input);
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
if (detector.boxes.length === 0) {
this.regionsOfInterest = [];
@ -158,8 +158,6 @@ class Pipeline {
});
this.updateRegionsOfInterest(scaledBoxes);
this.runsWithoutFaceDetector = 0;
} else {
this.runsWithoutFaceDetector++;
}
const results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
let angle = 0;
@ -272,12 +270,8 @@ class Pipeline {
}
shouldUpdateRegionsOfInterest() {
const roisCount = this.regionsOfInterest.length;
const noROIs = roisCount === 0;
if (this.maxFaces === 1 || noROIs) {
return noROIs;
}
return roisCount !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
if (this.regionsOfInterest.length === 0) return true; // nothing detected, so run detector on the next frame
return (this.regionsOfInterest.length !== this.maxFaces) && (this.runsWithoutFaceDetector >= this.skipFrames);
}
calculateLandmarksBoundingBox(landmarks) {

View File

@ -9,7 +9,7 @@ class HandPose {
}
async estimateHands(input, config) {
this.maxContinuousChecks = config.skipFrames;
this.skipFrames = config.skipFrames;
this.detectionConfidence = config.minConfidence;
this.maxHands = config.maxHands;
const image = tf.tidy(() => {

View File

@ -79,6 +79,7 @@ class HandPipeline {
this.maxContinuousChecks = config.skipFrames;
this.detectionConfidence = config.minConfidence;
this.maxHands = config.maxHands;
this.runsWithoutHandDetector++;
const useFreshBox = this.shouldUpdateRegionsOfInterest();
if (useFreshBox === true) {
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
@ -87,8 +88,6 @@ class HandPipeline {
this.updateRegionsOfInterest(boundingBoxPredictions[i], true /* force update */, i);
}
this.runsWithoutHandDetector = 0;
} else {
this.runsWithoutHandDetector++;
}
// Rotate input so the hand is vertically oriented.
const hands = [];
@ -172,7 +171,7 @@ class HandPipeline {
}
shouldUpdateRegionsOfInterest() {
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.maxContinuousChecks);
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
}
}
exports.HandPipeline = HandPipeline;

View File

@ -21,6 +21,11 @@ const models = {
emotion: null,
};
const override = {
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } },
hand: { skipFrames: 0 },
};
// helper function: gets elapsed time on both browser and nodejs
const now = () => {
if (typeof performance !== 'undefined') return performance.now();
@ -66,9 +71,16 @@ function mergeDeep(...objects) {
function sanity(input) {
if (!input) return 'input is not defined';
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
if (!width || (width === 0)) return 'input is empty';
if (input.readyState && (input.readyState <= 2)) return 'input is not ready';
if (tf.ENV.flags.IS_BROWSER && (input instanceof ImageData || input instanceof HTMLImageElement || input instanceof HTMLCanvasElement || input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
if (!width || (width === 0)) return 'input is empty';
}
if (tf.ENV.flags.IS_BROWSER && (input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
if (input.readyState && (input.readyState <= 2)) return 'input is not ready';
}
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
return 'input must be a tensor';
}
try {
tf.getBackend();
} catch {
@ -93,7 +105,8 @@ async function detect(input, userConfig = {}) {
let timeStamp;
timeStamp = now();
config = mergeDeep(defaults, userConfig);
const shouldOverride = tf.ENV.flags.IS_NODE || (tf.ENV.flags.IS_BROWSER && !((input instanceof HTMLVideoElement) || (input instanceof HTMLMediaElement)));
config = mergeDeep(defaults, userConfig, shouldOverride ? override : {});
perf.config = Math.trunc(now() - timeStamp);
// sanity checks
@ -222,3 +235,5 @@ exports.handpose = handpose;
exports.tf = tf;
exports.version = app.version;
exports.state = state;
// Error: Failed to compile fragment shader

View File

@ -22,12 +22,11 @@ async function loadGender(config) {
}
async function predict(image, config) {
if (frame > config.face.age.skipFrames) {
frame = 0;
} else {
if (frame < config.face.age.skipFrames) {
frame += 1;
return last;
}
if (frame === 0) return last;
frame = 0;
let enhance;
if (image instanceof tf.Tensor) {
const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);