mirror of https://github.com/vladmandic/human
autodetect skipFrames
parent
d29b16a8f4
commit
5e90ab0d51
15
README.md
15
README.md
|
@ -12,7 +12,7 @@
|
||||||
Compatible with Browser, WebWorker and NodeJS execution!
|
Compatible with Browser, WebWorker and NodeJS execution!
|
||||||
(and maybe with React-Native as it doesn't use any DOM objects)
|
(and maybe with React-Native as it doesn't use any DOM objects)
|
||||||
|
|
||||||
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations*
|
*This is a pre-release project, see [issues](https://github.com/vladmandic/human/issues) for list of known limitations and planned enhancements*
|
||||||
|
|
||||||
*Suggestions are welcome!*
|
*Suggestions are welcome!*
|
||||||
|
|
||||||
|
@ -124,8 +124,8 @@ And then use with:
|
||||||
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.cjs
|
const human = require('@vladmandic/human'); // points to @vladmandic/human/dist/human.cjs
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix
|
Since NodeJS projects load `weights` from local filesystem instead of using `http` calls, you must modify default configuration to include correct paths with `file://` prefix
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
```js
|
```js
|
||||||
const config = {
|
const config = {
|
||||||
|
@ -213,7 +213,6 @@ Note that user object and default configuration are merged using deep-merge, so
|
||||||
Configurtion object is large, but typically you only need to modify few values:
|
Configurtion object is large, but typically you only need to modify few values:
|
||||||
|
|
||||||
- `enabled`: Choose which models to use
|
- `enabled`: Choose which models to use
|
||||||
- `skipFrames`: Must be set to 0 for static images
|
|
||||||
- `modelPath`: Update as needed to reflect your application's relative path
|
- `modelPath`: Update as needed to reflect your application's relative path
|
||||||
|
|
||||||
|
|
||||||
|
@ -234,8 +233,9 @@ config = {
|
||||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||||
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||||
skipFrames: 10, // how many frames to go without re-running the face bounding box detector
|
skipFrames: 10, // how many frames to go without re-running the face bounding box detector
|
||||||
|
// only used for video inputs, ignored for static inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as the face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
|
@ -256,7 +256,7 @@ config = {
|
||||||
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
|
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
|
||||||
// which determines training set for model
|
// which determines training set for model
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 10, // how many frames to go without re-running the detector
|
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -267,7 +267,7 @@ config = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
skipFrames: 10, // how many frames to go without re-running the detector
|
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
|
||||||
useGrayscale: true, // convert image to grayscale before prediction or use highest channel
|
useGrayscale: true, // convert image to grayscale before prediction or use highest channel
|
||||||
modelPath: '../models/emotion/model.json',
|
modelPath: '../models/emotion/model.json',
|
||||||
},
|
},
|
||||||
|
@ -285,8 +285,9 @@ config = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 256, // fixed value
|
inputSize: 256, // fixed value
|
||||||
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector
|
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector
|
||||||
|
// only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
|
|
|
@ -16,7 +16,7 @@ export default {
|
||||||
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
|
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
|
||||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||||
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||||
skipFrames: 10, // how many frames to go without re-running the face bounding box detector
|
skipFrames: 10, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
|
@ -39,7 +39,7 @@ export default {
|
||||||
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
|
modelPath: '../models/ssrnet-age/imdb/model.json', // can be 'imdb' or 'wiki'
|
||||||
// which determines training set for model
|
// which determines training set for model
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 10, // how many frames to go without re-running the detector
|
skipFrames: 10, // how many frames to go without re-running the detector, only used for video inputs
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -67,9 +67,9 @@ export default {
|
||||||
hand: {
|
hand: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 256, // fixed value
|
inputSize: 256, // fixed value
|
||||||
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector
|
skipFrames: 10, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.7, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
|
|
|
@ -180,12 +180,6 @@ function runHumanDetect(input, canvas) {
|
||||||
|
|
||||||
// main processing function when input is image, can use direct invocation or web worker
|
// main processing function when input is image, can use direct invocation or web worker
|
||||||
async function processImage(input) {
|
async function processImage(input) {
|
||||||
// must be zero for images
|
|
||||||
config.face.detector.skipFrames = 0;
|
|
||||||
config.face.emotion.skipFrames = 0;
|
|
||||||
config.face.age.skipFrames = 0;
|
|
||||||
config.hand.skipFrames = 0;
|
|
||||||
|
|
||||||
timeStamp = performance.now();
|
timeStamp = performance.now();
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const image = document.getElementById('image');
|
const image = document.getElementById('image');
|
||||||
|
@ -234,7 +228,7 @@ async function detectVideo() {
|
||||||
|
|
||||||
// just initialize everything and call main function
|
// just initialize everything and call main function
|
||||||
async function detectSampleImages() {
|
async function detectSampleImages() {
|
||||||
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${ui.columns}rem`);
|
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${1.2 * ui.columns}rem`);
|
||||||
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
|
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
|
||||||
document.getElementById('canvas').style.display = 'none';
|
document.getElementById('canvas').style.display = 'none';
|
||||||
document.getElementById('samples').style.display = 'block';
|
document.getElementById('samples').style.display = 'block';
|
||||||
|
@ -244,6 +238,7 @@ async function detectSampleImages() {
|
||||||
|
|
||||||
function setupMenu() {
|
function setupMenu() {
|
||||||
menu = new Menu(document.body);
|
menu = new Menu(document.body);
|
||||||
|
menu.addTitle('...');
|
||||||
menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt));
|
menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt));
|
||||||
menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
|
menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
|
||||||
|
|
||||||
|
@ -297,7 +292,6 @@ function setupMenu() {
|
||||||
menu.addBool('Fill Polygons', ui, 'fillPolygons');
|
menu.addBool('Fill Polygons', ui, 'fillPolygons');
|
||||||
|
|
||||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
menu.addValue('State', '');
|
|
||||||
menu.addChart('FPS', 'FPS');
|
menu.addChart('FPS', 'FPS');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ async function drawFace(result, canvas, ui, triangulation) {
|
||||||
// silly hack since fillText does not suport new line
|
// silly hack since fillText does not suport new line
|
||||||
const labels = [];
|
const labels = [];
|
||||||
if (face.agConfidence) labels.push(`${Math.trunc(100 * face.agConfidence)}% ${face.gender || ''}`);
|
if (face.agConfidence) labels.push(`${Math.trunc(100 * face.agConfidence)}% ${face.gender || ''}`);
|
||||||
if (face.age) labels.push(`Age:${face.age || ''}`);
|
if (face.age) labels.push(`age:${face.age || ''}`);
|
||||||
if (face.iris) labels.push(`iris: ${face.iris}`);
|
if (face.iris) labels.push(`iris: ${face.iris}`);
|
||||||
if (face.emotion && face.emotion[0]) labels.push(`${Math.trunc(100 * face.emotion[0].score)}% ${face.emotion[0].emotion}`);
|
if (face.emotion && face.emotion[0]) labels.push(`${Math.trunc(100 * face.emotion[0].score)}% ${face.emotion[0].emotion}`);
|
||||||
ctx.fillStyle = ui.baseLabel;
|
ctx.fillStyle = ui.baseLabel;
|
||||||
|
|
38
demo/menu.js
38
demo/menu.js
|
@ -1,19 +1,22 @@
|
||||||
const css = `
|
const css = `
|
||||||
.menu-container { display: block; background: darkslategray; position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); }
|
.menu-container { display: block; background: darkslategray; position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); box-shadow: 0 0 8px dimgrey; }
|
||||||
|
.menu-container:hover { box-shadow: 0 0 8px lightgrey; }
|
||||||
.menu { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; }
|
.menu { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; }
|
||||||
.menu-title { padding: 0; }
|
.menu-title { text-align: right; cursor: pointer; }
|
||||||
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
|
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
|
||||||
.menu-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: lightcoral; border-radius: 1rem; transition: left 0.6s ease; }
|
.menu-label { padding: 0; }
|
||||||
|
|
||||||
.menu-chart-title { align-items: center; }
|
.menu-chart-title { align-items: center; }
|
||||||
.menu-chart-canvas { background: transparent; height: 40px; width: 180px; margin: 0.2rem 0.2rem 0.2rem 1rem; }
|
.menu-chart-canvas { background: transparent; height: 40px; width: 180px; margin: 0.2rem 0.2rem 0.2rem 1rem; }
|
||||||
|
|
||||||
.menu-button { border: 0; background: lightblue; width: -webkit-fill-available; padding: 8px; margin: 8px 0 8px 0; cursor: pointer; box-shadow: 4px 4px 4px 0 dimgrey; }
|
.menu-button { border: 0; background: lightblue; width: -webkit-fill-available; padding: 8px; margin: 8px 0 8px 0; cursor: pointer; box-shadow: 4px 4px 4px 0 dimgrey; }
|
||||||
.menu-button:hover { background: lightgreen; }
|
.menu-button:hover { background: lightgreen; box-shadow: 4px 4px 4px 0 black; }
|
||||||
|
.menu-button:focus { outline: none; }
|
||||||
|
|
||||||
.menu-checkbox { width: 2.8rem; height: 1rem; background: black; margin: 0.5rem 0.8rem 0 0; position: relative; border-radius: 1rem; }
|
.menu-checkbox { width: 2.8rem; height: 1rem; background: black; margin: 0.5rem 0.8rem 0 0; position: relative; border-radius: 1rem; }
|
||||||
.menu-checkbox:after { content: 'OFF'; color: lightcoral; position: absolute; right: 0.2rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
|
.menu-checkbox:after { content: 'OFF'; color: lightcoral; position: absolute; right: 0.2rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
|
||||||
.menu-checkbox:before { content: 'ON'; color: lightgreen; position: absolute; left: 0.3rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
|
.menu-checkbox:before { content: 'ON'; color: lightgreen; position: absolute; left: 0.3rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }
|
||||||
|
.menu-checkbox-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: lightcoral; border-radius: 1rem; transition: left 0.6s ease; }
|
||||||
input[type=checkbox] { visibility: hidden; }
|
input[type=checkbox] { visibility: hidden; }
|
||||||
input[type=checkbox]:checked + label { left: 1.4rem; background: lightgreen; }
|
input[type=checkbox]:checked + label { left: 1.4rem; background: lightgreen; }
|
||||||
|
|
||||||
|
@ -45,6 +48,7 @@ class Menu {
|
||||||
this.menu = createElem(parent);
|
this.menu = createElem(parent);
|
||||||
this._id = 0;
|
this._id = 0;
|
||||||
this._maxFPS = 0;
|
this._maxFPS = 0;
|
||||||
|
this.hidden = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
get newID() {
|
get newID() {
|
||||||
|
@ -64,9 +68,22 @@ class Menu {
|
||||||
return this.menu.offsetHeight;
|
return this.menu.offsetHeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async addTitle(title) {
|
||||||
|
const el = document.createElement('div');
|
||||||
|
el.className = 'menu-title';
|
||||||
|
el.id = this.newID;
|
||||||
|
el.innerHTML = title;
|
||||||
|
this.menu.appendChild(el);
|
||||||
|
el.addEventListener('click', () => {
|
||||||
|
this.hidden = !this.hidden;
|
||||||
|
const all = document.getElementsByClassName('menu');
|
||||||
|
for (const item of all) item.style.display = this.hidden ? 'none' : 'flex';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async addLabel(title) {
|
async addLabel(title) {
|
||||||
const el = document.createElement('div');
|
const el = document.createElement('div');
|
||||||
el.className = 'menu menu-title';
|
el.className = 'menu menu-label';
|
||||||
el.id = this.newID;
|
el.id = this.newID;
|
||||||
el.innerHTML = title;
|
el.innerHTML = title;
|
||||||
this.menu.appendChild(el);
|
this.menu.appendChild(el);
|
||||||
|
@ -75,9 +92,9 @@ class Menu {
|
||||||
async addBool(title, object, variable, callback) {
|
async addBool(title, object, variable, callback) {
|
||||||
const el = document.createElement('div');
|
const el = document.createElement('div');
|
||||||
el.className = 'menu';
|
el.className = 'menu';
|
||||||
el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-label" for="${this.ID}"></label></div>${title}`;
|
el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-checkbox-label" for="${this.ID}"></label></div>${title}`;
|
||||||
this.menu.appendChild(el);
|
this.menu.appendChild(el);
|
||||||
document.getElementById(this.ID).addEventListener('change', (evt) => {
|
el.addEventListener('change', (evt) => {
|
||||||
object[variable] = evt.target.checked;
|
object[variable] = evt.target.checked;
|
||||||
if (callback) callback(evt.target.checked);
|
if (callback) callback(evt.target.checked);
|
||||||
});
|
});
|
||||||
|
@ -88,7 +105,7 @@ class Menu {
|
||||||
el.className = 'menu';
|
el.className = 'menu';
|
||||||
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
||||||
this.menu.appendChild(el);
|
this.menu.appendChild(el);
|
||||||
document.getElementById(this.ID).addEventListener('change', (evt) => {
|
el.addEventListener('change', (evt) => {
|
||||||
object[variable] = evt.target.value;
|
object[variable] = evt.target.value;
|
||||||
evt.target.setAttribute('value', evt.target.value);
|
evt.target.setAttribute('value', evt.target.value);
|
||||||
if (callback) callback(evt.target.value);
|
if (callback) callback(evt.target.value);
|
||||||
|
@ -106,11 +123,14 @@ class Menu {
|
||||||
async addButton(titleOn, titleOff, callback) {
|
async addButton(titleOn, titleOff, callback) {
|
||||||
const el = document.createElement('button');
|
const el = document.createElement('button');
|
||||||
el.className = 'menu menu-button';
|
el.className = 'menu menu-button';
|
||||||
|
el.style.fontFamily = document.body.style.fontFamily;
|
||||||
|
el.style.fontSize = document.body.style.fontSize;
|
||||||
|
el.style.fontVariant = document.body.style.fontVariant;
|
||||||
el.type = 'button';
|
el.type = 'button';
|
||||||
el.id = this.newID;
|
el.id = this.newID;
|
||||||
el.innerText = titleOn;
|
el.innerText = titleOn;
|
||||||
this.menu.appendChild(el);
|
this.menu.appendChild(el);
|
||||||
document.getElementById(this.ID).addEventListener('click', () => {
|
el.addEventListener('click', () => {
|
||||||
if (el.innerText === titleOn) el.innerText = titleOff;
|
if (el.innerText === titleOn) el.innerText = titleOff;
|
||||||
else el.innerText = titleOn;
|
else el.innerText = titleOn;
|
||||||
if (callback) callback(el.innerText !== titleOn);
|
if (callback) callback(el.innerText !== titleOn);
|
||||||
|
|
20
demo/node.js
20
demo/node.js
|
@ -27,21 +27,15 @@ const config = {
|
||||||
backend: 'tensorflow',
|
backend: 'tensorflow',
|
||||||
console: true,
|
console: true,
|
||||||
face: {
|
face: {
|
||||||
enabled: false,
|
detector: { modelPath: 'file://models/blazeface/back/model.json' },
|
||||||
detector: { modelPath: 'file://models/blazeface/model.json', inputSize: 128, maxFaces: 10, skipFrames: 10, minConfidence: 0.8, iouThreshold: 0.3, scoreThreshold: 0.75 },
|
mesh: { modelPath: 'file://models/facemesh/model.json' },
|
||||||
mesh: { enabled: true, modelPath: 'file://models/facemesh/model.json', inputSize: 192 },
|
iris: { modelPath: 'file://models/iris/model.json' },
|
||||||
iris: { enabled: true, modelPath: 'file://models/iris/model.json', inputSize: 192 },
|
age: { modelPath: 'file://models/ssrnet-age/imdb/model.json' },
|
||||||
age: { enabled: true, modelPath: 'file://models/ssrnet-age/imdb/model.json', inputSize: 64, skipFrames: 5 },
|
gender: { modelPath: 'file://models/ssrnet-gender/imdb/model.json' },
|
||||||
gender: { enabled: true, modelPath: 'file://models/ssrnet-gender/imdb/model.json' },
|
emotion: { modelPath: 'file://models/emotion/model.json' },
|
||||||
},
|
},
|
||||||
body: { enabled: true, modelPath: 'file://models/posenet/model.json', inputResolution: 257, outputStride: 16, maxDetections: 5, scoreThreshold: 0.75, nmsRadius: 20 },
|
body: { modelPath: 'file://models/posenet/model.json' },
|
||||||
hand: {
|
hand: {
|
||||||
enabled: false,
|
|
||||||
inputSize: 256,
|
|
||||||
skipFrames: 10,
|
|
||||||
minConfidence: 0.8,
|
|
||||||
iouThreshold: 0.3,
|
|
||||||
scoreThreshold: 0.75,
|
|
||||||
detector: { anchors: 'file://models/handdetect/anchors.json', modelPath: 'file://models/handdetect/model.json' },
|
detector: { anchors: 'file://models/handdetect/anchors.json', modelPath: 'file://models/handdetect/model.json' },
|
||||||
skeleton: { modelPath: 'file://models/handskeleton/model.json' },
|
skeleton: { modelPath: 'file://models/handskeleton/model.json' },
|
||||||
},
|
},
|
||||||
|
|
|
@ -531,6 +531,7 @@ var require_pipeline = __commonJS((exports2) => {
|
||||||
async predict(input, config2) {
|
async predict(input, config2) {
|
||||||
this.skipFrames = config2.detector.skipFrames;
|
this.skipFrames = config2.detector.skipFrames;
|
||||||
this.maxFaces = config2.detector.maxFaces;
|
this.maxFaces = config2.detector.maxFaces;
|
||||||
|
this.runsWithoutFaceDetector++;
|
||||||
if (this.shouldUpdateRegionsOfInterest()) {
|
if (this.shouldUpdateRegionsOfInterest()) {
|
||||||
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
if (detector.boxes.length === 0) {
|
if (detector.boxes.length === 0) {
|
||||||
|
@ -557,8 +558,6 @@ var require_pipeline = __commonJS((exports2) => {
|
||||||
});
|
});
|
||||||
this.updateRegionsOfInterest(scaledBoxes);
|
this.updateRegionsOfInterest(scaledBoxes);
|
||||||
this.runsWithoutFaceDetector = 0;
|
this.runsWithoutFaceDetector = 0;
|
||||||
} else {
|
|
||||||
this.runsWithoutFaceDetector++;
|
|
||||||
}
|
}
|
||||||
const results = tf2.tidy(() => this.regionsOfInterest.map((box, i) => {
|
const results = tf2.tidy(() => this.regionsOfInterest.map((box, i) => {
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
|
@ -664,12 +663,9 @@ var require_pipeline = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
shouldUpdateRegionsOfInterest() {
|
shouldUpdateRegionsOfInterest() {
|
||||||
const roisCount = this.regionsOfInterest.length;
|
if (this.regionsOfInterest.length === 0)
|
||||||
const noROIs = roisCount === 0;
|
return true;
|
||||||
if (this.maxFaces === 1 || noROIs) {
|
return this.regionsOfInterest.length !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
|
||||||
return noROIs;
|
|
||||||
}
|
|
||||||
return roisCount !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
|
|
||||||
}
|
}
|
||||||
calculateLandmarksBoundingBox(landmarks) {
|
calculateLandmarksBoundingBox(landmarks) {
|
||||||
const xs = landmarks.map((d) => d[0]);
|
const xs = landmarks.map((d) => d[0]);
|
||||||
|
@ -3900,13 +3896,11 @@ var require_ssrnet = __commonJS((exports2) => {
|
||||||
return models2.gender;
|
return models2.gender;
|
||||||
}
|
}
|
||||||
async function predict(image, config2) {
|
async function predict(image, config2) {
|
||||||
if (frame > config2.face.age.skipFrames) {
|
if (frame < config2.face.age.skipFrames) {
|
||||||
frame = 0;
|
|
||||||
} else {
|
|
||||||
frame += 1;
|
frame += 1;
|
||||||
}
|
|
||||||
if (frame === 0)
|
|
||||||
return last;
|
return last;
|
||||||
|
}
|
||||||
|
frame = 0;
|
||||||
let enhance;
|
let enhance;
|
||||||
if (image instanceof tf2.Tensor) {
|
if (image instanceof tf2.Tensor) {
|
||||||
const resize = tf2.image.resizeBilinear(image, [config2.face.age.inputSize, config2.face.age.inputSize], false);
|
const resize = tf2.image.resizeBilinear(image, [config2.face.age.inputSize, config2.face.age.inputSize], false);
|
||||||
|
@ -3970,11 +3964,11 @@ var require_emotion = __commonJS((exports2) => {
|
||||||
return models2.emotion;
|
return models2.emotion;
|
||||||
}
|
}
|
||||||
async function predict(image, config2) {
|
async function predict(image, config2) {
|
||||||
|
if (frame < config2.face.emotion.skipFrames) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
if (frame >= config2.face.emotion.skipFrames) {
|
|
||||||
frame = 0;
|
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
frame = 0;
|
||||||
const enhance = tf2.tidy(() => {
|
const enhance = tf2.tidy(() => {
|
||||||
if (image instanceof tf2.Tensor) {
|
if (image instanceof tf2.Tensor) {
|
||||||
const resize = tf2.image.resizeBilinear(image, [config2.face.emotion.inputSize, config2.face.emotion.inputSize], false);
|
const resize = tf2.image.resizeBilinear(image, [config2.face.emotion.inputSize, config2.face.emotion.inputSize], false);
|
||||||
|
@ -4895,6 +4889,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
this.maxContinuousChecks = config2.skipFrames;
|
this.maxContinuousChecks = config2.skipFrames;
|
||||||
this.detectionConfidence = config2.minConfidence;
|
this.detectionConfidence = config2.minConfidence;
|
||||||
this.maxHands = config2.maxHands;
|
this.maxHands = config2.maxHands;
|
||||||
|
this.runsWithoutHandDetector++;
|
||||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||||
if (useFreshBox === true) {
|
if (useFreshBox === true) {
|
||||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config2);
|
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config2);
|
||||||
|
@ -4903,8 +4898,6 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
this.updateRegionsOfInterest(boundingBoxPredictions[i], true, i);
|
this.updateRegionsOfInterest(boundingBoxPredictions[i], true, i);
|
||||||
}
|
}
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
} else {
|
|
||||||
this.runsWithoutHandDetector++;
|
|
||||||
}
|
}
|
||||||
const hands = [];
|
const hands = [];
|
||||||
if (!this.regionsOfInterest)
|
if (!this.regionsOfInterest)
|
||||||
|
@ -4983,7 +4976,7 @@ var require_pipeline2 = __commonJS((exports2) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
shouldUpdateRegionsOfInterest() {
|
shouldUpdateRegionsOfInterest() {
|
||||||
return !this.regionsOfInterest || this.regionsOfInterest.length === 0 || this.runsWithoutHandDetector >= this.maxContinuousChecks;
|
return !this.regionsOfInterest || this.regionsOfInterest.length === 0 || this.runsWithoutHandDetector >= this.skipFrames;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports2.HandPipeline = HandPipeline;
|
exports2.HandPipeline = HandPipeline;
|
||||||
|
@ -5000,7 +4993,7 @@ var require_handpose = __commonJS((exports2) => {
|
||||||
this.pipeline = pipeline;
|
this.pipeline = pipeline;
|
||||||
}
|
}
|
||||||
async estimateHands(input, config2) {
|
async estimateHands(input, config2) {
|
||||||
this.maxContinuousChecks = config2.skipFrames;
|
this.skipFrames = config2.skipFrames;
|
||||||
this.detectionConfidence = config2.minConfidence;
|
this.detectionConfidence = config2.minConfidence;
|
||||||
this.maxHands = config2.maxHands;
|
this.maxHands = config2.maxHands;
|
||||||
const image = tf2.tidy(() => {
|
const image = tf2.tidy(() => {
|
||||||
|
@ -5138,7 +5131,7 @@ var require_config = __commonJS((exports2) => {
|
||||||
var require_package = __commonJS((exports2, module2) => {
|
var require_package = __commonJS((exports2, module2) => {
|
||||||
module2.exports = {
|
module2.exports = {
|
||||||
name: "@vladmandic/human",
|
name: "@vladmandic/human",
|
||||||
version: "0.3.6",
|
version: "0.3.8",
|
||||||
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
description: "human: 3D Face Detection, Iris Tracking and Age & Gender Prediction",
|
||||||
sideEffects: false,
|
sideEffects: false,
|
||||||
main: "dist/human.cjs",
|
main: "dist/human.cjs",
|
||||||
|
@ -5175,12 +5168,12 @@ var require_package = __commonJS((exports2, module2) => {
|
||||||
rimraf: "^3.0.2"
|
rimraf: "^3.0.2"
|
||||||
},
|
},
|
||||||
scripts: {
|
scripts: {
|
||||||
start: "node --trace-warnings --trace-uncaught --no-deprecation demo/node.js",
|
start: "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
|
||||||
lint: "eslint src/*.js demo/*.js",
|
lint: "eslint src/*.js demo/*.js",
|
||||||
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/index.js",
|
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
|
||||||
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/index.js",
|
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
|
||||||
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/index.js",
|
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
|
||||||
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/index.js",
|
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/human.js",
|
||||||
build: "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
|
build: "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
|
||||||
update: "npm update --depth 20 && npm dedupe && npm prune && npm audit",
|
update: "npm update --depth 20 && npm dedupe && npm prune && npm audit",
|
||||||
changelog: "node changelog.js"
|
changelog: "node changelog.js"
|
||||||
|
@ -5200,7 +5193,7 @@ var require_package = __commonJS((exports2, module2) => {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/index.js
|
// src/human.js
|
||||||
const tf = require("@tensorflow/tfjs");
|
const tf = require("@tensorflow/tfjs");
|
||||||
const facemesh = require_facemesh();
|
const facemesh = require_facemesh();
|
||||||
const ssrnet = require_ssrnet();
|
const ssrnet = require_ssrnet();
|
||||||
|
@ -5220,6 +5213,10 @@ const models = {
|
||||||
gender: null,
|
gender: null,
|
||||||
emotion: null
|
emotion: null
|
||||||
};
|
};
|
||||||
|
const override = {
|
||||||
|
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
||||||
|
hand: {skipFrames: 0}
|
||||||
|
};
|
||||||
const now = () => {
|
const now = () => {
|
||||||
if (typeof performance !== "undefined")
|
if (typeof performance !== "undefined")
|
||||||
return performance.now();
|
return performance.now();
|
||||||
|
@ -5261,11 +5258,18 @@ function mergeDeep(...objects) {
|
||||||
function sanity(input) {
|
function sanity(input) {
|
||||||
if (!input)
|
if (!input)
|
||||||
return "input is not defined";
|
return "input is not defined";
|
||||||
|
if (tf.ENV.flags.IS_BROWSER && (input instanceof ImageData || input instanceof HTMLImageElement || input instanceof HTMLCanvasElement || input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
|
||||||
const width = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
const width = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
|
||||||
if (!width || width === 0)
|
if (!width || width === 0)
|
||||||
return "input is empty";
|
return "input is empty";
|
||||||
|
}
|
||||||
|
if (tf.ENV.flags.IS_BROWSER && (input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
|
||||||
if (input.readyState && input.readyState <= 2)
|
if (input.readyState && input.readyState <= 2)
|
||||||
return "input is not ready";
|
return "input is not ready";
|
||||||
|
}
|
||||||
|
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
||||||
|
return "input must be a tensor";
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
tf.getBackend();
|
tf.getBackend();
|
||||||
} catch {
|
} catch {
|
||||||
|
@ -5294,7 +5298,8 @@ async function detect(input, userConfig = {}) {
|
||||||
const perf = {};
|
const perf = {};
|
||||||
let timeStamp;
|
let timeStamp;
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
config = mergeDeep(defaults, userConfig);
|
const shouldOverride = tf.ENV.flags.IS_NODE || tf.ENV.flags.IS_BROWSER && !(input instanceof HTMLVideoElement || input instanceof HTMLMediaElement);
|
||||||
|
config = mergeDeep(defaults, userConfig, shouldOverride ? override : {});
|
||||||
perf.config = Math.trunc(now() - timeStamp);
|
perf.config = Math.trunc(now() - timeStamp);
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
state = "check";
|
state = "check";
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 4774,
|
"bytes": 4862,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2605,
|
"bytes": 2635,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2020,
|
"bytes": 2019,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
|
@ -45,7 +45,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytes": 14393,
|
"bytes": 14262,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/box.js"
|
"path": "src/facemesh/box.js"
|
||||||
|
@ -83,7 +83,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytes": 2365,
|
"bytes": 2356,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/handdetector.js"
|
"path": "src/handpose/handdetector.js"
|
||||||
|
@ -101,7 +101,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8202,
|
"bytes": 8178,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -115,8 +115,8 @@
|
||||||
"bytes": 2488,
|
"bytes": 2488,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytes": 7526,
|
"bytes": 8299,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/facemesh.js"
|
"path": "src/facemesh/facemesh.js"
|
||||||
|
@ -245,7 +245,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 1965,
|
"bytes": 1937,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -253,7 +253,7 @@
|
||||||
"dist/human.cjs.map": {
|
"dist/human.cjs.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 219894
|
"bytes": 220934
|
||||||
},
|
},
|
||||||
"dist/human.cjs": {
|
"dist/human.cjs": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -271,7 +271,7 @@
|
||||||
"bytesInOutput": 3027
|
"bytesInOutput": 3027
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytesInOutput": 13366
|
"bytesInOutput": 13270
|
||||||
},
|
},
|
||||||
"src/facemesh/uvcoords.js": {
|
"src/facemesh/uvcoords.js": {
|
||||||
"bytesInOutput": 20586
|
"bytesInOutput": 20586
|
||||||
|
@ -283,10 +283,10 @@
|
||||||
"bytesInOutput": 2950
|
"bytesInOutput": 2950
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 2194
|
"bytesInOutput": 2158
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2134
|
"bytesInOutput": 2133
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 1120
|
"bytesInOutput": 1120
|
||||||
|
@ -334,22 +334,22 @@
|
||||||
"bytesInOutput": 2671
|
"bytesInOutput": 2671
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 7651
|
"bytesInOutput": 7625
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 2518
|
"bytesInOutput": 2509
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1872
|
"bytesInOutput": 1872
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 2748
|
"bytesInOutput": 2778
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 6514
|
"bytesInOutput": 7273
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 134107
|
"bytes": 134728
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 4774,
|
"bytes": 4862,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2605,
|
"bytes": 2635,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2020,
|
"bytes": 2019,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/blazeface.js": {
|
"src/facemesh/blazeface.js": {
|
||||||
|
@ -45,7 +45,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytes": 14393,
|
"bytes": 14262,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/box.js"
|
"path": "src/facemesh/box.js"
|
||||||
|
@ -83,7 +83,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytes": 2365,
|
"bytes": 2356,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/handdetector.js"
|
"path": "src/handpose/handdetector.js"
|
||||||
|
@ -101,7 +101,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8202,
|
"bytes": 8178,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/handpose/box.js"
|
"path": "src/handpose/box.js"
|
||||||
|
@ -115,8 +115,8 @@
|
||||||
"bytes": 2488,
|
"bytes": 2488,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytes": 7526,
|
"bytes": 8299,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/facemesh/facemesh.js"
|
"path": "src/facemesh/facemesh.js"
|
||||||
|
@ -245,7 +245,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 1965,
|
"bytes": 1937,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -253,7 +253,7 @@
|
||||||
"dist/human.esm-nobundle.js.map": {
|
"dist/human.esm-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 198188
|
"bytes": 199199
|
||||||
},
|
},
|
||||||
"dist/human.esm-nobundle.js": {
|
"dist/human.esm-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -271,7 +271,7 @@
|
||||||
"bytesInOutput": 1176
|
"bytesInOutput": 1176
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytesInOutput": 5602
|
"bytesInOutput": 5593
|
||||||
},
|
},
|
||||||
"src/facemesh/uvcoords.js": {
|
"src/facemesh/uvcoords.js": {
|
||||||
"bytesInOutput": 16790
|
"bytesInOutput": 16790
|
||||||
|
@ -283,10 +283,10 @@
|
||||||
"bytesInOutput": 1391
|
"bytesInOutput": 1391
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 1149
|
"bytesInOutput": 1142
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1148
|
"bytesInOutput": 1147
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 597
|
"bytesInOutput": 597
|
||||||
|
@ -334,22 +334,22 @@
|
||||||
"bytesInOutput": 984
|
"bytesInOutput": 984
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 3232
|
"bytesInOutput": 3218
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 1326
|
"bytesInOutput": 1317
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1146
|
"bytesInOutput": 1146
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 2275
|
"bytesInOutput": 2305
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 3564
|
"bytesInOutput": 4135
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 69404
|
"bytes": 69965
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 4774,
|
"bytes": 4862,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
|
@ -149,11 +149,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2605,
|
"bytes": 2635,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2020,
|
"bytes": 2019,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -204,7 +204,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytes": 14393,
|
"bytes": 14262,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -252,7 +252,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytes": 2365,
|
"bytes": 2356,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -273,7 +273,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8202,
|
"bytes": 8178,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -290,8 +290,8 @@
|
||||||
"bytes": 2488,
|
"bytes": 2488,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytes": 7526,
|
"bytes": 8299,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -436,7 +436,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 1965,
|
"bytes": 1937,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -464,7 +464,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 4959239
|
"bytes": 4960250
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -539,7 +539,7 @@
|
||||||
"bytesInOutput": 1195
|
"bytesInOutput": 1195
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytesInOutput": 5577
|
"bytesInOutput": 5568
|
||||||
},
|
},
|
||||||
"src/facemesh/uvcoords.js": {
|
"src/facemesh/uvcoords.js": {
|
||||||
"bytesInOutput": 16791
|
"bytesInOutput": 16791
|
||||||
|
@ -551,10 +551,10 @@
|
||||||
"bytesInOutput": 1376
|
"bytesInOutput": 1376
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 1150
|
"bytesInOutput": 1143
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1143
|
"bytesInOutput": 1142
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 575
|
"bytesInOutput": 575
|
||||||
|
@ -602,22 +602,22 @@
|
||||||
"bytesInOutput": 993
|
"bytesInOutput": 993
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 3228
|
"bytesInOutput": 3214
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 1312
|
"bytesInOutput": 1303
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1147
|
"bytesInOutput": 1147
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 2276
|
"bytesInOutput": 2306
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 3669
|
"bytesInOutput": 4246
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1106324
|
"bytes": 1106891
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 4774,
|
"bytes": 4862,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
|
@ -149,11 +149,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 2605,
|
"bytes": 2635,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2020,
|
"bytes": 2019,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -204,7 +204,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytes": 14393,
|
"bytes": 14262,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -252,7 +252,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytes": 2365,
|
"bytes": 2356,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -273,7 +273,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytes": 8202,
|
"bytes": 8178,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -290,8 +290,8 @@
|
||||||
"bytes": 2488,
|
"bytes": 2488,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytes": 7526,
|
"bytes": 8299,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -436,7 +436,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytes": 1965,
|
"bytes": 1937,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -464,7 +464,7 @@
|
||||||
"dist/human.js.map": {
|
"dist/human.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 4959239
|
"bytes": 4960250
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -539,7 +539,7 @@
|
||||||
"bytesInOutput": 1195
|
"bytesInOutput": 1195
|
||||||
},
|
},
|
||||||
"src/facemesh/pipeline.js": {
|
"src/facemesh/pipeline.js": {
|
||||||
"bytesInOutput": 5577
|
"bytesInOutput": 5568
|
||||||
},
|
},
|
||||||
"src/facemesh/uvcoords.js": {
|
"src/facemesh/uvcoords.js": {
|
||||||
"bytesInOutput": 16791
|
"bytesInOutput": 16791
|
||||||
|
@ -551,10 +551,10 @@
|
||||||
"bytesInOutput": 1376
|
"bytesInOutput": 1376
|
||||||
},
|
},
|
||||||
"src/ssrnet/ssrnet.js": {
|
"src/ssrnet/ssrnet.js": {
|
||||||
"bytesInOutput": 1150
|
"bytesInOutput": 1143
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1143
|
"bytesInOutput": 1142
|
||||||
},
|
},
|
||||||
"src/posenet/modelBase.js": {
|
"src/posenet/modelBase.js": {
|
||||||
"bytesInOutput": 575
|
"bytesInOutput": 575
|
||||||
|
@ -602,22 +602,22 @@
|
||||||
"bytesInOutput": 993
|
"bytesInOutput": 993
|
||||||
},
|
},
|
||||||
"src/handpose/pipeline.js": {
|
"src/handpose/pipeline.js": {
|
||||||
"bytesInOutput": 3228
|
"bytesInOutput": 3214
|
||||||
},
|
},
|
||||||
"src/handpose/handpose.js": {
|
"src/handpose/handpose.js": {
|
||||||
"bytesInOutput": 1312
|
"bytesInOutput": 1303
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1147
|
"bytesInOutput": 1147
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 2276
|
"bytesInOutput": 2306
|
||||||
},
|
},
|
||||||
"src/index.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 3669
|
"bytesInOutput": 4246
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1106333
|
"bytes": 1106900
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
10
package.json
10
package.json
|
@ -37,12 +37,12 @@
|
||||||
"rimraf": "^3.0.2"
|
"rimraf": "^3.0.2"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node --trace-warnings --trace-uncaught --no-deprecation demo/node.js",
|
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js",
|
||||||
"lint": "eslint src/*.js demo/*.js",
|
"lint": "eslint src/*.js demo/*.js",
|
||||||
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/index.js",
|
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
|
||||||
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/index.js",
|
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
|
||||||
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/index.js",
|
"build-esm-nobundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
|
||||||
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/index.js",
|
"build-node": "esbuild --bundle --platform=node --sourcemap --target=esnext --format=cjs --external:@tensorflow --metafile=dist/human.cjs.json --outfile=dist/human.cjs src/human.js",
|
||||||
"build": "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
|
"build": "rimraf dist/* && npm run build-iife && npm run build-esm-bundle && npm run build-esm-nobundle && npm run build-node && ls -l dist/",
|
||||||
"update": "npm update --depth 20 && npm dedupe && npm prune && npm audit",
|
"update": "npm update --depth 20 && npm dedupe && npm prune && npm audit",
|
||||||
"changelog": "node changelog.js"
|
"changelog": "node changelog.js"
|
||||||
|
|
|
@ -22,11 +22,11 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
|
if (frame < config.face.emotion.skipFrames) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
if (frame >= config.face.emotion.skipFrames) {
|
|
||||||
frame = 0;
|
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
frame = 0;
|
||||||
const enhance = tf.tidy(() => {
|
const enhance = tf.tidy(() => {
|
||||||
if (image instanceof tf.Tensor) {
|
if (image instanceof tf.Tensor) {
|
||||||
const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
|
const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
|
||||||
|
|
|
@ -131,8 +131,8 @@ class Pipeline {
|
||||||
async predict(input, config) {
|
async predict(input, config) {
|
||||||
this.skipFrames = config.detector.skipFrames;
|
this.skipFrames = config.detector.skipFrames;
|
||||||
this.maxFaces = config.detector.maxFaces;
|
this.maxFaces = config.detector.maxFaces;
|
||||||
|
this.runsWithoutFaceDetector++;
|
||||||
if (this.shouldUpdateRegionsOfInterest()) {
|
if (this.shouldUpdateRegionsOfInterest()) {
|
||||||
// const { boxes, scaleFactor } = await this.boundingBoxDetector.getBoundingBoxes(input);
|
|
||||||
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
if (detector.boxes.length === 0) {
|
if (detector.boxes.length === 0) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
|
@ -158,8 +158,6 @@ class Pipeline {
|
||||||
});
|
});
|
||||||
this.updateRegionsOfInterest(scaledBoxes);
|
this.updateRegionsOfInterest(scaledBoxes);
|
||||||
this.runsWithoutFaceDetector = 0;
|
this.runsWithoutFaceDetector = 0;
|
||||||
} else {
|
|
||||||
this.runsWithoutFaceDetector++;
|
|
||||||
}
|
}
|
||||||
const results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
|
const results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
|
@ -272,12 +270,8 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
shouldUpdateRegionsOfInterest() {
|
shouldUpdateRegionsOfInterest() {
|
||||||
const roisCount = this.regionsOfInterest.length;
|
if (this.regionsOfInterest.length === 0) return true; // nothing detected, so run detector on the next frame
|
||||||
const noROIs = roisCount === 0;
|
return (this.regionsOfInterest.length !== this.maxFaces) && (this.runsWithoutFaceDetector >= this.skipFrames);
|
||||||
if (this.maxFaces === 1 || noROIs) {
|
|
||||||
return noROIs;
|
|
||||||
}
|
|
||||||
return roisCount !== this.maxFaces && this.runsWithoutFaceDetector >= this.skipFrames;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
calculateLandmarksBoundingBox(landmarks) {
|
calculateLandmarksBoundingBox(landmarks) {
|
||||||
|
|
|
@ -9,7 +9,7 @@ class HandPose {
|
||||||
}
|
}
|
||||||
|
|
||||||
async estimateHands(input, config) {
|
async estimateHands(input, config) {
|
||||||
this.maxContinuousChecks = config.skipFrames;
|
this.skipFrames = config.skipFrames;
|
||||||
this.detectionConfidence = config.minConfidence;
|
this.detectionConfidence = config.minConfidence;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
const image = tf.tidy(() => {
|
const image = tf.tidy(() => {
|
||||||
|
|
|
@ -79,6 +79,7 @@ class HandPipeline {
|
||||||
this.maxContinuousChecks = config.skipFrames;
|
this.maxContinuousChecks = config.skipFrames;
|
||||||
this.detectionConfidence = config.minConfidence;
|
this.detectionConfidence = config.minConfidence;
|
||||||
this.maxHands = config.maxHands;
|
this.maxHands = config.maxHands;
|
||||||
|
this.runsWithoutHandDetector++;
|
||||||
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
const useFreshBox = this.shouldUpdateRegionsOfInterest();
|
||||||
if (useFreshBox === true) {
|
if (useFreshBox === true) {
|
||||||
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
const boundingBoxPredictions = await this.boundingBoxDetector.estimateHandBounds(image, config);
|
||||||
|
@ -87,8 +88,6 @@ class HandPipeline {
|
||||||
this.updateRegionsOfInterest(boundingBoxPredictions[i], true /* force update */, i);
|
this.updateRegionsOfInterest(boundingBoxPredictions[i], true /* force update */, i);
|
||||||
}
|
}
|
||||||
this.runsWithoutHandDetector = 0;
|
this.runsWithoutHandDetector = 0;
|
||||||
} else {
|
|
||||||
this.runsWithoutHandDetector++;
|
|
||||||
}
|
}
|
||||||
// Rotate input so the hand is vertically oriented.
|
// Rotate input so the hand is vertically oriented.
|
||||||
const hands = [];
|
const hands = [];
|
||||||
|
@ -172,7 +171,7 @@ class HandPipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
shouldUpdateRegionsOfInterest() {
|
shouldUpdateRegionsOfInterest() {
|
||||||
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.maxContinuousChecks);
|
return !this.regionsOfInterest || (this.regionsOfInterest.length === 0) || (this.runsWithoutHandDetector >= this.skipFrames);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.HandPipeline = HandPipeline;
|
exports.HandPipeline = HandPipeline;
|
||||||
|
|
|
@ -21,6 +21,11 @@ const models = {
|
||||||
emotion: null,
|
emotion: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const override = {
|
||||||
|
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } },
|
||||||
|
hand: { skipFrames: 0 },
|
||||||
|
};
|
||||||
|
|
||||||
// helper function: gets elapsed time on both browser and nodejs
|
// helper function: gets elapsed time on both browser and nodejs
|
||||||
const now = () => {
|
const now = () => {
|
||||||
if (typeof performance !== 'undefined') return performance.now();
|
if (typeof performance !== 'undefined') return performance.now();
|
||||||
|
@ -66,9 +71,16 @@ function mergeDeep(...objects) {
|
||||||
|
|
||||||
function sanity(input) {
|
function sanity(input) {
|
||||||
if (!input) return 'input is not defined';
|
if (!input) return 'input is not defined';
|
||||||
|
if (tf.ENV.flags.IS_BROWSER && (input instanceof ImageData || input instanceof HTMLImageElement || input instanceof HTMLCanvasElement || input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
|
||||||
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
||||||
if (!width || (width === 0)) return 'input is empty';
|
if (!width || (width === 0)) return 'input is empty';
|
||||||
|
}
|
||||||
|
if (tf.ENV.flags.IS_BROWSER && (input instanceof HTMLVideoElement || input instanceof HTMLMediaElement)) {
|
||||||
if (input.readyState && (input.readyState <= 2)) return 'input is not ready';
|
if (input.readyState && (input.readyState <= 2)) return 'input is not ready';
|
||||||
|
}
|
||||||
|
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
||||||
|
return 'input must be a tensor';
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
tf.getBackend();
|
tf.getBackend();
|
||||||
} catch {
|
} catch {
|
||||||
|
@ -93,7 +105,8 @@ async function detect(input, userConfig = {}) {
|
||||||
let timeStamp;
|
let timeStamp;
|
||||||
|
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
config = mergeDeep(defaults, userConfig);
|
const shouldOverride = tf.ENV.flags.IS_NODE || (tf.ENV.flags.IS_BROWSER && !((input instanceof HTMLVideoElement) || (input instanceof HTMLMediaElement)));
|
||||||
|
config = mergeDeep(defaults, userConfig, shouldOverride ? override : {});
|
||||||
perf.config = Math.trunc(now() - timeStamp);
|
perf.config = Math.trunc(now() - timeStamp);
|
||||||
|
|
||||||
// sanity checks
|
// sanity checks
|
||||||
|
@ -222,3 +235,5 @@ exports.handpose = handpose;
|
||||||
exports.tf = tf;
|
exports.tf = tf;
|
||||||
exports.version = app.version;
|
exports.version = app.version;
|
||||||
exports.state = state;
|
exports.state = state;
|
||||||
|
|
||||||
|
// Error: Failed to compile fragment shader
|
|
@ -22,12 +22,11 @@ async function loadGender(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
if (frame > config.face.age.skipFrames) {
|
if (frame < config.face.age.skipFrames) {
|
||||||
frame = 0;
|
|
||||||
} else {
|
|
||||||
frame += 1;
|
frame += 1;
|
||||||
|
return last;
|
||||||
}
|
}
|
||||||
if (frame === 0) return last;
|
frame = 0;
|
||||||
let enhance;
|
let enhance;
|
||||||
if (image instanceof tf.Tensor) {
|
if (image instanceof tf.Tensor) {
|
||||||
const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
||||||
|
|
Loading…
Reference in New Issue