mirror of https://github.com/vladmandic/human
staggered skipframes
parent
1f51188392
commit
1d0bff46ec
15
config.js
15
config.js
|
@ -66,9 +66,10 @@ export default {
|
|||
// 'back' is optimized for distanct faces.
|
||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||
maxFaces: 10, // maximum number of faces detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector
|
||||
skipFrames: 20, // how many frames to go without re-running the face bounding box detector
|
||||
// only used for video inputs
|
||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated face analysis as the head probably hasn't moved much
|
||||
|
@ -98,7 +99,7 @@ export default {
|
|||
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
|
||||
// which determines training set for model
|
||||
inputSize: 64, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the detector
|
||||
skipFrames: 41, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
|
@ -107,7 +108,7 @@ export default {
|
|||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
||||
inputSize: 64, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the detector
|
||||
skipFrames: 42, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
|
@ -115,7 +116,7 @@ export default {
|
|||
enabled: true,
|
||||
inputSize: 64, // fixed value
|
||||
minConfidence: 0.2, // threshold for discarding a prediction
|
||||
skipFrames: 15, // how many frames to go without re-running the detector
|
||||
skipFrames: 21, // how many frames to go without re-running the detector
|
||||
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
||||
},
|
||||
|
||||
|
@ -140,15 +141,15 @@ export default {
|
|||
hand: {
|
||||
enabled: true,
|
||||
inputSize: 256, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector
|
||||
skipFrames: 19, // how many frames to go without re-running the hand bounding box detector
|
||||
// only used for video inputs
|
||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated hand skeleton analysis as the hand probably
|
||||
// hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||
minConfidence: 0.5, // threshold for discarding a prediction
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much
|
||||
// in non-maximum suppression
|
||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on
|
||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on
|
||||
// score in non-maximum suppression
|
||||
maxHands: 1, // maximum number of hands detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
|
|
|
@ -3,7 +3,10 @@ import draw from './draw.js';
|
|||
import Menu from './menu.js';
|
||||
import GLBench from './gl-bench.js';
|
||||
|
||||
const userConfig = {}; // add any user configuration overrides
|
||||
// const userConfig = {}; // add any user configuration overrides
|
||||
const userConfig = {
|
||||
async: false,
|
||||
};
|
||||
|
||||
const human = new Human(userConfig);
|
||||
|
||||
|
@ -30,7 +33,7 @@ const ui = {
|
|||
console: true,
|
||||
maxFPSframes: 10,
|
||||
modelsPreload: true,
|
||||
modelsWarmup: true,
|
||||
modelsWarmup: false,
|
||||
menuWidth: 0,
|
||||
menuHeight: 0,
|
||||
camera: {},
|
||||
|
@ -41,7 +44,7 @@ const ui = {
|
|||
detectThread: null,
|
||||
framesDraw: 0,
|
||||
framesDetect: 0,
|
||||
bench: false,
|
||||
bench: true,
|
||||
};
|
||||
|
||||
// global variables
|
||||
|
@ -283,6 +286,7 @@ function runHumanDetect(input, canvas, timestamp) {
|
|||
clearTimeout(ui.drawThread);
|
||||
ui.drawThread = null;
|
||||
log('frame statistics: process:', ui.framesDetect, 'refresh:', ui.framesDraw);
|
||||
log('memory', human.tf.engine().memory());
|
||||
return;
|
||||
}
|
||||
status('');
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
"eslint-plugin-promise": "^4.2.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"seedrandom": "^3.0.5",
|
||||
"simple-git": "^2.25.0"
|
||||
"simple-git": "^2.26.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||
|
|
Loading…
Reference in New Issue