mirror of https://github.com/vladmandic/human
updated model defaults
parent
f1a431f3ef
commit
971f8508bb
|
@ -76,10 +76,10 @@ export default {
|
||||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||||
// box for updated face analysis as the head probably hasn't moved much
|
// box for updated face analysis as the head probably hasn't moved much
|
||||||
// in short time (10 * 1/25 = 0.25 sec)
|
// in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.2, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
|
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
|
||||||
// non-maximum suppression (0.1 means drop if overlap 10%)
|
// non-maximum suppression (0.1 means drop if overlap 10%)
|
||||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
|
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
||||||
// in non-maximum suppression,
|
// in non-maximum suppression,
|
||||||
// this is applied on detection objects only and before minConfidence
|
// this is applied on detection objects only and before minConfidence
|
||||||
},
|
},
|
||||||
|
@ -108,8 +108,8 @@ export default {
|
||||||
|
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
minConfidence: 0.1, // threshold for discarding a prediction
|
minConfidence: 0.4, // threshold for discarding a prediction
|
||||||
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
modelPath: '../models/gender.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 41, // how many frames to go without re-running the detector
|
skipFrames: 41, // how many frames to go without re-running the detector
|
||||||
// only used for video inputs
|
// only used for video inputs
|
||||||
|
|
|
@ -36,7 +36,7 @@ const ui = {
|
||||||
facing: true,
|
facing: true,
|
||||||
useWorker: false,
|
useWorker: false,
|
||||||
worker: 'worker.js',
|
worker: 'worker.js',
|
||||||
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
|
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg', '../assets/sample7.jpg', '../assets/sample8.jpg'],
|
||||||
compare: '../assets/sample-me.jpg',
|
compare: '../assets/sample-me.jpg',
|
||||||
drawBoxes: true,
|
drawBoxes: true,
|
||||||
drawPoints: false,
|
drawPoints: false,
|
||||||
|
@ -395,7 +395,7 @@ async function detectVideo() {
|
||||||
async function detectSampleImages() {
|
async function detectSampleImages() {
|
||||||
document.getElementById('play').style.display = 'none';
|
document.getElementById('play').style.display = 'none';
|
||||||
userConfig.videoOptimized = false;
|
userConfig.videoOptimized = false;
|
||||||
const size = Math.trunc(window.devicePixelRatio * (8 + (4 * ui.columns)));
|
const size = Math.trunc(window.devicePixelRatio * (12 + (4 * ui.columns)));
|
||||||
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
||||||
ui.baseLineHeight = size + 2;
|
ui.baseLineHeight = size + 2;
|
||||||
document.getElementById('canvas').style.display = 'none';
|
document.getElementById('canvas').style.display = 'none';
|
||||||
|
|
|
@ -219,8 +219,8 @@ class Human {
|
||||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
||||||
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
||||||
}
|
}
|
||||||
this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
// this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
// this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||||
const gl = await this.tf.backend().getGPGPUContext().gl;
|
const gl = await this.tf.backend().getGPGPUContext().gl;
|
||||||
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ export function process(input, config) {
|
||||||
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
|
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
|
||||||
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
|
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
|
||||||
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
|
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
|
||||||
log('created FX filter');
|
// log('created FX filter');
|
||||||
fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
|
fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
|
||||||
}
|
}
|
||||||
if (!fx) return inCanvas;
|
if (!fx) return inCanvas;
|
||||||
|
|
|
@ -63,9 +63,9 @@ export function register() {
|
||||||
try {
|
try {
|
||||||
tf.ENV.set('WEBGL_VERSION', 2);
|
tf.ENV.set('WEBGL_VERSION', 2);
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
|
// tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
|
||||||
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log('error: cannot set WebGL backend flags:', err);
|
log('error: cannot set WebGL backend flags:', err);
|
||||||
return;
|
return;
|
||||||
|
|
Loading…
Reference in New Issue