updated model defaults

pull/70/head
Vladimir Mandic 2021-02-22 09:13:11 -05:00
parent 2161a7df3a
commit 10f4b300f9
25 changed files with 329 additions and 327 deletions

BIN
assets/sample7.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 425 KiB

BIN
assets/sample8.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 394 KiB

View File

@ -76,10 +76,10 @@ export default {
// e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated face analysis as the head probably hasn't moved much
// in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.5, // threshold for discarding a prediction
minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
// non-maximum suppression (0.1 means drop if overlap 10%)
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression,
// this is applied on detection objects only and before minConfidence
},
@ -108,8 +108,8 @@ export default {
gender: {
enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
minConfidence: 0.4, // threshold for discarding a prediction
modelPath: '../models/gender.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
inputSize: 64, // fixed value
skipFrames: 41, // how many frames to go without re-running the detector
// only used for video inputs

View File

@ -36,7 +36,7 @@ const ui = {
facing: true,
useWorker: false,
worker: 'worker.js',
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg', '../assets/sample7.jpg', '../assets/sample8.jpg'],
compare: '../assets/sample-me.jpg',
drawBoxes: true,
drawPoints: false,
@ -395,7 +395,7 @@ async function detectVideo() {
async function detectSampleImages() {
document.getElementById('play').style.display = 'none';
userConfig.videoOptimized = false;
const size = Math.trunc(window.devicePixelRatio * (8 + (4 * ui.columns)));
const size = Math.trunc(window.devicePixelRatio * (12 + (4 * ui.columns)));
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
ui.baseLineHeight = size + 2;
document.getElementById('canvas').style.display = 'none';

View File

@ -32,7 +32,7 @@ async function drawFace(result, canvas, ui, triangulation) {
}
// silly hack since fillText does not suport new line
const labels = [];
// labels.push(`${Math.trunc(100 * face.confidence)}% face`);
labels.push(`detect confidence: ${Math.trunc(100 * face.confidence)}%`);
if (face.genderConfidence) labels.push(`${face.gender || ''} ${Math.trunc(100 * face.genderConfidence)}% confident`);
// if (face.genderConfidence) labels.push(face.gender);
if (face.age) labels.push(`age: ${face.age || ''}`);
@ -43,11 +43,13 @@ async function drawFace(result, canvas, ui, triangulation) {
}
if (labels.length === 0) labels.push('face');
ctx.fillStyle = ui.baseLabel;
for (let i = 0; i < labels.length; i++) {
for (let i = labels.length - 1; i >= 0; i--) {
ctx.fillStyle = 'black';
ctx.fillText(labels[i], face.box[0] + 1, face.box[1] - ((labels.length - i) * ui.baseLineHeight) + 6);
const x = Math.max(face.box[0], 0);
const y = i * ui.baseLineHeight + face.box[1];
ctx.fillText(labels[i], x + 5, y + 16);
ctx.fillStyle = ui.baseLabel;
ctx.fillText(labels[i], face.box[0] + 0, face.box[1] - ((labels.length - i) * ui.baseLineHeight) + 5);
ctx.fillText(labels[i], x + 4, y + 15);
}
ctx.fillStyle = ui.baseColor;
ctx.stroke();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,11 +1,11 @@
{
"inputs": {
"dist/human.esm.js": {
"bytes": 1347292,
"bytes": 1347010,
"imports": []
},
"demo/draw.js": {
"bytes": 10798,
"bytes": 10783,
"imports": []
},
"demo/menu.js": {
@ -17,7 +17,7 @@
"imports": []
},
"demo/browser.js": {
"bytes": 26720,
"bytes": 26771,
"imports": [
{
"path": "dist/human.esm.js",
@ -43,17 +43,17 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 2022090
"bytes": 2022011
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1339816
"bytesInOutput": 1339534
},
"demo/draw.js": {
"bytesInOutput": 6204
"bytesInOutput": 6241
},
"demo/menu.js": {
"bytesInOutput": 10696
@ -62,10 +62,10 @@
"bytesInOutput": 6759
},
"demo/browser.js": {
"bytesInOutput": 17340
"bytesInOutput": 17389
}
},
"bytes": 1388200
"bytes": 1388004
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

94
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

20
dist/human.esm.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2228,
"bytes": 2237,
"imports": [
{
"path": "src/log.ts",
@ -392,7 +392,7 @@
"imports": []
},
"src/image.ts": {
"bytes": 5869,
"bytes": 5872,
"imports": [
{
"path": "src/log.ts",
@ -409,7 +409,7 @@
]
},
"config.js": {
"bytes": 9786,
"bytes": 9774,
"imports": []
},
"src/sample.ts": {
@ -421,7 +421,7 @@
"imports": []
},
"src/human.ts": {
"bytes": 19720,
"bytes": 19726,
"imports": [
{
"path": "src/log.ts",
@ -499,7 +499,7 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 1924855
"bytes": 1924674
},
"dist/human.esm.js": {
"imports": [],
@ -520,7 +520,7 @@
"bytesInOutput": 1064610
},
"src/tfjs/backend.ts": {
"bytesInOutput": 1205
"bytesInOutput": 1053
},
"src/blazeface/blazeface.ts": {
"bytesInOutput": 2185
@ -538,7 +538,7 @@
"bytesInOutput": 5040
},
"src/human.ts": {
"bytesInOutput": 10441
"bytesInOutput": 10347
},
"src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1549
@ -610,10 +610,10 @@
"bytesInOutput": 10975
},
"src/image.ts": {
"bytesInOutput": 2379
"bytesInOutput": 2355
},
"config.js": {
"bytesInOutput": 1426
"bytesInOutput": 1414
},
"src/sample.ts": {
"bytesInOutput": 55295
@ -622,7 +622,7 @@
"bytesInOutput": 2572
}
},
"bytes": 1347292
"bytes": 1347010
}
}
}

20
dist/human.iife.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2228,
"bytes": 2237,
"imports": [
{
"path": "src/log.ts",
@ -392,7 +392,7 @@
"imports": []
},
"src/image.ts": {
"bytes": 5869,
"bytes": 5872,
"imports": [
{
"path": "src/log.ts",
@ -409,7 +409,7 @@
]
},
"config.js": {
"bytes": 9786,
"bytes": 9774,
"imports": []
},
"src/sample.ts": {
@ -421,7 +421,7 @@
"imports": []
},
"src/human.ts": {
"bytes": 19720,
"bytes": 19726,
"imports": [
{
"path": "src/log.ts",
@ -499,7 +499,7 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 1924866
"bytes": 1924685
},
"dist/human.ts": {
"imports": [],
@ -512,7 +512,7 @@
"bytesInOutput": 1690
},
"src/human.ts": {
"bytesInOutput": 10477
"bytesInOutput": 10383
},
"src/log.ts": {
"bytesInOutput": 252
@ -521,7 +521,7 @@
"bytesInOutput": 1064610
},
"src/tfjs/backend.ts": {
"bytesInOutput": 1205
"bytesInOutput": 1053
},
"src/blazeface/blazeface.ts": {
"bytesInOutput": 2185
@ -608,10 +608,10 @@
"bytesInOutput": 10975
},
"src/image.ts": {
"bytesInOutput": 2379
"bytesInOutput": 2355
},
"config.js": {
"bytesInOutput": 1426
"bytesInOutput": 1414
},
"src/sample.ts": {
"bytesInOutput": 55295
@ -620,7 +620,7 @@
"bytesInOutput": 2572
}
},
"bytes": 1347334
"bytes": 1347052
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

16
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

20
dist/human.node.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.ts": {
"bytes": 2228,
"bytes": 2237,
"imports": [
{
"path": "src/log.ts",
@ -392,7 +392,7 @@
"imports": []
},
"src/image.ts": {
"bytes": 5869,
"bytes": 5872,
"imports": [
{
"path": "src/log.ts",
@ -409,7 +409,7 @@
]
},
"config.js": {
"bytes": 9786,
"bytes": 9774,
"imports": []
},
"src/sample.ts": {
@ -421,7 +421,7 @@
"imports": []
},
"src/human.ts": {
"bytes": 19720,
"bytes": 19726,
"imports": [
{
"path": "src/log.ts",
@ -499,7 +499,7 @@
"imports": [],
"exports": [],
"inputs": {},
"bytes": 705440
"bytes": 705259
},
"dist/human.node-gpu.js": {
"imports": [],
@ -515,13 +515,13 @@
"bytesInOutput": 1677
},
"src/human.ts": {
"bytesInOutput": 10449
"bytesInOutput": 10355
},
"src/log.ts": {
"bytesInOutput": 251
},
"src/tfjs/backend.ts": {
"bytesInOutput": 1304
"bytesInOutput": 1145
},
"src/blazeface/blazeface.ts": {
"bytesInOutput": 2329
@ -605,13 +605,13 @@
"bytesInOutput": 2391
},
"src/image.ts": {
"bytesInOutput": 2362
"bytesInOutput": 2339
},
"src/imagefx.js": {
"bytesInOutput": 10973
},
"config.js": {
"bytesInOutput": 1426
"bytesInOutput": 1414
},
"src/sample.ts": {
"bytesInOutput": 55295
@ -620,7 +620,7 @@
"bytesInOutput": 2569
}
},
"bytes": 276845
"bytes": 276557
}
}
}

94
dist/human.ts vendored

File diff suppressed because one or more lines are too long

4
dist/human.ts.map vendored

File diff suppressed because one or more lines are too long

View File

@ -30,7 +30,7 @@ export function cutBoxFromImageAndResize(box, image, cropSize) {
return tf.image.cropAndResize(image, boxes, [0], cropSize);
}
export function enlargeBox(box, factor = 1.5) {
export function enlargeBox(box, factor = 1.6) {
const center = getBoxCenter(box);
const size = getBoxSize(box);
const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];

View File

@ -219,8 +219,8 @@ class Human {
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
}
this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
// this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
const gl = await this.tf.backend().getGPGPUContext().gl;
log(`gl version:${gl.getParameter(gl.VERSION)} renderer:${gl.getParameter(gl.RENDERER)}`);
}

View File

@ -43,7 +43,7 @@ export function process(input, config) {
outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');
if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
log('created FX filter');
// log('created FX filter');
fx = tf.ENV.flags.IS_BROWSER ? new fxImage.GLImageFilter({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
if (!fx) return inCanvas;

View File

@ -63,9 +63,9 @@ export function register() {
try {
tf.ENV.set('WEBGL_VERSION', 2);
// @ts-ignore
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
// tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
} catch (err) {
log('error: cannot set WebGL backend flags:', err);
return;