fine tuning age and face models

pull/280/head
Vladimir Mandic 2021-03-03 09:59:04 -05:00
parent 4eb6fa709c
commit b77b98e8d4
4 changed files with 23 additions and 9 deletions

6
.markdownlint.json Normal file
View File

@ -0,0 +1,6 @@
{
"MD012": false,
"MD013": false,
"MD033": false,
"MD036": false
}

View File

@ -82,10 +82,10 @@ export default {
// in short time (10 * 1/25 = 0.25 sec)
skipInitial: false, // if previous detection resulted in no faces detected,
// should skipFrames be reset immediately
minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in
minConfidence: 0.1, // threshold for discarding a prediction
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in
// non-maximum suppression (0.1 means drop if overlap 10%)
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
scoreThreshold: 0.1, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression,
// this is applied on detection objects only and before minConfidence
},
@ -114,7 +114,7 @@ export default {
gender: {
enabled: true,
minConfidence: 0.4, // threshold for discarding a prediction
minConfidence: 0.1, // threshold for discarding a prediction
modelPath: '../models/gender.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
inputSize: 64, // fixed value
skipFrames: 41, // how many frames to go without re-running the detector
@ -124,7 +124,7 @@ export default {
emotion: {
enabled: true,
inputSize: 64, // fixed value
minConfidence: 0.2, // threshold for discarding a prediction
minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 21, // how many frames to go without re-running the detector
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
},

View File

@ -31,7 +31,7 @@ const ui = {
baseFontProto: 'small-caps {size} "Segoe UI"',
baseLineWidth: 12,
crop: true,
columns: 2,
columns: 4,
busy: false,
facing: true,
useWorker: false,
@ -339,7 +339,7 @@ async function processImage(input) {
return new Promise((resolve) => {
const image = new Image();
image.onload = async () => {
log('Processing image:', image.src);
log('Processing image:', encodeURI(image.src));
const canvas = document.getElementById('canvas');
image.width = image.naturalWidth;
image.height = image.naturalHeight;
@ -351,7 +351,12 @@ async function processImage(input) {
const thumb = document.createElement('canvas');
thumb.className = 'thumbnail';
thumb.width = window.innerWidth / (ui.columns + 0.1);
thumb.height = canvas.height / (window.innerWidth / thumb.width);
thumb.height = thumb.width * canvas.height / canvas.width;
if (result.face && result.face.length > 0) {
thumb.title = result.face.map((a, i) => `#${i} face: ${Math.trunc(100 * a.faceConfidence)}% box: ${Math.trunc(100 * a.boxConfidence)}% age: ${Math.trunc(a.age)} gender: ${Math.trunc(100 * a.genderConfidence)}% ${a.gender}`).join(' | ');
} else {
thumb.title = 'no face detected';
}
const ctx = thumb.getContext('2d');
ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);
document.getElementById('samples-container').appendChild(thumb);
@ -403,6 +408,7 @@ async function detectSampleImages() {
log('Running detection of sample images');
status('processing images');
document.getElementById('samples-container').innerHTML = '';
for (const m of Object.values(menu)) m.hide();
for (const image of ui.samples) await processImage(image);
status('');
}

View File

@ -235,7 +235,7 @@ class Human {
let genderRes;
let emotionRes;
let embeddingRes;
const faceRes: Array<{ confidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
const faceRes: Array<{ confidence: number, boxConfidence: number, faceConfidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.face?.estimateFaces(input, this.config);
@ -316,6 +316,8 @@ class Human {
// combine results
faceRes.push({
confidence: face.confidence,
faceConfidence: face.faceConfidence,
boxConfidence: face.boxConfidence,
box: face.box,
mesh: face.mesh,
boxRaw: face.boxRaw,