human/src/gender/gender.js

97 lines
3.3 KiB
JavaScript
Raw Normal View History

2020-11-17 18:38:48 +01:00
import { tf } from '../../dist/tfjs.esm.js';
2020-11-10 02:13:38 +01:00
import * as profile from '../profile.js';
2020-11-06 17:39:39 +01:00
const models = {};
let last = { gender: '' };
let frame = Number.MAX_SAFE_INTEGER;
2020-11-06 21:35:58 +01:00
let alternative = false;
2020-11-06 17:39:39 +01:00
// tuning values
2020-11-06 21:35:58 +01:00
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
2020-11-06 17:39:39 +01:00
async function load(config) {
2020-11-07 16:37:19 +01:00
if (!models.gender) {
2020-11-17 16:18:15 +01:00
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
2020-11-07 16:37:19 +01:00
alternative = models.gender.inputs[0].shape[3] === 1;
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
}
2020-11-06 17:39:39 +01:00
return models.gender;
}
async function predict(image, config) {
2020-11-13 22:13:35 +01:00
if (!models.gender) return null;
2020-11-14 13:02:05 +01:00
if ((frame < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
2020-11-06 19:50:16 +01:00
frame += 1;
return last;
}
frame = 0;
2020-11-06 17:39:39 +01:00
return new Promise(async (resolve) => {
2020-11-13 22:13:35 +01:00
/*
const zoom = [0, 0]; // 0..1 meaning 0%..100%
2020-11-06 17:39:39 +01:00
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
2020-11-06 21:35:58 +01:00
const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
2020-11-13 22:13:35 +01:00
*/
const resize = tf.image.resizeBilinear(image, [config.face.gender.inputSize, config.face.gender.inputSize], false);
2020-11-06 21:35:58 +01:00
let enhance;
if (alternative) {
enhance = tf.tidy(() => {
const [red, green, blue] = tf.split(resize, 3, 3);
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
return grayscale.sub(0.5).mul(2);
});
} else {
enhance = tf.mul(resize, [255.0]);
}
2020-11-06 17:39:39 +01:00
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
tf.dispose(resize);
let genderT;
const obj = {};
if (!config.profile) {
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
} else {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
profile.run('gender', profileGender);
}
enhance.dispose();
if (genderT) {
const data = genderT.dataSync();
2020-11-06 21:35:58 +01:00
if (alternative) {
// returns two values 0..1, bigger one is prediction
const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = data[0] > data[1] ? 'female' : 'male';
obj.confidence = confidence;
}
} else {
// returns one value 0..1, .5 is prediction threshold
const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;
if (confidence > config.face.gender.minConfidence) {
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
2020-11-12 04:40:05 +01:00
obj.confidence = Math.min(0.99, confidence);
2020-11-06 21:35:58 +01:00
}
2020-11-06 17:39:39 +01:00
}
}
genderT.dispose();
last = obj;
resolve(obj);
});
}
exports.predict = predict;
exports.load = load;