human/src/emotion/emotion.js

79 lines
2.9 KiB
JavaScript
Raw Normal View History

2020-11-10 02:13:38 +01:00
import * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';
import * as profile from '../profile.js';
2020-10-15 00:22:38 +02:00
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];
2020-10-15 00:22:38 +02:00
const models = {};
let last = [];
2020-11-03 16:55:33 +01:00
let frame = Number.MAX_SAFE_INTEGER;
2020-11-05 21:38:09 +01:00
// tuning values
const zoom = [0, 0]; // 0..1 meaning 0%..100%
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
const scale = 1; // score multiplication factor
2020-10-15 00:22:38 +02:00
async function load(config) {
2020-11-07 16:37:19 +01:00
if (!models.emotion) {
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
// eslint-disable-next-line no-console
console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
}
2020-10-15 15:43:16 +02:00
return models.emotion;
2020-10-15 00:22:38 +02:00
}
async function predict(image, config) {
2020-11-06 19:50:16 +01:00
if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {
frame += 1;
return last;
}
frame = 0;
2020-11-06 17:39:39 +01:00
return new Promise(async (resolve) => {
const box = [[
(image.shape[1] * zoom[0]) / image.shape[1],
(image.shape[2] * zoom[1]) / image.shape[2],
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
]];
const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);
// const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
const [red, green, blue] = tf.split(resize, 3, 3);
resize.dispose();
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const redNorm = tf.mul(red, rgb[0]);
const greenNorm = tf.mul(green, rgb[1]);
const blueNorm = tf.mul(blue, rgb[2]);
red.dispose();
green.dispose();
blue.dispose();
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
redNorm.dispose();
greenNorm.dispose();
blueNorm.dispose();
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
grayscale.dispose();
const obj = [];
if (config.face.emotion.enabled) {
let data;
if (!config.profile) {
const emotionT = await models.emotion.predict(normalize);
data = emotionT.dataSync();
tf.dispose(emotionT);
} else {
2020-11-08 18:26:45 +01:00
const profileData = await tf.profile(() => models.emotion.predict(normalize));
2020-11-06 17:39:39 +01:00
data = profileData.result.dataSync();
profileData.result.dispose();
profile.run('emotion', profileData);
}
for (let i = 0; i < data.length; i++) {
if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });
}
obj.sort((a, b) => b.score - a.score);
2020-10-15 00:22:38 +02:00
}
2020-11-06 17:39:39 +01:00
normalize.dispose();
last = obj;
resolve(obj);
});
2020-10-15 00:22:38 +02:00
}
exports.predict = predict;
exports.load = load;