human/src/faceboxes/faceboxes.ts

70 lines
2.8 KiB
TypeScript
Raw Normal View History

2021-02-08 17:39:09 +01:00
import { log } from '../log';
2021-02-06 23:41:53 +01:00
import * as tf from '../../dist/tfjs.esm.js';
2021-02-13 15:16:41 +01:00
import * as profile from '../profile';
2021-02-06 23:41:53 +01:00
2021-02-08 17:39:09 +01:00
export class FaceBoxes {
enlarge: number;
model: any;
config: any;
2021-02-06 23:41:53 +01:00
constructor(model, config) {
2021-02-08 17:39:09 +01:00
this.enlarge = 1.1;
2021-02-06 23:41:53 +01:00
this.model = model;
this.config = config;
}
async estimateFaces(input, config) {
if (config) this.config = config;
2021-02-08 18:47:38 +01:00
const results: Array<{ confidence: number, box: any, boxRaw: any, image: any }> = [];
2021-02-06 23:41:53 +01:00
const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const castT = resizeT.toInt();
let scores;
let boxes;
if (!config.profile) {
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT);
scores = scoresT.dataSync();
const squeezeT = boxesT.squeeze();
boxes = squeezeT.arraySync();
scoresT.dispose();
boxesT.dispose();
squeezeT.dispose();
numT.dispose();
} else {
const profileData = await tf.profile(() => this.model.executeAsync(castT));
scores = profileData.result[0].dataSync();
const squeezeT = profileData.result[1].squeeze();
boxes = squeezeT.arraySync();
profileData.result.forEach((t) => t.dispose());
profile.run('faceboxes', profileData);
}
castT.dispose();
resizeT.dispose();
for (const i in boxes) {
if (scores[i] && scores[i] > this.config.face.detector.minConfidence) {
2021-02-08 17:39:09 +01:00
const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge];
2021-02-06 23:41:53 +01:00
const boxRaw = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
2021-02-08 17:39:09 +01:00
const box = [
2021-02-13 15:16:41 +01:00
parseInt((boxRaw[0] * input.shape[2]).toString()),
parseInt((boxRaw[1] * input.shape[1]).toString()),
2021-02-08 17:39:09 +01:00
parseInt((boxRaw[2] * input.shape[2]).toString()),
parseInt((boxRaw[3] * input.shape[1]).toString())];
const resized = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const image = resized.div([255]);
resized.dispose();
2021-02-13 15:16:41 +01:00
results.push({ confidence: scores[i], box, boxRaw: this.config.face.mesh.returnRawData ? boxRaw : null, image });
2021-02-08 18:47:38 +01:00
// add mesh, meshRaw, annotations,
2021-02-06 23:41:53 +01:00
}
}
return results;
}
}
2021-02-08 17:39:09 +01:00
export async function load(config) {
2021-02-06 23:41:53 +01:00
const model = await tf.loadGraphModel(config.face.detector.modelPath);
log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`);
const faceboxes = new FaceBoxes(model, config);
if (config.face.mesh.enabled) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`);
if (config.face.iris.enabled) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`);
return faceboxes;
}