2020-10-12 16:08:00 +02:00
|
|
|
const tf = require('@tensorflow/tfjs');
|
2020-10-14 17:43:33 +02:00
|
|
|
const facemesh = require('./facemesh/facemesh.js');
|
|
|
|
const ssrnet = require('./ssrnet/ssrnet.js');
|
2020-10-15 00:22:38 +02:00
|
|
|
const emotion = require('./emotion/emotion.js');
|
2020-10-14 17:43:33 +02:00
|
|
|
const posenet = require('./posenet/posenet.js');
|
|
|
|
const handpose = require('./handpose/handpose.js');
|
2020-10-18 18:12:09 +02:00
|
|
|
const fxImage = require('./imagefx.js');
|
2020-11-01 19:07:53 +01:00
|
|
|
const profile = require('./profile.js');
|
2020-10-17 12:30:00 +02:00
|
|
|
const defaults = require('../config.js').default;
|
2020-10-15 21:25:58 +02:00
|
|
|
const app = require('../package.json');
|
|
|
|
|
2020-10-20 13:58:20 +02:00
|
|
|
let first = true;
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// static config override for non-video detection
|
2020-10-18 14:07:45 +02:00
|
|
|
const override = {
|
|
|
|
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } },
|
|
|
|
hand: { skipFrames: 0 },
|
|
|
|
};
|
|
|
|
|
2020-10-17 16:06:02 +02:00
|
|
|
// helper function: gets elapsed time on both browser and nodejs
|
2020-10-16 16:12:12 +02:00
|
|
|
const now = () => {
|
|
|
|
if (typeof performance !== 'undefined') return performance.now();
|
|
|
|
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
|
|
|
|
};
|
2020-10-12 01:22:43 +02:00
|
|
|
|
2020-10-17 16:06:02 +02:00
|
|
|
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
|
2020-10-12 01:22:43 +02:00
|
|
|
function mergeDeep(...objects) {
|
|
|
|
const isObject = (obj) => obj && typeof obj === 'object';
|
|
|
|
return objects.reduce((prev, obj) => {
|
2020-10-12 03:21:41 +02:00
|
|
|
Object.keys(obj || {}).forEach((key) => {
|
2020-10-12 01:22:43 +02:00
|
|
|
const pVal = prev[key];
|
|
|
|
const oVal = obj[key];
|
|
|
|
if (Array.isArray(pVal) && Array.isArray(oVal)) {
|
|
|
|
prev[key] = pVal.concat(...oVal);
|
|
|
|
} else if (isObject(pVal) && isObject(oVal)) {
|
|
|
|
prev[key] = mergeDeep(pVal, oVal);
|
|
|
|
} else {
|
|
|
|
prev[key] = oVal;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return prev;
|
|
|
|
}, {});
|
|
|
|
}
|
|
|
|
|
2020-10-16 16:12:12 +02:00
|
|
|
function sanity(input) {
|
|
|
|
if (!input) return 'input is not defined';
|
2020-10-18 14:07:45 +02:00
|
|
|
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
|
|
|
return 'input must be a tensor';
|
|
|
|
}
|
2020-10-16 16:12:12 +02:00
|
|
|
try {
|
|
|
|
tf.getBackend();
|
|
|
|
} catch {
|
|
|
|
return 'backend not loaded';
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
class Human {
|
|
|
|
constructor() {
|
|
|
|
this.tf = tf;
|
|
|
|
this.version = app.version;
|
|
|
|
this.defaults = defaults;
|
|
|
|
this.config = defaults;
|
2020-11-03 00:54:03 +01:00
|
|
|
this.fx = null;
|
2020-10-19 17:03:48 +02:00
|
|
|
this.state = 'idle';
|
|
|
|
this.numTensors = 0;
|
|
|
|
this.analyzeMemoryLeaks = false;
|
2020-11-03 00:54:03 +01:00
|
|
|
// internal temp canvases
|
|
|
|
this.inCanvas = null;
|
|
|
|
this.outCanvas = null;
|
2020-10-19 17:03:48 +02:00
|
|
|
// object that contains all initialized models
|
|
|
|
this.models = {
|
|
|
|
facemesh: null,
|
|
|
|
posenet: null,
|
|
|
|
handpose: null,
|
|
|
|
iris: null,
|
|
|
|
age: null,
|
|
|
|
gender: null,
|
|
|
|
emotion: null,
|
|
|
|
};
|
|
|
|
// export raw access to underlying models
|
|
|
|
this.facemesh = facemesh;
|
|
|
|
this.ssrnet = ssrnet;
|
|
|
|
this.emotion = emotion;
|
|
|
|
this.posenet = posenet;
|
|
|
|
this.handpose = handpose;
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
|
|
|
|
// helper function: wrapper around console output
|
|
|
|
log(...msg) {
|
|
|
|
// eslint-disable-next-line no-console
|
2020-10-30 16:57:23 +01:00
|
|
|
if (msg && this.config.console) console.log('Human:', ...msg);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
|
2020-11-01 19:07:53 +01:00
|
|
|
profile() {
|
|
|
|
if (this.config.profile) return profile.data;
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// helper function: measure tensor leak
|
|
|
|
analyze(...msg) {
|
|
|
|
if (!this.analyzeMemoryLeaks) return;
|
|
|
|
const current = tf.engine().state.numTensors;
|
|
|
|
const previous = this.numTensors;
|
|
|
|
this.numTensors = current;
|
|
|
|
const leaked = current - previous;
|
|
|
|
if (leaked !== 0) this.log(...msg, leaked);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-17 13:15:23 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
async load(userConfig) {
|
|
|
|
if (userConfig) this.config = mergeDeep(defaults, userConfig);
|
2020-11-03 15:34:36 +01:00
|
|
|
|
|
|
|
if (first) {
|
|
|
|
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
|
|
|
this.log('configuration:', this.config);
|
|
|
|
this.log('flags:', tf.ENV.flags);
|
|
|
|
first = false;
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
if (this.config.face.enabled && !this.models.facemesh) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: face');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.facemesh = await facemesh.load(this.config.face);
|
|
|
|
}
|
|
|
|
if (this.config.body.enabled && !this.models.posenet) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: body');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.posenet = await posenet.load(this.config.body);
|
|
|
|
}
|
|
|
|
if (this.config.hand.enabled && !this.models.handpose) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: hand');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.handpose = await handpose.load(this.config.hand);
|
|
|
|
}
|
|
|
|
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: age');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.age = await ssrnet.loadAge(this.config);
|
|
|
|
}
|
|
|
|
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: gender');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.gender = await ssrnet.loadGender(this.config);
|
|
|
|
}
|
|
|
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) {
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log('load model: emotion');
|
2020-10-19 17:03:48 +02:00
|
|
|
this.models.emotion = await emotion.load(this.config);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-18 15:21:53 +02:00
|
|
|
}
|
2020-10-17 17:38:24 +02:00
|
|
|
|
2020-10-30 16:57:23 +01:00
|
|
|
async checkBackend() {
|
2020-10-30 15:23:49 +01:00
|
|
|
if (tf.getBackend() !== this.config.backend) {
|
|
|
|
this.state = 'backend';
|
2020-11-01 19:07:53 +01:00
|
|
|
/* force backend reload
|
2020-10-30 16:57:23 +01:00
|
|
|
if (this.config.backend in tf.engine().registry) {
|
2020-11-01 19:07:53 +01:00
|
|
|
const backendFactory = tf.findBackendFactory(this.config.backend);
|
|
|
|
tf.removeBackend(this.config.backend);
|
|
|
|
tf.registerBackend(this.config.backend, backendFactory);
|
2020-10-30 15:23:49 +01:00
|
|
|
} else {
|
2020-10-30 16:57:23 +01:00
|
|
|
this.log('Backend not registred:', this.config.backend);
|
2020-10-30 15:23:49 +01:00
|
|
|
}
|
2020-11-01 19:07:53 +01:00
|
|
|
*/
|
|
|
|
this.log('Setting backend:', this.config.backend);
|
|
|
|
await tf.setBackend(this.config.backend);
|
|
|
|
tf.enableProdMode();
|
|
|
|
/* debug mode is really too mcuh
|
|
|
|
if (this.config.profile) tf.enableDebugMode();
|
|
|
|
else tf.enableProdMode();
|
|
|
|
*/
|
|
|
|
if (this.config.deallocate && this.config.backend === 'webgl') {
|
|
|
|
this.log('Changing WebGL: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
|
|
|
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
|
|
|
}
|
|
|
|
await tf.ready();
|
2020-10-30 15:23:49 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
tfImage(input) {
|
2020-11-03 00:54:03 +01:00
|
|
|
let tensor;
|
|
|
|
if (input instanceof tf.Tensor) {
|
|
|
|
tensor = tf.clone(input);
|
|
|
|
} else {
|
|
|
|
const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
|
|
|
|
const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));
|
|
|
|
let targetWidth = originalWidth;
|
|
|
|
let targetHeight = originalHeight;
|
2020-10-27 15:06:01 +01:00
|
|
|
if (this.config.filter.width > 0) targetWidth = this.config.filter.width;
|
|
|
|
else if (this.config.filter.height > 0) targetWidth = originalWidth * (this.config.filter.height / originalHeight);
|
|
|
|
if (this.config.filter.height > 0) targetHeight = this.config.filter.height;
|
|
|
|
else if (this.config.filter.width > 0) targetHeight = originalHeight * (this.config.filter.width / originalWidth);
|
2020-11-03 00:54:03 +01:00
|
|
|
if (!this.inCanvas || (this.inCanvas.width !== originalWidth) || (this.inCanvas.height !== originalHeight)) {
|
|
|
|
this.inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
|
|
|
if (this.inCanvas.width !== targetWidth) this.inCanvas.width = targetWidth;
|
|
|
|
if (this.inCanvas.height !== targetHeight) this.inCanvas.height = targetHeight;
|
|
|
|
}
|
|
|
|
const ctx = this.inCanvas.getContext('2d');
|
2020-10-19 17:03:48 +02:00
|
|
|
if (input instanceof ImageData) ctx.putImageData(input, 0, 0);
|
2020-11-03 00:54:03 +01:00
|
|
|
else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, this.inCanvas.width, this.inCanvas.height);
|
|
|
|
if (this.config.filter.enabled) {
|
2020-11-03 04:15:37 +01:00
|
|
|
if (!this.fx || !this.outCanvas || (this.inCanvas.width !== this.outCanvas.width) || (this.inCanvas.height !== this.outCanvas.height)) {
|
2020-11-03 00:54:03 +01:00
|
|
|
this.outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(this.inCanvas.width, this.inCanvas.height) : document.createElement('canvas');
|
|
|
|
if (this.outCanvas.width !== this.inCanvas.width) this.outCanvas.width = this.inCanvas.width;
|
|
|
|
if (this.outCanvas.height !== this.inCanvas.height) this.outCanvas.height = this.inCanvas.height;
|
2020-11-03 04:15:37 +01:00
|
|
|
this.fx = (tf.ENV.flags.IS_BROWSER && (typeof document !== 'undefined')) ? new fxImage.Canvas({ canvas: this.outCanvas }) : null;
|
2020-11-03 00:54:03 +01:00
|
|
|
}
|
|
|
|
this.fx.reset();
|
|
|
|
this.fx.addFilter('brightness', this.config.filter.brightness); // must have at least one filter enabled
|
|
|
|
if (this.config.filter.contrast !== 0) this.fx.addFilter('contrast', this.config.filter.contrast);
|
|
|
|
if (this.config.filter.sharpness !== 0) this.fx.addFilter('sharpen', this.config.filter.sharpness);
|
|
|
|
if (this.config.filter.blur !== 0) this.fx.addFilter('blur', this.config.filter.blur);
|
|
|
|
if (this.config.filter.saturation !== 0) this.fx.addFilter('saturation', this.config.filter.saturation);
|
|
|
|
if (this.config.filter.hue !== 0) this.fx.addFilter('hue', this.config.filter.hue);
|
|
|
|
if (this.config.filter.negative) this.fx.addFilter('negative');
|
|
|
|
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
|
|
|
if (this.config.filter.vintage) this.fx.addFilter('brownie');
|
|
|
|
if (this.config.filter.sepia) this.fx.addFilter('sepia');
|
|
|
|
if (this.config.filter.kodachrome) this.fx.addFilter('kodachrome');
|
|
|
|
if (this.config.filter.technicolor) this.fx.addFilter('technicolor');
|
|
|
|
if (this.config.filter.polaroid) this.fx.addFilter('polaroid');
|
|
|
|
if (this.config.filter.pixelate !== 0) this.fx.addFilter('pixelate', this.config.filter.pixelate);
|
|
|
|
this.fx.apply(this.inCanvas);
|
|
|
|
}
|
|
|
|
if (!this.outCanvas) this.outCanvas = this.inCanvas;
|
2020-10-30 15:23:49 +01:00
|
|
|
let pixels;
|
2020-11-03 00:54:03 +01:00
|
|
|
if ((this.config.backend === 'webgl') || (this.outCanvas instanceof ImageData)) {
|
2020-11-02 18:21:30 +01:00
|
|
|
// tf kernel-optimized method to get imagedata, also if input is imagedata, just use it
|
2020-11-03 00:54:03 +01:00
|
|
|
pixels = tf.browser.fromPixels(this.outCanvas);
|
2020-11-02 18:21:30 +01:00
|
|
|
} else {
|
|
|
|
// cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas
|
2020-10-30 15:23:49 +01:00
|
|
|
const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');
|
|
|
|
tempCanvas.width = targetWidth;
|
|
|
|
tempCanvas.height = targetHeight;
|
|
|
|
const tempCtx = tempCanvas.getContext('2d');
|
2020-11-03 00:54:03 +01:00
|
|
|
tempCtx.drawImage(this.outCanvas, 0, 0);
|
2020-10-30 15:23:49 +01:00
|
|
|
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
|
|
|
|
pixels = tf.browser.fromPixels(data);
|
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
const casted = pixels.toFloat();
|
|
|
|
tensor = casted.expandDims(0);
|
|
|
|
pixels.dispose();
|
|
|
|
casted.dispose();
|
|
|
|
}
|
2020-11-03 00:54:03 +01:00
|
|
|
return { tensor, canvas: this.config.filter.return ? this.outCanvas : null };
|
2020-10-16 16:12:12 +02:00
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
async detect(input, userConfig = {}) {
|
|
|
|
this.state = 'config';
|
|
|
|
const perf = {};
|
|
|
|
let timeStamp;
|
2020-10-17 17:38:24 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
this.config = mergeDeep(defaults, userConfig);
|
|
|
|
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, override);
|
2020-10-17 17:43:04 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// sanity checks
|
|
|
|
this.state = 'check';
|
|
|
|
const error = sanity(input);
|
|
|
|
if (error) {
|
|
|
|
this.log(error, input);
|
|
|
|
return { error };
|
2020-10-17 17:43:04 +02:00
|
|
|
}
|
2020-10-14 17:43:33 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// eslint-disable-next-line no-async-promise-executor
|
|
|
|
return new Promise(async (resolve) => {
|
2020-11-03 00:54:03 +01:00
|
|
|
let poseRes;
|
|
|
|
let handRes;
|
|
|
|
let ssrRes;
|
|
|
|
let emotionRes;
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
const timeStart = now();
|
2020-10-14 19:23:02 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// configure backend
|
|
|
|
timeStamp = now();
|
2020-10-30 16:57:23 +01:00
|
|
|
await this.checkBackend();
|
2020-10-19 17:03:48 +02:00
|
|
|
perf.backend = Math.trunc(now() - timeStamp);
|
2020-10-17 16:06:02 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// load models if enabled
|
|
|
|
timeStamp = now();
|
|
|
|
this.state = 'load';
|
|
|
|
await this.load();
|
|
|
|
perf.load = Math.trunc(now() - timeStamp);
|
2020-10-18 15:21:53 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
if (this.config.scoped) tf.engine().startScope();
|
2020-10-12 01:22:43 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
this.analyze('Start Detect:');
|
2020-10-12 01:22:43 +02:00
|
|
|
|
2020-10-16 16:12:12 +02:00
|
|
|
timeStamp = now();
|
2020-10-19 17:03:48 +02:00
|
|
|
const image = this.tfImage(input);
|
|
|
|
perf.image = Math.trunc(now() - timeStamp);
|
|
|
|
const imageTensor = image.tensor;
|
|
|
|
|
|
|
|
// run posenet
|
2020-11-03 00:54:03 +01:00
|
|
|
if (this.config.async) {
|
|
|
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
|
|
|
} else {
|
|
|
|
this.state = 'run:body';
|
|
|
|
timeStamp = now();
|
|
|
|
this.analyze('Start PoseNet');
|
|
|
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(imageTensor, this.config.body) : [];
|
|
|
|
this.analyze('End PoseNet:');
|
|
|
|
perf.body = Math.trunc(now() - timeStamp);
|
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
|
|
|
|
// run handpose
|
2020-11-03 00:54:03 +01:00
|
|
|
if (this.config.async) {
|
|
|
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
|
|
|
} else {
|
|
|
|
this.state = 'run:hand';
|
|
|
|
timeStamp = now();
|
|
|
|
this.analyze('Start HandPose:');
|
|
|
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(imageTensor, this.config.hand) : [];
|
|
|
|
this.analyze('End HandPose:');
|
|
|
|
perf.hand = Math.trunc(now() - timeStamp);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.config.async) [poseRes, handRes] = await Promise.all([poseRes, handRes]);
|
2020-10-19 17:03:48 +02:00
|
|
|
|
|
|
|
// run facemesh, includes blazeface and iris
|
|
|
|
const faceRes = [];
|
|
|
|
if (this.config.face.enabled) {
|
|
|
|
this.state = 'run:face';
|
2020-10-16 16:12:12 +02:00
|
|
|
timeStamp = now();
|
2020-10-19 17:03:48 +02:00
|
|
|
this.analyze('Start FaceMesh:');
|
|
|
|
const faces = await this.models.facemesh.estimateFaces(imageTensor, this.config.face);
|
|
|
|
perf.face = Math.trunc(now() - timeStamp);
|
|
|
|
for (const face of faces) {
|
|
|
|
// is something went wrong, skip the face
|
|
|
|
if (!face.image || face.image.isDisposedInternal) {
|
2020-10-30 16:57:23 +01:00
|
|
|
this.log('Face object is disposed:', face.image);
|
2020-10-19 17:03:48 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// run ssr-net age & gender, inherits face from blazeface
|
|
|
|
this.state = 'run:agegender';
|
|
|
|
timeStamp = now();
|
2020-11-03 00:54:03 +01:00
|
|
|
ssrRes = (this.config.face.age.enabled || this.config.face.gender.enabled) ? await ssrnet.predict(face.image, this.config) : {};
|
2020-10-19 17:03:48 +02:00
|
|
|
perf.agegender = Math.trunc(now() - timeStamp);
|
|
|
|
// run emotion, inherits face from blazeface
|
|
|
|
this.state = 'run:emotion';
|
|
|
|
timeStamp = now();
|
2020-11-03 00:54:03 +01:00
|
|
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
2020-10-19 17:03:48 +02:00
|
|
|
perf.emotion = Math.trunc(now() - timeStamp);
|
2020-10-17 17:38:24 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// dont need face anymore
|
|
|
|
face.image.dispose();
|
|
|
|
// calculate iris distance
|
|
|
|
// iris: array[ bottom, left, top, right, center ]
|
|
|
|
const iris = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
|
|
|
|
? Math.max(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0], face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0])
|
|
|
|
: 0;
|
|
|
|
faceRes.push({
|
|
|
|
confidence: face.confidence,
|
|
|
|
box: face.box,
|
|
|
|
mesh: face.mesh,
|
|
|
|
annotations: face.annotations,
|
2020-11-03 00:54:03 +01:00
|
|
|
age: ssrRes.age,
|
|
|
|
gender: ssrRes.gender,
|
|
|
|
agConfidence: ssrRes.confidence,
|
|
|
|
emotion: emotionRes,
|
2020-10-19 17:03:48 +02:00
|
|
|
iris: (iris !== 0) ? Math.trunc(100 * 11.7 /* human iris size in mm */ / iris) / 100 : 0,
|
|
|
|
});
|
|
|
|
this.analyze('End FaceMesh:');
|
|
|
|
}
|
2020-10-14 02:52:30 +02:00
|
|
|
}
|
2020-10-13 04:01:35 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
imageTensor.dispose();
|
|
|
|
this.state = 'idle';
|
2020-10-14 17:43:33 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
if (this.config.scoped) tf.engine().endScope();
|
|
|
|
this.analyze('End Scope:');
|
2020-10-17 13:15:23 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
perf.total = Math.trunc(now() - timeStart);
|
|
|
|
resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf, canvas: image.canvas });
|
|
|
|
});
|
|
|
|
}
|
2020-10-12 01:22:43 +02:00
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
export { Human as default };
|