2020-11-10 14:57:39 +01:00
|
|
|
import { tf, setWasmPaths } from './tf.js';
|
2020-11-10 02:13:38 +01:00
|
|
|
import * as facemesh from './face/facemesh.js';
|
|
|
|
import * as age from './age/age.js';
|
|
|
|
import * as gender from './gender/gender.js';
|
|
|
|
import * as emotion from './emotion/emotion.js';
|
|
|
|
import * as posenet from './body/posenet.js';
|
|
|
|
import * as handpose from './hand/handpose.js';
|
|
|
|
import * as gesture from './gesture.js';
|
|
|
|
import * as image from './image.js';
|
|
|
|
import * as profile from './profile.js';
|
|
|
|
import * as config from '../config.js';
|
|
|
|
import * as app from '../package.json';
|
2020-10-15 21:25:58 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// static config override for non-video detection
|
2020-11-07 17:25:03 +01:00
|
|
|
const disableSkipFrames = {
|
2020-11-06 21:35:58 +01:00
|
|
|
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },
|
2020-10-18 14:07:45 +02:00
|
|
|
};
|
|
|
|
|
2020-10-17 16:06:02 +02:00
|
|
|
// helper function: gets elapsed time on both browser and nodejs
|
2020-10-16 16:12:12 +02:00
|
|
|
const now = () => {
|
|
|
|
if (typeof performance !== 'undefined') return performance.now();
|
|
|
|
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
|
|
|
|
};
|
2020-10-12 01:22:43 +02:00
|
|
|
|
2020-10-17 16:06:02 +02:00
|
|
|
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
|
2020-10-12 01:22:43 +02:00
|
|
|
function mergeDeep(...objects) {
|
|
|
|
const isObject = (obj) => obj && typeof obj === 'object';
|
|
|
|
return objects.reduce((prev, obj) => {
|
2020-10-12 03:21:41 +02:00
|
|
|
Object.keys(obj || {}).forEach((key) => {
|
2020-10-12 01:22:43 +02:00
|
|
|
const pVal = prev[key];
|
|
|
|
const oVal = obj[key];
|
|
|
|
if (Array.isArray(pVal) && Array.isArray(oVal)) {
|
|
|
|
prev[key] = pVal.concat(...oVal);
|
|
|
|
} else if (isObject(pVal) && isObject(oVal)) {
|
|
|
|
prev[key] = mergeDeep(pVal, oVal);
|
|
|
|
} else {
|
|
|
|
prev[key] = oVal;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return prev;
|
|
|
|
}, {});
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
class Human {
|
2020-11-09 12:32:11 +01:00
|
|
|
constructor(userConfig = {}) {
|
2020-10-19 17:03:48 +02:00
|
|
|
this.tf = tf;
|
|
|
|
this.version = app.version;
|
2020-11-10 02:13:38 +01:00
|
|
|
this.config = mergeDeep(config.default, userConfig);
|
2020-11-03 00:54:03 +01:00
|
|
|
this.fx = null;
|
2020-10-19 17:03:48 +02:00
|
|
|
this.state = 'idle';
|
|
|
|
this.numTensors = 0;
|
|
|
|
this.analyzeMemoryLeaks = false;
|
2020-11-03 16:55:33 +01:00
|
|
|
this.checkSanity = false;
|
|
|
|
this.firstRun = true;
|
2020-11-06 17:39:39 +01:00
|
|
|
this.perf = {};
|
2020-10-19 17:03:48 +02:00
|
|
|
// object that contains all initialized models
|
|
|
|
this.models = {
|
|
|
|
facemesh: null,
|
|
|
|
posenet: null,
|
|
|
|
handpose: null,
|
|
|
|
iris: null,
|
|
|
|
age: null,
|
|
|
|
gender: null,
|
|
|
|
emotion: null,
|
|
|
|
};
|
|
|
|
// export raw access to underlying models
|
|
|
|
this.facemesh = facemesh;
|
2020-11-06 17:39:39 +01:00
|
|
|
this.age = age;
|
|
|
|
this.gender = gender;
|
2020-10-19 17:03:48 +02:00
|
|
|
this.emotion = emotion;
|
2020-11-06 17:39:39 +01:00
|
|
|
this.body = posenet;
|
|
|
|
this.hand = handpose;
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
|
|
|
|
// helper function: wrapper around console output
|
|
|
|
log(...msg) {
|
|
|
|
// eslint-disable-next-line no-console
|
2020-10-30 16:57:23 +01:00
|
|
|
if (msg && this.config.console) console.log('Human:', ...msg);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-19 17:03:48 +02:00
|
|
|
|
2020-11-01 19:07:53 +01:00
|
|
|
profile() {
|
|
|
|
if (this.config.profile) return profile.data;
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// helper function: measure tensor leak
|
|
|
|
analyze(...msg) {
|
|
|
|
if (!this.analyzeMemoryLeaks) return;
|
|
|
|
const current = tf.engine().state.numTensors;
|
|
|
|
const previous = this.numTensors;
|
|
|
|
this.numTensors = current;
|
|
|
|
const leaked = current - previous;
|
|
|
|
if (leaked !== 0) this.log(...msg, leaked);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-10-17 13:15:23 +02:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// quick sanity check on inputs
|
2020-11-03 16:55:33 +01:00
|
|
|
sanity(input) {
|
|
|
|
if (!this.checkSanity) return null;
|
|
|
|
if (!input) return 'input is not defined';
|
|
|
|
if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {
|
|
|
|
return 'input must be a tensor';
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
tf.getBackend();
|
|
|
|
} catch {
|
|
|
|
return 'backend not loaded';
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// preload models, not explicitly required as it's done automatically on first use
|
2020-10-19 17:03:48 +02:00
|
|
|
async load(userConfig) {
|
2020-11-06 17:39:39 +01:00
|
|
|
this.state = 'load';
|
|
|
|
const timeStamp = now();
|
2020-11-06 21:35:58 +01:00
|
|
|
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
2020-11-03 15:34:36 +01:00
|
|
|
|
2020-11-03 16:55:33 +01:00
|
|
|
if (this.firstRun) {
|
2020-11-04 16:18:22 +01:00
|
|
|
this.checkBackend(true);
|
2020-11-03 15:34:36 +01:00
|
|
|
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
|
|
|
this.log('configuration:', this.config);
|
|
|
|
this.log('flags:', tf.ENV.flags);
|
2020-11-03 16:55:33 +01:00
|
|
|
this.firstRun = false;
|
2020-11-03 15:34:36 +01:00
|
|
|
}
|
2020-11-06 17:39:39 +01:00
|
|
|
if (this.config.async) {
|
|
|
|
[
|
2020-11-12 04:40:05 +01:00
|
|
|
this.models.facemesh,
|
2020-11-06 17:39:39 +01:00
|
|
|
this.models.age,
|
|
|
|
this.models.gender,
|
|
|
|
this.models.emotion,
|
|
|
|
this.models.posenet,
|
|
|
|
this.models.handpose,
|
|
|
|
] = await Promise.all([
|
2020-11-11 21:02:49 +01:00
|
|
|
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),
|
2020-11-12 04:40:05 +01:00
|
|
|
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
|
|
|
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
|
|
|
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
2020-11-11 21:02:49 +01:00
|
|
|
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
|
|
|
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
|
2020-11-06 17:39:39 +01:00
|
|
|
]);
|
|
|
|
} else {
|
2020-11-07 16:37:19 +01:00
|
|
|
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);
|
|
|
|
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
|
|
|
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
|
|
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
2020-11-12 04:40:05 +01:00
|
|
|
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
|
|
|
|
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
|
2020-10-18 18:12:09 +02:00
|
|
|
}
|
2020-11-06 17:39:39 +01:00
|
|
|
const current = Math.trunc(now() - timeStamp);
|
|
|
|
if (current > (this.perf.load || 0)) this.perf.load = current;
|
2020-10-18 15:21:53 +02:00
|
|
|
}
|
2020-10-17 17:38:24 +02:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// check if backend needs initialization if it changed
|
|
|
|
async checkBackend(force) {
|
2020-11-06 17:39:39 +01:00
|
|
|
const timeStamp = now();
|
2020-11-09 12:32:11 +01:00
|
|
|
if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {
|
2020-10-30 15:23:49 +01:00
|
|
|
this.state = 'backend';
|
2020-11-01 19:07:53 +01:00
|
|
|
/* force backend reload
|
2020-10-30 16:57:23 +01:00
|
|
|
if (this.config.backend in tf.engine().registry) {
|
2020-11-01 19:07:53 +01:00
|
|
|
const backendFactory = tf.findBackendFactory(this.config.backend);
|
|
|
|
tf.removeBackend(this.config.backend);
|
|
|
|
tf.registerBackend(this.config.backend, backendFactory);
|
2020-10-30 15:23:49 +01:00
|
|
|
} else {
|
2020-10-30 16:57:23 +01:00
|
|
|
this.log('Backend not registred:', this.config.backend);
|
2020-10-30 15:23:49 +01:00
|
|
|
}
|
2020-11-01 19:07:53 +01:00
|
|
|
*/
|
2020-11-10 02:13:38 +01:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
this.log('setting backend:', this.config.backend);
|
2020-11-10 02:13:38 +01:00
|
|
|
|
|
|
|
if (this.config.backend === 'wasm') {
|
|
|
|
this.log('settings wasm path:', this.config.wasmPath);
|
|
|
|
setWasmPaths(this.config.wasmPath);
|
|
|
|
const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');
|
|
|
|
if (!simd) this.log('warning: wasm simd support is not enabled');
|
|
|
|
}
|
|
|
|
|
2020-11-01 19:07:53 +01:00
|
|
|
await tf.setBackend(this.config.backend);
|
|
|
|
tf.enableProdMode();
|
|
|
|
/* debug mode is really too mcuh
|
2020-11-09 12:32:11 +01:00
|
|
|
tf.enableDebugMode();
|
2020-11-01 19:07:53 +01:00
|
|
|
*/
|
2020-11-09 12:32:11 +01:00
|
|
|
if (this.config.backend === 'webgl') {
|
|
|
|
if (this.config.deallocate) {
|
2020-11-10 02:13:38 +01:00
|
|
|
this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);
|
2020-11-09 12:32:11 +01:00
|
|
|
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);
|
|
|
|
}
|
|
|
|
// tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
|
|
|
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
2020-11-01 19:07:53 +01:00
|
|
|
}
|
|
|
|
await tf.ready();
|
2020-10-30 15:23:49 +01:00
|
|
|
}
|
2020-11-06 17:39:39 +01:00
|
|
|
const current = Math.trunc(now() - timeStamp);
|
|
|
|
if (current > (this.perf.backend || 0)) this.perf.backend = current;
|
|
|
|
}
|
|
|
|
|
|
|
|
async detectFace(input) {
|
|
|
|
// run facemesh, includes blazeface and iris
|
|
|
|
// eslint-disable-next-line no-async-promise-executor
|
|
|
|
let timeStamp;
|
|
|
|
let ageRes;
|
|
|
|
let genderRes;
|
|
|
|
let emotionRes;
|
|
|
|
const faceRes = [];
|
|
|
|
this.state = 'run:face';
|
|
|
|
timeStamp = now();
|
|
|
|
const faces = await this.models.facemesh.estimateFaces(input, this.config.face);
|
|
|
|
this.perf.face = Math.trunc(now() - timeStamp);
|
|
|
|
for (const face of faces) {
|
|
|
|
this.analyze('Get Face');
|
|
|
|
// is something went wrong, skip the face
|
|
|
|
if (!face.image || face.image.isDisposedInternal) {
|
|
|
|
this.log('Face object is disposed:', face.image);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// run age, inherits face from blazeface
|
|
|
|
this.analyze('Start Age:');
|
|
|
|
if (this.config.async) {
|
|
|
|
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
|
|
|
|
} else {
|
|
|
|
this.state = 'run:age';
|
|
|
|
timeStamp = now();
|
|
|
|
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
|
|
|
|
this.perf.age = Math.trunc(now() - timeStamp);
|
|
|
|
}
|
|
|
|
|
|
|
|
// run gender, inherits face from blazeface
|
|
|
|
this.analyze('Start Gender:');
|
|
|
|
if (this.config.async) {
|
|
|
|
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
|
|
|
|
} else {
|
|
|
|
this.state = 'run:gender';
|
|
|
|
timeStamp = now();
|
|
|
|
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
|
|
|
|
this.perf.gender = Math.trunc(now() - timeStamp);
|
|
|
|
}
|
|
|
|
// run emotion, inherits face from blazeface
|
|
|
|
this.analyze('Start Emotion:');
|
|
|
|
if (this.config.async) {
|
|
|
|
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
|
|
|
|
} else {
|
|
|
|
this.state = 'run:emotion';
|
|
|
|
timeStamp = now();
|
|
|
|
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
|
|
|
|
this.perf.emotion = Math.trunc(now() - timeStamp);
|
|
|
|
}
|
|
|
|
this.analyze('End Emotion:');
|
|
|
|
|
|
|
|
// if async wait for results
|
|
|
|
if (this.config.async) {
|
|
|
|
[ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.analyze('Finish Face:');
|
|
|
|
// dont need face anymore
|
|
|
|
face.image.dispose();
|
|
|
|
|
|
|
|
// calculate iris distance
|
2020-11-09 14:57:24 +01:00
|
|
|
// iris: array[ center, left, top, right, bottom]
|
|
|
|
const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)
|
|
|
|
/* average human iris size is 11.7mm */
|
|
|
|
? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))
|
2020-11-06 17:39:39 +01:00
|
|
|
: 0;
|
|
|
|
|
|
|
|
// combine results
|
|
|
|
faceRes.push({
|
|
|
|
confidence: face.confidence,
|
|
|
|
box: face.box,
|
|
|
|
mesh: face.mesh,
|
|
|
|
annotations: face.annotations,
|
|
|
|
age: ageRes.age,
|
|
|
|
gender: genderRes.gender,
|
|
|
|
genderConfidence: genderRes.confidence,
|
|
|
|
emotion: emotionRes,
|
2020-11-09 14:57:24 +01:00
|
|
|
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
2020-11-06 17:39:39 +01:00
|
|
|
});
|
|
|
|
this.analyze('End Face');
|
|
|
|
}
|
|
|
|
this.analyze('End FaceMesh:');
|
|
|
|
if (this.config.async) {
|
|
|
|
if (this.perf.face) delete this.perf.face;
|
|
|
|
if (this.perf.age) delete this.perf.age;
|
|
|
|
if (this.perf.gender) delete this.perf.gender;
|
|
|
|
if (this.perf.emotion) delete this.perf.emotion;
|
|
|
|
}
|
|
|
|
return faceRes;
|
2020-10-30 15:23:49 +01:00
|
|
|
}
|
|
|
|
|
2020-11-12 15:21:26 +01:00
|
|
|
async image(input, userConfig = {}) {
|
|
|
|
this.state = 'image';
|
|
|
|
this.config = mergeDeep(this.config, userConfig);
|
|
|
|
const process = image.process(input, this.config);
|
|
|
|
process.tensor.dispose();
|
|
|
|
return process.canvas;
|
|
|
|
}
|
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// main detect function
|
2020-10-19 17:03:48 +02:00
|
|
|
async detect(input, userConfig = {}) {
|
|
|
|
this.state = 'config';
|
|
|
|
let timeStamp;
|
2020-10-17 17:38:24 +02:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// update configuration
|
2020-11-06 21:35:58 +01:00
|
|
|
this.config = mergeDeep(this.config, userConfig);
|
2020-11-07 17:25:03 +01:00
|
|
|
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);
|
2020-10-17 17:43:04 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// sanity checks
|
|
|
|
this.state = 'check';
|
2020-11-03 16:55:33 +01:00
|
|
|
const error = this.sanity(input);
|
2020-10-19 17:03:48 +02:00
|
|
|
if (error) {
|
|
|
|
this.log(error, input);
|
|
|
|
return { error };
|
2020-10-17 17:43:04 +02:00
|
|
|
}
|
2020-10-14 17:43:33 +02:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
// detection happens inside a promise
|
2020-10-19 17:03:48 +02:00
|
|
|
return new Promise(async (resolve) => {
|
2020-11-03 00:54:03 +01:00
|
|
|
let poseRes;
|
|
|
|
let handRes;
|
2020-11-06 17:39:39 +01:00
|
|
|
let faceRes;
|
2020-11-03 00:54:03 +01:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
const timeStart = now();
|
2020-10-14 19:23:02 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// configure backend
|
2020-10-30 16:57:23 +01:00
|
|
|
await this.checkBackend();
|
2020-10-17 16:06:02 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
// load models if enabled
|
|
|
|
await this.load();
|
2020-10-18 15:21:53 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
if (this.config.scoped) tf.engine().startScope();
|
2020-11-06 17:39:39 +01:00
|
|
|
this.analyze('Start Scope:');
|
2020-10-12 01:22:43 +02:00
|
|
|
|
2020-10-16 16:12:12 +02:00
|
|
|
timeStamp = now();
|
2020-11-04 16:18:22 +01:00
|
|
|
const process = image.process(input, this.config);
|
2020-11-06 17:39:39 +01:00
|
|
|
this.perf.image = Math.trunc(now() - timeStamp);
|
|
|
|
this.analyze('Get Image:');
|
2020-10-19 17:03:48 +02:00
|
|
|
|
2020-11-06 17:39:39 +01:00
|
|
|
// run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion
|
|
|
|
if (this.config.async) {
|
|
|
|
faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];
|
|
|
|
if (this.perf.face) delete this.perf.face;
|
|
|
|
} else {
|
2020-10-19 17:03:48 +02:00
|
|
|
this.state = 'run:face';
|
2020-10-16 16:12:12 +02:00
|
|
|
timeStamp = now();
|
2020-11-06 17:39:39 +01:00
|
|
|
faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];
|
|
|
|
this.perf.face = Math.trunc(now() - timeStamp);
|
2020-10-14 02:52:30 +02:00
|
|
|
}
|
2020-10-13 04:01:35 +02:00
|
|
|
|
2020-11-04 07:11:24 +01:00
|
|
|
// run posenet
|
2020-11-06 17:39:39 +01:00
|
|
|
this.analyze('Start Body:');
|
2020-11-04 07:11:24 +01:00
|
|
|
if (this.config.async) {
|
2020-11-08 18:26:45 +01:00
|
|
|
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];
|
2020-11-06 17:39:39 +01:00
|
|
|
if (this.perf.body) delete this.perf.body;
|
2020-11-04 07:11:24 +01:00
|
|
|
} else {
|
|
|
|
this.state = 'run:body';
|
|
|
|
timeStamp = now();
|
2020-11-08 18:26:45 +01:00
|
|
|
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];
|
2020-11-06 17:39:39 +01:00
|
|
|
this.perf.body = Math.trunc(now() - timeStamp);
|
2020-11-04 07:11:24 +01:00
|
|
|
}
|
2020-11-06 17:39:39 +01:00
|
|
|
this.analyze('End Body:');
|
2020-11-04 07:11:24 +01:00
|
|
|
|
|
|
|
// run handpose
|
2020-11-06 17:39:39 +01:00
|
|
|
this.analyze('Start Hand:');
|
2020-11-04 07:11:24 +01:00
|
|
|
if (this.config.async) {
|
2020-11-04 16:18:22 +01:00
|
|
|
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
|
2020-11-06 17:39:39 +01:00
|
|
|
if (this.perf.hand) delete this.perf.hand;
|
2020-11-04 07:11:24 +01:00
|
|
|
} else {
|
|
|
|
this.state = 'run:hand';
|
|
|
|
timeStamp = now();
|
2020-11-04 16:18:22 +01:00
|
|
|
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];
|
2020-11-06 17:39:39 +01:00
|
|
|
this.perf.hand = Math.trunc(now() - timeStamp);
|
2020-11-04 07:11:24 +01:00
|
|
|
}
|
2020-11-06 17:39:39 +01:00
|
|
|
// this.analyze('End Hand:');
|
2020-11-04 07:11:24 +01:00
|
|
|
|
2020-11-06 17:39:39 +01:00
|
|
|
// if async wait for results
|
|
|
|
if (this.config.async) {
|
|
|
|
[faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);
|
|
|
|
}
|
2020-11-04 16:18:22 +01:00
|
|
|
process.tensor.dispose();
|
2020-10-14 17:43:33 +02:00
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
if (this.config.scoped) tf.engine().endScope();
|
|
|
|
this.analyze('End Scope:');
|
2020-10-17 13:15:23 +02:00
|
|
|
|
2020-11-04 16:18:22 +01:00
|
|
|
let gestureRes = [];
|
|
|
|
if (this.config.gesture.enabled) {
|
|
|
|
timeStamp = now();
|
2020-11-09 15:31:11 +01:00
|
|
|
gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };
|
2020-11-06 17:39:39 +01:00
|
|
|
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
|
|
|
|
else if (this.perf.gesture) delete this.perf.gesture;
|
2020-11-04 16:18:22 +01:00
|
|
|
}
|
|
|
|
|
2020-11-06 17:39:39 +01:00
|
|
|
this.perf.total = Math.trunc(now() - timeStart);
|
|
|
|
this.state = 'idle';
|
|
|
|
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
|
2020-10-19 17:03:48 +02:00
|
|
|
});
|
|
|
|
}
|
2020-11-09 20:26:10 +01:00
|
|
|
|
|
|
|
async warmup(userConfig) {
|
|
|
|
const warmup = new ImageData(255, 255);
|
|
|
|
await this.detect(warmup, userConfig);
|
|
|
|
this.log('warmed up');
|
|
|
|
}
|
2020-10-12 01:22:43 +02:00
|
|
|
}
|
|
|
|
|
2020-10-19 17:03:48 +02:00
|
|
|
export { Human as default };
|