mirror of https://github.com/vladmandic/human
enable cross origin isolation
parent
65cabb2693
commit
1619f6655f
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **1.6.1**
|
||||
Version: **1.7.0**
|
||||
Description: **Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -11,9 +11,6 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
### **HEAD -> main** 2021/04/24 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/04/24 mandic00@live.com
|
||||
|
||||
- remove efficientpose
|
||||
- major version rebuild
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ const OrbitControls = function (object, domElement) {
|
|||
|
||||
// so camera.up is the orbit axis
|
||||
const quat = new Quaternion().setFromUnitVectors(object.up, new Vector3(0, 1, 0));
|
||||
const quatInverse = quat.clone().inverse();
|
||||
const quatInverse = quat.clone().invert();
|
||||
|
||||
const lastPosition = new Vector3();
|
||||
const lastQuaternion = new Quaternion();
|
||||
|
|
|
@ -505,12 +505,10 @@ function setupMenu() {
|
|||
menu.process.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
human.config.face.detector.skipFrames = parseInt(val);
|
||||
human.config.face.emotion.skipFrames = parseInt(val);
|
||||
human.config.face.age.skipFrames = parseInt(val);
|
||||
human.config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
menu.process.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.minConfidence = parseFloat(val);
|
||||
human.config.face.gender.minConfidence = parseFloat(val);
|
||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||
human.config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
|
@ -536,9 +534,7 @@ function setupMenu() {
|
|||
menu.models.addBool('face detect', human.config.face, 'enabled', (val) => human.config.face.enabled = val);
|
||||
menu.models.addBool('face mesh', human.config.face.mesh, 'enabled', (val) => human.config.face.mesh.enabled = val);
|
||||
menu.models.addBool('face iris', human.config.face.iris, 'enabled', (val) => human.config.face.iris.enabled = val);
|
||||
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.age.description = val);
|
||||
// menu.models.addBool('face age', human.config.face.age, 'enabled', (val) => human.config.face.age.enabled = val);
|
||||
// menu.models.addBool('face gender', human.config.face.gender, 'enabled', (val) => human.config.face.gender.enabled = val);
|
||||
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.description.enabled = val);
|
||||
menu.models.addBool('face emotion', human.config.face.emotion, 'enabled', (val) => human.config.face.emotion.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('body pose', human.config.body, 'enabled', (val) => human.config.body.enabled = val);
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@vladmandic/human",
|
||||
"version": "1.6.1",
|
||||
"version": "1.7.0",
|
||||
"description": "Human: AI-powered 3D Face Detection & Rotation Tracking, Face Description & Recognition, Body Pose Tracking, 3D Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction, Gesture Recognition",
|
||||
"sideEffects": false,
|
||||
"main": "dist/human.node.js",
|
||||
|
|
|
@ -185,3 +185,20 @@
|
|||
2021-04-24 16:03:58 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-04-24 16:04:03 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-04-24 16:04:03 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
2021-04-24 18:43:24 [36mINFO: [39m @vladmandic/human version 1.7.0
|
||||
2021-04-24 18:43:24 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-04-24 18:43:24 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true,"sourcemap":true,"bundle":true,"metafile":true,"target":"es2018"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: node type: tfjs: {"imports":1,"importBytes":39,"outputBytes":733,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: node type: node: {"imports":36,"importBytes":521048,"outputBytes":296170,"outputFiles":"dist/human.node.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: nodeGPU type: tfjs: {"imports":1,"importBytes":43,"outputBytes":737,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: nodeGPU type: node: {"imports":36,"importBytes":521052,"outputBytes":296178,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: nodeWASM type: tfjs: {"imports":1,"importBytes":81,"outputBytes":783,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: nodeWASM type: node: {"imports":36,"importBytes":521098,"outputBytes":296222,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: browserNoBundle type: tfjs: {"imports":1,"importBytes":2488,"outputBytes":1394,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-24 18:43:24 [35mSTATE:[39m Build for: browserNoBundle type: esm: {"imports":36,"importBytes":521709,"outputBytes":296317,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-04-24 18:43:25 [35mSTATE:[39m Build for: browserBundle type: tfjs: {"modules":1267,"moduleBytes":4085087,"imports":7,"importBytes":2488,"outputBytes":1101728,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-04-24 18:43:25 [35mSTATE:[39m Build for: browserBundle type: iife: {"imports":36,"importBytes":1622043,"outputBytes":1394426,"outputFiles":"dist/human.js"}
|
||||
2021-04-24 18:43:26 [35mSTATE:[39m Build for: browserBundle type: esm: {"imports":36,"importBytes":1622043,"outputBytes":1394384,"outputFiles":"dist/human.esm.js"}
|
||||
2021-04-24 18:43:26 [36mINFO: [39m Generate types: ["src/human.ts"]
|
||||
2021-04-24 18:43:31 [36mINFO: [39m Update Change log: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-04-24 18:43:31 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"]
|
||||
|
|
|
@ -81,31 +81,6 @@ async function watch() {
|
|||
.on('ready', () => log.state('Monitoring:', options.monitor));
|
||||
}
|
||||
|
||||
// get file content for a valid url request
|
||||
/*
|
||||
function handle(url) {
|
||||
return new Promise(async (resolve) => {
|
||||
let obj = { ok: false, file: decodeURI(url) };
|
||||
if (!fs.existsSync(obj.file)) {
|
||||
resolve(obj);
|
||||
} else {
|
||||
obj.stat = fs.statSync(obj.file);
|
||||
if (obj.stat.isFile()) obj.ok = true;
|
||||
if (!obj.ok && obj.stat.isDirectory()) {
|
||||
if (fs.existsSync(path.join(obj.file, options.defaultFile))) {
|
||||
obj = await handle(path.join(obj.file, options.defaultFile));
|
||||
} else if (fs.existsSync(path.join(obj.file, options.defaultFolder, options.defaultFile))) {
|
||||
obj = await handle(path.join(obj.file, options.defaultFolder, options.defaultFile));
|
||||
} else {
|
||||
obj.ok = obj.stat.isDirectory();
|
||||
}
|
||||
}
|
||||
resolve(obj);
|
||||
}
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
function handle(url) {
|
||||
url = url.split(/[?#]/)[0];
|
||||
const result = { ok: false, stat: {}, file: '' };
|
||||
|
@ -159,20 +134,26 @@ async function httpRequest(req, res) {
|
|||
const accept = req.headers['accept-encoding'] ? req.headers['accept-encoding'].includes('br') : false; // does target accept brotli compressed data
|
||||
res.writeHead(200, {
|
||||
// 'Content-Length': result.stat.size, // not using as it's misleading for compressed streams
|
||||
'Content-Language': 'en', 'Content-Type': contentType, 'Content-Encoding': accept ? 'br' : '', 'Last-Modified': result.stat.mtime, 'Cache-Control': 'no-cache', 'X-Content-Type-Options': 'nosniff',
|
||||
'Content-Language': 'en',
|
||||
'Content-Type': contentType,
|
||||
'Content-Encoding': accept ? 'br' : '',
|
||||
'Last-Modified': result.stat.mtime,
|
||||
'Cache-Control': 'no-cache',
|
||||
'X-Content-Type-Options': 'nosniff',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
});
|
||||
const compress = zlib.createBrotliCompress({ params: { [zlib.constants.BROTLI_PARAM_QUALITY]: 5 } }); // instance of brotli compression with level 5
|
||||
const stream = fs.createReadStream(result.file);
|
||||
if (!accept) stream.pipe(res); // don't compress data
|
||||
else stream.pipe(compress).pipe(res); // compress data
|
||||
|
||||
// alternative methods of sending data
|
||||
/// 2. read stream and send by chunk
|
||||
/// alternative #2 read stream and send by chunk
|
||||
// const stream = fs.createReadStream(result.file);
|
||||
// stream.on('data', (chunk) => res.write(chunk));
|
||||
// stream.on('end', () => res.end());
|
||||
|
||||
// 3. read entire file and send it as blob
|
||||
// alternative #3 read entire file and send it as blob
|
||||
// const data = fs.readFileSync(result.file);
|
||||
// res.write(data);
|
||||
log.data(`${req.method}/${req.httpVersion}`, res.statusCode, contentType, result.stat.size, req.url, ip);
|
||||
|
|
|
@ -269,7 +269,7 @@ export class Pipeline {
|
|||
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
||||
|
||||
// do rotation one more time with mesh keypoints if we want to return perfect image
|
||||
if (config.face.detector.rotation && config.face.mesh.enabled && (config.face.description.enabled || config.face.embedding.enabled) && tf.ENV.flags.IS_BROWSER) {
|
||||
if (config.face.detector.rotation && config.face.mesh.enabled && config.face.description.enabled && tf.ENV.flags.IS_BROWSER) {
|
||||
const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= meshLandmarks.count) ? meshLandmarks.symmetryLine : blazeFaceLandmarks.symmetryLine;
|
||||
angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);
|
||||
const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });
|
||||
|
|
|
@ -129,17 +129,7 @@ export interface Config {
|
|||
enabled: boolean,
|
||||
modelPath: string,
|
||||
skipFrames: number,
|
||||
},
|
||||
age: {
|
||||
enabled: boolean,
|
||||
modelPath: string,
|
||||
skipFrames: number,
|
||||
},
|
||||
gender: {
|
||||
enabled: boolean,
|
||||
minConfidence: number,
|
||||
modelPath: string,
|
||||
skipFrames: number,
|
||||
},
|
||||
emotion: {
|
||||
enabled: boolean,
|
||||
|
@ -147,10 +137,6 @@ export interface Config {
|
|||
skipFrames: number,
|
||||
modelPath: string,
|
||||
},
|
||||
embedding: {
|
||||
enabled: boolean,
|
||||
modelPath: string,
|
||||
},
|
||||
},
|
||||
/** Controlls and configures all body detection specific options
|
||||
* - enabled: true/false
|
||||
|
@ -323,6 +309,7 @@ const config: Config = {
|
|||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 31, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
},
|
||||
|
||||
emotion: {
|
||||
|
@ -332,29 +319,6 @@ const config: Config = {
|
|||
modelPath: 'emotion.json', // face emotion model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
|
||||
age: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
modelPath: 'age.json', // age model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 33, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
gender: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
modelPath: 'gender.json', // gender model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 34, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
embedding: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
modelPath: 'mobileface.json', // face descriptor model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
},
|
||||
|
||||
body: {
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import { log, now } from './helpers';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as age from './age/age';
|
||||
import * as gender from './gender/gender';
|
||||
import * as emotion from './emotion/emotion';
|
||||
import * as embedding from './embedding/embedding';
|
||||
import * as faceres from './faceres/faceres';
|
||||
|
||||
type Tensor = typeof tf.Tensor;
|
||||
|
@ -147,28 +144,6 @@ export const detectFace = async (parent, input): Promise<any> => {
|
|||
|
||||
const rotation = calculateFaceAngle(face, [input.shape[2], input.shape[1]]);
|
||||
|
||||
// run age, inherits face from blazeface
|
||||
parent.analyze('Start Age:');
|
||||
if (parent.config.async) {
|
||||
ageRes = parent.config.face.age.enabled ? age.predict(face.image, parent.config) : {};
|
||||
} else {
|
||||
parent.state = 'run:age';
|
||||
timeStamp = now();
|
||||
ageRes = parent.config.face.age.enabled ? await age.predict(face.image, parent.config) : {};
|
||||
parent.perf.age = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run gender, inherits face from blazeface
|
||||
parent.analyze('Start Gender:');
|
||||
if (parent.config.async) {
|
||||
genderRes = parent.config.face.gender.enabled ? gender.predict(face.image, parent.config) : {};
|
||||
} else {
|
||||
parent.state = 'run:gender';
|
||||
timeStamp = now();
|
||||
genderRes = parent.config.face.gender.enabled ? await gender.predict(face.image, parent.config) : {};
|
||||
parent.perf.gender = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Emotion:');
|
||||
if (parent.config.async) {
|
||||
|
@ -181,18 +156,6 @@ export const detectFace = async (parent, input): Promise<any> => {
|
|||
}
|
||||
parent.analyze('End Emotion:');
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Embedding:');
|
||||
if (parent.config.async) {
|
||||
embeddingRes = parent.config.face.embedding.enabled ? embedding.predict(face, parent.config) : [];
|
||||
} else {
|
||||
parent.state = 'run:embedding';
|
||||
timeStamp = now();
|
||||
embeddingRes = parent.config.face.embedding.enabled ? await embedding.predict(face, parent.config) : [];
|
||||
parent.perf.embedding = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
parent.analyze('End Embedding:');
|
||||
|
||||
// run emotion, inherits face from blazeface
|
||||
parent.analyze('Start Description:');
|
||||
if (parent.config.async) {
|
||||
|
@ -226,10 +189,10 @@ export const detectFace = async (parent, input): Promise<any> => {
|
|||
// combine results
|
||||
faceRes.push({
|
||||
...face,
|
||||
age: descRes.age || ageRes.age,
|
||||
gender: descRes.gender || genderRes.gender,
|
||||
genderConfidence: descRes.genderConfidence || genderRes.confidence,
|
||||
embedding: descRes.descriptor || embeddingRes,
|
||||
age: descRes.age,
|
||||
gender: descRes.gender,
|
||||
genderConfidence: descRes.genderConfidence,
|
||||
embedding: descRes.descriptor,
|
||||
emotion: emotionRes,
|
||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||
rotation,
|
||||
|
|
|
@ -121,7 +121,7 @@ export async function predict(image, config) {
|
|||
tf.tidy(() => {
|
||||
const gender = resT.find((t) => t.shape[1] === 1).dataSync();
|
||||
const confidence = Math.trunc(200 * Math.abs((gender[0] - 0.5))) / 100;
|
||||
if (confidence > config.face.gender.minConfidence) {
|
||||
if (confidence > config.face.description.minConfidence) {
|
||||
obj.gender = gender[0] <= 0.5 ? 'female' : 'male';
|
||||
obj.genderConfidence = Math.min(0.99, confidence);
|
||||
}
|
||||
|
|
29
src/human.ts
29
src/human.ts
|
@ -4,15 +4,11 @@ import * as tf from '../dist/tfjs.esm.js';
|
|||
import * as backend from './tfjs/backend';
|
||||
import * as faceall from './faceall';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as age from './age/age';
|
||||
import * as gender from './gender/gender';
|
||||
import * as faceres from './faceres/faceres';
|
||||
import * as emotion from './emotion/emotion';
|
||||
import * as embedding from './embedding/embedding';
|
||||
import * as posenet from './posenet/posenet';
|
||||
import * as handpose from './handpose/handpose';
|
||||
import * as blazepose from './blazepose/blazepose';
|
||||
import * as efficientpose from './efficientpose/efficientpose';
|
||||
import * as nanodet from './nanodet/nanodet';
|
||||
import * as gesture from './gesture/gesture';
|
||||
import * as image from './image/image';
|
||||
|
@ -103,8 +99,6 @@ export class Human {
|
|||
/** Internal: Currently loaded classes */
|
||||
classes: {
|
||||
facemesh: typeof facemesh;
|
||||
age: typeof age;
|
||||
gender: typeof gender;
|
||||
emotion: typeof emotion;
|
||||
body: typeof posenet | typeof blazepose;
|
||||
hand: typeof handpose;
|
||||
|
@ -162,8 +156,6 @@ export class Human {
|
|||
// export raw access to underlying models
|
||||
this.classes = {
|
||||
facemesh,
|
||||
age,
|
||||
gender,
|
||||
emotion,
|
||||
faceres,
|
||||
body: this.config.body.modelPath.includes('posenet') ? posenet : blazepose,
|
||||
|
@ -212,10 +204,9 @@ export class Human {
|
|||
/** Simmilarity method calculates simmilarity between two provided face descriptors (face embeddings)
|
||||
* - Calculation is based on normalized Minkowski distance between
|
||||
*/
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
similarity(embedding1: Array<number>, embedding2: Array<number>): number {
|
||||
if (this.config.face.description.enabled) return faceres.similarity(embedding1, embedding2);
|
||||
if (this.config.face.embedding.enabled) return embedding.similarity(embedding1, embedding2);
|
||||
return 0;
|
||||
return faceres.similarity(embedding1, embedding2);
|
||||
}
|
||||
|
||||
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
||||
|
@ -262,39 +253,27 @@ export class Human {
|
|||
if (this.config.async) {
|
||||
[
|
||||
this.models.face,
|
||||
this.models.age,
|
||||
this.models.gender,
|
||||
this.models.emotion,
|
||||
this.models.embedding,
|
||||
this.models.handpose,
|
||||
this.models.posenet,
|
||||
this.models.blazepose,
|
||||
this.models.efficientpose,
|
||||
this.models.nanodet,
|
||||
this.models.faceres,
|
||||
] = await Promise.all([
|
||||
this.models.face || (this.config.face.enabled ? facemesh.load(this.config) : null),
|
||||
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
||||
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
||||
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
||||
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
|
||||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
|
||||
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? <any>posenet.load(this.config) : null),
|
||||
this.models.posenet || (this.config.body.enabled && this.config.body.modelPath.includes('posenet') ? posenet.load(this.config) : null),
|
||||
this.models.blazepose || (this.config.body.enabled && this.config.body.modelPath.includes('blazepose') ? blazepose.load(this.config) : null),
|
||||
this.models.efficientpose || (this.config.body.enabled && this.config.body.modelPath.includes('efficientpose') ? efficientpose.load(this.config) : null),
|
||||
this.models.nanodet || (this.config.object.enabled ? nanodet.load(this.config) : null),
|
||||
this.models.faceres || ((this.config.face.enabled && this.config.face.description.enabled) ? faceres.load(this.config) : null),
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.face) this.models.face = await facemesh.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
|
||||
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
|
||||
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelPath.includes('posenet')) this.models.posenet = await posenet.load(this.config);
|
||||
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelPath.includes('blazepose')) this.models.blazepose = await blazepose.load(this.config);
|
||||
if (this.config.body.enabled && !this.models.efficientpose && this.config.body.modelPath.includes('efficientpose')) this.models.efficientpose = await efficientpose.load(this.config);
|
||||
if (this.config.object.enabled && !this.models.nanodet) this.models.nanodet = await nanodet.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.description.enabled && !this.models.faceres) this.models.faceres = await faceres.load(this.config);
|
||||
}
|
||||
|
@ -446,14 +425,12 @@ export class Human {
|
|||
if (this.config.async) {
|
||||
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? posenet.predict(process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? efficientpose.predict(process.tensor, this.config) : [];
|
||||
if (this.perf.body) delete this.perf.body;
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
if (this.config.body.modelPath.includes('posenet')) bodyRes = this.config.body.enabled ? await posenet.predict(process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes('blazepose')) bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
|
||||
else if (this.config.body.modelPath.includes('efficientpose')) bodyRes = this.config.body.enabled ? await efficientpose.predict(process.tensor, this.config) : [];
|
||||
current = Math.trunc(now() - timeStamp);
|
||||
if (current > 0) this.perf.body = current;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -162,7 +162,7 @@
|
|||
<section class="tsd-panel tsd-member tsd-kind-property tsd-parent-kind-class">
|
||||
<a name="classes" class="tsd-anchor"></a>
|
||||
<h3>classes</h3>
|
||||
<div class="tsd-signature tsd-kind-icon">classes<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>age<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>body<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>facemesh<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>faceres<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>gender<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>hand<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>nanodet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<div class="tsd-signature tsd-kind-icon">classes<span class="tsd-signature-symbol">:</span> <span class="tsd-signature-symbol">{ </span>body<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>emotion<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>facemesh<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>faceres<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>hand<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol">; </span>nanodet<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol"> }</span></div>
|
||||
<aside class="tsd-sources">
|
||||
</aside>
|
||||
<div class="tsd-comment tsd-typography">
|
||||
|
@ -173,9 +173,6 @@
|
|||
<div class="tsd-type-declaration">
|
||||
<h4>Type declaration</h4>
|
||||
<ul class="tsd-parameters">
|
||||
<li class="tsd-parameter">
|
||||
<h5>age<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>body<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">__module</span></h5>
|
||||
</li>
|
||||
|
@ -188,9 +185,6 @@
|
|||
<li class="tsd-parameter">
|
||||
<h5>faceres<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>gender<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span></h5>
|
||||
</li>
|
||||
<li class="tsd-parameter">
|
||||
<h5>hand<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">__module</span></h5>
|
||||
</li>
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,2 +0,0 @@
|
|||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<unknown>;
|
|
@ -124,17 +124,7 @@ export interface Config {
|
|||
enabled: boolean;
|
||||
modelPath: string;
|
||||
skipFrames: number;
|
||||
};
|
||||
age: {
|
||||
enabled: boolean;
|
||||
modelPath: string;
|
||||
skipFrames: number;
|
||||
};
|
||||
gender: {
|
||||
enabled: boolean;
|
||||
minConfidence: number;
|
||||
modelPath: string;
|
||||
skipFrames: number;
|
||||
};
|
||||
emotion: {
|
||||
enabled: boolean;
|
||||
|
@ -142,10 +132,6 @@ export interface Config {
|
|||
skipFrames: number;
|
||||
modelPath: string;
|
||||
};
|
||||
embedding: {
|
||||
enabled: boolean;
|
||||
modelPath: string;
|
||||
};
|
||||
};
|
||||
/** Controlls and configures all body detection specific options
|
||||
* - enabled: true/false
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<unknown>;
|
|
@ -1,18 +0,0 @@
|
|||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
declare type Tensor = typeof tf.Tensor;
|
||||
declare type DB = Array<{
|
||||
name: string;
|
||||
source: string;
|
||||
embedding: number[];
|
||||
}>;
|
||||
export declare function load(config: any): Promise<any>;
|
||||
export declare function similarity(embedding1: any, embedding2: any, order?: number): number;
|
||||
export declare function match(embedding: Array<number>, db: DB, threshold?: number): {
|
||||
similarity: number;
|
||||
name: string;
|
||||
source: string;
|
||||
embedding: number[];
|
||||
};
|
||||
export declare function enhance(input: any): Tensor;
|
||||
export declare function predict(input: any, config: any): Promise<number[]>;
|
||||
export {};
|
|
@ -1,2 +0,0 @@
|
|||
export declare function load(config: any): Promise<any>;
|
||||
export declare function predict(image: any, config: any): Promise<unknown>;
|
|
@ -1,7 +1,5 @@
|
|||
import * as tf from '../dist/tfjs.esm.js';
|
||||
import * as facemesh from './blazeface/facemesh';
|
||||
import * as age from './age/age';
|
||||
import * as gender from './gender/gender';
|
||||
import * as faceres from './faceres/faceres';
|
||||
import * as emotion from './emotion/emotion';
|
||||
import * as posenet from './posenet/posenet';
|
||||
|
@ -91,8 +89,6 @@ export declare class Human {
|
|||
/** Internal: Currently loaded classes */
|
||||
classes: {
|
||||
facemesh: typeof facemesh;
|
||||
age: typeof age;
|
||||
gender: typeof gender;
|
||||
emotion: typeof emotion;
|
||||
body: typeof posenet | typeof blazepose;
|
||||
hand: typeof handpose;
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit ee4cf3aa27940b10e275ef9e8119e220c4b2d70d
|
||||
Subproject commit e06119f8538250b84e4ff4caed40746b749a3531
|
Loading…
Reference in New Issue