mirror of https://github.com/vladmandic/human
add modelBasePath option
parent
50d3a7697f
commit
57fe43ab5d
|
@ -1,6 +1,6 @@
|
|||
# @vladmandic/human
|
||||
|
||||
Version: **1.3.5**
|
||||
Version: **1.4.0**
|
||||
Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
|
||||
|
||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||
|
@ -9,6 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/04/08 mandic00@live.com
|
||||
|
||||
|
||||
### **1.3.5** 2021/04/06 mandic00@live.com
|
||||
|
||||
- add dynamic viewport and fix web worker
|
||||
|
|
|
@ -23,8 +23,8 @@ const userConfig = {
|
|||
hand: { enabled: false },
|
||||
gesture: { enabled: false },
|
||||
body: { enabled: false },
|
||||
// body: { enabled: true, modelPath: '../models/blazepose.json' },
|
||||
// body: { enabled: true, modelPath: '../models/efficientpose.json' },
|
||||
// body: { enabled: true, modelPath: 'blazepose.json' },
|
||||
// body: { enabled: true, modelPath: 'efficientpose.json' },
|
||||
object: { enabled: true },
|
||||
};
|
||||
*/
|
||||
|
|
21
demo/node.js
21
demo/node.js
|
@ -12,26 +12,25 @@ let human = null;
|
|||
|
||||
const myConfig = {
|
||||
backend: 'tensorflow',
|
||||
modelBasePath: 'file://models/',
|
||||
debug: true,
|
||||
videoOptimized: false,
|
||||
async: false,
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false },
|
||||
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
|
||||
iris: { modelPath: 'file://models/iris.json', enabled: true },
|
||||
description: { modelPath: 'file://models/faceres.json', enabled: true },
|
||||
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
|
||||
detector: { enabled: true, rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: {
|
||||
enabled: true,
|
||||
detector: { modelPath: 'file://models/handdetect.json' },
|
||||
skeleton: { modelPath: 'file://models/handskeleton.json' },
|
||||
},
|
||||
// body: { modelPath: 'file://models/efficientpose.json', enabled: true },
|
||||
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
|
||||
body: { modelPath: 'file://models/posenet.json', enabled: true },
|
||||
object: { modelPath: 'file://models/nanodet.json', enabled: true },
|
||||
// body: { modelPath: 'efficientpose.json', enabled: true },
|
||||
// body: { modelPath: 'blazepose.json', enabled: true },
|
||||
body: { enabled: true },
|
||||
object: { enabled: true },
|
||||
};
|
||||
|
||||
async function init() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@vladmandic/human",
|
||||
"version": "1.3.5",
|
||||
"version": "1.4.0",
|
||||
"description": "Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
|
||||
"sideEffects": false,
|
||||
"main": "dist/human.node.js",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
@ -8,8 +8,9 @@ let skipped = Number.MAX_SAFE_INTEGER;
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.age.modelPath);
|
||||
if (config.debug) log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.age.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ export interface Config {
|
|||
scoped: boolean,
|
||||
videoOptimized: boolean,
|
||||
warmup: string,
|
||||
modelBasePath: string,
|
||||
filter: {
|
||||
enabled: boolean,
|
||||
width: number,
|
||||
|
@ -125,6 +126,7 @@ const config: Config = {
|
|||
// can be 'webgl', 'wasm', 'cpu', or 'humangl' which is a custom version of webgl
|
||||
// leave as empty string to continue using default backend
|
||||
// when backend is set outside of Human library
|
||||
modelBasePath: '../models/', // base path for all models
|
||||
wasmPath: '../assets/', // path for wasm binaries
|
||||
// only used for backend: wasm
|
||||
debug: true, // print additional status messages to console
|
||||
|
@ -185,7 +187,8 @@ const config: Config = {
|
|||
// detector, mesh, iris, age, gender, emotion
|
||||
// (note: module is not loaded until it is required)
|
||||
detector: {
|
||||
modelPath: '../models/blazeface-back.json',
|
||||
modelPath: 'blazeface-back.json', // detector model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
|
||||
// this parameter is not valid in nodejs
|
||||
|
@ -209,18 +212,21 @@ const config: Config = {
|
|||
|
||||
mesh: {
|
||||
enabled: true,
|
||||
modelPath: '../models/facemesh.json',
|
||||
modelPath: 'facemesh.json', // facemesh model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
|
||||
iris: {
|
||||
enabled: true,
|
||||
modelPath: '../models/iris.json',
|
||||
modelPath: 'iris.json', // face iris model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
|
||||
description: {
|
||||
enabled: true, // to improve accuracy of face description extraction it is
|
||||
// recommended to enable detector.rotation and mesh.enabled
|
||||
modelPath: '../models/faceres.json',
|
||||
modelPath: 'faceres.json', // face description model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 31, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
@ -229,12 +235,14 @@ const config: Config = {
|
|||
enabled: true,
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
skipFrames: 32, // how many frames to go without re-running the detector
|
||||
modelPath: '../models/emotion.json',
|
||||
modelPath: 'emotion.json', // face emotion model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
|
||||
age: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
modelPath: '../models/age.json',
|
||||
modelPath: 'age.json', // age model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 33, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
@ -242,20 +250,24 @@ const config: Config = {
|
|||
gender: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
modelPath: '../models/gender.json',
|
||||
modelPath: 'gender.json', // gender model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
skipFrames: 34, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
embedding: {
|
||||
enabled: false, // obsolete, replaced by description module
|
||||
modelPath: '../models/mobileface.json',
|
||||
},
|
||||
modelPath: 'mobileface.json', // face descriptor model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
},
|
||||
|
||||
body: {
|
||||
enabled: true,
|
||||
modelPath: '../models/posenet.json', // can be 'posenet', 'blazepose' or 'efficientpose'
|
||||
modelPath: 'posenet.json', // body model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
// can be 'posenet', 'blazepose' or 'efficientpose'
|
||||
// 'blazepose' and 'efficientpose' are experimental
|
||||
maxDetections: 10, // maximum number of people detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
|
@ -287,16 +299,19 @@ const config: Config = {
|
|||
// should be set to the minimum number for performance
|
||||
landmarks: true, // detect hand landmarks or just hand boundary box
|
||||
detector: {
|
||||
modelPath: '../models/handdetect.json',
|
||||
modelPath: 'handdetect.json', // hand detector model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
skeleton: {
|
||||
modelPath: '../models/handskeleton.json',
|
||||
modelPath: 'handskeleton.json', // hand skeleton model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
},
|
||||
},
|
||||
|
||||
object: {
|
||||
enabled: false,
|
||||
modelPath: '../models/nanodet.json',
|
||||
modelPath: 'nanodet.json', // object detection model
|
||||
// can be either absolute path or relative to modelBasePath
|
||||
// 'nanodet' is experimental
|
||||
minConfidence: 0.20, // threshold for discarding a prediction
|
||||
iouThreshold: 0.40, // threshold for deciding whether boxes overlap too much
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
@ -10,8 +10,9 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.body.modelPath);
|
||||
if (config.debug) log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.body.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
@ -12,8 +12,9 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.emotion.modelPath);
|
||||
if (config.debug) log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.emotion.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.emotion.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
@ -11,8 +11,9 @@ type DB = Array<{ name: string, source: string, embedding: number[] }>;
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.description.modelPath);
|
||||
if (config.debug) log(`load model: ${config.face.description.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.description.modelPath));
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.description.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
|
||||
|
@ -12,9 +12,10 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.face.gender.modelPath);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
|
||||
alternative = model.inputs[0].shape[3] === 1;
|
||||
if (config.debug) log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.face.gender.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
|
||||
|
||||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as handdetector from './handdetector';
|
||||
import * as handpipeline from './handpipeline';
|
||||
|
@ -57,13 +55,18 @@ export class HandPose {
|
|||
|
||||
export async function load(config): Promise<HandPose> {
|
||||
const [handDetectorModel, handPoseModel] = await Promise.all([
|
||||
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
|
||||
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
|
||||
config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
|
||||
config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
|
||||
]);
|
||||
const handDetector = new handdetector.HandDetector(handDetectorModel, handDetectorModel?.inputs[0].shape[2], anchors.anchors);
|
||||
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, handPoseModel?.inputs[0].shape[2]);
|
||||
const handPose = new HandPose(handPipeline);
|
||||
if (config.hand.enabled && config.debug) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
if (config.hand.landmarks && config.debug) log(`load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
|
||||
if (config.hand.enabled) {
|
||||
if (!handDetectorModel || !handDetectorModel.modelUrl) log('load model failed:', config.hand.detector.modelPath);
|
||||
else if (config.debug) log('load model:', handDetectorModel.modelUrl);
|
||||
if (!handPoseModel || !handPoseModel.modelUrl) log('load model failed:', config.hand.skeleton.modelPath);
|
||||
else if (config.debug) log('load model:', handPoseModel.modelUrl);
|
||||
}
|
||||
return handPose;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
// helper function: join two paths
|
||||
export function join(folder: string, file: string): string {
|
||||
const separator = folder.endsWith('/') ? '' : '/';
|
||||
const skipJoin = file.startsWith('.') || file.startsWith('/') || file.startsWith('http:') || file.startsWith('https:') || file.startsWith('file:');
|
||||
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
|
||||
if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
|
||||
return path;
|
||||
}
|
||||
|
||||
// helper function: wrapper around console output
|
||||
export function log(...msg) {
|
||||
const dt = new Date();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { log } from '../helpers';
|
||||
import { log, join } from '../helpers';
|
||||
import * as tf from '../../dist/tfjs.esm.js';
|
||||
import * as profile from '../profile';
|
||||
import { labels } from './labels';
|
||||
|
@ -11,10 +11,11 @@ const scaleBox = 2.5; // increase box size
|
|||
|
||||
export async function load(config) {
|
||||
if (!model) {
|
||||
model = await tf.loadGraphModel(config.object.modelPath);
|
||||
model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath));
|
||||
// @ts-ignore
|
||||
model.inputSize = parseInt(Object.values(model.modelSignature['inputs'])[0].tensorShape.dim[2].size);
|
||||
if (config.debug) log(`load model: ${config.object.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath);
|
||||
else if (config.debug) log('load model:', model.modelUrl);
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
|
|
@ -9,26 +9,25 @@ const config = {
|
|||
videoOptimized: false,
|
||||
async: false,
|
||||
warmup: 'full',
|
||||
modelBasePath: 'file://models/',
|
||||
filter: {
|
||||
enabled: true,
|
||||
},
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false },
|
||||
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
|
||||
iris: { modelPath: 'file://models/iris.json', enabled: true },
|
||||
description: { modelPath: 'file://models/faceres.json', enabled: true },
|
||||
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
|
||||
detector: { enabled: true, rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: {
|
||||
enabled: true,
|
||||
detector: { modelPath: 'file://models/handdetect.json' },
|
||||
skeleton: { modelPath: 'file://models/handskeleton.json' },
|
||||
},
|
||||
// body: { modelPath: 'file://models/efficientpose.json', enabled: true },
|
||||
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
|
||||
body: { modelPath: 'file://models/posenet.json', enabled: true },
|
||||
object: { modelPath: 'file://models/nanodet.json', enabled: true },
|
||||
// body: { modelPath: 'efficientpose.json', enabled: true },
|
||||
// body: { modelPath: 'blazepose.json', enabled: true },
|
||||
body: { modelPath: 'posenet.json', enabled: true },
|
||||
object: { enabled: true },
|
||||
};
|
||||
|
||||
async function test() {
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit f5850bc4ed575a9973854d3d35242d181d7f4b65
|
||||
Subproject commit d09d36cf9de309728504402bf301ac3ab018de65
|
Loading…
Reference in New Issue