add modelBasePath option

pull/134/head
Vladimir Mandic 2021-04-09 08:07:58 -04:00
parent 4b44d7829a
commit ea8a96ad47
34 changed files with 820 additions and 774 deletions

View File

@ -1,6 +1,6 @@
# @vladmandic/human # @vladmandic/human
Version: **1.3.5** Version: **1.4.0**
Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition** Description: **Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
Author: **Vladimir Mandic <mandic00@live.com>** Author: **Vladimir Mandic <mandic00@live.com>**
@ -9,6 +9,9 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
## Changelog ## Changelog
### **HEAD -> main** 2021/04/08 mandic00@live.com
### **1.3.5** 2021/04/06 mandic00@live.com ### **1.3.5** 2021/04/06 mandic00@live.com
- add dynamic viewport and fix web worker - add dynamic viewport and fix web worker

View File

@ -23,8 +23,8 @@ const userConfig = {
hand: { enabled: false }, hand: { enabled: false },
gesture: { enabled: false }, gesture: { enabled: false },
body: { enabled: false }, body: { enabled: false },
// body: { enabled: true, modelPath: '../models/blazepose.json' }, // body: { enabled: true, modelPath: 'blazepose.json' },
// body: { enabled: true, modelPath: '../models/efficientpose.json' }, // body: { enabled: true, modelPath: 'efficientpose.json' },
object: { enabled: true }, object: { enabled: true },
}; };
*/ */

View File

@ -12,26 +12,25 @@ let human = null;
const myConfig = { const myConfig = {
backend: 'tensorflow', backend: 'tensorflow',
modelBasePath: 'file://models/',
debug: true, debug: true,
videoOptimized: false, videoOptimized: false,
async: false, async: false,
face: { face: {
enabled: true, enabled: true,
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false }, detector: { enabled: true, rotation: false },
mesh: { modelPath: 'file://models/facemesh.json', enabled: true }, mesh: { enabled: true },
iris: { modelPath: 'file://models/iris.json', enabled: true }, iris: { enabled: true },
description: { modelPath: 'file://models/faceres.json', enabled: true }, description: { enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true }, emotion: { enabled: true },
}, },
hand: { hand: {
enabled: true, enabled: true,
detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' },
}, },
// body: { modelPath: 'file://models/efficientpose.json', enabled: true }, // body: { modelPath: 'efficientpose.json', enabled: true },
// body: { modelPath: 'file://models/blazepose.json', enabled: true }, // body: { modelPath: 'blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true }, body: { enabled: true },
object: { modelPath: 'file://models/nanodet.json', enabled: true }, object: { enabled: true },
}; };
async function init() { async function init() {

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

650
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

650
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

16
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,6 @@
{ {
"name": "@vladmandic/human", "name": "@vladmandic/human",
"version": "1.3.5", "version": "1.4.0",
"description": "Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition", "description": "Human: AI-powered 3D Face Detection, Face Description & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
"sideEffects": false, "sideEffects": false,
"main": "dist/human.node.js", "main": "dist/human.node.js",

View File

@ -212,7 +212,7 @@ async function typedoc(entryPoint) {
async function build(f, msg, dev = false) { async function build(f, msg, dev = false) {
if (busy) { if (busy) {
log.state('Build: busy...'); log.state('Build: busy...');
setTimeout(() => build(f, msg), 500); setTimeout(() => build(f, msg, dev), 500);
return; return;
} }
busy = true; busy = true;

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -8,8 +8,9 @@ let skipped = Number.MAX_SAFE_INTEGER;
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.face.age.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.face.age.modelPath));
if (config.debug) log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.age.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
const NUM_LANDMARKS = 6; const NUM_LANDMARKS = 6;
@ -123,8 +123,9 @@ export class BlazeFaceModel {
} }
export async function load(config) { export async function load(config) {
const blazeface = await tf.loadGraphModel(config.face.detector.modelPath, { fromTFHub: config.face.detector.modelPath.includes('tfhub.dev') }); const model = await tf.loadGraphModel(join(config.modelBasePath, config.face.detector.modelPath), { fromTFHub: config.face.detector.modelPath.includes('tfhub.dev') });
const model = new BlazeFaceModel(blazeface, config); const blazeFace = new BlazeFaceModel(model, config);
if (config.debug) log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.detector.modelPath);
return model; else if (config.debug) log('load model:', model.modelUrl);
return blazeFace;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as blazeface from './blazeface'; import * as blazeface from './blazeface';
import * as facepipeline from './facepipeline'; import * as facepipeline from './facepipeline';
@ -58,17 +58,22 @@ export class MediaPipeFaceMesh {
} }
} }
let faceModels = [null, null, null]; let faceModels:[any, any, any] = [null, null, null];
export async function load(config): Promise<MediaPipeFaceMesh> { export async function load(config): Promise<MediaPipeFaceMesh> {
// @ts-ignore
faceModels = await Promise.all([ faceModels = await Promise.all([
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null, (!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null, (!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.mesh.modelPath), { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null, (!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(join(config.modelBasePath, config.face.iris.modelPath), { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,
]); ]);
const faceMesh = new MediaPipeFaceMesh(faceModels[0], faceModels[1], faceModels[2], config); const faceMesh = new MediaPipeFaceMesh(faceModels[0], faceModels[1], faceModels[2], config);
if (config.face.mesh.enabled && config.debug) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`); if (config.face.mesh.enabled) {
if (config.face.iris.enabled && config.debug) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`); if (!faceModels[1] || !faceModels[1].modelUrl) log('load model failed:', config.face.age.modelPath);
else if (config.debug) log('load model:', faceModels[1].modelUrl);
}
if (config.face.iris.enabled) {
if (!faceModels[2] || !faceModels[1].modelUrl) log('load model failed:', config.face.age.modelPath);
else if (config.debug) log('load model:', faceModels[2].modelUrl);
}
return faceMesh; return faceMesh;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
import * as annotations from './annotations'; import * as annotations from './annotations';
@ -7,10 +7,11 @@ let model;
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.body.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
model.width = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[2].size); model.width = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[2].size);
model.height = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[1].size); model.height = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[1].size);
if (config.debug) log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -16,6 +16,7 @@ export interface Config {
scoped: boolean, scoped: boolean,
videoOptimized: boolean, videoOptimized: boolean,
warmup: string, warmup: string,
modelBasePath: string,
filter: { filter: {
enabled: boolean, enabled: boolean,
width: number, width: number,
@ -125,6 +126,7 @@ const config: Config = {
// can be 'webgl', 'wasm', 'cpu', or 'humangl' which is a custom version of webgl // can be 'webgl', 'wasm', 'cpu', or 'humangl' which is a custom version of webgl
// leave as empty string to continue using default backend // leave as empty string to continue using default backend
// when backend is set outside of Human library // when backend is set outside of Human library
modelBasePath: '../models/', // base path for all models
wasmPath: '../assets/', // path for wasm binaries wasmPath: '../assets/', // path for wasm binaries
// only used for backend: wasm // only used for backend: wasm
debug: true, // print additional status messages to console debug: true, // print additional status messages to console
@ -185,7 +187,8 @@ const config: Config = {
// detector, mesh, iris, age, gender, emotion // detector, mesh, iris, age, gender, emotion
// (note: module is not loaded until it is required) // (note: module is not loaded until it is required)
detector: { detector: {
modelPath: '../models/blazeface-back.json', modelPath: 'blazeface-back.json', // detector model
// can be either absolute path or relative to modelBasePath
rotation: false, // use best-guess rotated face image or just box with rotation as-is rotation: false, // use best-guess rotated face image or just box with rotation as-is
// false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees // false means higher performance, but incorrect mesh mapping if face angle is above 20 degrees
// this parameter is not valid in nodejs // this parameter is not valid in nodejs
@ -209,18 +212,21 @@ const config: Config = {
mesh: { mesh: {
enabled: true, enabled: true,
modelPath: '../models/facemesh.json', modelPath: 'facemesh.json', // facemesh model
// can be either absolute path or relative to modelBasePath
}, },
iris: { iris: {
enabled: true, enabled: true,
modelPath: '../models/iris.json', modelPath: 'iris.json', // face iris model
// can be either absolute path or relative to modelBasePath
}, },
description: { description: {
enabled: true, // to improve accuracy of face description extraction it is enabled: true, // to improve accuracy of face description extraction it is
// recommended to enable detector.rotation and mesh.enabled // recommended to enable detector.rotation and mesh.enabled
modelPath: '../models/faceres.json', modelPath: 'faceres.json', // face description model
// can be either absolute path or relative to modelBasePath
skipFrames: 31, // how many frames to go without re-running the detector skipFrames: 31, // how many frames to go without re-running the detector
// only used for video inputs // only used for video inputs
}, },
@ -229,12 +235,14 @@ const config: Config = {
enabled: true, enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 32, // how many frames to go without re-running the detector skipFrames: 32, // how many frames to go without re-running the detector
modelPath: '../models/emotion.json', modelPath: 'emotion.json', // face emotion model
// can be either absolute path or relative to modelBasePath
}, },
age: { age: {
enabled: false, // obsolete, replaced by description module enabled: false, // obsolete, replaced by description module
modelPath: '../models/age.json', modelPath: 'age.json', // age model
// can be either absolute path or relative to modelBasePath
skipFrames: 33, // how many frames to go without re-running the detector skipFrames: 33, // how many frames to go without re-running the detector
// only used for video inputs // only used for video inputs
}, },
@ -242,20 +250,24 @@ const config: Config = {
gender: { gender: {
enabled: false, // obsolete, replaced by description module enabled: false, // obsolete, replaced by description module
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
modelPath: '../models/gender.json', modelPath: 'gender.json', // gender model
// can be either absolute path or relative to modelBasePath
skipFrames: 34, // how many frames to go without re-running the detector skipFrames: 34, // how many frames to go without re-running the detector
// only used for video inputs // only used for video inputs
}, },
embedding: { embedding: {
enabled: false, // obsolete, replaced by description module enabled: false, // obsolete, replaced by description module
modelPath: '../models/mobileface.json', modelPath: 'mobileface.json', // face descriptor model
}, // can be either absolute path or relative to modelBasePath
},
}, },
body: { body: {
enabled: true, enabled: true,
modelPath: '../models/posenet.json', // can be 'posenet', 'blazepose' or 'efficientpose' modelPath: 'posenet.json', // body model
// can be either absolute path or relative to modelBasePath
// can be 'posenet', 'blazepose' or 'efficientpose'
// 'blazepose' and 'efficientpose' are experimental // 'blazepose' and 'efficientpose' are experimental
maxDetections: 10, // maximum number of people detected in the input maxDetections: 10, // maximum number of people detected in the input
// should be set to the minimum number for performance // should be set to the minimum number for performance
@ -287,16 +299,19 @@ const config: Config = {
// should be set to the minimum number for performance // should be set to the minimum number for performance
landmarks: true, // detect hand landmarks or just hand boundary box landmarks: true, // detect hand landmarks or just hand boundary box
detector: { detector: {
modelPath: '../models/handdetect.json', modelPath: 'handdetect.json', // hand detector model
// can be either absolute path or relative to modelBasePath
}, },
skeleton: { skeleton: {
modelPath: '../models/handskeleton.json', modelPath: 'handskeleton.json', // hand skeleton model
// can be either absolute path or relative to modelBasePath
}, },
}, },
object: { object: {
enabled: false, enabled: false,
modelPath: '../models/nanodet.json', modelPath: 'nanodet.json', // object detection model
// can be either absolute path or relative to modelBasePath
// 'nanodet' is experimental // 'nanodet' is experimental
minConfidence: 0.20, // threshold for discarding a prediction minConfidence: 0.20, // threshold for discarding a prediction
iouThreshold: 0.40, // threshold for deciding whether boxes overlap too much iouThreshold: 0.40, // threshold for deciding whether boxes overlap too much

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -10,8 +10,9 @@ const bodyParts = ['head', 'neck', 'rightShoulder', 'rightElbow', 'rightWrist',
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.body.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
if (config.debug) log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -8,8 +8,9 @@ let model;
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.face.embedding.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.face.embedding.modelPath));
if (config.debug) log(`load model: ${config.face.embedding.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.embedding.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -12,8 +12,9 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.face.emotion.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.face.emotion.modelPath));
if (config.debug) log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.emotion.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -11,8 +11,9 @@ type DB = Array<{ name: string, source: string, embedding: number[] }>;
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.face.description.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.face.description.modelPath));
if (config.debug) log(`load model: ${config.face.description.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.description.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -12,9 +12,10 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.face.gender.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.face.gender.modelPath));
alternative = model.inputs[0].shape[3] === 1; alternative = model.inputs[0].shape[3] === 1;
if (config.debug) log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.face.gender.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,6 +1,4 @@
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html import { log, join } from '../helpers';
import { log } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as handdetector from './handdetector'; import * as handdetector from './handdetector';
import * as handpipeline from './handpipeline'; import * as handpipeline from './handpipeline';
@ -57,13 +55,18 @@ export class HandPose {
export async function load(config): Promise<HandPose> { export async function load(config): Promise<HandPose> {
const [handDetectorModel, handPoseModel] = await Promise.all([ const [handDetectorModel, handPoseModel] = await Promise.all([
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null, config.hand.enabled ? tf.loadGraphModel(join(config.modelBasePath, config.hand.detector.modelPath), { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null, config.hand.landmarks ? tf.loadGraphModel(join(config.modelBasePath, config.hand.skeleton.modelPath), { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]); ]);
const handDetector = new handdetector.HandDetector(handDetectorModel, handDetectorModel?.inputs[0].shape[2], anchors.anchors); const handDetector = new handdetector.HandDetector(handDetectorModel, handDetectorModel?.inputs[0].shape[2], anchors.anchors);
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, handPoseModel?.inputs[0].shape[2]); const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, handPoseModel?.inputs[0].shape[2]);
const handPose = new HandPose(handPipeline); const handPose = new HandPose(handPipeline);
if (config.hand.enabled && config.debug) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);
if (config.hand.landmarks && config.debug) log(`load model: ${config.hand.skeleton.modelPath.match(/\/(.*)\./)[1]}`); if (config.hand.enabled) {
if (!handDetectorModel || !handDetectorModel.modelUrl) log('load model failed:', config.hand.detector.modelPath);
else if (config.debug) log('load model:', handDetectorModel.modelUrl);
if (!handPoseModel || !handPoseModel.modelUrl) log('load model failed:', config.hand.skeleton.modelPath);
else if (config.debug) log('load model:', handPoseModel.modelUrl);
}
return handPose; return handPose;
} }

View File

@ -1,3 +1,12 @@
// helper function: join two paths
export function join(folder: string, file: string): string {
const separator = folder.endsWith('/') ? '' : '/';
const skipJoin = file.startsWith('.') || file.startsWith('/') || file.startsWith('http:') || file.startsWith('https:') || file.startsWith('file:');
const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;
if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`Human: ModelPath Error: ${path} Expecting JSON file`);
return path;
}
// helper function: wrapper around console output // helper function: wrapper around console output
export function log(...msg) { export function log(...msg) {
const dt = new Date(); const dt = new Date();

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
import { labels } from './labels'; import { labels } from './labels';
@ -11,10 +11,11 @@ const scaleBox = 2.5; // increase box size
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.object.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.object.modelPath));
// @ts-ignore // @ts-ignore
model.inputSize = parseInt(Object.values(model.modelSignature['inputs'])[0].tensorShape.dim[2].size); model.inputSize = parseInt(Object.values(model.modelSignature['inputs'])[0].tensorShape.dim[2].size);
if (config.debug) log(`load model: ${config.object.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.object.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as modelBase from './modelBase'; import * as modelBase from './modelBase';
import * as decodeMultiple from './decodeMultiple'; import * as decodeMultiple from './decodeMultiple';
@ -58,8 +58,9 @@ export class PoseNet {
} }
export async function load(config) { export async function load(config) {
const model = await tf.loadGraphModel(config.body.modelPath); const model = await tf.loadGraphModel(join(config.modelBasePath, config.body.modelPath));
const mobilenet = new modelBase.BaseModel(model); const mobilenet = new modelBase.BaseModel(model);
if (config.debug) log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.body.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
return new PoseNet(mobilenet); return new PoseNet(mobilenet);
} }

View File

@ -1,4 +1,4 @@
import { log } from '../helpers'; import { log, join } from '../helpers';
import * as tf from '../../dist/tfjs.esm.js'; import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile'; import * as profile from '../profile';
@ -17,8 +17,9 @@ let skipped = Number.MAX_SAFE_INTEGER;
export async function load(config) { export async function load(config) {
if (!model) { if (!model) {
model = await tf.loadGraphModel(config.prototype.modelPath); model = await tf.loadGraphModel(join(config.modelBasePath, config.prototype.modelPath));
if (config.debug) log(`load model: ${config.prototype.modelPath.match(/\/(.*)\./)[1]}`); if (!model || !model.modelUrl) log('load model failed:', config.prototype.modelPath);
else if (config.debug) log('load model:', model.modelUrl);
} }
return model; return model;
} }

View File

@ -9,26 +9,25 @@ const config = {
videoOptimized: false, videoOptimized: false,
async: false, async: false,
warmup: 'full', warmup: 'full',
modelBasePath: 'file://models/',
filter: { filter: {
enabled: true, enabled: true,
}, },
face: { face: {
enabled: true, enabled: true,
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false }, detector: { enabled: true, rotation: false },
mesh: { modelPath: 'file://models/facemesh.json', enabled: true }, mesh: { enabled: true },
iris: { modelPath: 'file://models/iris.json', enabled: true }, iris: { enabled: true },
description: { modelPath: 'file://models/faceres.json', enabled: true }, description: { enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true }, emotion: { enabled: true },
}, },
hand: { hand: {
enabled: true, enabled: true,
detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' },
}, },
// body: { modelPath: 'file://models/efficientpose.json', enabled: true }, // body: { modelPath: 'efficientpose.json', enabled: true },
// body: { modelPath: 'file://models/blazepose.json', enabled: true }, // body: { modelPath: 'blazepose.json', enabled: true },
body: { modelPath: 'file://models/posenet.json', enabled: true }, body: { modelPath: 'posenet.json', enabled: true },
object: { modelPath: 'file://models/nanodet.json', enabled: true }, object: { enabled: true },
}; };
async function test() { async function test() {

1
types/config.d.ts vendored
View File

@ -13,6 +13,7 @@ export interface Config {
scoped: boolean; scoped: boolean;
videoOptimized: boolean; videoOptimized: boolean;
warmup: string; warmup: string;
modelBasePath: string;
filter: { filter: {
enabled: boolean; enabled: boolean;
width: number; width: number;

1
types/helpers.d.ts vendored
View File

@ -1,3 +1,4 @@
export declare function join(folder: string, file: string): string;
export declare function log(...msg: any[]): void; export declare function log(...msg: any[]): void;
export declare const now: () => number; export declare const now: () => number;
export declare function mergeDeep(...objects: any[]): any; export declare function mergeDeep(...objects: any[]): any;

2
wiki

@ -1 +1 @@
Subproject commit f5850bc4ed575a9973854d3d35242d181d7f4b65 Subproject commit d09d36cf9de309728504402bf301ac3ab018de65