mirror of https://github.com/vladmandic/human
added configuration validation
parent
cb1ff858e9
commit
ccd5ba1e46
|
@ -9,8 +9,9 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2021/09/17 mandic00@live.com
|
||||
### **HEAD -> main** 2021/09/18 mandic00@live.com
|
||||
|
||||
- prevent validation failed on some model combinations
|
||||
- webgl exception handling
|
||||
|
||||
### **2.2.2** 2021/09/17 mandic00@live.com
|
||||
|
|
14
TODO.md
14
TODO.md
|
@ -30,6 +30,15 @@ Feature is automatically disabled in NodeJS without user impact
|
|||
|
||||
<br>
|
||||
|
||||
### Face Emotion Detection
|
||||
|
||||
Face Emotion detection using WASM backend has reduced precision due to math errors in backend
|
||||
|
||||
- Backend WASM incorrect handling of `int32` tensors
|
||||
<https://github.com/tensorflow/tfjs/issues/5641>
|
||||
|
||||
<br>
|
||||
|
||||
### Hand Detection
|
||||
|
||||
Enhanced rotation correction for hand detection is not working in NodeJS due to missing kernel op in TFJS
|
||||
|
@ -38,7 +47,10 @@ Feature is automatically disabled in NodeJS without user impact
|
|||
- Backend NodeJS missing kernel op `RotateWithOffset`
|
||||
<https://github.com/tensorflow/tfjs/issues/5473>
|
||||
|
||||
Hand detection using WASM backend has reduced precision due to math rounding errors in backend
|
||||
Hand detection using WASM backend has reduced precision due to math errors in backend
|
||||
|
||||
- Backend WASM incorrect handling of `int32` tensors
|
||||
<https://github.com/tensorflow/tfjs/issues/5641>
|
||||
|
||||
<br>
|
||||
|
||||
|
|
|
@ -66,11 +66,11 @@
|
|||
"@tensorflow/tfjs-layers": "^3.9.0",
|
||||
"@tensorflow/tfjs-node": "^3.9.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^3.9.0",
|
||||
"@types/node": "^16.9.2",
|
||||
"@types/node": "^16.9.3",
|
||||
"@typescript-eslint/eslint-plugin": "^4.31.1",
|
||||
"@typescript-eslint/parser": "^4.31.1",
|
||||
"@vladmandic/build": "^0.5.1",
|
||||
"@vladmandic/pilogger": "^0.3.2",
|
||||
"@vladmandic/build": "^0.5.2",
|
||||
"@vladmandic/pilogger": "^0.3.3",
|
||||
"canvas": "^2.8.0",
|
||||
"dayjs": "^1.10.7",
|
||||
"esbuild": "^0.12.28",
|
||||
|
|
|
@ -21,6 +21,7 @@ function calculateSlope(point1x, point1y, point2x, point2y) {
|
|||
// point1, point2 are 2d or 3d point arrays (xy[z])
|
||||
// returns either a single scalar (2d) or array of two slopes (3d)
|
||||
function getSlopes(point1, point2) {
|
||||
if (!point1 || !point2) return [0, 0];
|
||||
const slopeXY = calculateSlope(point1[0], point1[1], point2[0], point2[1]);
|
||||
if (point1.length === 2) return slopeXY;
|
||||
const slopeYZ = calculateSlope(point1[1], point1[2], point2[1], point2[2]);
|
||||
|
|
|
@ -124,7 +124,7 @@ export const hand = (res): GestureResult[] => {
|
|||
for (let i = 0; i < res.length; i++) {
|
||||
const fingers: Array<{ name: string, position: number }> = [];
|
||||
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
||||
if (finger !== 'palmBase' && Array.isArray(pos)) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||
if (finger !== 'palmBase' && Array.isArray(pos) && pos[0]) fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||
}
|
||||
if (fingers && fingers.length > 0) {
|
||||
const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));
|
||||
|
|
|
@ -47,7 +47,8 @@ export class HandDetector {
|
|||
const scores = await t.scores.data();
|
||||
t.boxes = tf.slice(t.predictions, [0, 1], [-1, 4]);
|
||||
t.norm = this.normalizeBoxes(t.boxes);
|
||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 10 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
||||
// box detection is flaky so we look for 3x boxes than we need results
|
||||
t.nms = await tf.image.nonMaxSuppressionAsync(t.norm, t.scores, 3 * config.hand.maxDetected, config.hand.iouThreshold, config.hand.minConfidence);
|
||||
const nms = await t.nms.array() as Array<number>;
|
||||
const hands: Array<{ box: Tensor, palmLandmarks: Tensor, confidence: number }> = [];
|
||||
for (const index of nms) {
|
||||
|
|
|
@ -22,7 +22,7 @@ export class HandPipeline {
|
|||
constructor(handDetector, handPoseModel) {
|
||||
this.handDetector = handDetector;
|
||||
this.handPoseModel = handPoseModel;
|
||||
this.inputSize = this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
|
||||
this.inputSize = this.handPoseModel && this.handPoseModel.inputs[0].shape ? this.handPoseModel.inputs[0].shape[2] : 0;
|
||||
this.storedBoxes = [];
|
||||
this.skipped = 0;
|
||||
this.detectedHands = 0;
|
||||
|
@ -152,6 +152,7 @@ export class HandPipeline {
|
|||
}
|
||||
this.storedBoxes = this.storedBoxes.filter((a) => a !== null);
|
||||
this.detectedHands = hands.length;
|
||||
if (hands.length > config.hand.maxDetected) hands.length = config.hand.maxDetected;
|
||||
return hands;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,23 @@ export const now = () => {
|
|||
return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());
|
||||
};
|
||||
|
||||
// helper function: checks current config validity
|
||||
export function validate(defaults, config, parent = 'config', msgs: Array<{ reason: string, where: string, expected?: string }> = []) {
|
||||
for (const key of Object.keys(config)) {
|
||||
if (typeof config[key] === 'object') {
|
||||
validate(defaults[key], config[key], key, msgs);
|
||||
} else {
|
||||
const defined = (typeof defaults[key] !== 'undefined');
|
||||
if (!defined) msgs.push({ reason: 'unknown property', where: `${parent}.${key} = ${config[key]}` });
|
||||
const same = typeof defaults[key] === typeof config[key];
|
||||
if (defined && !same) msgs.push({ reason: 'property type mismatch', where: `${parent}.${key} = ${config[key]}`, expected: typeof defaults[key] });
|
||||
}
|
||||
// ok = ok && defined && same;
|
||||
}
|
||||
if (config.debug && parent === 'config' && msgs.length > 0) log('invalid configuration', msgs);
|
||||
return msgs;
|
||||
}
|
||||
|
||||
// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides
|
||||
export function mergeDeep(...objects) {
|
||||
const isObject = (obj) => obj && typeof obj === 'object';
|
||||
|
|
13
src/human.ts
13
src/human.ts
|
@ -2,7 +2,7 @@
|
|||
* Human main module
|
||||
*/
|
||||
|
||||
import { log, now, mergeDeep } from './helpers';
|
||||
import { log, now, mergeDeep, validate } from './helpers';
|
||||
import { Config, defaults } from './config';
|
||||
import type { Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult } from './result';
|
||||
import * as tf from '../dist/tfjs.esm.js';
|
||||
|
@ -173,7 +173,10 @@ export class Human {
|
|||
defaults.backend = this.env.browser ? 'humangl' : 'tensorflow';
|
||||
this.version = app.version; // expose version property on instance of class
|
||||
Object.defineProperty(this, 'version', { value: app.version }); // expose version property directly on class itself
|
||||
this.config = mergeDeep(defaults, userConfig || {});
|
||||
this.config = JSON.parse(JSON.stringify(defaults));
|
||||
Object.seal(this.config);
|
||||
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
||||
validate(defaults, this.config);
|
||||
this.tf = tf;
|
||||
this.state = 'idle';
|
||||
this.#numTensors = 0;
|
||||
|
@ -248,6 +251,12 @@ export class Human {
|
|||
return null;
|
||||
}
|
||||
|
||||
/** Reset configuration to default values */
|
||||
reset = () => this.config = JSON.parse(JSON.stringify(defaults));
|
||||
|
||||
/** Validate current configuration schema */
|
||||
validate = (userConfig?: Partial<Config>) => validate(defaults, userConfig || this.config);
|
||||
|
||||
/** Process input as return canvas and tensor
|
||||
*
|
||||
* @param input: {@link Input}
|
||||
|
|
|
@ -166,7 +166,7 @@ export function process(input: Input, config: Config): { tensor: Tensor | null,
|
|||
let pixels;
|
||||
if (outCanvas.data) { // if we have data, just convert to tensor
|
||||
const shape = [outCanvas.height, outCanvas.width, 3];
|
||||
pixels = tf.tensor3d(outCanvas.data, shape, 'int32');
|
||||
pixels = tf.tensor3d(outCanvas.data, shape, 'float32');
|
||||
} else if ((typeof ImageData !== 'undefined') && (outCanvas instanceof ImageData)) { // if input is imagedata, just use it
|
||||
pixels = tf.browser ? tf.browser.fromPixels(outCanvas) : null;
|
||||
} else if (config.backend === 'webgl' || config.backend === 'humangl') { // tf kernel-optimized method to get imagedata
|
||||
|
|
|
@ -40,14 +40,16 @@ async function getImage(human, input) {
|
|||
ctx.drawImage(img, 0, 0, img.width, img.height);
|
||||
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
const res = human.tf.tidy(() => {
|
||||
const tensor = human.tf.tensor(Array.from(imageData.data), [canvas.height, canvas.width, 4], 'int32'); // create rgba image tensor from flat array
|
||||
const tensor = human.tf.tensor(Array.from(imageData.data), [canvas.height, canvas.width, 4], 'float32'); // create rgba image tensor from flat array
|
||||
const channels = human.tf.split(tensor, 4, 2); // split rgba to channels
|
||||
const rgb = human.tf.stack([channels[0], channels[1], channels[2]], 2); // stack channels back to rgb
|
||||
const reshape = human.tf.reshape(rgb, [1, canvas.height, canvas.width, 3]); // move extra dim from the end of tensor and use it as batch number instead
|
||||
return reshape;
|
||||
});
|
||||
if (res && res.shape[0] === 1 && res.shape[3] === 3) log('state', 'passed: load image:', input, res.shape);
|
||||
const sum = human.tf.sum(res);
|
||||
if (res && res.shape[0] === 1 && res.shape[3] === 3) log('state', 'passed: load image:', input, res.shape, { checksum: sum.dataSync()[0] });
|
||||
else log('error', 'failed: load image:', input, res);
|
||||
human.tf.dispose(sum);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -98,10 +100,10 @@ async function testWarmup(human, title) {
|
|||
// const count = human.tf.engine().state.numTensors;
|
||||
// if (count - tensors > 0) log('warn', 'failed: memory', config.warmup, title, 'tensors:', count - tensors);
|
||||
printResults(warmup);
|
||||
return true;
|
||||
} else {
|
||||
log('error', 'failed: warmup:', config.warmup, title);
|
||||
}
|
||||
log('error', 'failed: warmup:', config.warmup, title);
|
||||
return false;
|
||||
return warmup;
|
||||
}
|
||||
|
||||
async function testDetect(human, input, title) {
|
||||
|
@ -124,10 +126,10 @@ async function testDetect(human, input, title) {
|
|||
// const count = human.tf.engine().state.numTensors;
|
||||
// if (count - tensors > 0) log('warn', 'failed: memory', config.warmup, title, 'tensors:', count - tensors);
|
||||
printResults(detect);
|
||||
return true;
|
||||
} else {
|
||||
log('error', 'failed: detect', input || 'random', title);
|
||||
}
|
||||
log('error', 'failed: detect', input || 'random', title);
|
||||
return false;
|
||||
return detect;
|
||||
}
|
||||
const evt = { image: 0, detect: 0, warmup: 0 };
|
||||
async function events(event) {
|
||||
|
@ -144,33 +146,94 @@ async function test(Human, inputConfig) {
|
|||
return;
|
||||
}
|
||||
const t0 = process.hrtime.bigint();
|
||||
let human;
|
||||
let res;
|
||||
|
||||
// test event emitter
|
||||
human = new Human(config);
|
||||
const human = new Human(config);
|
||||
|
||||
human.events.addEventListener('warmup', () => events('warmup'));
|
||||
human.events.addEventListener('image', () => events('image'));
|
||||
human.events.addEventListener('detect', () => events('detect'));
|
||||
|
||||
// test configuration validation
|
||||
let invalid = human.validate();
|
||||
if (invalid.length === 0) log('state', 'passed: configuration default validation', invalid);
|
||||
else log('error', 'failed: configuration default validation', invalid);
|
||||
config.invalid = true;
|
||||
invalid = human.validate(config);
|
||||
if (invalid.length === 1) log('state', 'passed: configuration invalid validation', invalid);
|
||||
else log('error', 'failed: configuration default validation', invalid);
|
||||
delete config.invalid;
|
||||
|
||||
// test warmup sequences
|
||||
await testInstance(human);
|
||||
config.warmup = 'none';
|
||||
await testWarmup(human, 'default');
|
||||
res = await testWarmup(human, 'default');
|
||||
if (res.error !== 'null') log('error', 'failed: warmup none result mismatch');
|
||||
else log('state', 'passed: warmup none result match');
|
||||
config.warmup = 'face';
|
||||
await testWarmup(human, 'default');
|
||||
res = await testWarmup(human, 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 0 || res?.gesture?.length !== 3) log('error', 'failed: warmup face result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: warmup face result match');
|
||||
|
||||
config.warmup = 'body';
|
||||
await testWarmup(human, 'default');
|
||||
res = await testWarmup(human, 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 0 || res?.gesture?.length !== 3) log('error', 'failed: warmup body result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: warmup body result match');
|
||||
|
||||
// test default config
|
||||
log('info', 'test default');
|
||||
human = new Human(config);
|
||||
await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
human.reset();
|
||||
config.cacheSensitivity = 0;
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.face[0].gender !== 'female') log('error', 'failed: default result face mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: default result face match');
|
||||
|
||||
// test default config
|
||||
log('info', 'test object');
|
||||
human.reset();
|
||||
config.object = { enabled: true };
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.object?.length !== 1 || res?.object[0]?.label !== 'person') log('error', 'failed: object result mismatch', res?.object?.length);
|
||||
else log('state', 'passed: object result match');
|
||||
|
||||
// test sensitive config
|
||||
log('info', 'test sensitive');
|
||||
human.reset();
|
||||
config.cacheSensitivity = 0;
|
||||
config.face = { detector: { minConfidence: 0.0001, maxDetected: 1 } };
|
||||
config.body = { minConfidence: 0.0001, maxDetected: 1 };
|
||||
config.hand = { minConfidence: 0.0001, maxDetected: 3 };
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.body?.length !== 1 || res?.hand?.length !== 3 || res?.gesture?.length !== 9) log('error', 'failed: sensitive result mismatch', res?.face?.length, res?.body?.length, res?.hand?.length, res?.gesture?.length);
|
||||
else log('state', 'passed: sensitive result match');
|
||||
|
||||
// test sensitive details face
|
||||
const face = res && res.face ? res.face[0] : null;
|
||||
if (!face || face?.box?.length !== 4 || face?.mesh?.length !== 478 || face?.emotion?.length !== 4 || face?.embedding?.length !== 1024 || face?.rotation?.matrix?.length !== 9) {
|
||||
log('error', 'failed: sensitive face result mismatch', res?.face?.length, face?.box?.length, face?.mesh?.length, face?.emotion?.length, face?.embedding?.length, face?.rotation?.matrix?.length);
|
||||
} else log('state', 'passed: sensitive face result match');
|
||||
|
||||
// test sensitive details body
|
||||
const body = res && res.body ? res.body[0] : null;
|
||||
if (!body || body?.box?.length !== 4 || body?.keypoints?.length !== 17) log('error', 'failed: sensitive body result mismatch', body);
|
||||
else log('state', 'passed: sensitive body result match');
|
||||
|
||||
// test sensitive details hand
|
||||
const hand = res && res.hand ? res.hand[0] : null;
|
||||
if (!hand || hand?.box?.length !== 4 || hand?.keypoints?.length !== 21) log('error', 'failed: sensitive hand result mismatch', hand?.keypoints?.length);
|
||||
else log('state', 'passed: sensitive hand result match');
|
||||
|
||||
// test detectors only
|
||||
log('info', 'test detectors');
|
||||
config.face = { mesh: { enabled: false }, iris: { enabled: false }, hand: { landmarks: false } };
|
||||
human = new Human(config);
|
||||
await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
human.reset();
|
||||
config.face = { mesh: { enabled: false }, iris: { enabled: false }, description: { enabled: false }, emotion: { enabled: false } };
|
||||
config.hand = { landmarks: false };
|
||||
res = await testDetect(human, 'samples/ai-body.jpg', 'default');
|
||||
if (!res || res?.face?.length !== 1 || res?.face[0]?.gender || res?.face[0]?.age || res?.face[0]?.embedding) log('error', 'failed: detectors result face mismatch', res?.face);
|
||||
else log('state', 'passed: detector result face match');
|
||||
if (!res || res?.hand?.length !== 1 || res?.hand[0]?.landmarks) log('error', 'failed: detectors result hand mismatch', res?.hand?.length);
|
||||
else log('state', 'passed: detector result hand match');
|
||||
|
||||
// test posenet and movenet
|
||||
log('info', 'test body variants');
|
||||
|
|
|
@ -8,7 +8,7 @@ const config = {
|
|||
async: true,
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { enabled: true, rotation: true },
|
||||
detector: { rotation: true },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
|
|
|
@ -19,7 +19,7 @@ const config = {
|
|||
async: false,
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { enabled: true, rotation: false },
|
||||
detector: { rotation: false },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
|
|
|
@ -5,19 +5,19 @@ const config = {
|
|||
modelBasePath: 'file://models/',
|
||||
backend: 'tensorflow',
|
||||
debug: false,
|
||||
async: false,
|
||||
async: true,
|
||||
face: {
|
||||
enabled: true,
|
||||
detector: { enabled: true, rotation: true },
|
||||
detector: { rotation: true },
|
||||
mesh: { enabled: true },
|
||||
iris: { enabled: true },
|
||||
description: { enabled: true },
|
||||
emotion: { enabled: true },
|
||||
},
|
||||
hand: { enabled: false, rotation: true },
|
||||
body: { enabled: false },
|
||||
object: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
hand: { enabled: true },
|
||||
body: { enabled: true },
|
||||
object: { enabled: true },
|
||||
segmentation: { enabled: true },
|
||||
filter: { enabled: false },
|
||||
};
|
||||
|
||||
|
|
17
test/test.js
17
test/test.js
|
@ -23,15 +23,18 @@ const ignoreMessages = [
|
|||
'ExperimentalWarning',
|
||||
];
|
||||
|
||||
const status = {
|
||||
passed: 0,
|
||||
failed: 0,
|
||||
};
|
||||
const status = {};
|
||||
|
||||
function logMessage(test, data) {
|
||||
log[data[0]](test, ...data[1]);
|
||||
if (data[1][0].startsWith('passed')) status.passed++;
|
||||
if (data[1][0].startsWith('failed')) status.failed++;
|
||||
if (!status[test]) status[test] = { passed: 0, failed: 0 };
|
||||
if (log[data[0]]) {
|
||||
log[data[0]](test, ...data[1]);
|
||||
} else {
|
||||
log.error('unknown facility', test, ...data[1]);
|
||||
status[test].failed++;
|
||||
}
|
||||
if (data[1][0].startsWith('passed')) status[test].passed++;
|
||||
if (data[1][0].startsWith('failed')) status[test].failed++;
|
||||
}
|
||||
|
||||
function logStdIO(ok, test, buffer) {
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit 740fcd1b5b2cc92bde0111b630e872cbf7670c81
|
||||
Subproject commit b24eafa265bda331788e0d36cf5c854a494e33d6
|
Loading…
Reference in New Issue