new human.compare api

pull/356/head
Vladimir Mandic 2021-11-07 10:03:33 -05:00
parent 39d45e1e2b
commit 7d58d02ca2
7 changed files with 117 additions and 34 deletions

View File

@ -9,8 +9,12 @@
## Changelog
### **HEAD -> main** 2021/11/05 mandic00@live.com
### **HEAD -> main** 2021/11/07 mandic00@live.com
### **origin/main** 2021/11/06 mandic00@live.com
- new frame change detection algorithm
- add histogram equalization
- implement wasm missing ops
- performance and memory optimizations

25
TODO.md
View File

@ -42,18 +42,27 @@ MoveNet MultiPose model does not work with WASM backend due to missing F32 broad
### Pending release
New:
- New frame change detection algorithm used for cache determination
- New frame change detection algorithm used for [cache determination](https://vladmandic.github.io/human/typedoc/interfaces/Config.html#cacheSensitivity)
based on temporal input difference
- New optional input histogram equalization
auto-level input for optimal brightness/contrast via `config.filter.equalization`
- New built-in Tensorflow profiling [human.profile](https://vladmandic.github.io/human/typedoc/classes/Human.html#profile)
- New optional input histogram equalization [config.filter.equalization](https://vladmandic.github.io/human/)
auto-level input for optimal brightness/contrast
- New event-baseed interface [human.events](https://vladmandic.github.io/human/typedoc/classes/Human.html#events)
- New configuration validation [human.validate](https://vladmandic.github.io/human/typedoc/classes/Human.html#validate)
- New input compare function [human.compare](https://vladmandic.github.io/human/typedoc/classes/Human.html#compare)
this function is internally used by `human` to determine frame changes and cache validation
- New [custom built TFJS](https://github.com/vladmandic/tfjs) for bundled version
result is a pure module with reduced bundle size and include built-in support for all backends
note: **nobundle** and **node** versions link to standard `@tensorflow` packages
Changed:
- Supports all modules on all backends
- [Default configuration values](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) have been tuned for precision and performance
- Supports all built-in modules on all backends
via custom implementation of missing kernel ops
- Performance and precision improvements
**face**, **hand** and **gestures** modules
- Use custom built TFJS for bundled version
reduced bundle size and built-in support for all backends
`nobundle` and `node` versions link to standard `@tensorflow` packages
- **face**, **hand**
- **gestures** modules
- **face matching**
- Fix **ReactJS** compatibility
- Better precision using **WASM**
Previous issues due to math low-precision in WASM implementation

View File

@ -184,7 +184,7 @@ export interface GestureConfig {
* Contains all configurable parameters
* @typedef Config
*
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)
*/
export interface Config {
/** Backend used for TFJS operations
@ -196,16 +196,19 @@ export interface Config {
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu',
/** Path to *.wasm files if backend is set to `wasm`
*
* default: auto-detects to link to CDN `jsdelivr` when running in browser
*/
wasmPath: string,
/** Print debug statements to console
*
* default: `true`
*/
debug: boolean,
/** Perform model loading and inference concurrently or sequentially
*
* default: `true`
*/
async: boolean,
@ -213,6 +216,7 @@ export interface Config {
/** What to use for `human.warmup()`
* - warmup pre-initializes all models for faster inference but can take significant time on startup
* - used by `webgl`, `humangl` and `webgpu` backends
*
* default: `full`
*/
warmup: 'none' | 'face' | 'full' | 'body',
@ -220,6 +224,7 @@ export interface Config {
/** Base model path (typically starting with file://, http:// or https://) for all models
* - individual modelPath values are relative to this path
*
* default: `../models/` for browsers and `file://models/` for nodejs
*/
modelBasePath: string,
@ -227,6 +232,7 @@ export interface Config {
/** Cache sensitivity
* - values 0..1 where 0.01 means reset cache if input changed more than 1%
* - set to 0 to disable caching
*
* default: 0.7
*/
cacheSensitivity: number;
@ -259,7 +265,7 @@ export interface Config {
segmentation: Partial<SegmentationConfig>,
}
/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L253) */
/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */
const config: Config = {
backend: '',
modelBasePath: '',

View File

@ -68,7 +68,7 @@ export class Human {
version: string;
/** Current configuration
* - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L250)
* - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)
*/
config: Config;
@ -267,6 +267,18 @@ export class Human {
return faceres.enhance(input);
}
/** Compare two input tensors for pixel simmilarity
* - use `human.image` to process any valid input and get a tensor that can be used for compare
* - when passing manually generated tensors:
* - both input tensors must be in format [1, height, width, 3]
* - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor
* @returns {number}
* - return value is pixel similarity score normalized by input resolution and rgb channels
*/
compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise<number> {
return image.compare(this.config, firstImageTensor, secondImageTensor);
}
/** Explicit backend initialization
* - Normally done implicitly during initial load phase
* - Call to explictly register and initialize TFJS backend without any other operations

View File

@ -272,3 +272,24 @@ export async function skip(config, input: Tensor) {
}
return skipFrame;
}
export async function compare(config, input1: Tensor, input2: Tensor): Promise<number> {
const t: Record<string, Tensor> = {};
if (!input1 || !input2 || input1.shape.length !== 4 || input1.shape.length !== input2.shape.length) {
if (!config.debug) log('invalid input tensor or tensor shapes do not match:', input1.shape, input2.shape);
return 0;
}
if (input1.shape[0] !== 1 || input2.shape[0] !== 1 || input1.shape[3] !== 3 || input2.shape[3] !== 3) {
if (!config.debug) log('input tensors must be of shape [1, height, width, 3]:', input1.shape, input2.shape);
return 0;
}
t.input1 = tf.clone(input1);
t.input2 = (input1.shape[1] !== input2.shape[1] || input1.shape[2] !== input2.shape[2]) ? tf.image.resizeBilinear(input2, [input1.shape[1], input1.shape[2]]) : tf.clone(input2);
t.diff = tf.sub(t.input1, t.input2);
t.squared = tf.mul(t.diff, t.diff);
t.sum = tf.sum(t.squared);
const diffSum = await t.sum.data();
const diffRelative = diffSum[0] / (input1.shape[1] || 1) / (input1.shape[2] || 1) / 255 / 3;
tf.dispose([t.input1, t.input2, t.diff, t.squared, t.sum]);
return diffRelative;
}

View File

@ -6,11 +6,10 @@ Not required for normal funcioning of library
### NodeJS using TensorFlow library
- Image filters are disabled due to lack of Canvas and WeBGL access
- Image filters are disabled due to lack of Canvas and WebGL access
- Face rotation is disabled for `NodeJS` platform:
`Kernel 'RotateWithOffset' not registered for backend 'tensorflow'`
<https://github.com/tensorflow/tfjs/issues/4606>
Work has recently been completed and will likely be included in TFJS 3.9.0
### NodeJS using WASM
@ -18,27 +17,11 @@ Not required for normal funcioning of library
See <https://github.com/tensorflow/tfjs/issues/4927>
- Image filters are disabled due to lack of Canvas and WeBGL access
- Only supported input is Tensor due to missing image decoders
- Warmup returns null and is marked as failed
Missing image decode in `tfjs-core`
- Fails on object detection:
`Kernel 'SparseToDense' not registered for backend 'wasm'`
<https://github.com/tensorflow/tfjs/issues/4824>
<br>
## Manual Tests
## Browser Tests
### Browser using WebGL backend
- Chrome/Edge: All Passing
- Firefox: WebWorkers not supported due to missing support for OffscreenCanvas
- Safari: Limited Testing
### Browser using WASM backend
- Chrome/Edge: All Passing
- Firefox: WebWorkers not supported due to missing support for OffscreenCanvas
- Safari: Limited Testing
- Fails on object detection:
`Kernel 'SparseToDense' not registered for backend 'wasm'`
<https://github.com/tensorflow/tfjs/issues/4824>
- Chrome/Edge: **All Passing**
- Firefox: WebWorkers not supported due to missing support for `OffscreenCanvas`
- Safari: **Limited Testing**

View File

@ -135,12 +135,55 @@ async function testDetect(human, input, title, checkLeak = true) {
}
return detect;
}
const evt = { image: 0, detect: 0, warmup: 0 };
async function events(event) {
log('state', 'event:', event);
evt[event]++;
}
const verify = (state, ...messages) => {
if (state) log('state', 'passed:', ...messages);
else log('error', 'failed:', ...messages);
};
async function verifyDetails(human) {
const res = await testDetect(human, 'samples/in/ai-body.jpg', 'default');
verify(res.face.length === 1, 'details face length', res.face.length);
for (const face of res.face) {
verify(face.score > 0.9 && face.boxScore > 0.9 && face.faceScore > 0.9, 'details face score', face.score, face.boxScore, face.faceScore);
verify(face.age > 29 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.iris > 70 && face.iris < 80, 'details face age/gender', face.age, face.gender, face.genderScore, face.iris);
verify(face.box.length === 4 && face.boxRaw.length === 4 && face.mesh.length === 478 && face.meshRaw.length === 478 && face.embedding.length === 1024, 'details face arrays', face.box.length, face.mesh.length, face.embedding.length);
verify(face.emotion.length === 3 && face.emotion[0].score > 0.5 && face.emotion[0].emotion === 'angry', 'details face emotion', face.emotion.length, face.emotion[0]);
}
verify(res.body.length === 1, 'details body length', res.body.length);
for (const body of res.body) {
verify(body.score > 0.9 && body.box.length === 4 && body.boxRaw.length === 4 && body.keypoints.length === 17 && Object.keys(body.annotations).length === 6, 'details body', body.score, body.keypoints.length, Object.keys(body.annotations).length);
}
verify(res.hand.length === 1, 'details hand length', res.hand.length);
for (const hand of res.hand) {
verify(hand.score > 0.5 && hand.boxScore > 0.5 && hand.fingerScore > 0.5 && hand.box.length === 4 && hand.boxRaw.length === 4 && hand.label === 'point', 'details hand', hand.boxScore, hand.fingerScore, hand.label);
verify(hand.keypoints.length === 21 && Object.keys(hand.landmarks).length === 5 && Object.keys(hand.annotations).length === 6, 'details hand arrays', hand.keypoints.length, Object.keys(hand.landmarks).length, Object.keys(hand.annotations).length);
}
verify(res.gesture.length === 5, 'details gesture length', res.gesture.length);
verify(res.gesture[0].gesture === 'facing right', 'details gesture first', res.gesture[0]);
verify(res.object.length === 1, 'details object length', res.object.length);
for (const obj of res.object) {
verify(obj.score > 0.7 && obj.label === 'person' && obj.box.length === 4 && obj.boxRaw.length === 4, 'details object', obj.score, obj.label);
}
}
async function verifyCompare(human) {
const t1 = await getImage(human, 'samples/in/ai-face.jpg');
const t2 = await getImage(human, 'samples/in/ai-body.jpg');
const n1 = await human.compare(t1, t1);
const n2 = await human.compare(t1, t2);
const n3 = await human.compare(t2, t1);
const n4 = await human.compare(t2, t2);
verify(n1 === 0 && n4 === 0 && Math.round(n2) === Math.round(n3) && n2 > 20 && n2 < 30, 'image compare', n1, n2);
human.tf.dispose([t1, t2]);
}
async function test(Human, inputConfig) {
config = inputConfig;
fetch = (await import('node-fetch')).default;
@ -202,6 +245,8 @@ async function test(Human, inputConfig) {
gestures: res.gesture,
});
await verifyDetails(human);
// test default config async
log('info', 'test default');
human.reset();
@ -357,6 +402,9 @@ async function test(Human, inputConfig) {
if (tensors1 === tensors2 && tensors1 === tensors3 && tensors2 === tensors3) log('state', 'passeed: equal usage');
else log('error', 'failed: equal usage', tensors1, tensors2, tensors3);
// validate cache compare algorithm
await verifyCompare(human);
// tests end
const t1 = process.hrtime.bigint();