new release

pull/356/head
Vladimir Mandic 2021-09-24 09:55:27 -04:00
parent 7b23c7f0a8
commit 894dde3edd
7 changed files with 65 additions and 38 deletions

View File

@ -9,8 +9,12 @@
## Changelog
### **HEAD -> main** 2021/09/22 mandic00@live.com
### **2.2.3** 2021/09/24 mandic00@live.com
### **origin/main** 2021/09/23 mandic00@live.com
- support segmentation for nodejs
- redo segmentation and handtracking
- prototype handtracking
- automated browser tests

View File

@ -6,9 +6,6 @@
### Handtrack
- Finish implementation
- Set defaults and image sizes
- Optimize model
- Add tests
<br>

View File

@ -211,13 +211,26 @@ export interface GestureConfig {
*
* Contains all configurable parameters
* @typedef Config
*
* Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)
*/
export interface Config {
/** Backend used for TFJS operations */
/** Backend used for TFJS operations
* Valid build-in backends are:
* - Browser: `cpu`, `wasm`, `webgl`, `humangl`
* - NodeJS: `cpu`, `wasm`, `tensorflow`
*
* Experimental:
* - Browser: `webgpu` - requires custom build of `tfjs-backend-webgpu`
*
* Defaults: `humangl` for browser and `tensorflow` for nodejs
*/
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu',
// backend: string;
/** Path to *.wasm files if backend is set to `wasm` */
/** Path to *.wasm files if backend is set to `wasm`
* - if not set, auto-detects to link to CDN `jsdelivr` when running in browser
*/
wasmPath: string,
/** Print debug statements to console */
@ -248,20 +261,27 @@ export interface Config {
/** Run input through image filters before inference
* - image filters run with near-zero latency as they are executed on the GPU
*
* {@link FilterConfig}
*/
filter: Partial<FilterConfig>,
// type definition end
/** {@link GestureConfig} */
gesture: Partial<GestureConfig>;
/** {@link FaceConfig} */
face: Partial<FaceConfig>,
/** {@link BodyConfig} */
body: Partial<BodyConfig>,
/** {@link HandConfig} */
hand: Partial<HandConfig>,
/** {@link ObjectConfig} */
object: Partial<ObjectConfig>,
/** {@link SegmentationConfig} */
segmentation: Partial<SegmentationConfig>,
}
@ -332,7 +352,7 @@ const config: Config = {
skipFrames: 15, // how many max frames to go without re-running the face bounding box detector
// only used when cacheSensitivity is not zero
// e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated face analysis as the head probably hasn't moved much
// box for updated face analysis as the head does not move fast
// in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.2, // threshold for discarding a prediction
iouThreshold: 0.1, // ammount of overlap between two detected objects before one object is removed
@ -386,10 +406,11 @@ const config: Config = {
enabled: true,
rotation: true, // use best-guess rotated hand image or just box with rotation as-is
// false means higher performance, but incorrect finger mapping if hand is inverted
// only valid for `handdetect` variation
skipFrames: 18, // how many max frames to go without re-running the hand bounding box detector
// only used when cacheSensitivity is not zero
// e.g., if model is running st 25 FPS, we can re-use existing bounding
// box for updated hand skeleton analysis as the hand probably
// box for updated hand skeleton analysis as the hand
// hasn't moved much in short time (10 * 1/25 = 0.25 sec)
minConfidence: 0.8, // threshold for discarding a prediction
iouThreshold: 0.2, // ammount of overlap between two detected objects before one object is removed
@ -427,4 +448,5 @@ const config: Config = {
blur: 8, // blur segmentation output by n pixels for more realistic image
},
};
export { config as defaults };

View File

@ -58,6 +58,7 @@ const fingerMap = {
};
export async function loadDetect(config: Config): Promise<GraphModel> {
// HandTrack Model: Original: <https://github.com/victordibia/handtracking> TFJS Port: <https://github.com/victordibia/handtrack.js/>
if (env.initial) models[0] = null;
if (!models[0]) {
// handtrack model has some kernel ops defined in model but those are never referenced and non-existent in tfjs
@ -224,26 +225,3 @@ export async function predict(input: Tensor, config: Config): Promise<HandResult
cache.fingerBoxes = [...cache.tmpBoxes]; // repopulate cache with validated hands
return hands as HandResult[];
}
/*
- Live Site: <https://victordibia.com/handtrack.js/#/>
- TFJS Port: <https://github.com/victordibia/handtrack.js/>
- Original: <https://github.com/victordibia/handtracking>
- Writeup: <https://medium.com/@victor.dibia/how-to-build-a-real-time-hand-detector-using-neural-networks-ssd-on-tensorflow-d6bac0e4b2ce>
- Convert:
tensorflowjs_converter --input_format=tf_frozen_model --output_format=tfjs_graph_model \
--output_node_names='num_detections,detection_boxes,detection_scores,detection_classes' --saved_model_tags=serve --quantize_uint8=* \
--strip_debug_ops=* --weight_shard_size_bytes=10000000000 --control_flow_v2=true frozen_inference_graph.pb graph
webmodel/efficientdet512d0/base/model.json
webmodel/centernet512fpn/base/model.json
https://github.com/victordibia/handtrack.js/commit/70d5d9c98e69688414cddaad044bd8730bc982d1#diff-c40e819be4ec1dc29f26913f5cdeb05202261b3a1725ab259cb235ea0f0fc5d6
git rev-list HEAD -- webmodel/*
9ba7220fb31e9168aa248500cc70800566f4c719
70d5d9c98e69688414cddaad044bd8730bc982d1
git checkout 9ba7220fb31e9168aa248500cc70800566f4c719^ -- webmodel
git checkout 70d5d9c98e69688414cddaad044bd8730bc982d1^ -- webmodel
*/

View File

@ -80,13 +80,15 @@ export class Human {
version: string;
/** Current configuration
* - Details: {@link Config}
* - Definition: {@link Config}
* - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L292)
*/
config: Config;
/** Last known result of detect run
* - Can be accessed anytime after initial detection
*/
* - Definition: {@link Result}
*/
result: Result;
/** Current state of Human library
@ -101,6 +103,8 @@ export class Human {
/** Instance of TensorFlow/JS used by Human
* - Can be embedded or externally provided
* @internal
*
* [TFJS API]<https://js.tensorflow.org/api/latest/>
*/
tf: TensorFlow;
@ -119,6 +123,7 @@ export class Human {
/** Currently loaded models
* @internal
* {@link Models}
*/
models: models.Models;
@ -150,7 +155,7 @@ export class Human {
*
* @param userConfig: {@link Config}
*
* @return instance
* @return instance: {@link Human}
*/
constructor(userConfig?: Partial<Config>) {
env.get();

View File

@ -31,6 +31,8 @@ const ignoreMessages = [
'ExperimentalWarning',
];
const failedMessages = [];
const status = {};
function logMessage(test, data) {
@ -42,7 +44,10 @@ function logMessage(test, data) {
status[test].failed++;
}
if (data[1][0].startsWith('passed')) status[test].passed++;
if (data[1][0].startsWith('failed')) status[test].failed++;
if (data[1][0].startsWith('failed')) {
status[test].failed++;
failedMessages.push({ test, data });
}
}
function logStdIO(ok, test, buffer) {
@ -100,6 +105,7 @@ async function testAll() {
// for (const demo of demos) await runDemo(demo);
for (const test of tests) await runTest(test);
log.info();
log.info('failed', failedMessages);
log.info('status:', status);
}

View File

@ -302,9 +302,24 @@ async function test(Human, inputConfig) {
// test posenet and movenet
log('info', 'test body variants');
config.body = { modelPath: 'posenet.json' };
await testDetect(human, 'samples/ai-body.jpg', 'posenet');
res = await testDetect(human, 'samples/ai-body.jpg', 'posenet');
if (!res || res?.body?.length !== 1) log('error', 'failed: body posenet');
else log('state', 'passed: body posenet');
config.body = { modelPath: 'movenet-lightning.json' };
await testDetect(human, 'samples/ai-body.jpg', 'movenet');
res = await testDetect(human, 'samples/ai-body.jpg', 'movenet');
if (!res || res?.body?.length !== 1) log('error', 'failed: body movenet');
else log('state', 'passed: body movenet');
// test handdetect and handtrack
log('info', 'test hand variants');
config.hand = { enabled: true, maxDetected: 2, minConfidence: 0.1, detector: { modelPath: 'handdetect.json' } };
res = await testDetect(human, 'samples/ai-body.jpg', 'handdetect');
if (!res || res?.hand?.length !== 2) log('error', 'failed: hand handdetect');
else log('state', 'passed: hand handdetect');
config.hand = { enabled: true, maxDetected: 2, minConfidence: 0.1, detector: { modelPath: 'handtrack.json' } };
res = await testDetect(human, 'samples/ai-body.jpg', 'handtrack');
if (!res || res?.hand?.length !== 2) log('error', 'failed: hand handdetect');
else log('state', 'passed: hand handdetect');
// test multiple instances
const first = new Human(config);