From bc7cb66846f150664df61777dea298bdc99492b2 Mon Sep 17 00:00:00 2001 From: Vladimir Mandic Date: Tue, 1 Jun 2021 08:56:36 -0400 Subject: [PATCH] update demos and output docs with breaking changes --- Demos.md | 235 +++++++++++++++++++++++++++++++---------------------- Outputs.md | 27 ++++-- 2 files changed, 155 insertions(+), 107 deletions(-) diff --git a/Demos.md b/Demos.md index a02ce19..98d3d41 100644 --- a/Demos.md +++ b/Demos.md @@ -94,7 +94,7 @@ human.draw.options = { fillPolygons: false, useDepth: true, useCurves: false, - bufferedOutput: false, + bufferedOutput: true, useRawBoxes: false, }; ``` @@ -142,69 +142,94 @@ node demo/node.js ``` ```json -10:28:53.444 Human: version: 0.40.5 TensorFlow/JS version: 3.4.0 -10:28:53.445 Human: platform: linux x64 -10:28:53.445 Human: agent: NodeJS v15.7.0 -10:28:53.445 Human: setting backend: tensorflow -10:28:53.505 Human: load model: /models/faceboxes -10:28:53.505 Human: load model: /models/iris -10:28:53.522 Human: load model: /models/age -10:28:53.529 Human: load model: /models/gender -10:28:53.535 Human: load model: /models/emotion -10:28:53.607 Human: load model: /models/handdetect -10:28:53.608 Human: load model: /models/handskeleton -10:28:53.698 Human: load model: /models/posenet -10:28:53.698 Human: tf engine state: 31020964 bytes 932 tensors -2021-03-06 10:28:53 INFO: Loaded: [ 'posenet', 'handpose', 'age', 'gender', 'emotion', 'face', [length]: 6 ] -2021-03-06 10:28:53 INFO: Memory state: { numTensors: 932, numDataBuffers: 932, numBytes: 31020964 } -2021-03-06 10:28:53 WARN: Parameters: missing -2021-03-06 10:28:53 STATE: Processing embedded warmup image: full -2021-03-06 10:28:54 DATA: Face: [ - { - confidence: 0.9981339573860168, - faceConfidence: undefined, - boxConfidence: undefined, - box: [ 43, 20, 182, 231, [length]: 4 ], - mesh: undefined, - boxRaw: null, - meshRaw: undefined, - annotations: undefined, - age: 24.3, - gender: 'female', - genderConfidence: 0.84, - emotion: [ { score: 0.83, emotion: 'neutral' }, { score: 0.12, emotion: 'sad' }, [length]: 2 ], - embedding: [ [length]: 0 ], - iris: 0 +2021-06-01 08:52:15 INFO: @vladmandic/human version 2.0.0 +2021-06-01 08:52:15 INFO: User: vlado Platform: linux Arch: x64 Node: v16.0.0 +2021-06-01 08:52:15 INFO: Current folder: /home/vlado/dev/human +2021-06-01 08:52:15 INFO: Human: 2.0.0 +2021-06-01 08:52:15 INFO: Active Configuration { + backend: 'tensorflow', + modelBasePath: 'file://models/', + wasmPath: '../node_modules/@tensorflow/tfjs-backend-wasm/dist/', + debug: true, + async: false, + warmup: 'full', + cacheSensitivity: 0.75, + filter: { + enabled: true, + width: 0, + height: 0, + flip: true, + return: true, + brightness: 0, + contrast: 0, + sharpness: 0, + blur: 0, + saturation: 0, + hue: 0, + negative: false, + sepia: false, + vintage: false, + kodachrome: false, + technicolor: false, + polaroid: false, + pixelate: 0 }, -] -2021-03-06 10:28:54 DATA: Body: [ - { - score: 0.9466612444204443, - keypoints: [ - { score: 0.9937239289283752, part: 'nose', position: { x: 597, y: 126 } }, - { score: 0.994640588760376, part: 'leftEye', position: { x: 602, y: 113 } }, - { score: 0.9851681590080261, part: 'rightEye', position: { x: 597, y: 114 } }, - { score: 0.9937878251075745, part: 'leftEar', position: { x: 633, y: 131 } }, - { score: 0.8690065145492554, part: 'rightEar', position: { x: 584, y: 146 } }, - { score: 0.9881162643432617, part: 'leftShoulder', position: { x: 661, y: 228 } }, - { score: 0.9983603954315186, part: 'rightShoulder', position: { x: 541, y: 253 } }, - { score: 0.9678125381469727, part: 'leftElbow', position: { x: 808, y: 392 } }, - { score: 0.9479317665100098, part: 'rightElbow', position: { x: 461, y: 387 } }, - { score: 0.9611830711364746, part: 'leftWrist', position: { x: 896, y: 521 } }, - { score: 0.8795050382614136, part: 'rightWrist', position: { x: 323, y: 503 } }, - { score: 0.9769214391708374, part: 'leftHip', position: { x: 655, y: 540 } }, - { score: 0.9489732384681702, part: 'rightHip', position: { x: 567, y: 533 } }, - { score: 0.9663040041923523, part: 'leftKnee', position: { x: 646, y: 827 } }, - { score: 0.9643898010253906, part: 'rightKnee', position: { x: 561, y: 818 } }, - { score: 0.9095755815505981, part: 'leftAnkle', position: { x: 667, y: 1103 } }, - { score: 0.7478410005569458, part: 'rightAnkle', position: { x: 624, y: 1059 } }, - [length]: 17 - ] + gesture: { enabled: true }, + face: { + enabled: true, + detector: { modelPath: 'blazeface.json', rotation: false, maxDetected: 10, skipFrames: 15, minConfidence: 0.2, iouThreshold: 0.1, return: false, enabled: true }, + mesh: { enabled: true, modelPath: 'facemesh.json' }, + iris: { enabled: true, modelPath: 'iris.json' }, + description: { enabled: true, modelPath: 'faceres.json', skipFrames: 16, minConfidence: 0.1 }, + emotion: { enabled: true, minConfidence: 0.1, skipFrames: 17, modelPath: 'emotion.json' } }, -] -2021-03-06 10:28:54 DATA: Hand: [ [length]: 0 ] -2021-03-06 10:28:54 DATA: Gesture: [ { body: 0, gesture: 'leaning right' }, [length]: 1 ] -10:28:54.968 Human: Warmup full 621 ms + body: { enabled: true, modelPath: 'movenet-lightning.json', maxDetected: 1, minConfidence: 0.2 }, + hand: { + enabled: true, + rotation: true, + skipFrames: 18, + minConfidence: 0.1, + iouThreshold: 0.1, + maxDetected: 2, + landmarks: true, + detector: { modelPath: 'handdetect.json' }, + skeleton: { modelPath: 'handskeleton.json' } + }, + object: { enabled: true, modelPath: 'mb3-centernet.json', minConfidence: 0.2, iouThreshold: 0.4, maxDetected: 10, skipFrames: 19 } +} +08:52:15.673 Human: version: 2.0.0 +08:52:15.674 Human: tfjs version: 3.6.0 +08:52:15.674 Human: platform: linux x64 +08:52:15.674 Human: agent: NodeJS v16.0.0 +08:52:15.674 Human: setting backend: tensorflow +08:52:15.710 Human: load model: file://models/blazeface.json +08:52:15.743 Human: load model: file://models/facemesh.json +08:52:15.744 Human: load model: file://models/iris.json +08:52:15.760 Human: load model: file://models/emotion.json +08:52:15.847 Human: load model: file://models/handdetect.json +08:52:15.847 Human: load model: file://models/handskeleton.json +08:52:15.914 Human: load model: file://models/movenet-lightning.json +08:52:15.957 Human: load model: file://models/mb3-centernet.json +08:52:16.015 Human: load model: file://models/faceres.json +08:52:16.015 Human: tf engine state: 50796152 bytes 1318 tensors +2021-06-01 08:52:16 INFO: Loaded: [ 'face', 'movenet', 'handpose', 'emotion', 'centernet', 'faceres', [length]: 6 ] +2021-06-01 08:52:16 INFO: Memory state: { unreliable: true, numTensors: 1318, numDataBuffers: 1318, numBytes: 50796152 } +2021-06-01 08:52:16 INFO: Loading image: private/daz3d/daz3d-kiaria-02.jpg +2021-06-01 08:52:16 STATE: Processing: [ 1, 1300, 1000, 3, [length]: 4 ] +2021-06-01 08:52:17 DATA: Results: +2021-06-01 08:52:17 DATA: Face: #0 boxScore:0.88 faceScore:1 age:16.3 genderScore:0.97 gender:female emotionScore:0.85 emotion:happy iris:61.05 +2021-06-01 08:52:17 DATA: Body: #0 score:0.82 keypoints:17 +2021-06-01 08:52:17 DATA: Hand: #0 score:0.89 +2021-06-01 08:52:17 DATA: Hand: #1 score:0.97 +2021-06-01 08:52:17 DATA: Gesture: face#0 gesture:facing left +2021-06-01 08:52:17 DATA: Gesture: body#0 gesture:leaning right +2021-06-01 08:52:17 DATA: Gesture: hand#0 gesture:pinky forward middlefinger up +2021-06-01 08:52:17 DATA: Gesture: hand#1 gesture:pinky forward middlefinger up +2021-06-01 08:52:17 DATA: Gesture: iris#0 gesture:looking left +2021-06-01 08:52:17 DATA: Object: #0 score:0.55 label:person +2021-06-01 08:52:17 DATA: Object: #1 score:0.23 label:bottle +2021-06-01 08:52:17 DATA: Persons: +2021-06-01 08:52:17 DATA: #0: Face:score:1 age:16.3 gender:female iris:61.05 Body:score:0.82 keypoints:17 LeftHand:no RightHand:yes Gestures:4 ```


@@ -219,42 +244,56 @@ node node-multiprocess.js ``` ```json -2021-04-16 08:33:13 INFO: @vladmandic/face-api version 1.1.12 -2021-04-16 08:33:13 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0 -2021-04-16 08:33:13 INFO: FaceAPI multi-process test -2021-04-16 08:33:13 STATE: Main: started worker: 268453 -2021-04-16 08:33:13 STATE: Main: started worker: 268459 -2021-04-16 08:33:13 STATE: Main: started worker: 268460 -2021-04-16 08:33:13 STATE: Main: started worker: 268466 -2021-04-16 08:33:14 STATE: Worker: PID: 268459 TensorFlow/JS 3.4.0 FaceAPI 1.1.12 Backend: tensorflow -2021-04-16 08:33:14 STATE: Worker: PID: 268466 TensorFlow/JS 3.4.0 FaceAPI 1.1.12 Backend: tensorflow -2021-04-16 08:33:14 STATE: Worker: PID: 268460 TensorFlow/JS 3.4.0 FaceAPI 1.1.12 Backend: tensorflow -2021-04-16 08:33:14 STATE: Worker: PID: 268453 TensorFlow/JS 3.4.0 FaceAPI 1.1.12 Backend: tensorflow -2021-04-16 08:33:15 STATE: Main: dispatching to worker: 268466 -2021-04-16 08:33:15 STATE: Main: dispatching to worker: 268460 -2021-04-16 08:33:15 INFO: Latency: worker initializtion: 1860 message round trip: 39 -2021-04-16 08:33:15 DATA: Worker received message: 268466 { test: true } -2021-04-16 08:33:15 STATE: Main: dispatching to worker: 268459 -2021-04-16 08:33:15 STATE: Main: dispatching to worker: 268453 -2021-04-16 08:33:15 DATA: Worker received message: 268460 { image: 'demo/sample2.jpg' } -2021-04-16 08:33:15 DATA: Worker received message: 268459 { image: 'demo/sample3.jpg' } -2021-04-16 08:33:15 DATA: Worker received message: 268453 { image: 'demo/sample4.jpg' } -2021-04-16 08:33:15 DATA: Worker received message: 268466 { image: 'demo/sample1.jpg' } -2021-04-16 08:33:17 DATA: Main: worker finished: 268466 detected faces: 3 -2021-04-16 08:33:17 STATE: Main: dispatching to worker: 268466 -2021-04-16 08:33:17 DATA: Main: worker finished: 268460 detected faces: 3 -2021-04-16 08:33:17 STATE: Main: dispatching to worker: 268460 -2021-04-16 08:33:17 DATA: Worker received message: 268466 { image: 'demo/sample5.jpg' } -2021-04-16 08:33:17 DATA: Worker received message: 268460 { image: 'demo/sample6.jpg' } -2021-04-16 08:33:17 DATA: Main: worker finished: 268453 detected faces: 4 -2021-04-16 08:33:17 STATE: Main: worker exit: 268453 0 -2021-04-16 08:33:17 DATA: Main: worker finished: 268459 detected faces: 3 -2021-04-16 08:33:17 STATE: Main: worker exit: 268459 0 -2021-04-16 08:33:18 DATA: Main: worker finished: 268466 detected faces: 5 -2021-04-16 08:33:18 STATE: Main: worker exit: 268466 0 -2021-04-16 08:33:18 DATA: Main: worker finished: 268460 detected faces: 4 -2021-04-16 08:33:18 INFO: Processed: 6 images in total: 4930 ms working: 3069 ms average: 511 ms -2021-04-16 08:33:18 STATE: Main: worker exit: 268460 0 +2021-06-01 08:54:19 INFO: @vladmandic/human version 2.0.0 +2021-06-01 08:54:19 INFO: User: vlado Platform: linux Arch: x64 Node: v16.0.0 +2021-06-01 08:54:19 INFO: FaceAPI multi-process test +2021-06-01 08:54:19 STATE: Enumerated images: ./assets 15 +2021-06-01 08:54:19 STATE: Main: started worker: 130362 +2021-06-01 08:54:19 STATE: Main: started worker: 130363 +2021-06-01 08:54:19 STATE: Main: started worker: 130369 +2021-06-01 08:54:19 STATE: Main: started worker: 130370 +2021-06-01 08:54:20 STATE: Worker: PID: 130370 TensorFlow/JS 3.6.0 Human 2.0.0 Backend: tensorflow +2021-06-01 08:54:20 STATE: Worker: PID: 130362 TensorFlow/JS 3.6.0 Human 2.0.0 Backend: tensorflow +2021-06-01 08:54:20 STATE: Worker: PID: 130369 TensorFlow/JS 3.6.0 Human 2.0.0 Backend: tensorflow +2021-06-01 08:54:20 STATE: Worker: PID: 130363 TensorFlow/JS 3.6.0 Human 2.0.0 Backend: tensorflow +2021-06-01 08:54:21 STATE: Main: dispatching to worker: 130370 +2021-06-01 08:54:21 INFO: Latency: worker initializtion: 1348 message round trip: 0 +2021-06-01 08:54:21 DATA: Worker received message: 130370 { test: true } +2021-06-01 08:54:21 STATE: Main: dispatching to worker: 130362 +2021-06-01 08:54:21 DATA: Worker received message: 130362 { image: 'assets/human-sample-face.jpg' } +2021-06-01 08:54:21 DATA: Worker received message: 130370 { image: 'assets/human-sample-body.jpg' } +2021-06-01 08:54:21 STATE: Main: dispatching to worker: 130369 +2021-06-01 08:54:21 STATE: Main: dispatching to worker: 130363 +2021-06-01 08:54:21 DATA: Worker received message: 130369 { image: 'assets/human-sample-upper.jpg' } +2021-06-01 08:54:21 DATA: Worker received message: 130363 { image: 'assets/sample-me.jpg' } +2021-06-01 08:54:24 DATA: Main: worker finished: 130362 detected faces: 1 bodies: 1 hands: 0 objects: 1 +2021-06-01 08:54:24 STATE: Main: dispatching to worker: 130362 +2021-06-01 08:54:24 DATA: Worker received message: 130362 { image: 'assets/sample1.jpg' } +2021-06-01 08:54:25 DATA: Main: worker finished: 130369 detected faces: 1 bodies: 1 hands: 0 objects: 1 +2021-06-01 08:54:25 STATE: Main: dispatching to worker: 130369 +2021-06-01 08:54:25 DATA: Main: worker finished: 130370 detected faces: 1 bodies: 1 hands: 0 objects: 1 +2021-06-01 08:54:25 STATE: Main: dispatching to worker: 130370 +2021-06-01 08:54:25 DATA: Worker received message: 130369 { image: 'assets/sample2.jpg' } +2021-06-01 08:54:25 DATA: Main: worker finished: 130363 detected faces: 1 bodies: 1 hands: 0 objects: 2 +2021-06-01 08:54:25 STATE: Main: dispatching to worker: 130363 +2021-06-01 08:54:25 DATA: Worker received message: 130370 { image: 'assets/sample3.jpg' } +2021-06-01 08:54:25 DATA: Worker received message: 130363 { image: 'assets/sample4.jpg' } +2021-06-01 08:54:30 DATA: Main: worker finished: 130362 detected faces: 3 bodies: 1 hands: 0 objects: 7 +2021-06-01 08:54:30 STATE: Main: dispatching to worker: 130362 +2021-06-01 08:54:30 DATA: Worker received message: 130362 { image: 'assets/sample5.jpg' } +2021-06-01 08:54:31 DATA: Main: worker finished: 130369 detected faces: 3 bodies: 1 hands: 0 objects: 5 +2021-06-01 08:54:31 STATE: Main: dispatching to worker: 130369 +2021-06-01 08:54:31 DATA: Worker received message: 130369 { image: 'assets/sample6.jpg' } +2021-06-01 08:54:31 DATA: Main: worker finished: 130363 detected faces: 4 bodies: 1 hands: 2 objects: 2 +2021-06-01 08:54:31 STATE: Main: dispatching to worker: 130363 +2021-06-01 08:54:39 STATE: Main: worker exit: 130370 0 +2021-06-01 08:54:39 DATA: Main: worker finished: 130362 detected faces: 1 bodies: 1 hands: 0 objects: 1 +2021-06-01 08:54:39 DATA: Main: worker finished: 130369 detected faces: 1 bodies: 1 hands: 1 objects: 3 +2021-06-01 08:54:39 STATE: Main: worker exit: 130362 0 +2021-06-01 08:54:39 STATE: Main: worker exit: 130369 0 +2021-06-01 08:54:41 DATA: Main: worker finished: 130363 detected faces: 9 bodies: 1 hands: 0 objects: 10 +2021-06-01 08:54:41 STATE: Main: worker exit: 130363 0 +2021-06-01 08:54:41 INFO: Processed: 15 images in total: 22006 ms working: 20658 ms average: 1377 ms ```


diff --git a/Outputs.md b/Outputs.md index 143b926..c3423d7 100644 --- a/Outputs.md +++ b/Outputs.md @@ -1,20 +1,29 @@ # Outputs -[**Result Interface Specification**](https://vladmandic.github.io/human/typedoc/interfaces/result.html) +Result of `humand.detect()` method is a single object that includes data for all enabled modules and all detected objects +`Result` object also includes `persons` getter which when invokes sorts results according to person that particular body part belongs to +`Result` object can also be generated as smoothened time-based interpolation from last known `Result` using `human.next()` method -Result of `humand.detect()` is a single object that includes data for all enabled modules and all detected objects: + +- [**Result Interface Specification**](https://vladmandic.github.io/human/typedoc/interfaces/result.html) +- [**Sample Result JSON**](../assets/sample-result.json) +- [**Sample Persons JSON**](../assets/sample-persons.json) + +
+ +Simplified documentation of `Result` object type: ```js -result = { +result: Result = { timestamp: // timestamp in miliseconds when detection occured canvas: // optional processed canvas face: // [ { id, // face id number - confidence, // returns faceConfidence if exists, otherwise boxConfidence - faceConfidence // confidence in detection box after running mesh - boxConfidence // confidence in detection box before running mesh + score, // overall detection score returns faceScore if exists, otherwise boxScore + faceScore // confidence score in detection box after running mesh + boxScore // confidence score in detection box before running mesh box, // , clamped and normalized to input image size boxRaw, // , unclamped and normalized to range of 0..1 mesh, // 468 base points & 10 iris points, normalized to input impact size @@ -22,7 +31,7 @@ result = { annotations, // 32 base annotated landmarks & 2 iris annotations age, // estimated age gender, // 'male', 'female' - genderConfidence // confidence in gender detection + genderScore // confidence score in gender detection embedding, // [float] vector of number values used for face similarity compare iris, // relative distance of iris to camera, multiple by focal lenght to get actual distance emotion: // returns multiple possible emotions for a given face, each with probability @@ -68,10 +77,10 @@ result = { [ { id, // hand id number - confidence, // , + score, // , overal detection confidence score box, // , clamped and normalized to input image size boxRaw, // , unclamped and normalized to range of 0..1 - landmarks, // 21 points + keypoints, // 21 points annotations, // ]> 5 annotated landmakrs } ],