implement blazepose and update demos

pull/91/head
Vladimir Mandic 2021-03-04 10:33:08 -05:00
parent db3e5da390
commit f32eb96b39
50 changed files with 1888 additions and 1725 deletions

View File

@ -1,20 +1,23 @@
# Human Library # Human Library
### 3D Face Detection, Face Embedding & Recognition, **3D Face Detection, Face Embedding & Recognition,**
### Body Pose Tracking, Hand & Finger Tracking, **Body Pose Tracking, Hand & Finger Tracking,**
### Iris Analysis, Age & Gender & Emotion Prediction **Iris Analysis, Age & Gender & Emotion Prediction**
### & Gesture Recognition **& Gesture Recognition**
<br> <br>
Native JavaScript module using TensorFlow/JS Machine Learning library Native JavaScript module using TensorFlow/JS Machine Learning library
Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows and Linux Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows and Linux
- Browser/WebWorker: Compatible with *CPU*, *WebGL*, *WASM* and *WebGPU* backends - Browser/WebWorker: Compatible with *CPU*, *WebGL*, *WASM* and *WebGPU* backends
- NodeJS: Compatible with software *tfjs-node* and CUDA accelerated backends *tfjs-node-gpu* - NodeJS: Compatible with software *tfjs-node* and CUDA accelerated backends *tfjs-node-gpu*
Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) for processing of live WebCam video or static images
<br> <br>
### Project pages: ## Project pages
- [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) - [**Live Demo**](https://vladmandic.github.io/human/demo/index.html)
- [**Code Repository**](https://github.com/vladmandic/human) - [**Code Repository**](https://github.com/vladmandic/human)
@ -24,7 +27,7 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
<br> <br>
### Wiki pages: ## Wiki pages
- [**Home**](https://github.com/vladmandic/human/wiki) - [**Home**](https://github.com/vladmandic/human/wiki)
- [**Demos**](https://github.com/vladmandic/human/wiki/Demos) - [**Demos**](https://github.com/vladmandic/human/wiki/Demos)
@ -37,7 +40,7 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
<br> <br>
### Additional notes: ## Additional notes
- [**Notes on Backends**](https://github.com/vladmandic/human/wiki/Backends) - [**Notes on Backends**](https://github.com/vladmandic/human/wiki/Backends)
- [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server) - [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server)
@ -49,13 +52,35 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
<br> <br>
*See [issues](https://github.com/vladmandic/human/issues?q=) and [discussions](https://github.com/vladmandic/human/discussions) for list of known limitations and planned enhancements* ## Default models
Default models in Human library are:
- **Face Detection**: MediaPipe BlazeFace-Back
- **Face Mesh**: MediaPipe FaceMesh
- **Face Iris Analysis**: MediaPipe Iris
- **Emotion Detection**: Oarriaga Emotion
- **Gender Detection**: Oarriaga Gender
- **Age Detection**: SSR-Net Age IMDB
- **Body Analysis**: PoseNet
- **Face Embedding**: Sirius-AI MobileFaceNet Embedding
Note that alternative models are provided and can be enabled via configuration
For example, `PoseNet` model can be switched for `BlazePose` model depending on the use case
For more info, see [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration) and [**List of Models**](https://github.com/vladmandic/human/wiki/Models)
<br>
*See [**issues**](https://github.com/vladmandic/human/issues?q=) and [**discussions**](https://github.com/vladmandic/human/discussions) for list of known limitations and planned enhancements*
*Suggestions are welcome!* *Suggestions are welcome!*
<br><hr><br> <br><hr><br>
## Options ## ## Options
As presented in the demo application...
![Options visible in demo](assets/screenshot-menu.png) ![Options visible in demo](assets/screenshot-menu.png)
@ -65,12 +90,17 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
<br> <br>
**Training image:**
![Example Training Image](assets/screenshot-sample.png)
**Using static images:** **Using static images:**
![Example Using Image](assets/screenshot1.jpg) ![Example Using Image](assets/screenshot-images.jpg)
**Live WebCam view:**
![Example Using WebCam](assets/screenshot-webcam.jpg)
<br> <br>
**Using webcam:**
![Example Using WebCam](assets/screenshot2.jpg)

View File

@ -1,7 +1,5 @@
# To-Do list for Human library # To-Do list for Human library
- Explore BlazePose model
- Explore MediaPipe TFLite models
- Fix BlazeFace NodeJS missing ops - Fix BlazeFace NodeJS missing ops
- Prune pre-packaged models - Prune pre-packaged models
- Build Face embedding database - Build Face embedding database

Binary file not shown.

After

Width:  |  Height:  |  Size: 434 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 315 KiB

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 113 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 177 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 360 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 274 KiB

View File

@ -105,8 +105,7 @@ export default {
age: { age: {
enabled: true, enabled: true,
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki' modelPath: '../models/age-ssrnet-imdb.json',
// which determines training set for model
inputSize: 64, // fixed value inputSize: 64, // fixed value
skipFrames: 31, // how many frames to go without re-running the detector skipFrames: 31, // how many frames to go without re-running the detector
// only used for video inputs // only used for video inputs
@ -115,7 +114,7 @@ export default {
gender: { gender: {
enabled: true, enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
modelPath: '../models/gender.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki' modelPath: '../models/gender.json', // can be 'gender' or 'gender-ssrnet-imdb'
inputSize: 64, // fixed value inputSize: 64, // fixed value
skipFrames: 41, // how many frames to go without re-running the detector skipFrames: 41, // how many frames to go without re-running the detector
// only used for video inputs // only used for video inputs
@ -126,7 +125,7 @@ export default {
inputSize: 64, // fixed value inputSize: 64, // fixed value
minConfidence: 0.1, // threshold for discarding a prediction minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 21, // how many frames to go without re-running the detector skipFrames: 21, // how many frames to go without re-running the detector
modelPath: '../models/emotion-large.json', // can be 'mini', 'large' modelPath: '../models/emotion.json',
}, },
embedding: { embedding: {
@ -138,16 +137,17 @@ export default {
body: { body: {
enabled: true, enabled: true,
modelPath: '../models/posenet.json', modelPath: '../models/posenet.json', // can be 'posenet' or 'blazepose'
inputSize: 257, // fixed value inputSize: 257, // fixed value, 257 for posenet and 256 for blazepose
maxDetections: 10, // maximum number of people detected in the input maxDetections: 10, // maximum number of people detected in the input
// should be set to the minimum number for performance // should be set to the minimum number for performance
// only valid for posenet as blazepose only detects single pose
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression // in non-maximum suppression
// only valid for posenet as blazepose only detects single pose
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
outputStride: 16, // size of block in which to run point detection, smaller value means higher resolution // only valid for posenet as blazepose only detects single pose
// defined by model itself, can be 8, 16, or 32 modelType: 'posenet-mobilenet', // can be 'posenet-mobilenet', 'posenet-resnet', 'blazepose'
modelType: 'MobileNet', // Human includes MobileNet version, but you can switch to ResNet
}, },
hand: { hand: {

View File

@ -14,10 +14,12 @@ const userConfig = {}; // add any user configuration overrides
const userConfig = { const userConfig = {
backend: 'wasm', backend: 'wasm',
async: false, async: false,
warmup: 'full',
videoOptimized: false, videoOptimized: false,
face: { enabled: true, iris: { enabled: false }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } }, face: { enabled: true, iris: { enabled: true }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } },
body: { enabled: false }, hand: { enabled: true },
hand: { enabled: false }, gestures: { enabled: true },
body: { enabled: true, modelType: 'blazepose', modelPath: '../models/blazepose.json' },
}; };
*/ */
@ -31,15 +33,16 @@ const ui = {
baseFontProto: 'small-caps {size} "Segoe UI"', baseFontProto: 'small-caps {size} "Segoe UI"',
baseLineWidth: 12, baseLineWidth: 12,
crop: true, crop: true,
columns: 4, columns: 2,
busy: false, busy: false,
facing: true, facing: true,
useWorker: false, useWorker: false,
worker: 'worker.js', worker: 'worker.js',
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'], samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
compare: '../assets/sample-me.jpg', compare: '../assets/sample-me.jpg',
drawLabels: true,
drawBoxes: true, drawBoxes: true,
drawPoints: false, drawPoints: true,
drawPolygons: true, drawPolygons: true,
fillPolygons: false, fillPolygons: false,
useDepth: true, useDepth: true,
@ -52,6 +55,7 @@ const ui = {
detectFPS: [], detectFPS: [],
drawFPS: [], drawFPS: [],
buffered: false, buffered: false,
drawWarmup: false,
drawThread: null, drawThread: null,
detectThread: null, detectThread: null,
framesDraw: 0, framesDraw: 0,
@ -120,7 +124,7 @@ async function drawResults(input) {
await menu.process.updateChart('FPS', ui.detectFPS); await menu.process.updateChart('FPS', ui.detectFPS);
// get updated canvas // get updated canvas
if (ui.buffered || !result.canvas) result.canvas = await human.image(input, userConfig); if (ui.buffered || !result.canvas) result.canvas = await human.image(input).canvas;
// draw image from video // draw image from video
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
@ -436,6 +440,7 @@ function setupMenu() {
}); });
menu.display.addHTML('<hr style="border-style: inset; border-color: dimgray">'); menu.display.addHTML('<hr style="border-style: inset; border-color: dimgray">');
menu.display.addBool('use 3D depth', ui, 'useDepth'); menu.display.addBool('use 3D depth', ui, 'useDepth');
menu.display.addBool('print labels', ui, 'drawLabels');
menu.display.addBool('draw boxes', ui, 'drawBoxes'); menu.display.addBool('draw boxes', ui, 'drawBoxes');
menu.display.addBool('draw polygons', ui, 'drawPolygons'); menu.display.addBool('draw polygons', ui, 'drawPolygons');
menu.display.addBool('Fill Polygons', ui, 'fillPolygons'); menu.display.addBool('Fill Polygons', ui, 'fillPolygons');
@ -530,6 +535,18 @@ function setupMenu() {
document.getElementById('play').addEventListener('click', () => detectVideo()); document.getElementById('play').addEventListener('click', () => detectVideo());
} }
async function drawWarmup(res) {
const canvas = document.getElementById('canvas');
canvas.width = res.canvas.width;
canvas.height = res.canvas.height;
const ctx = canvas.getContext('2d');
ctx.drawImage(res.canvas, 0, 0, res.canvas.width, res.canvas.height, 0, 0, canvas.width, canvas.height);
await draw.face(res.face, canvas, ui, human.facemesh.triangulation);
await draw.body(res.body, canvas, ui);
await draw.hand(res.hand, canvas, ui);
await draw.gesture(res.gesture, canvas, ui);
}
async function main() { async function main() {
log('Demo starting ...'); log('Demo starting ...');
log('Browser:', navigator?.userAgent); log('Browser:', navigator?.userAgent);
@ -543,7 +560,9 @@ async function main() {
} }
if (!ui.useWorker) { if (!ui.useWorker) {
status('initializing'); status('initializing');
await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
ui.baseFont = ui.baseFontProto.replace(/{size}/, '16px');
if (res && res.canvas && ui.drawWarmup) await drawWarmup(res);
} }
status('human: ready'); status('human: ready');
document.getElementById('loader').style.display = 'none'; document.getElementById('loader').style.display = 'none';

View File

@ -1,3 +1,12 @@
async function drawPoint(canvas, x = 0, y = 0, radius = 0, color = 'black', label) {
const ctx = canvas.getContext('2d');
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(x, y, radius, 0, 2 * Math.PI);
ctx.fill();
if (label) ctx.fillText(label, x + 4, y + 4);
}
async function drawGesture(result, canvas, ui) { async function drawGesture(result, canvas, ui) {
if (!result) return; if (!result) return;
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
@ -11,7 +20,9 @@ async function drawGesture(result, canvas, ui) {
const label = `${where[0]} ${person}: ${what[1]}`; const label = `${where[0]} ${person}: ${what[1]}`;
ctx.fillStyle = 'black'; ctx.fillStyle = 'black';
ctx.fillText(label, 8, 2 + (i * ui.baseLineHeight)); ctx.fillText(label, 8, 2 + (i * ui.baseLineHeight));
// ctx.fillText(label, 151, i * 16 + 101);
ctx.fillStyle = ui.baseLabel; ctx.fillStyle = ui.baseLabel;
// ctx.fillText(label, 150, i * 16 + 100);
ctx.fillText(label, 6, 0 + (i * ui.baseLineHeight)); ctx.fillText(label, 6, 0 + (i * ui.baseLineHeight));
i += 1; i += 1;
} }
@ -48,8 +59,10 @@ async function drawFace(result, canvas, ui, triangulation) {
const x = Math.max(face.box[0], 0); const x = Math.max(face.box[0], 0);
const y = i * ui.baseLineHeight + face.box[1]; const y = i * ui.baseLineHeight + face.box[1];
ctx.fillText(labels[i], x + 5, y + 16); ctx.fillText(labels[i], x + 5, y + 16);
// ctx.fillText(labels[i], 151, i * 16 + 28);
ctx.fillStyle = ui.baseLabel; ctx.fillStyle = ui.baseLabel;
ctx.fillText(labels[i], x + 4, y + 15); ctx.fillText(labels[i], x + 4, y + 15);
// ctx.fillText(labels[i], 150, i * 16 + 27);
} }
ctx.fillStyle = ui.baseColor; ctx.fillStyle = ui.baseColor;
ctx.stroke(); ctx.stroke();
@ -119,13 +132,17 @@ async function drawBody(result, canvas, ui) {
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx.lineJoin = 'round'; ctx.lineJoin = 'round';
for (let i = 0; i < result.length; i++) { for (let i = 0; i < result.length; i++) {
// result[i].keypoints = result[i].keypoints.filter((a) => a.score > 0.5);
if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] }; if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] };
ctx.fillStyle = ui.baseColor;
ctx.strokeStyle = ui.baseColor; ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont; ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth; ctx.lineWidth = ui.baseLineWidth;
if (ui.drawPoints) { if (ui.drawPoints) {
for (let pt = 0; pt < result[i].keypoints.length; pt++) { for (let pt = 0; pt < result[i].keypoints.length; pt++) {
ctx.fillStyle = ui.useDepth && result[i].keypoints[pt].position.z ? `rgba(${127.5 + (2 * result[i].keypoints[pt].position.z)}, ${127.5 - (2 * result[i].keypoints[pt].position.z)}, 255, 0.5)` : ui.baseColor;
if (ui.drawLabels) {
ctx.fillText(`${result[i].keypoints[pt].part}`, result[i].keypoints[pt].position.x + 4, result[i].keypoints[pt].position.y + 4);
}
ctx.beginPath(); ctx.beginPath();
if (ui.buffered) { if (ui.buffered) {
lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2; lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;
@ -162,6 +179,10 @@ async function drawBody(result, canvas, ui) {
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'leftAnkle'); part = result[i].keypoints.find((a) => a.part === 'leftAnkle');
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'leftHeel');
if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'leftFoot');
if (part) path.lineTo(part.position.x, part.position.y);
} }
// leg right // leg right
root = result[i].keypoints.find((a) => a.part === 'rightHip'); root = result[i].keypoints.find((a) => a.part === 'rightHip');
@ -171,6 +192,10 @@ async function drawBody(result, canvas, ui) {
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'rightAnkle'); part = result[i].keypoints.find((a) => a.part === 'rightAnkle');
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'rightHeel');
if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'rightFoot');
if (part) path.lineTo(part.position.x, part.position.y);
} }
// arm left // arm left
root = result[i].keypoints.find((a) => a.part === 'leftShoulder'); root = result[i].keypoints.find((a) => a.part === 'leftShoulder');
@ -180,6 +205,8 @@ async function drawBody(result, canvas, ui) {
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'leftWrist'); part = result[i].keypoints.find((a) => a.part === 'leftWrist');
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'leftPalm');
if (part) path.lineTo(part.position.x, part.position.y);
} }
// arm right // arm right
root = result[i].keypoints.find((a) => a.part === 'rightShoulder'); root = result[i].keypoints.find((a) => a.part === 'rightShoulder');
@ -189,6 +216,8 @@ async function drawBody(result, canvas, ui) {
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'rightWrist'); part = result[i].keypoints.find((a) => a.part === 'rightWrist');
if (part) path.lineTo(part.position.x, part.position.y); if (part) path.lineTo(part.position.x, part.position.y);
part = result[i].keypoints.find((a) => a.part === 'rightPalm');
if (part) path.lineTo(part.position.x, part.position.y);
} }
// draw all // draw all
ctx.stroke(path); ctx.stroke(path);
@ -253,4 +282,5 @@ export default {
body: drawBody, body: drawBody,
hand: drawHand, hand: drawHand,
gesture: drawGesture, gesture: drawGesture,
point: drawPoint,
}; };

View File

@ -14,16 +14,19 @@ const myConfig = {
videoOptimized: false, videoOptimized: false,
async: false, async: false,
face: { face: {
// detector: { modelPath: 'file://models/faceboxes.json' }, enabled: true,
detector: { modelPath: 'file://models/blazeface-back.json' }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node detector: { modelPath: 'file://models/faceboxes.json', enabled: true, minConfidence: 0.5 },
mesh: { modelPath: 'file://models/facemesh.json' }, // detector: { modelPath: 'file://models/blazeface-back.json', enabled: false }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
iris: { modelPath: 'file://models/iris.json' }, mesh: { modelPath: 'file://models/facemesh.json', enabled: false }, // depends on blazeface detector
age: { modelPath: 'file://models/age-ssrnet-imdb.json' }, iris: { modelPath: 'file://models/iris.json', enabled: true },
gender: { modelPath: 'file://models/gender.json' }, age: { modelPath: 'file://models/age-ssrnet-imdb.json', enabled: true },
emotion: { modelPath: 'file://models/emotion-large.json' }, gender: { modelPath: 'file://models/gender.json', enabled: true },
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
}, },
body: { modelPath: 'file://models/posenet.json' }, // body: { modelPath: 'file://models/blazepose.json', modelType: 'blazepose', inputSize: 256, enabled: true },
body: { modelPath: 'file://models/posenet.json', modelType: 'posenet', inputSize: 257, enabled: true },
hand: { hand: {
enabled: true,
detector: { modelPath: 'file://models/handdetect.json' }, detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' }, skeleton: { modelPath: 'file://models/handskeleton.json' },
}, },
@ -35,7 +38,14 @@ async function init() {
// create instance of human // create instance of human
human = new Human(myConfig); human = new Human(myConfig);
// pre-load models // pre-load models
log.info('Human:', human.version);
log.info('Active Configuration', human.config);
log.info('TFJS Version:', human.tf.version_core, 'Backend:', tf.getBackend());
log.info('TFJS Flags:', human.tf.env().features);
await human.load(); await human.load();
const loaded = Object.keys(human.models).filter((a) => human.models[a]);
log.info('Loaded:', loaded);
log.info('Memory state:', human.tf.engine().memory());
} }
async function detect(input) { async function detect(input) {
@ -74,6 +84,7 @@ async function test() {
async function main() { async function main() {
log.info('NodeJS:', process.version); log.info('NodeJS:', process.version);
log.info('Current folder:', process.env.PWD);
await init(); await init();
if (process.argv.length !== 3) { if (process.argv.length !== 3) {
log.warn('Parameters: <input image> missing'); log.warn('Parameters: <input image> missing');

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,11 +1,11 @@
{ {
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytes": 1339605, "bytes": 1341917,
"imports": [] "imports": []
}, },
"demo/draw.js": { "demo/draw.js": {
"bytes": 10783, "bytes": 12536,
"imports": [] "imports": []
}, },
"demo/menu.js": { "demo/menu.js": {
@ -17,7 +17,7 @@
"imports": [] "imports": []
}, },
"demo/browser.js": { "demo/browser.js": {
"bytes": 27252, "bytes": 28101,
"imports": [ "imports": [
{ {
"path": "dist/human.esm.js", "path": "dist/human.esm.js",
@ -43,7 +43,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 2040655 "bytes": 2051266
}, },
"dist/demo-browser-index.js": { "dist/demo-browser-index.js": {
"imports": [], "imports": [],
@ -51,10 +51,10 @@
"entryPoint": "demo/browser.js", "entryPoint": "demo/browser.js",
"inputs": { "inputs": {
"dist/human.esm.js": { "dist/human.esm.js": {
"bytesInOutput": 1332127 "bytesInOutput": 1334439
}, },
"demo/draw.js": { "demo/draw.js": {
"bytesInOutput": 6241 "bytesInOutput": 7178
}, },
"demo/menu.js": { "demo/menu.js": {
"bytesInOutput": 10696 "bytesInOutput": 10696
@ -63,10 +63,10 @@
"bytesInOutput": 6759 "bytesInOutput": 6759
}, },
"demo/browser.js": { "demo/browser.js": {
"bytesInOutput": 17737 "bytesInOutput": 18268
} }
}, },
"bytes": 1380945 "bytes": 1384725
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

516
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

56
dist/human.esm.json vendored
View File

@ -192,7 +192,7 @@
] ]
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytes": 1333, "bytes": 1349,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js", "path": "dist/tfjs.esm.js",
@ -214,7 +214,7 @@
] ]
}, },
"src/posenet/keypoints.ts": { "src/posenet/keypoints.ts": {
"bytes": 2025, "bytes": 2041,
"imports": [] "imports": []
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
@ -240,7 +240,7 @@
] ]
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytes": 5152, "bytes": 5182,
"imports": [ "imports": [
{ {
"path": "src/posenet/keypoints.ts", "path": "src/posenet/keypoints.ts",
@ -257,7 +257,7 @@
] ]
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytes": 2301, "bytes": 2323,
"imports": [ "imports": [
{ {
"path": "src/posenet/buildParts.ts", "path": "src/posenet/buildParts.ts",
@ -383,6 +383,23 @@
} }
] ]
}, },
"src/blazepose/blazepose.ts": {
"bytes": 3327,
"imports": [
{
"path": "src/log.ts",
"kind": "import-statement"
},
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/profile.ts",
"kind": "import-statement"
}
]
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytes": 4496, "bytes": 4496,
"imports": [] "imports": []
@ -409,7 +426,7 @@
] ]
}, },
"config.js": { "config.js": {
"bytes": 10438, "bytes": 10403,
"imports": [] "imports": []
}, },
"src/sample.ts": { "src/sample.ts": {
@ -417,11 +434,11 @@
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
"bytes": 2560, "bytes": 2594,
"imports": [] "imports": []
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 20100, "bytes": 21042,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -467,6 +484,10 @@
"path": "src/handpose/handpose.ts", "path": "src/handpose/handpose.ts",
"kind": "import-statement" "kind": "import-statement"
}, },
{
"path": "src/blazepose/blazepose.ts",
"kind": "import-statement"
},
{ {
"path": "src/gesture/gesture.ts", "path": "src/gesture/gesture.ts",
"kind": "import-statement" "kind": "import-statement"
@ -499,7 +520,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 1941875 "bytes": 1948302
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -518,7 +539,7 @@
"bytesInOutput": 252 "bytesInOutput": 252
}, },
"dist/tfjs.esm.js": { "dist/tfjs.esm.js": {
"bytesInOutput": 1056626 "bytesInOutput": 1056636
}, },
"src/tfjs/backend.ts": { "src/tfjs/backend.ts": {
"bytesInOutput": 1053 "bytesInOutput": 1053
@ -539,7 +560,7 @@
"bytesInOutput": 5054 "bytesInOutput": 5054
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 10612 "bytesInOutput": 11312
}, },
"src/faceboxes/faceboxes.ts": { "src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1576 "bytesInOutput": 1576
@ -563,7 +584,7 @@
"bytesInOutput": 1016 "bytesInOutput": 1016
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytesInOutput": 646 "bytesInOutput": 662
}, },
"src/posenet/heapSort.ts": { "src/posenet/heapSort.ts": {
"bytesInOutput": 1017 "bytesInOutput": 1017
@ -572,7 +593,7 @@
"bytesInOutput": 456 "bytesInOutput": 456
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytesInOutput": 1292 "bytesInOutput": 1283
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
"bytesInOutput": 346 "bytesInOutput": 346
@ -581,7 +602,7 @@
"bytesInOutput": 768 "bytesInOutput": 768
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytesInOutput": 557 "bytesInOutput": 529
}, },
"src/posenet/util.ts": { "src/posenet/util.ts": {
"bytesInOutput": 354 "bytesInOutput": 354
@ -604,6 +625,9 @@
"src/handpose/anchors.ts": { "src/handpose/anchors.ts": {
"bytesInOutput": 126985 "bytesInOutput": 126985
}, },
"src/blazepose/blazepose.ts": {
"bytesInOutput": 1613
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytesInOutput": 2391 "bytesInOutput": 2391
}, },
@ -614,16 +638,16 @@
"bytesInOutput": 2355 "bytesInOutput": 2355
}, },
"config.js": { "config.js": {
"bytesInOutput": 1453 "bytesInOutput": 1439
}, },
"src/sample.ts": { "src/sample.ts": {
"bytesInOutput": 55295 "bytesInOutput": 55295
}, },
"package.json": { "package.json": {
"bytesInOutput": 2572 "bytesInOutput": 2596
} }
}, },
"bytes": 1339605 "bytes": 1341917
} }
} }
} }

56
dist/human.iife.json vendored
View File

@ -192,7 +192,7 @@
] ]
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytes": 1333, "bytes": 1349,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js", "path": "dist/tfjs.esm.js",
@ -214,7 +214,7 @@
] ]
}, },
"src/posenet/keypoints.ts": { "src/posenet/keypoints.ts": {
"bytes": 2025, "bytes": 2041,
"imports": [] "imports": []
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
@ -240,7 +240,7 @@
] ]
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytes": 5152, "bytes": 5182,
"imports": [ "imports": [
{ {
"path": "src/posenet/keypoints.ts", "path": "src/posenet/keypoints.ts",
@ -257,7 +257,7 @@
] ]
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytes": 2301, "bytes": 2323,
"imports": [ "imports": [
{ {
"path": "src/posenet/buildParts.ts", "path": "src/posenet/buildParts.ts",
@ -383,6 +383,23 @@
} }
] ]
}, },
"src/blazepose/blazepose.ts": {
"bytes": 3327,
"imports": [
{
"path": "src/log.ts",
"kind": "import-statement"
},
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/profile.ts",
"kind": "import-statement"
}
]
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytes": 4496, "bytes": 4496,
"imports": [] "imports": []
@ -409,7 +426,7 @@
] ]
}, },
"config.js": { "config.js": {
"bytes": 10438, "bytes": 10403,
"imports": [] "imports": []
}, },
"src/sample.ts": { "src/sample.ts": {
@ -417,11 +434,11 @@
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
"bytes": 2560, "bytes": 2594,
"imports": [] "imports": []
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 20100, "bytes": 21042,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -467,6 +484,10 @@
"path": "src/handpose/handpose.ts", "path": "src/handpose/handpose.ts",
"kind": "import-statement" "kind": "import-statement"
}, },
{
"path": "src/blazepose/blazepose.ts",
"kind": "import-statement"
},
{ {
"path": "src/gesture/gesture.ts", "path": "src/gesture/gesture.ts",
"kind": "import-statement" "kind": "import-statement"
@ -499,7 +520,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 1941886 "bytes": 1948313
}, },
"dist/human.ts": { "dist/human.ts": {
"imports": [], "imports": [],
@ -513,13 +534,13 @@
"bytesInOutput": 1690 "bytesInOutput": 1690
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 10648 "bytesInOutput": 11348
}, },
"src/log.ts": { "src/log.ts": {
"bytesInOutput": 252 "bytesInOutput": 252
}, },
"dist/tfjs.esm.js": { "dist/tfjs.esm.js": {
"bytesInOutput": 1056626 "bytesInOutput": 1056636
}, },
"src/tfjs/backend.ts": { "src/tfjs/backend.ts": {
"bytesInOutput": 1053 "bytesInOutput": 1053
@ -561,7 +582,7 @@
"bytesInOutput": 1016 "bytesInOutput": 1016
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytesInOutput": 646 "bytesInOutput": 662
}, },
"src/posenet/heapSort.ts": { "src/posenet/heapSort.ts": {
"bytesInOutput": 1017 "bytesInOutput": 1017
@ -570,7 +591,7 @@
"bytesInOutput": 456 "bytesInOutput": 456
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytesInOutput": 1292 "bytesInOutput": 1283
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
"bytesInOutput": 346 "bytesInOutput": 346
@ -579,7 +600,7 @@
"bytesInOutput": 768 "bytesInOutput": 768
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytesInOutput": 557 "bytesInOutput": 529
}, },
"src/posenet/util.ts": { "src/posenet/util.ts": {
"bytesInOutput": 354 "bytesInOutput": 354
@ -602,6 +623,9 @@
"src/handpose/anchors.ts": { "src/handpose/anchors.ts": {
"bytesInOutput": 126985 "bytesInOutput": 126985
}, },
"src/blazepose/blazepose.ts": {
"bytesInOutput": 1613
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytesInOutput": 2391 "bytesInOutput": 2391
}, },
@ -612,16 +636,16 @@
"bytesInOutput": 2355 "bytesInOutput": 2355
}, },
"config.js": { "config.js": {
"bytesInOutput": 1453 "bytesInOutput": 1439
}, },
"src/sample.ts": { "src/sample.ts": {
"bytesInOutput": 55295 "bytesInOutput": 55295
}, },
"package.json": { "package.json": {
"bytesInOutput": 2572 "bytesInOutput": 2596
} }
}, },
"bytes": 1339647 "bytes": 1341959
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

18
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

58
dist/human.node.json vendored
View File

@ -192,7 +192,7 @@
] ]
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytes": 1333, "bytes": 1349,
"imports": [ "imports": [
{ {
"path": "dist/tfjs.esm.js", "path": "dist/tfjs.esm.js",
@ -214,7 +214,7 @@
] ]
}, },
"src/posenet/keypoints.ts": { "src/posenet/keypoints.ts": {
"bytes": 2025, "bytes": 2041,
"imports": [] "imports": []
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
@ -240,7 +240,7 @@
] ]
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytes": 5152, "bytes": 5182,
"imports": [ "imports": [
{ {
"path": "src/posenet/keypoints.ts", "path": "src/posenet/keypoints.ts",
@ -257,7 +257,7 @@
] ]
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytes": 2301, "bytes": 2323,
"imports": [ "imports": [
{ {
"path": "src/posenet/buildParts.ts", "path": "src/posenet/buildParts.ts",
@ -383,6 +383,23 @@
} }
] ]
}, },
"src/blazepose/blazepose.ts": {
"bytes": 3327,
"imports": [
{
"path": "src/log.ts",
"kind": "import-statement"
},
{
"path": "dist/tfjs.esm.js",
"kind": "import-statement"
},
{
"path": "src/profile.ts",
"kind": "import-statement"
}
]
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytes": 4496, "bytes": 4496,
"imports": [] "imports": []
@ -409,7 +426,7 @@
] ]
}, },
"config.js": { "config.js": {
"bytes": 10438, "bytes": 10403,
"imports": [] "imports": []
}, },
"src/sample.ts": { "src/sample.ts": {
@ -417,11 +434,11 @@
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
"bytes": 2560, "bytes": 2594,
"imports": [] "imports": []
}, },
"src/human.ts": { "src/human.ts": {
"bytes": 20100, "bytes": 21042,
"imports": [ "imports": [
{ {
"path": "src/log.ts", "path": "src/log.ts",
@ -467,6 +484,10 @@
"path": "src/handpose/handpose.ts", "path": "src/handpose/handpose.ts",
"kind": "import-statement" "kind": "import-statement"
}, },
{
"path": "src/blazepose/blazepose.ts",
"kind": "import-statement"
},
{ {
"path": "src/gesture/gesture.ts", "path": "src/gesture/gesture.ts",
"kind": "import-statement" "kind": "import-statement"
@ -499,7 +520,7 @@
"imports": [], "imports": [],
"exports": [], "exports": [],
"inputs": {}, "inputs": {},
"bytes": 708630 "bytes": 715070
}, },
"dist/human.node-gpu.js": { "dist/human.node-gpu.js": {
"imports": [], "imports": [],
@ -510,13 +531,13 @@
"bytesInOutput": 551 "bytesInOutput": 551
}, },
"src/blazeface/facemesh.ts": { "src/blazeface/facemesh.ts": {
"bytesInOutput": 1577 "bytesInOutput": 1585
}, },
"src/posenet/keypoints.ts": { "src/posenet/keypoints.ts": {
"bytesInOutput": 1677 "bytesInOutput": 1677
}, },
"src/human.ts": { "src/human.ts": {
"bytesInOutput": 10620 "bytesInOutput": 11320
}, },
"src/log.ts": { "src/log.ts": {
"bytesInOutput": 251 "bytesInOutput": 251
@ -540,7 +561,7 @@
"bytesInOutput": 28973 "bytesInOutput": 28973
}, },
"src/faceboxes/faceboxes.ts": { "src/faceboxes/faceboxes.ts": {
"bytesInOutput": 1613 "bytesInOutput": 1618
}, },
"src/profile.ts": { "src/profile.ts": {
"bytesInOutput": 604 "bytesInOutput": 604
@ -561,7 +582,7 @@
"bytesInOutput": 1039 "bytesInOutput": 1039
}, },
"src/posenet/modelBase.ts": { "src/posenet/modelBase.ts": {
"bytesInOutput": 656 "bytesInOutput": 672
}, },
"src/posenet/heapSort.ts": { "src/posenet/heapSort.ts": {
"bytesInOutput": 1017 "bytesInOutput": 1017
@ -570,7 +591,7 @@
"bytesInOutput": 454 "bytesInOutput": 454
}, },
"src/posenet/decodePose.ts": { "src/posenet/decodePose.ts": {
"bytesInOutput": 1282 "bytesInOutput": 1271
}, },
"src/posenet/vectors.ts": { "src/posenet/vectors.ts": {
"bytesInOutput": 345 "bytesInOutput": 345
@ -579,7 +600,7 @@
"bytesInOutput": 823 "bytesInOutput": 823
}, },
"src/posenet/decodeMultiple.ts": { "src/posenet/decodeMultiple.ts": {
"bytesInOutput": 553 "bytesInOutput": 525
}, },
"src/posenet/util.ts": { "src/posenet/util.ts": {
"bytesInOutput": 352 "bytesInOutput": 352
@ -602,6 +623,9 @@
"src/handpose/anchors.ts": { "src/handpose/anchors.ts": {
"bytesInOutput": 126985 "bytesInOutput": 126985
}, },
"src/blazepose/blazepose.ts": {
"bytesInOutput": 1637
},
"src/gesture/gesture.ts": { "src/gesture/gesture.ts": {
"bytesInOutput": 2391 "bytesInOutput": 2391
}, },
@ -612,16 +636,16 @@
"bytesInOutput": 10973 "bytesInOutput": 10973
}, },
"config.js": { "config.js": {
"bytesInOutput": 1453 "bytesInOutput": 1439
}, },
"src/sample.ts": { "src/sample.ts": {
"bytesInOutput": 55295 "bytesInOutput": 55295
}, },
"package.json": { "package.json": {
"bytesInOutput": 2569 "bytesInOutput": 2593
} }
}, },
"bytes": 277134 "bytes": 279471
} }
} }
} }

516
dist/human.ts vendored

File diff suppressed because one or more lines are too long

6
dist/human.ts.map vendored

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

BIN
models/blazepose.bin Normal file

Binary file not shown.

631
models/blazepose.json Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -1,105 +0,0 @@
{
"format": "graph-model",
"generatedBy": "2.3.1",
"convertedBy": "TensorFlow.js Converter v2.4.0",
"userDefinedMetadata":
{
"signature":
{
"inputs": {"input_1:0":{"name":"input_1:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}},
"outputs": {"Identity:0":{"name":"Identity:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"7"}]}}}
}
},
"modelTopology":
{
"node":
[
{"name":"unknown_60","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
{"name":"unknown_66","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_43","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_49","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
{"name":"unknown_26","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_32","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_9","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"1"}]}}}}},
{"name":"unknown_15","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}}}},
{"name":"unknown_77","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"7"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"unknown_78","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"7"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}},
{"name":"input_1","op":"Placeholder","attr":{"dtype":{"type":"DT_FLOAT"},"shape":{"shape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"1"},{"size":"8"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"8"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"128"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
{"name":"StatefulPartitionedCall/model_1/activation_1/Relu","op":"_FusedConv2D","input":["input_1","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0}}},
{"name":"StatefulPartitionedCall/model_1/activation_2/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_1/Relu","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_2/Relu","unknown_9"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_2/Relu","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/activation_3/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_3/Relu","unknown_15"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}},"padding":{"s":"U0FNRQ=="}}},
{"name":"StatefulPartitionedCall/model_1/add_1/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_1/add","unknown_26"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_1/add","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true}}},
{"name":"StatefulPartitionedCall/model_1/activation_4/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_4/Relu","unknown_32"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="}}},
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","2","2","1"]}},"ksize":{"list":{"i":["1","3","3","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/add_2/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_2/add","unknown_43"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_2/add","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/activation_5/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"num_args":{"i":"1"},"padding":{"s":"VkFMSUQ="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"explicit_paddings":{"list":{}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_5/Relu","unknown_49"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="}}},
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3"],"attr":{"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/add_3/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_3/add","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","2","2","1"]}}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_3/add","unknown_60"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
{"name":"StatefulPartitionedCall/model_1/activation_6/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"use_cudnn_on_gpu":{"b":true}}},
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_6/Relu","unknown_66"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3"],"attr":{"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/add_4/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_4/add","unknown_77","unknown_78"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean","op":"Mean","input":["StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices"],"attr":{"keep_dims":{"b":false},"Tidx":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
{"name":"StatefulPartitionedCall/model_1/predictions/Softmax","op":"Softmax","input":["StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean"],"attr":{"T":{"type":"DT_FLOAT"}}},
{"name":"Identity","op":"Identity","input":["StatefulPartitionedCall/model_1/predictions/Softmax"],"attr":{"T":{"type":"DT_FLOAT"}}}
],
"library": {},
"versions":
{
"producer": 440
}
},
"weightsManifest":
[
{
"paths": ["emotion-mini.bin"],
"weights": [{"name":"unknown_60","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_66","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_49","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_26","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,8,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_77","shape":[3,3,128,7],"dtype":"float32"},{"name":"unknown_78","shape":[7],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,8,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,16,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,32,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","shape":[1,1,64,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"}]
}
]
}

View File

@ -69,7 +69,7 @@
"weightsManifest": "weightsManifest":
[ [
{ {
"paths": ["emotion-large.bin"], "paths": ["emotion.bin"],
"weights": [{"name":"unknown_26","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,256,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,256,7],"dtype":"float32"},{"name":"unknown_44","shape":[7],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,256,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"}] "weights": [{"name":"unknown_26","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,256,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,256,7],"dtype":"float32"},{"name":"unknown_44","shape":[7],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,256,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"}]
} }
] ]

Binary file not shown.

File diff suppressed because one or more lines are too long

2
package-lock.json generated
View File

@ -1,6 +1,6 @@
{ {
"name": "@vladmandic/human", "name": "@vladmandic/human",
"version": "0.30.6", "version": "0.40.0",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@vladmandic/human", "name": "@vladmandic/human",
"version": "0.30.6", "version": "0.40.0",
"description": "Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition", "description": "Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
"sideEffects": false, "sideEffects": false,
"main": "dist/human.node.js", "main": "dist/human.node.js",
@ -71,6 +71,8 @@
"age-estimation", "age-estimation",
"emotion-detection", "emotion-detection",
"gender-prediction", "gender-prediction",
"gesture-recognition" "gesture-recognition",
"blazeface",
"blazepose"
] ]
} }

104
src/blazepose/blazepose.ts Normal file
View File

@ -0,0 +1,104 @@
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile';
let model;
const labels = [
'nose',
'leftEyeInside',
'leftEye',
'leftEyeOutside',
'rightEyeInside',
'rightEye',
'rightEyeOutside',
'leftEar',
'rightEar',
'leftMouth',
'rightMouth',
'leftShoulder',
'rightShoulder',
'leftElbow',
'rightElbow',
'leftWrist',
'rightWrist',
'leftPalm',
'rightPalm',
'leftIndex',
'rightIndex',
'leftPinky',
'rightPinky',
'leftHip',
'rightHip',
'leftKnee',
'rightKnee',
'leftAnkle',
'rightAnkle',
'leftHeel',
'rightHeel',
'leftFoot',
'rightFoot',
'midHip',
'forehead',
'leftThumb',
'leftHand',
'rightThumb',
'rightHand',
];
export async function load(config) {
if (!model) {
model = await tf.loadGraphModel(config.body.modelPath);
model.width = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[2].size);
model.height = parseInt(model.signature.inputs['input_1:0'].tensorShape.dim[1].size);
if (config.debug) log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
}
return model;
}
export async function predict(image, config) {
if (!model) return null;
if (!config.body.enabled) return null;
const imgSize = { width: image.shape[2], height: image.shape[1] };
const resize = tf.image.resizeBilinear(image, [model.width || config.body.inputSize, model.height || config.body.inputSize], false);
const normalize = tf.div(resize, [255.0]);
resize.dispose();
// let segmentation; // not used right now since we have keypoints and don't need to go through matrix using strides
// let poseflag; // irrelevant
let points;
if (!config.profile) {
const resT = await model.predict(normalize);
// segmentation = resT[0].dataSync();
// poseflag = resT[1].dataSync();
points = resT.find((t) => (t.size === 195 || t.size === 155)).dataSync();
resT.forEach((t) => t.dispose());
} else {
const profileData = await tf.profile(() => model.predict(normalize));
// segmentation = profileData.result[0].dataSync();
// poseflag = profileData.result[1].dataSync();
points = profileData.result.find((t) => t.size === 195).dataSync(); // find a tensor with 195 items which is 39 points with 5 properties
profileData.result.forEach((t) => t.dispose());
profile.run('blazepose', profileData);
}
normalize.dispose();
const keypoints: Array<{ id, part, position: { x, y, z }, score, presence }> = [];
for (let i = 0; i < points.length / 5; i++) {
keypoints.push({
id: i,
part: labels[i],
position: {
x: Math.trunc(imgSize.width * points[5 * i + 0] / 255),
y: Math.trunc(imgSize.height * points[5 * i + 1] / 255),
z: Math.trunc(points[5 * i + 2]) + 0, // fix negative zero
},
score: (100 - Math.trunc(100 / (1 + Math.exp(points[5 * i + 3])))) / 100, // reverse sigmoid value
presence: (100 - Math.trunc(100 / (1 + Math.exp(points[5 * i + 4])))) / 100, // reverse sigmoid value
});
}
// console.log('POINTS', imgSize, pts.length, pts);
return [{ keypoints }];
}
/*
Model card: https://drive.google.com/file/d/10IU-DRP2ioSNjKFdiGbmmQX81xAYj88s/view
Download: https://github.com/PINTO0309/PINTO_model_zoo/tree/main/058_BlazePose_Full_Keypoints
*/

View File

@ -9,6 +9,7 @@ import * as emotion from './emotion/emotion';
import * as embedding from './embedding/embedding'; import * as embedding from './embedding/embedding';
import * as posenet from './posenet/posenet'; import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose'; import * as handpose from './handpose/handpose';
import * as blazepose from './blazepose/blazepose';
import * as gesture from './gesture/gesture'; import * as gesture from './gesture/gesture';
import * as image from './image'; import * as image from './image';
import * as profile from './profile'; import * as profile from './profile';
@ -49,6 +50,7 @@ class Human {
checkSanity: boolean; checkSanity: boolean;
firstRun: boolean; firstRun: boolean;
perf: any; perf: any;
image: any;
models: any; models: any;
// models // models
facemesh: any; facemesh: any;
@ -74,18 +76,21 @@ class Human {
this.models = { this.models = {
facemesh: null, facemesh: null,
posenet: null, posenet: null,
blazepose: null,
handpose: null, handpose: null,
iris: null, iris: null,
age: null, age: null,
gender: null, gender: null,
emotion: null, emotion: null,
}; };
// export access to image processing
this.image = (input) => image.process(input, this.config);
// export raw access to underlying models // export raw access to underlying models
this.facemesh = facemesh; this.facemesh = facemesh;
this.age = age; this.age = age;
this.gender = gender; this.gender = gender;
this.emotion = emotion; this.emotion = emotion;
this.body = posenet; this.body = this.config.body.modelType.startsWith('posenet') ? posenet : blazepose;
this.hand = handpose; this.hand = handpose;
} }
@ -146,16 +151,18 @@ class Human {
this.models.gender, this.models.gender,
this.models.emotion, this.models.emotion,
this.models.embedding, this.models.embedding,
this.models.posenet,
this.models.handpose, this.models.handpose,
this.models.posenet,
this.models.blazepose,
] = await Promise.all([ ] = await Promise.all([
this.models.face || (this.config.face.enabled ? face.load(this.config) : null), this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null), this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null), this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null), this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null), this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null), this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelType.startsWith('posenet') ? posenet.load(this.config) : null),
this.models.posenet || (this.config.body.enabled && this.config.body.modelType.startsWith('blazepose') ? blazepose.load(this.config) : null),
]); ]);
} else { } else {
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config); if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
@ -163,8 +170,9 @@ class Human {
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config); if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config); if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config); if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config); if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
if (this.config.body.enabled && !this.models.posenet && this.config.body.modelType.startsWith('posenet')) this.models.posenet = await posenet.load(this.config);
if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelType.startsWith('blazepose')) this.models.blazepose = await blazepose.load(this.config);
} }
if (this.firstRun) { if (this.firstRun) {
@ -346,16 +354,6 @@ class Human {
return faceRes; return faceRes;
} }
/*
async processImage(input, userConfig = {}) {
this.state = 'image';
this.config = mergeDeep(this.config, userConfig);
const process = image.process(input, this.config);
process?.tensor?.dispose();
return process?.canvas;
}
*/
// main detect function // main detect function
async detect(input, userConfig = {}) { async detect(input, userConfig = {}) {
// detection happens inside a promise // detection happens inside a promise
@ -374,7 +372,7 @@ class Human {
resolve({ error }); resolve({ error });
} }
let poseRes; let bodyRes;
let handRes; let handRes;
let faceRes; let faceRes;
@ -410,15 +408,17 @@ class Human {
this.perf.face = Math.trunc(now() - timeStamp); this.perf.face = Math.trunc(now() - timeStamp);
} }
// run posenet // run body: can be posenet or blazepose
this.analyze('Start Body:'); this.analyze('Start Body:');
if (this.config.async) { if (this.config.async) {
poseRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : []; if (this.config.body.modelType.startsWith('posenet')) bodyRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
else bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body; if (this.perf.body) delete this.perf.body;
} else { } else {
this.state = 'run:body'; this.state = 'run:body';
timeStamp = now(); timeStamp = now();
poseRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : []; if (this.config.body.modelType.startsWith('posenet')) bodyRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
else bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
this.perf.body = Math.trunc(now() - timeStamp); this.perf.body = Math.trunc(now() - timeStamp);
} }
this.analyze('End Body:'); this.analyze('End Body:');
@ -438,7 +438,7 @@ class Human {
// if async wait for results // if async wait for results
if (this.config.async) { if (this.config.async) {
[faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]); [faceRes, bodyRes, handRes] = await Promise.all([faceRes, bodyRes, handRes]);
} }
process.tensor.dispose(); process.tensor.dispose();
@ -449,14 +449,14 @@ class Human {
if (this.config.gesture.enabled) { if (this.config.gesture.enabled) {
timeStamp = now(); timeStamp = now();
// @ts-ignore // @ts-ignore
gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp); if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture; else if (this.perf.gesture) delete this.perf.gesture;
} }
this.perf.total = Math.trunc(now() - timeStart); this.perf.total = Math.trunc(now() - timeStart);
this.state = 'idle'; this.state = 'idle';
resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas }); resolve({ face: faceRes, body: bodyRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
}); });
} }
@ -487,21 +487,24 @@ class Human {
src = 'data:image/jpeg;base64,' + sample.face; src = 'data:image/jpeg;base64,' + sample.face;
break; break;
case 'full': case 'full':
case 'body':
size = 1200; size = 1200;
src = 'data:image/jpeg;base64,' + sample.body; src = 'data:image/jpeg;base64,' + sample.body;
break; break;
default: default:
src = null; src = null;
} }
const img = new Image(size, size); // src = encodeURI('../assets/human-sample-face.jpg');
img.onload = () => { const img = new Image();
img.onload = async () => {
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas'); const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
canvas.width = size; canvas.width = img.naturalWidth;
canvas.height = size; canvas.height = img.naturalHeight;
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx?.drawImage(img, 0, 0); ctx?.drawImage(img, 0, 0);
const data = ctx?.getImageData(0, 0, size, size); // const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);
this.detect(data, this.config).then((res) => resolve(res)); const res = await this.detect(canvas, this.config);
resolve(res);
}; };
if (src) img.src = src; if (src) img.src = src;
else resolve(null); else resolve(null);

View File

@ -32,7 +32,7 @@ const config = {
iris: { modelPath: 'file://models/iris.json' }, iris: { modelPath: 'file://models/iris.json' },
age: { modelPath: 'file://models/age-ssrnet-imdb.json' }, age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
gender: { modelPath: 'file://models/gender.json' }, gender: { modelPath: 'file://models/gender.json' },
emotion: { modelPath: 'file://models/emotion-large.json' }, emotion: { modelPath: 'file://models/emotion.json' },
embedding: { modelPath: 'file://models/mobilefacenet.json' }, embedding: { modelPath: 'file://models/mobilefacenet.json' },
}, },
body: { modelPath: 'file://models/posenet.json' }, body: { modelPath: 'file://models/posenet.json' },

View File

@ -3,6 +3,7 @@ import * as decodePose from './decodePose';
import * as vectors from './vectors'; import * as vectors from './vectors';
const kLocalMaximumRadius = 1; const kLocalMaximumRadius = 1;
const defaultOutputStride = 16;
function withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) { function withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {
return poses.some(({ keypoints }) => { return poses.some(({ keypoints }) => {
@ -28,10 +29,10 @@ export function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFw
// The top element in the queue is the next root candidate. // The top element in the queue is the next root candidate.
const root = queue.dequeue(); const root = queue.dequeue();
// Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance. // Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.
const rootImageCoords = vectors.getImageCoords(root.part, config.body.outputStride, offsetsBuffer); const rootImageCoords = vectors.getImageCoords(root.part, defaultOutputStride, offsetsBuffer);
if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue; if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;
// Else start a new detection instance at the position of the root. // Else start a new detection instance at the position of the root.
const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, config.body.outputStride, displacementsFwdBuffer, displacementsBwdBuffer); const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, defaultOutputStride, displacementsFwdBuffer, displacementsBwdBuffer);
const score = getInstanceScore(poses, squaredNmsRadius, keypoints); const score = getInstanceScore(poses, squaredNmsRadius, keypoints);
if (score > config.body.scoreThreshold) poses.push({ keypoints, score }); if (score > config.body.scoreThreshold) poses.push({ keypoints, score });
} }

View File

@ -5,6 +5,9 @@ import * as decoders from './decoders';
const parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]])); const parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]]));
const parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId); const parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId);
const childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId); const childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId);
const defaultOutputStride = 16;
function getDisplacement(edgeId, point, displacements) { function getDisplacement(edgeId, point, displacements) {
const numEdges = displacements.shape[2] / 2; const numEdges = displacements.shape[2] / 2;
return { return {
@ -12,6 +15,7 @@ function getDisplacement(edgeId, point, displacements) {
x: displacements.get(point.y, point.x, numEdges + edgeId), x: displacements.get(point.y, point.x, numEdges + edgeId),
}; };
} }
function getStridedIndexNearPoint(point, outputStride, height, width) { function getStridedIndexNearPoint(point, outputStride, height, width) {
return { return {
y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1), y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),
@ -77,7 +81,7 @@ export async function decodeSinglePose(heatmapScores, offsets, config) {
const scoresBuffer = allTensorBuffers[0]; const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1]; const offsetsBuffer = allTensorBuffers[1];
const heatmapValuesBuffer = allTensorBuffers[2]; const heatmapValuesBuffer = allTensorBuffers[2];
const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, config.body.outputStride, offsetsBuffer); const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, defaultOutputStride, offsetsBuffer);
const offsetPointsBuffer = await offsetPoints.buffer(); const offsetPointsBuffer = await offsetPoints.buffer();
const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer)); const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer));
const instanceKeypoints = keypointConfidence.map((score, i) => { const instanceKeypoints = keypointConfidence.map((score, i) => {

View File

@ -4,7 +4,7 @@ export const partNames = [
'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle', 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',
]; ];
export const NUM_KEYPOINTS = exports.partNames.length; export const NUM_KEYPOINTS = exports.partNames.length; // 17 keypoints
export const partIds = exports.partNames.reduce((result, jointName, i) => { export const partIds = exports.partNames.reduce((result, jointName, i) => {
result[jointName] = i; result[jointName] = i;

View File

@ -20,11 +20,11 @@ export class BaseModel {
predict(input, config) { predict(input, config) {
return tf.tidy(() => { return tf.tidy(() => {
const asFloat = (config.body.modelType === 'ResNet') ? input.toFloat().add(imageNetMean) : input.toFloat().div(127.5).sub(1.0); const asFloat = (config.body.modelType === 'posenet-resnet') ? input.toFloat().add(imageNetMean) : input.toFloat().div(127.5).sub(1.0);
const asBatch = asFloat.expandDims(0); const asBatch = asFloat.expandDims(0);
const results = this.model.predict(asBatch); const results = this.model.predict(asBatch);
const results3d = results.map((y) => y.squeeze([0])); const results3d = results.map((y) => y.squeeze([0]));
const namedResults = (config.body.modelType === 'ResNet') ? nameOutputResultsResNet(results3d) : nameOutputResultsMobileNet(results3d); const namedResults = (config.body.modelType === 'posenet-resnet') ? nameOutputResultsResNet(results3d) : nameOutputResultsMobileNet(results3d);
return { return {
heatmapScores: namedResults.heatmap.sigmoid(), heatmapScores: namedResults.heatmap.sigmoid(),
offsets: namedResults.offsets, offsets: namedResults.offsets,

14
types/blazepose/blazepose.d.ts vendored Normal file
View File

@ -0,0 +1,14 @@
export declare function load(config: any): Promise<any>;
export declare function predict(image: any, config: any): Promise<{
keypoints: {
id: any;
part: any;
position: {
x;
y;
z;
};
score: any;
presence: any;
}[];
}[] | null>;

1
types/human.d.ts vendored
View File

@ -10,6 +10,7 @@ declare class Human {
checkSanity: boolean; checkSanity: boolean;
firstRun: boolean; firstRun: boolean;
perf: any; perf: any;
image: any;
models: any; models: any;
facemesh: any; facemesh: any;
age: any; age: any;

2
wiki

@ -1 +1 @@
Subproject commit 55e854ea9263ca0eae7ffbb7d60b87e1ca3a7065 Subproject commit c60f442714b1b5887ae25feb35fa413bc9996402