diff --git a/README.md b/README.md
index 28ccf3c7..7c23cce3 100644
--- a/README.md
+++ b/README.md
@@ -1,20 +1,23 @@
# Human Library
-### 3D Face Detection, Face Embedding & Recognition,
-### Body Pose Tracking, Hand & Finger Tracking,
-### Iris Analysis, Age & Gender & Emotion Prediction
-### & Gesture Recognition
+**3D Face Detection, Face Embedding & Recognition,**
+**Body Pose Tracking, Hand & Finger Tracking,**
+**Iris Analysis, Age & Gender & Emotion Prediction**
+**& Gesture Recognition**
Native JavaScript module using TensorFlow/JS Machine Learning library
-Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows and Linux
+Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows and Linux
+
- Browser/WebWorker: Compatible with *CPU*, *WebGL*, *WASM* and *WebGPU* backends
- NodeJS: Compatible with software *tfjs-node* and CUDA accelerated backends *tfjs-node-gpu*
+Check out [**Live Demo**](https://vladmandic.github.io/human/demo/index.html) for processing of live WebCam video or static images
+
-### Project pages:
+## Project pages
- [**Live Demo**](https://vladmandic.github.io/human/demo/index.html)
- [**Code Repository**](https://github.com/vladmandic/human)
@@ -24,7 +27,7 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
-### Wiki pages:
+## Wiki pages
- [**Home**](https://github.com/vladmandic/human/wiki)
- [**Demos**](https://github.com/vladmandic/human/wiki/Demos)
@@ -37,7 +40,7 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
-### Additional notes:
+## Additional notes
- [**Notes on Backends**](https://github.com/vladmandic/human/wiki/Backends)
- [**Development Server**](https://github.com/vladmandic/human/wiki/Development-Server)
@@ -49,13 +52,35 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
-*See [issues](https://github.com/vladmandic/human/issues?q=) and [discussions](https://github.com/vladmandic/human/discussions) for list of known limitations and planned enhancements*
+## Default models
+
+Default models in Human library are:
+
+- **Face Detection**: MediaPipe BlazeFace-Back
+- **Face Mesh**: MediaPipe FaceMesh
+- **Face Iris Analysis**: MediaPipe Iris
+- **Emotion Detection**: Oarriaga Emotion
+- **Gender Detection**: Oarriaga Gender
+- **Age Detection**: SSR-Net Age IMDB
+- **Body Analysis**: PoseNet
+- **Face Embedding**: Sirius-AI MobileFaceNet Embedding
+
+Note that alternative models are provided and can be enabled via configuration
+For example, `PoseNet` model can be switched for `BlazePose` model depending on the use case
+
+For more info, see [**Configuration Details**](https://github.com/vladmandic/human/wiki/Configuration) and [**List of Models**](https://github.com/vladmandic/human/wiki/Models)
+
+
+
+*See [**issues**](https://github.com/vladmandic/human/issues?q=) and [**discussions**](https://github.com/vladmandic/human/discussions) for list of known limitations and planned enhancements*
*Suggestions are welcome!*
-## Options ##
+## Options
+
+As presented in the demo application...

@@ -65,12 +90,17 @@ Compatible with *Browser*, *WebWorker* and *NodeJS* execution on both Windows an
+**Training image:**
+
+
+
**Using static images:**
-
+
+
+**Live WebCam view:**
+
+
+
-
-**Using webcam:**
-
-
diff --git a/TODO.md b/TODO.md
index 0fd667cc..7197fbe0 100644
--- a/TODO.md
+++ b/TODO.md
@@ -1,7 +1,5 @@
# To-Do list for Human library
-- Explore BlazePose model
-- Explore MediaPipe TFLite models
- Fix BlazeFace NodeJS missing ops
- Prune pre-packaged models
- Build Face embedding database
diff --git a/assets/screenshot-menu.png b/assets/screenshot-menu.png
index 495c7521..53b03f22 100644
Binary files a/assets/screenshot-menu.png and b/assets/screenshot-menu.png differ
diff --git a/config.js b/config.js
index 8d8be31e..dd44ff2d 100644
--- a/config.js
+++ b/config.js
@@ -105,8 +105,7 @@ export default {
age: {
enabled: true,
- modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
- // which determines training set for model
+ modelPath: '../models/age-ssrnet-imdb.json',
inputSize: 64, // fixed value
skipFrames: 31, // how many frames to go without re-running the detector
// only used for video inputs
@@ -115,7 +114,7 @@ export default {
gender: {
enabled: true,
minConfidence: 0.1, // threshold for discarding a prediction
- modelPath: '../models/gender.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
+ modelPath: '../models/gender.json', // can be 'gender' or 'gender-ssrnet-imdb'
inputSize: 64, // fixed value
skipFrames: 41, // how many frames to go without re-running the detector
// only used for video inputs
@@ -126,7 +125,7 @@ export default {
inputSize: 64, // fixed value
minConfidence: 0.1, // threshold for discarding a prediction
skipFrames: 21, // how many frames to go without re-running the detector
- modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
+ modelPath: '../models/emotion.json',
},
embedding: {
@@ -138,16 +137,17 @@ export default {
body: {
enabled: true,
- modelPath: '../models/posenet.json',
- inputSize: 257, // fixed value
+ modelPath: '../models/posenet.json', // can be 'posenet' or 'blazepose'
+ inputSize: 257, // fixed value, 257 for posenet and 256 for blazepose
maxDetections: 10, // maximum number of people detected in the input
// should be set to the minimum number for performance
+ // only valid for posenet as blazepose only detects single pose
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score
// in non-maximum suppression
+ // only valid for posenet as blazepose only detects single pose
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
- outputStride: 16, // size of block in which to run point detection, smaller value means higher resolution
- // defined by model itself, can be 8, 16, or 32
- modelType: 'MobileNet', // Human includes MobileNet version, but you can switch to ResNet
+ // only valid for posenet as blazepose only detects single pose
+ modelType: 'posenet-mobilenet', // can be 'posenet-mobilenet', 'posenet-resnet', 'blazepose'
},
hand: {
diff --git a/demo/browser.js b/demo/browser.js
index 350e6848..8b8fbcf9 100644
--- a/demo/browser.js
+++ b/demo/browser.js
@@ -14,10 +14,12 @@ const userConfig = {}; // add any user configuration overrides
const userConfig = {
backend: 'wasm',
async: false,
+ warmup: 'full',
videoOptimized: false,
- face: { enabled: true, iris: { enabled: false }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } },
- body: { enabled: false },
- hand: { enabled: false },
+ face: { enabled: true, iris: { enabled: true }, mesh: { enabled: true }, age: { enabled: true }, gender: { enabled: true }, emotion: { enabled: true }, embedding: { enabled: true } },
+ hand: { enabled: true },
+ gestures: { enabled: true },
+ body: { enabled: true, modelType: 'blazepose', modelPath: '../models/blazepose.json' },
};
*/
@@ -31,15 +33,16 @@ const ui = {
baseFontProto: 'small-caps {size} "Segoe UI"',
baseLineWidth: 12,
crop: true,
- columns: 4,
+ columns: 2,
busy: false,
facing: true,
useWorker: false,
worker: 'worker.js',
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
compare: '../assets/sample-me.jpg',
+ drawLabels: true,
drawBoxes: true,
- drawPoints: false,
+ drawPoints: true,
drawPolygons: true,
fillPolygons: false,
useDepth: true,
@@ -52,6 +55,7 @@ const ui = {
detectFPS: [],
drawFPS: [],
buffered: false,
+ drawWarmup: false,
drawThread: null,
detectThread: null,
framesDraw: 0,
@@ -120,7 +124,7 @@ async function drawResults(input) {
await menu.process.updateChart('FPS', ui.detectFPS);
// get updated canvas
- if (ui.buffered || !result.canvas) result.canvas = await human.image(input, userConfig);
+ if (ui.buffered || !result.canvas) result.canvas = await human.image(input).canvas;
// draw image from video
const ctx = canvas.getContext('2d');
@@ -436,6 +440,7 @@ function setupMenu() {
});
menu.display.addHTML('');
menu.display.addBool('use 3D depth', ui, 'useDepth');
+ menu.display.addBool('print labels', ui, 'drawLabels');
menu.display.addBool('draw boxes', ui, 'drawBoxes');
menu.display.addBool('draw polygons', ui, 'drawPolygons');
menu.display.addBool('Fill Polygons', ui, 'fillPolygons');
@@ -530,6 +535,18 @@ function setupMenu() {
document.getElementById('play').addEventListener('click', () => detectVideo());
}
+async function drawWarmup(res) {
+ const canvas = document.getElementById('canvas');
+ canvas.width = res.canvas.width;
+ canvas.height = res.canvas.height;
+ const ctx = canvas.getContext('2d');
+ ctx.drawImage(res.canvas, 0, 0, res.canvas.width, res.canvas.height, 0, 0, canvas.width, canvas.height);
+ await draw.face(res.face, canvas, ui, human.facemesh.triangulation);
+ await draw.body(res.body, canvas, ui);
+ await draw.hand(res.hand, canvas, ui);
+ await draw.gesture(res.gesture, canvas, ui);
+}
+
async function main() {
log('Demo starting ...');
log('Browser:', navigator?.userAgent);
@@ -543,7 +560,9 @@ async function main() {
}
if (!ui.useWorker) {
status('initializing');
- await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
+ const res = await human.warmup(userConfig); // this is not required, just pre-warms all models for faster initial inference
+ ui.baseFont = ui.baseFontProto.replace(/{size}/, '16px');
+ if (res && res.canvas && ui.drawWarmup) await drawWarmup(res);
}
status('human: ready');
document.getElementById('loader').style.display = 'none';
diff --git a/demo/node.js b/demo/node.js
index 07d75e3a..29442994 100644
--- a/demo/node.js
+++ b/demo/node.js
@@ -14,16 +14,19 @@ const myConfig = {
videoOptimized: false,
async: false,
face: {
- // detector: { modelPath: 'file://models/faceboxes.json' },
- detector: { modelPath: 'file://models/blazeface-back.json' }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
- mesh: { modelPath: 'file://models/facemesh.json' },
- iris: { modelPath: 'file://models/iris.json' },
- age: { modelPath: 'file://models/age-ssrnet-imdb.json' },
- gender: { modelPath: 'file://models/gender.json' },
- emotion: { modelPath: 'file://models/emotion-large.json' },
+ enabled: true,
+ detector: { modelPath: 'file://models/faceboxes.json', enabled: true, minConfidence: 0.5 },
+ // detector: { modelPath: 'file://models/blazeface-back.json', enabled: false }, // cannot use blazeface in nodejs due to missing required kernel function in tfjs-node
+ mesh: { modelPath: 'file://models/facemesh.json', enabled: false }, // depends on blazeface detector
+ iris: { modelPath: 'file://models/iris.json', enabled: true },
+ age: { modelPath: 'file://models/age-ssrnet-imdb.json', enabled: true },
+ gender: { modelPath: 'file://models/gender.json', enabled: true },
+ emotion: { modelPath: 'file://models/emotion.json', enabled: true },
},
- body: { modelPath: 'file://models/posenet.json' },
+ // body: { modelPath: 'file://models/blazepose.json', modelType: 'blazepose', inputSize: 256, enabled: true },
+ body: { modelPath: 'file://models/posenet.json', modelType: 'posenet', inputSize: 257, enabled: true },
hand: {
+ enabled: true,
detector: { modelPath: 'file://models/handdetect.json' },
skeleton: { modelPath: 'file://models/handskeleton.json' },
},
@@ -35,7 +38,14 @@ async function init() {
// create instance of human
human = new Human(myConfig);
// pre-load models
+ log.info('Human:', human.version);
+ log.info('Active Configuration', human.config);
+ log.info('TFJS Version:', human.tf.version_core, 'Backend:', tf.getBackend());
+ log.info('TFJS Flags:', human.tf.env().features);
await human.load();
+ const loaded = Object.keys(human.models).filter((a) => human.models[a]);
+ log.info('Loaded:', loaded);
+ log.info('Memory state:', human.tf.engine().memory());
}
async function detect(input) {
@@ -74,6 +84,7 @@ async function test() {
async function main() {
log.info('NodeJS:', process.version);
+ log.info('Current folder:', process.env.PWD);
await init();
if (process.argv.length !== 3) {
log.warn('Parameters: missing');
diff --git a/models/emotion-mini.bin b/models/emotion-mini.bin
deleted file mode 100644
index 6a7612c3..00000000
Binary files a/models/emotion-mini.bin and /dev/null differ
diff --git a/models/emotion-mini.json b/models/emotion-mini.json
deleted file mode 100644
index 70d3bbe1..00000000
--- a/models/emotion-mini.json
+++ /dev/null
@@ -1,105 +0,0 @@
-{
- "format": "graph-model",
- "generatedBy": "2.3.1",
- "convertedBy": "TensorFlow.js Converter v2.4.0",
- "userDefinedMetadata":
- {
- "signature":
- {
- "inputs": {"input_1:0":{"name":"input_1:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}},
- "outputs": {"Identity:0":{"name":"Identity:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"7"}]}}}
- }
- },
- "modelTopology":
- {
- "node":
- [
- {"name":"unknown_60","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
- {"name":"unknown_66","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"unknown_43","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"unknown_49","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
- {"name":"unknown_26","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"unknown_32","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"unknown_9","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"1"}]}}}}},
- {"name":"unknown_15","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}}}},
- {"name":"unknown_77","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"7"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"unknown_78","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"7"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}},
- {"name":"input_1","op":"Placeholder","attr":{"dtype":{"type":"DT_FLOAT"},"shape":{"shape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"1"},{"size":"8"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"8"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"128"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
- {"name":"StatefulPartitionedCall/model_1/activation_1/Relu","op":"_FusedConv2D","input":["input_1","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0}}},
- {"name":"StatefulPartitionedCall/model_1/activation_2/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_1/Relu","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_2/Relu","unknown_9"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_2/Relu","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/activation_3/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_3/Relu","unknown_15"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}},"padding":{"s":"U0FNRQ=="}}},
- {"name":"StatefulPartitionedCall/model_1/add_1/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_1/add","unknown_26"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_1/add","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true}}},
- {"name":"StatefulPartitionedCall/model_1/activation_4/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","1","1","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_4/Relu","unknown_32"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="}}},
- {"name":"StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","2","2","1"]}},"ksize":{"list":{"i":["1","3","3","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/add_2/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_2/add","unknown_43"],"attr":{"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_2/add","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/activation_5/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"num_args":{"i":"1"},"padding":{"s":"VkFMSUQ="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"explicit_paddings":{"list":{}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_5/Relu","unknown_49"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="}}},
- {"name":"StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3"],"attr":{"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/add_3/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_3/add","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","2","2","1"]}}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_3/add","unknown_60"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
- {"name":"StatefulPartitionedCall/model_1/activation_6/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"use_cudnn_on_gpu":{"b":true}}},
- {"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_6/Relu","unknown_66"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
- {"name":"StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3"],"attr":{"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/add_4/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_4/add","unknown_77","unknown_78"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean","op":"Mean","input":["StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices"],"attr":{"keep_dims":{"b":false},"Tidx":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
- {"name":"StatefulPartitionedCall/model_1/predictions/Softmax","op":"Softmax","input":["StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean"],"attr":{"T":{"type":"DT_FLOAT"}}},
- {"name":"Identity","op":"Identity","input":["StatefulPartitionedCall/model_1/predictions/Softmax"],"attr":{"T":{"type":"DT_FLOAT"}}}
- ],
- "library": {},
- "versions":
- {
- "producer": 440
- }
- },
- "weightsManifest":
- [
- {
- "paths": ["emotion-mini.bin"],
- "weights": [{"name":"unknown_60","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_66","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_49","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_26","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,8,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_77","shape":[3,3,128,7],"dtype":"float32"},{"name":"unknown_78","shape":[7],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,8,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,16,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,32,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","shape":[1,1,64,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"}]
- }
- ]
-}
\ No newline at end of file
diff --git a/models/emotion-large.bin b/models/emotion.bin
similarity index 100%
rename from models/emotion-large.bin
rename to models/emotion.bin
diff --git a/models/emotion-large.json b/models/emotion.json
similarity index 99%
rename from models/emotion-large.json
rename to models/emotion.json
index 60f9baad..a08b4fa7 100644
--- a/models/emotion-large.json
+++ b/models/emotion.json
@@ -69,7 +69,7 @@
"weightsManifest":
[
{
- "paths": ["emotion-large.bin"],
+ "paths": ["emotion.bin"],
"weights": [{"name":"unknown_26","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,256,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,256,7],"dtype":"float32"},{"name":"unknown_44","shape":[7],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,256,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,128,256],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[256],"dtype":"float32"}]
}
]
diff --git a/package.json b/package.json
index 5d63fadf..328b4740 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@vladmandic/human",
- "version": "0.30.6",
+ "version": "0.40.0",
"description": "Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
"sideEffects": false,
"main": "dist/human.node.js",
@@ -71,6 +71,8 @@
"age-estimation",
"emotion-detection",
"gender-prediction",
- "gesture-recognition"
+ "gesture-recognition",
+ "blazeface",
+ "blazepose"
]
}
diff --git a/src/human.ts b/src/human.ts
index 165b344b..f28523ee 100644
--- a/src/human.ts
+++ b/src/human.ts
@@ -9,6 +9,7 @@ import * as emotion from './emotion/emotion';
import * as embedding from './embedding/embedding';
import * as posenet from './posenet/posenet';
import * as handpose from './handpose/handpose';
+import * as blazepose from './blazepose/blazepose';
import * as gesture from './gesture/gesture';
import * as image from './image';
import * as profile from './profile';
@@ -49,6 +50,7 @@ class Human {
checkSanity: boolean;
firstRun: boolean;
perf: any;
+ image: any;
models: any;
// models
facemesh: any;
@@ -74,18 +76,21 @@ class Human {
this.models = {
facemesh: null,
posenet: null,
+ blazepose: null,
handpose: null,
iris: null,
age: null,
gender: null,
emotion: null,
};
+ // export access to image processing
+ this.image = (input) => image.process(input, this.config);
// export raw access to underlying models
this.facemesh = facemesh;
this.age = age;
this.gender = gender;
this.emotion = emotion;
- this.body = posenet;
+ this.body = this.config.body.modelType.startsWith('posenet') ? posenet : blazepose;
this.hand = handpose;
}
@@ -146,16 +151,18 @@ class Human {
this.models.gender,
this.models.emotion,
this.models.embedding,
- this.models.posenet,
this.models.handpose,
+ this.models.posenet,
+ this.models.blazepose,
] = await Promise.all([
this.models.face || (this.config.face.enabled ? face.load(this.config) : null),
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
- this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
+ this.models.posenet || (this.config.body.enabled && this.config.body.modelType.startsWith('posenet') ? posenet.load(this.config) : null),
+ this.models.posenet || (this.config.body.enabled && this.config.body.modelType.startsWith('blazepose') ? blazepose.load(this.config) : null),
]);
} else {
if (this.config.face.enabled && !this.models.face) this.models.face = await face.load(this.config);
@@ -163,8 +170,9 @@ class Human {
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
- if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
+ if (this.config.body.enabled && !this.models.posenet && this.config.body.modelType.startsWith('posenet')) this.models.posenet = await posenet.load(this.config);
+ if (this.config.body.enabled && !this.models.blazepose && this.config.body.modelType.startsWith('blazepose')) this.models.blazepose = await blazepose.load(this.config);
}
if (this.firstRun) {
@@ -346,16 +354,6 @@ class Human {
return faceRes;
}
- /*
- async processImage(input, userConfig = {}) {
- this.state = 'image';
- this.config = mergeDeep(this.config, userConfig);
- const process = image.process(input, this.config);
- process?.tensor?.dispose();
- return process?.canvas;
- }
- */
-
// main detect function
async detect(input, userConfig = {}) {
// detection happens inside a promise
@@ -374,7 +372,7 @@ class Human {
resolve({ error });
}
- let poseRes;
+ let bodyRes;
let handRes;
let faceRes;
@@ -410,15 +408,17 @@ class Human {
this.perf.face = Math.trunc(now() - timeStamp);
}
- // run posenet
+ // run body: can be posenet or blazepose
this.analyze('Start Body:');
if (this.config.async) {
- poseRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
+ if (this.config.body.modelType.startsWith('posenet')) bodyRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
+ else bodyRes = this.config.body.enabled ? blazepose.predict(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
- poseRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
+ if (this.config.body.modelType.startsWith('posenet')) bodyRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
+ else bodyRes = this.config.body.enabled ? await blazepose.predict(process.tensor, this.config) : [];
this.perf.body = Math.trunc(now() - timeStamp);
}
this.analyze('End Body:');
@@ -438,7 +438,7 @@ class Human {
// if async wait for results
if (this.config.async) {
- [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);
+ [faceRes, bodyRes, handRes] = await Promise.all([faceRes, bodyRes, handRes]);
}
process.tensor.dispose();
@@ -449,14 +449,14 @@ class Human {
if (this.config.gesture.enabled) {
timeStamp = now();
// @ts-ignore
- gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
+ gestureRes = [...gesture.face(faceRes), ...gesture.body(bodyRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
else if (this.perf.gesture) delete this.perf.gesture;
}
this.perf.total = Math.trunc(now() - timeStart);
this.state = 'idle';
- resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
+ resolve({ face: faceRes, body: bodyRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });
});
}
@@ -487,21 +487,24 @@ class Human {
src = 'data:image/jpeg;base64,' + sample.face;
break;
case 'full':
+ case 'body':
size = 1200;
src = 'data:image/jpeg;base64,' + sample.body;
break;
default:
src = null;
}
- const img = new Image(size, size);
- img.onload = () => {
+ // src = encodeURI('../assets/human-sample-face.jpg');
+ const img = new Image();
+ img.onload = async () => {
const canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(size, size) : document.createElement('canvas');
- canvas.width = size;
- canvas.height = size;
+ canvas.width = img.naturalWidth;
+ canvas.height = img.naturalHeight;
const ctx = canvas.getContext('2d');
ctx?.drawImage(img, 0, 0);
- const data = ctx?.getImageData(0, 0, size, size);
- this.detect(data, this.config).then((res) => resolve(res));
+ // const data = ctx?.getImageData(0, 0, canvas.height, canvas.width);
+ const res = await this.detect(canvas, this.config);
+ resolve(res);
};
if (src) img.src = src;
else resolve(null);
diff --git a/wiki b/wiki
index 55e854ea..c60f4427 160000
--- a/wiki
+++ b/wiki
@@ -1 +1 @@
-Subproject commit 55e854ea9263ca0eae7ffbb7d60b87e1ca3a7065
+Subproject commit c60f442714b1b5887ae25feb35fa413bc9996402