mirror of https://github.com/vladmandic/human
switched face embedding to mobileface
parent
ce92c60b54
commit
ea5c9b9be6
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -1,6 +1,6 @@
|
||||||
# @vladmandic/human
|
# @vladmandic/human
|
||||||
|
|
||||||
Version: **1.0.3**
|
Version: **1.1.0**
|
||||||
Description: **Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
|
Description: **Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition**
|
||||||
|
|
||||||
Author: **Vladimir Mandic <mandic00@live.com>**
|
Author: **Vladimir Mandic <mandic00@live.com>**
|
||||||
|
@ -11,14 +11,16 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
||||||
|
|
||||||
### **HEAD -> main** 2021/03/11 mandic00@live.com
|
### **HEAD -> main** 2021/03/11 mandic00@live.com
|
||||||
|
|
||||||
|
|
||||||
|
### **1.0.4** 2021/03/11 mandic00@live.com
|
||||||
|
|
||||||
|
- add face return tensor
|
||||||
|
- add test for face descriptors
|
||||||
- wip on embedding
|
- wip on embedding
|
||||||
- simplify face box coordinate calculations
|
- simplify face box coordinate calculations
|
||||||
- annotated models and removed gender-ssrnet
|
- annotated models and removed gender-ssrnet
|
||||||
- autodetect inputsizes
|
- autodetect inputsizes
|
||||||
|
|
||||||
### **origin/main** 2021/03/10 mandic00@live.com
|
|
||||||
|
|
||||||
|
|
||||||
### **1.0.3** 2021/03/10 mandic00@live.com
|
### **1.0.3** 2021/03/10 mandic00@live.com
|
||||||
|
|
||||||
- strong typing for public classes and hide private classes
|
- strong typing for public classes and hide private classes
|
||||||
|
|
|
@ -69,7 +69,7 @@ Default models in Human library are:
|
||||||
- **Gender Detection**: Oarriaga Gender
|
- **Gender Detection**: Oarriaga Gender
|
||||||
- **Age Detection**: SSR-Net Age IMDB
|
- **Age Detection**: SSR-Net Age IMDB
|
||||||
- **Body Analysis**: PoseNet
|
- **Body Analysis**: PoseNet
|
||||||
- **Face Embedding**: Sirius-AI MobileFaceNet Embedding
|
- **Face Embedding**: BecauseofAI MobileFace Embedding
|
||||||
|
|
||||||
Note that alternative models are provided and can be enabled via configuration
|
Note that alternative models are provided and can be enabled via configuration
|
||||||
For example, `PoseNet` model can be switched for `BlazePose` model depending on the use case
|
For example, `PoseNet` model can be switched for `BlazePose` model depending on the use case
|
||||||
|
|
10
TODO.md
10
TODO.md
|
@ -8,3 +8,13 @@
|
||||||
- Explore EfficientPose
|
- Explore EfficientPose
|
||||||
<https://github.com/daniegr/EfficientPose>
|
<https://github.com/daniegr/EfficientPose>
|
||||||
<https://github.com/PINTO0309/PINTO_model_zoo/tree/main/084_EfficientPose>
|
<https://github.com/PINTO0309/PINTO_model_zoo/tree/main/084_EfficientPose>
|
||||||
|
|
||||||
|
## WiP: Embedding
|
||||||
|
|
||||||
|
- Implement offsetRaw
|
||||||
|
|
||||||
|
full with and without rotation
|
||||||
|
full with and without embedding
|
||||||
|
full with any without mesh
|
||||||
|
embedding with and without mesh
|
||||||
|
boxRaw and meshRaw with and without mesh
|
||||||
|
|
|
@ -121,9 +121,9 @@ export default {
|
||||||
},
|
},
|
||||||
|
|
||||||
embedding: {
|
embedding: {
|
||||||
enabled: false, // to improve accuracy of face embedding extraction it is recommended
|
enabled: false, // to improve accuracy of face embedding extraction it is
|
||||||
// to enable detector.rotation and mesh.enabled
|
// highly recommended to enable detector.rotation and mesh.enabled
|
||||||
modelPath: '../models/mobilefacenet.json',
|
modelPath: '../models/mobileface.json',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -84,11 +84,22 @@ let original;
|
||||||
async function calcSimmilariry(result) {
|
async function calcSimmilariry(result) {
|
||||||
document.getElementById('compare-container').style.display = human.config.face.embedding.enabled ? 'block' : 'none';
|
document.getElementById('compare-container').style.display = human.config.face.embedding.enabled ? 'block' : 'none';
|
||||||
if (!human.config.face.embedding.enabled) return;
|
if (!human.config.face.embedding.enabled) return;
|
||||||
if (!(result?.face?.length > 0) || (result?.face[0]?.embedding?.length !== 192)) return;
|
if (!(result?.face?.length > 0) || (result?.face[0]?.embedding?.length !== 256)) return;
|
||||||
if (!original) {
|
if (!original) {
|
||||||
original = result;
|
original = result;
|
||||||
|
if (result.face[0].tensor) {
|
||||||
|
const enhanced = human.enhance(result.face[0]);
|
||||||
|
if (enhanced) {
|
||||||
|
const c = document.getElementById('orig');
|
||||||
|
const squeeze = enhanced.squeeze();
|
||||||
|
human.tf.browser.toPixels(squeeze, c);
|
||||||
|
enhanced.dispose();
|
||||||
|
squeeze.dispose();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
document.getElementById('compare-canvas').getContext('2d').drawImage(original.canvas, 0, 0, 200, 200);
|
document.getElementById('compare-canvas').getContext('2d').drawImage(original.canvas, 0, 0, 200, 200);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
const simmilarity = human.simmilarity(original?.face[0]?.embedding, result?.face[0]?.embedding);
|
const simmilarity = human.simmilarity(original?.face[0]?.embedding, result?.face[0]?.embedding);
|
||||||
document.getElementById('simmilarity').innerText = `simmilarity: ${Math.trunc(1000 * simmilarity) / 10}%`;
|
document.getElementById('simmilarity').innerText = `simmilarity: ${Math.trunc(1000 * simmilarity) / 10}%`;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,9 @@
|
||||||
<body>
|
<body>
|
||||||
<br>Sample Images:
|
<br>Sample Images:
|
||||||
<div id="images"></div>
|
<div id="images"></div>
|
||||||
<br>Selected Face<br>
|
<br>Selected Face (Enhanced)<br>
|
||||||
<canvas id="orig" style="width: 200px; height: 200px;"></canvas>
|
<canvas id="orig" style="width: 200px; height: 200px;"></canvas>
|
||||||
<br>Extracted Faces - click on a face to sort by simmilarity:<br>
|
<br><br>Extracted Faces - click on a face to sort by simmilarity:<br>
|
||||||
<div id="faces"></div>
|
<div id="faces"></div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -11,7 +11,7 @@ const userConfig = {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: { rotation: true, return: true },
|
detector: { rotation: true, return: true },
|
||||||
mesh: { enabled: true },
|
mesh: { enabled: true },
|
||||||
embedding: { enabled: true, modelPath: '../models/mobilefacenet.json' },
|
embedding: { enabled: true },
|
||||||
iris: { enabled: false },
|
iris: { enabled: false },
|
||||||
age: { enabled: false },
|
age: { enabled: false },
|
||||||
gender: { enabled: false },
|
gender: { enabled: false },
|
||||||
|
@ -21,12 +21,15 @@ const userConfig = {
|
||||||
gesture: { enabled: false },
|
gesture: { enabled: false },
|
||||||
body: { enabled: false },
|
body: { enabled: false },
|
||||||
};
|
};
|
||||||
const human = new Human(userConfig);
|
|
||||||
|
const human = new Human(userConfig); // new instance of human
|
||||||
|
|
||||||
const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'];
|
const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'];
|
||||||
// const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg',
|
// const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg',
|
||||||
// '../private/me (1).jpg', '../private/me (2).jpg', '../private/me (3).jpg', '../private/me (4).jpg', '../private/me (5).jpg', '../private/me (6).jpg', '../private/me (7).jpg', '../private/me (8).jpg',
|
// '../private/me (1).jpg', '../private/me (2).jpg', '../private/me (3).jpg', '../private/me (4).jpg', '../private/me (5).jpg', '../private/me (6).jpg', '../private/me (7).jpg', '../private/me (8).jpg',
|
||||||
// '../private/me (9).jpg', '../private/me (10).jpg', '../private/me (11).jpg', '../private/me (12).jpg', '../private/me (13).jpg'];
|
// '../private/me (9).jpg', '../private/me (10).jpg', '../private/me (11).jpg', '../private/me (12).jpg', '../private/me (13).jpg'];
|
||||||
const all = [];
|
|
||||||
|
const all = []; // array that will hold all detected faces
|
||||||
|
|
||||||
function log(...msg) {
|
function log(...msg) {
|
||||||
const dt = new Date();
|
const dt = new Date();
|
||||||
|
@ -38,14 +41,24 @@ function log(...msg) {
|
||||||
async function analyze(face) {
|
async function analyze(face) {
|
||||||
log('Face:', face);
|
log('Face:', face);
|
||||||
|
|
||||||
const box = [[0.05, 0.15, 0.90, 0.85]]; // top, left, bottom, right
|
// if we have face image tensor, enhance it and display it
|
||||||
const crop = human.tf.image.cropAndResize(face.tensor.expandDims(0), box, [0], [200, 200]); // optionally do a tight box crop
|
if (face.tensor) {
|
||||||
|
const enhanced = human.enhance(face);
|
||||||
|
if (enhanced) {
|
||||||
const c = document.getElementById('orig');
|
const c = document.getElementById('orig');
|
||||||
human.tf.browser.toPixels(crop.squeeze(), c);
|
const squeeze = enhanced.squeeze();
|
||||||
|
human.tf.browser.toPixels(squeeze, c);
|
||||||
|
enhanced.dispose();
|
||||||
|
squeeze.dispose();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loop through all canvases that contain faces
|
||||||
const canvases = document.getElementsByClassName('face');
|
const canvases = document.getElementsByClassName('face');
|
||||||
for (const canvas of canvases) {
|
for (const canvas of canvases) {
|
||||||
|
// calculate simmilarity from selected face to current one in the loop
|
||||||
const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding);
|
const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding);
|
||||||
|
// draw the canvas and simmilarity score
|
||||||
canvas.title = res;
|
canvas.title = res;
|
||||||
await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas);
|
await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas);
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
|
@ -55,6 +68,8 @@ async function analyze(face) {
|
||||||
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
|
ctx.fillStyle = 'rgba(255, 255, 255, 1)';
|
||||||
ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20);
|
ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// sort all faces by simmilarity
|
||||||
const sorted = document.getElementById('faces');
|
const sorted = document.getElementById('faces');
|
||||||
[...sorted.children]
|
[...sorted.children]
|
||||||
.sort((a, b) => parseFloat(b.title) - parseFloat(a.title))
|
.sort((a, b) => parseFloat(b.title) - parseFloat(a.title))
|
||||||
|
@ -70,22 +85,26 @@ async function faces(index, res) {
|
||||||
canvas.width = 200;
|
canvas.width = 200;
|
||||||
canvas.height = 200;
|
canvas.height = 200;
|
||||||
canvas.className = 'face';
|
canvas.className = 'face';
|
||||||
|
// mouse click on any face canvas triggers analysis
|
||||||
canvas.addEventListener('click', (evt) => {
|
canvas.addEventListener('click', (evt) => {
|
||||||
log('Select:', 'Image:', evt.target.tag.sample, 'Face:', evt.target.tag.face);
|
log('Select:', 'Image:', evt.target.tag.sample, 'Face:', evt.target.tag.face);
|
||||||
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
|
analyze(all[evt.target.tag.sample][evt.target.tag.face]);
|
||||||
});
|
});
|
||||||
|
// if we actually got face image tensor, draw canvas with that face
|
||||||
|
if (res.face[i].tensor) {
|
||||||
human.tf.browser.toPixels(res.face[i].tensor, canvas);
|
human.tf.browser.toPixels(res.face[i].tensor, canvas);
|
||||||
document.getElementById('faces').appendChild(canvas);
|
document.getElementById('faces').appendChild(canvas);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function add(index) {
|
async function add(index) {
|
||||||
log('Add image:', samples[index]);
|
log('Add image:', samples[index]);
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const img = new Image(100, 100);
|
const img = new Image(100, 100);
|
||||||
img.onload = () => {
|
img.onload = () => { // must wait until image is loaded
|
||||||
human.detect(img).then((res) => faces(index, res));
|
human.detect(img).then((res) => faces(index, res)); // then wait until image is analyzed
|
||||||
document.getElementById('images').appendChild(img);
|
document.getElementById('images').appendChild(img); // and finally we can add it
|
||||||
resolve(true);
|
resolve(true);
|
||||||
};
|
};
|
||||||
img.title = samples[index];
|
img.title = samples[index];
|
||||||
|
@ -95,7 +114,7 @@ async function add(index) {
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
await human.load();
|
await human.load();
|
||||||
for (const i in samples) await add(i);
|
for (const i in samples) await add(i); // download and analyze all images
|
||||||
log('Ready');
|
log('Ready');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
11
demo/node.js
11
demo/node.js
|
@ -1,8 +1,10 @@
|
||||||
const log = require('@vladmandic/pilogger');
|
const log = require('@vladmandic/pilogger');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const process = require('process');
|
const process = require('process');
|
||||||
// for Node, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
|
|
||||||
|
// for NodeJS, `tfjs-node` or `tfjs-node-gpu` should be loaded before using Human
|
||||||
const tf = require('@tensorflow/tfjs-node'); // or const tf = require('@tensorflow/tfjs-node-gpu');
|
const tf = require('@tensorflow/tfjs-node'); // or const tf = require('@tensorflow/tfjs-node-gpu');
|
||||||
|
|
||||||
// load specific version of Human library that matches TensorFlow mode
|
// load specific version of Human library that matches TensorFlow mode
|
||||||
const Human = require('../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
|
const Human = require('../dist/human.node.js').default; // or const Human = require('../dist/human.node-gpu.js').default;
|
||||||
|
|
||||||
|
@ -15,15 +17,16 @@ const myConfig = {
|
||||||
async: false,
|
async: false,
|
||||||
face: {
|
face: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true },
|
detector: { modelPath: 'file://models/blazeface-back.json', enabled: true, rotation: false },
|
||||||
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
|
mesh: { modelPath: 'file://models/facemesh.json', enabled: true },
|
||||||
iris: { modelPath: 'file://models/iris.json', enabled: true },
|
iris: { modelPath: 'file://models/iris.json', enabled: true },
|
||||||
age: { modelPath: 'file://models/age.json', enabled: true },
|
age: { modelPath: 'file://models/age.json', enabled: true },
|
||||||
gender: { modelPath: 'file://models/gender.json', enabled: true },
|
gender: { modelPath: 'file://models/gender.json', enabled: true },
|
||||||
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
|
emotion: { modelPath: 'file://models/emotion.json', enabled: true },
|
||||||
|
embedding: { modelPath: 'file://models/mobileface.json', enabled: true },
|
||||||
},
|
},
|
||||||
// body: { modelPath: 'file://models/blazepose.json', modelType: 'blazepose', enabled: true },
|
// body: { modelPath: 'file://models/blazepose.json', enabled: true },
|
||||||
body: { modelPath: 'file://models/posenet.json', modelType: 'posenet', enabled: true },
|
body: { modelPath: 'file://models/posenet.json', enabled: true },
|
||||||
hand: {
|
hand: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
detector: { modelPath: 'file://models/handdetect.json' },
|
detector: { modelPath: 'file://models/handdetect.json' },
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -0,0 +1,202 @@
|
||||||
|
{
|
||||||
|
"format": "graph-model",
|
||||||
|
"generatedBy": "2.4.1",
|
||||||
|
"convertedBy": "https://github.com/vladmandic",
|
||||||
|
"signature":
|
||||||
|
{
|
||||||
|
"inputs":
|
||||||
|
{
|
||||||
|
"data:0": {"name":"data:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"112"},{"size":"112"},{"size":"3"}]}}
|
||||||
|
},
|
||||||
|
"outputs":
|
||||||
|
{
|
||||||
|
"batchnorm0/add_1:0": {"name":"batchnorm0/add_1:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"256"}]}}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"modelTopology":
|
||||||
|
{
|
||||||
|
"node":
|
||||||
|
[
|
||||||
|
{"name":"Maximum_12/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage4_unit1_prelu2_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}},
|
||||||
|
{"name":"Minimum_12/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Maximum_11/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage4_unit1_prelu1_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_11/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_11/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage4_unit1_conv1_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_12/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage4_unit1_conv2_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"Maximum_10/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage4_unit1_prelu0_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_10/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Maximum_9/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage3_unit1_prelu2_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}},
|
||||||
|
{"name":"Minimum_9/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Maximum_8/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage3_unit1_prelu1_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_8/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_8/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage3_unit1_conv1_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"Pad_9/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"stage3_unit1_conv2_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"Maximum_7/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage3_unit1_prelu0_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_7/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Maximum_6/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage2_unit1_prelu2_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
|
||||||
|
{"name":"Minimum_6/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Maximum_5/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage2_unit1_prelu1_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
|
||||||
|
{"name":"Minimum_5/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Pad_5/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"stage2_unit1_conv1_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_6/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage2_unit1_conv2_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Maximum_4/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage2_unit1_prelu0_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_4/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Maximum_3/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage1_unit1_prelu2_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_3/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Maximum_2/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage1_unit1_prelu1_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
|
||||||
|
{"name":"Minimum_2/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_2/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"stage1_unit1_conv1_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"Pad_3/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage1_unit1_conv2_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"Maximum_1/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"stage1_unit1_prelu0_gamma","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_1/x","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}},
|
||||||
|
{"name":"Maximum/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"prelu1_gamma","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
|
||||||
|
{"name":"Minimum/x","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"minusscalar0_second","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mulscalar0_second","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"}]}}}}},
|
||||||
|
{"name":"Pad/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"conv1_weight","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"3"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage1_unit1_conv0_0_weight","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_1/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"stage1_unit1_conv0_1_df","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"stage2_unit1_conv0_0_weight","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"32"}]}}}}},
|
||||||
|
{"name":"Pad_4/paddings","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}}}},
|
||||||
|
{"name":"stage2_unit1_conv0_1_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage3_unit1_conv0_0_weight","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_7/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage3_unit1_conv0_1_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage4_unit1_conv0_0_weight","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_10/paddings","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"4"},{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"stage4_unit1_conv0_1_df","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"flatten/Const","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"dense/kernel","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"2048"},{"size":"256"}]}}}}},
|
||||||
|
{"name":"batchnorm0/mul","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"256"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"batchnorm0/sub","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"256"}]}}}}},
|
||||||
|
{"name":"data","op":"Placeholder","attr":{"shape":{"shape":{"dim":[{"size":"-1"},{"size":"112"},{"size":"112"},{"size":"3"}]}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"sub","op":"Sub","input":["data","minusscalar0_second"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul","op":"Mul","input":["sub","mulscalar0_second"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad","op":"Pad","input":["mul","Pad/paddings"],"attr":{"Tpaddings":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"conv1","op":"Conv2D","input":["Pad","conv1_weight"],"device":"/device:CPU:0","attr":{"padding":{"s":"VkFMSUQ="},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}}}},
|
||||||
|
{"name":"Maximum","op":"Maximum","input":["Maximum/x","conv1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum","op":"Minimum","input":["Minimum/x","conv1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_1","op":"Mul","input":["prelu1_gamma","Minimum"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add","op":"AddV2","input":["Maximum","mul_1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage1_unit1_conv0_0","op":"Conv2D","input":["add","stage1_unit1_conv0_0_weight"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"Pad_1","op":"Pad","input":["stage1_unit1_conv0_0","Pad_1/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise","op":"DepthwiseConv2dNative","input":["Pad_1","stage1_unit1_conv0_1_df"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Maximum_1","op":"Maximum","input":["Maximum_1/x","depthwise"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_1","op":"Minimum","input":["Minimum_1/x","depthwise"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_2","op":"Mul","input":["stage1_unit1_prelu0_gamma","Minimum_1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_1","op":"AddV2","input":["Maximum_1","mul_2"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_2","op":"Pad","input":["add_1","Pad_2/paddings"],"attr":{"Tpaddings":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"depthwise_1","op":"DepthwiseConv2dNative","input":["Pad_2","stage1_unit1_conv1_df"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"Maximum_2","op":"Maximum","input":["Maximum_2/x","depthwise_1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_2","op":"Minimum","input":["Minimum_2/x","depthwise_1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_3","op":"Mul","input":["stage1_unit1_prelu1_gamma","Minimum_2"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_2","op":"AddV2","input":["Maximum_2","mul_3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_3","op":"Pad","input":["add_2","Pad_3/paddings"],"attr":{"Tpaddings":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"depthwise_2","op":"DepthwiseConv2dNative","input":["Pad_3","stage1_unit1_conv2_df"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}}}},
|
||||||
|
{"name":"Maximum_3","op":"Maximum","input":["Maximum_3/x","depthwise_2"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_3","op":"Minimum","input":["Minimum_3/x","depthwise_2"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_4","op":"Mul","input":["stage1_unit1_prelu2_gamma","Minimum_3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_3","op":"AddV2","input":["Maximum_3","mul_4"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_4","op":"AddV2","input":["add_3","add_1"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage2_unit1_conv0_0","op":"Conv2D","input":["add_4","stage2_unit1_conv0_0_weight"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
|
||||||
|
{"name":"Pad_4","op":"Pad","input":["stage2_unit1_conv0_0","Pad_4/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_3","op":"DepthwiseConv2dNative","input":["Pad_4","stage2_unit1_conv0_1_df"],"attr":{"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}},
|
||||||
|
{"name":"Maximum_4","op":"Maximum","input":["Maximum_4/x","depthwise_3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_4","op":"Minimum","input":["Minimum_4/x","depthwise_3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_5","op":"Mul","input":["stage2_unit1_prelu0_gamma","Minimum_4"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_5","op":"AddV2","input":["Maximum_4","mul_5"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_5","op":"Pad","input":["add_5","Pad_5/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_4","op":"DepthwiseConv2dNative","input":["Pad_5","stage2_unit1_conv1_df"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="}}},
|
||||||
|
{"name":"Maximum_5","op":"Maximum","input":["Maximum_5/x","depthwise_4"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_5","op":"Minimum","input":["Minimum_5/x","depthwise_4"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_6","op":"Mul","input":["stage2_unit1_prelu1_gamma","Minimum_5"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_6","op":"AddV2","input":["Maximum_5","mul_6"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_6","op":"Pad","input":["add_6","Pad_6/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_5","op":"DepthwiseConv2dNative","input":["Pad_6","stage2_unit1_conv2_df"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="}}},
|
||||||
|
{"name":"Maximum_6","op":"Maximum","input":["Maximum_6/x","depthwise_5"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_6","op":"Minimum","input":["Minimum_6/x","depthwise_5"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_7","op":"Mul","input":["stage2_unit1_prelu2_gamma","Minimum_6"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_7","op":"AddV2","input":["Maximum_6","mul_7"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_8","op":"AddV2","input":["add_7","add_5"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage3_unit1_conv0_0","op":"Conv2D","input":["add_8","stage3_unit1_conv0_0_weight"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"Pad_7","op":"Pad","input":["stage3_unit1_conv0_0","Pad_7/paddings"],"attr":{"Tpaddings":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"depthwise_6","op":"DepthwiseConv2dNative","input":["Pad_7","stage3_unit1_conv0_1_df"],"attr":{"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}}}},
|
||||||
|
{"name":"Maximum_7","op":"Maximum","input":["Maximum_7/x","depthwise_6"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_7","op":"Minimum","input":["Minimum_7/x","depthwise_6"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_8","op":"Mul","input":["stage3_unit1_prelu0_gamma","Minimum_7"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_9","op":"AddV2","input":["Maximum_7","mul_8"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_8","op":"Pad","input":["add_9","Pad_8/paddings"],"attr":{"Tpaddings":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"depthwise_7","op":"DepthwiseConv2dNative","input":["Pad_8","stage3_unit1_conv1_df"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="}}},
|
||||||
|
{"name":"Maximum_8","op":"Maximum","input":["Maximum_8/x","depthwise_7"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_8","op":"Minimum","input":["Minimum_8/x","depthwise_7"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_9","op":"Mul","input":["stage3_unit1_prelu1_gamma","Minimum_8"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_10","op":"AddV2","input":["Maximum_8","mul_9"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_9","op":"Pad","input":["add_10","Pad_9/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_8","op":"DepthwiseConv2dNative","input":["Pad_9","stage3_unit1_conv2_df"],"attr":{"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"Maximum_9","op":"Maximum","input":["Maximum_9/x","depthwise_8"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_9","op":"Minimum","input":["Minimum_9/x","depthwise_8"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_10","op":"Mul","input":["stage3_unit1_prelu2_gamma","Minimum_9"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_11","op":"AddV2","input":["Maximum_9","mul_10"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_12","op":"AddV2","input":["add_11","add_9"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"stage4_unit1_conv0_0","op":"Conv2D","input":["add_12","stage4_unit1_conv0_0_weight"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}},
|
||||||
|
{"name":"Pad_10","op":"Pad","input":["stage4_unit1_conv0_0","Pad_10/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_9","op":"DepthwiseConv2dNative","input":["Pad_10","stage4_unit1_conv0_1_df"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}}}},
|
||||||
|
{"name":"Maximum_10","op":"Maximum","input":["Maximum_10/x","depthwise_9"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_10","op":"Minimum","input":["Minimum_10/x","depthwise_9"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_11","op":"Mul","input":["stage4_unit1_prelu0_gamma","Minimum_10"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_13","op":"AddV2","input":["Maximum_10","mul_11"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_11","op":"Pad","input":["add_13","Pad_11/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_10","op":"DepthwiseConv2dNative","input":["Pad_11","stage4_unit1_conv1_df"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"Maximum_11","op":"Maximum","input":["Maximum_11/x","depthwise_10"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_11","op":"Minimum","input":["Minimum_11/x","depthwise_10"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_12","op":"Mul","input":["stage4_unit1_prelu1_gamma","Minimum_11"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_14","op":"AddV2","input":["Maximum_11","mul_12"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Pad_12","op":"Pad","input":["add_14","Pad_12/paddings"],"attr":{"T":{"type":"DT_FLOAT"},"Tpaddings":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"depthwise_11","op":"DepthwiseConv2dNative","input":["Pad_12","stage4_unit1_conv2_df"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="}}},
|
||||||
|
{"name":"Maximum_12","op":"Maximum","input":["Maximum_12/x","depthwise_11"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Minimum_12","op":"Minimum","input":["Minimum_12/x","depthwise_11"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"mul_13","op":"Mul","input":["stage4_unit1_prelu2_gamma","Minimum_12"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_15","op":"AddV2","input":["Maximum_12","mul_13"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"add_16","op":"AddV2","input":["add_15","add_13"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"flatten/Reshape","op":"Reshape","input":["add_16","flatten/Const"],"attr":{"Tshape":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"dense/MatMul","op":"MatMul","input":["flatten/Reshape","dense/kernel"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"transpose_a":{"b":false},"transpose_b":{"b":false}}},
|
||||||
|
{"name":"batchnorm0/mul_1","op":"Mul","input":["dense/MatMul","batchnorm0/mul"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"batchnorm0/add_1","op":"AddV2","input":["batchnorm0/mul_1","batchnorm0/sub"],"attr":{"T":{"type":"DT_FLOAT"}}}
|
||||||
|
],
|
||||||
|
"library": {},
|
||||||
|
"versions":
|
||||||
|
{
|
||||||
|
"producer": 561
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"weightsManifest":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paths": ["mobileface.bin"],
|
||||||
|
"weights": [{"name":"Maximum_12/x","shape":[],"dtype":"float32"},{"name":"stage4_unit1_prelu2_gamma","shape":[128],"dtype":"float32"},{"name":"Minimum_12/x","shape":[],"dtype":"float32"},{"name":"Maximum_11/x","shape":[],"dtype":"float32"},{"name":"stage4_unit1_prelu1_gamma","shape":[128],"dtype":"float32"},{"name":"Minimum_11/x","shape":[],"dtype":"float32"},{"name":"Pad_11/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage4_unit1_conv1_df","shape":[3,3,128,1],"dtype":"float32"},{"name":"Pad_12/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage4_unit1_conv2_df","shape":[3,3,128,1],"dtype":"float32"},{"name":"Maximum_10/x","shape":[],"dtype":"float32"},{"name":"stage4_unit1_prelu0_gamma","shape":[128],"dtype":"float32"},{"name":"Minimum_10/x","shape":[],"dtype":"float32"},{"name":"Maximum_9/x","shape":[],"dtype":"float32"},{"name":"stage3_unit1_prelu2_gamma","shape":[64],"dtype":"float32"},{"name":"Minimum_9/x","shape":[],"dtype":"float32"},{"name":"Maximum_8/x","shape":[],"dtype":"float32"},{"name":"stage3_unit1_prelu1_gamma","shape":[64],"dtype":"float32"},{"name":"Minimum_8/x","shape":[],"dtype":"float32"},{"name":"Pad_8/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage3_unit1_conv1_df","shape":[3,3,64,1],"dtype":"float32"},{"name":"Pad_9/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage3_unit1_conv2_df","shape":[3,3,64,1],"dtype":"float32"},{"name":"Maximum_7/x","shape":[],"dtype":"float32"},{"name":"stage3_unit1_prelu0_gamma","shape":[64],"dtype":"float32"},{"name":"Minimum_7/x","shape":[],"dtype":"float32"},{"name":"Maximum_6/x","shape":[],"dtype":"float32"},{"name":"stage2_unit1_prelu2_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_6/x","shape":[],"dtype":"float32"},{"name":"Maximum_5/x","shape":[],"dtype":"float32"},{"name":"stage2_unit1_prelu1_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_5/x","shape":[],"dtype":"float32"},{"name":"Pad_5/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage2_unit1_conv1_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"Pad_6/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage2_unit1_conv2_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"Maximum_4/x","shape":[],"dtype":"float32"},{"name":"stage2_unit1_prelu0_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_4/x","shape":[],"dtype":"float32"},{"name":"Maximum_3/x","shape":[],"dtype":"float32"},{"name":"stage1_unit1_prelu2_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_3/x","shape":[],"dtype":"float32"},{"name":"Maximum_2/x","shape":[],"dtype":"float32"},{"name":"stage1_unit1_prelu1_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_2/x","shape":[],"dtype":"float32"},{"name":"Pad_2/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage1_unit1_conv1_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"Pad_3/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage1_unit1_conv2_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"Maximum_1/x","shape":[],"dtype":"float32"},{"name":"stage1_unit1_prelu0_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum_1/x","shape":[],"dtype":"float32"},{"name":"Maximum/x","shape":[],"dtype":"float32"},{"name":"prelu1_gamma","shape":[32],"dtype":"float32"},{"name":"Minimum/x","shape":[],"dtype":"float32"},{"name":"minusscalar0_second","shape":[1],"dtype":"float32"},{"name":"mulscalar0_second","shape":[1],"dtype":"float32"},{"name":"Pad/paddings","shape":[4,2],"dtype":"int32"},{"name":"conv1_weight","shape":[3,3,3,32],"dtype":"float32"},{"name":"stage1_unit1_conv0_0_weight","shape":[1,1,32,32],"dtype":"float32"},{"name":"Pad_1/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage1_unit1_conv0_1_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"stage2_unit1_conv0_0_weight","shape":[1,1,32,32],"dtype":"float32"},{"name":"Pad_4/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage2_unit1_conv0_1_df","shape":[3,3,32,1],"dtype":"float32"},{"name":"stage3_unit1_conv0_0_weight","shape":[1,1,32,64],"dtype":"float32"},{"name":"Pad_7/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage3_unit1_conv0_1_df","shape":[3,3,64,1],"dtype":"float32"},{"name":"stage4_unit1_conv0_0_weight","shape":[1,1,64,128],"dtype":"float32"},{"name":"Pad_10/paddings","shape":[4,2],"dtype":"int32"},{"name":"stage4_unit1_conv0_1_df","shape":[3,3,128,1],"dtype":"float32"},{"name":"flatten/Const","shape":[2],"dtype":"int32"},{"name":"dense/kernel","shape":[2048,256],"dtype":"float32"},{"name":"batchnorm0/mul","shape":[256],"dtype":"float32"},{"name":"batchnorm0/sub","shape":[256],"dtype":"float32"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "@vladmandic/human",
|
"name": "@vladmandic/human",
|
||||||
"version": "1.0.4",
|
"version": "1.1.0",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@vladmandic/human",
|
"name": "@vladmandic/human",
|
||||||
"version": "1.0.4",
|
"version": "1.1.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@tensorflow/tfjs": "^3.3.0",
|
"@tensorflow/tfjs": "^3.3.0",
|
||||||
|
@ -19,13 +19,13 @@
|
||||||
"@tensorflow/tfjs-layers": "^3.3.0",
|
"@tensorflow/tfjs-layers": "^3.3.0",
|
||||||
"@tensorflow/tfjs-node": "^3.3.0",
|
"@tensorflow/tfjs-node": "^3.3.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.3.0",
|
"@tensorflow/tfjs-node-gpu": "^3.3.0",
|
||||||
"@types/node": "^14.14.33",
|
"@types/node": "^14.14.34",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.17.0",
|
"@typescript-eslint/eslint-plugin": "^4.17.0",
|
||||||
"@typescript-eslint/parser": "^4.17.0",
|
"@typescript-eslint/parser": "^4.17.0",
|
||||||
"@vladmandic/pilogger": "^0.2.14",
|
"@vladmandic/pilogger": "^0.2.14",
|
||||||
"chokidar": "^3.5.1",
|
"chokidar": "^3.5.1",
|
||||||
"dayjs": "^1.10.4",
|
"dayjs": "^1.10.4",
|
||||||
"esbuild": "^0.9.0",
|
"esbuild": "=0.9.0",
|
||||||
"eslint": "^7.21.0",
|
"eslint": "^7.21.0",
|
||||||
"eslint-config-airbnb-base": "^14.2.1",
|
"eslint-config-airbnb-base": "^14.2.1",
|
||||||
"eslint-plugin-import": "^2.22.1",
|
"eslint-plugin-import": "^2.22.1",
|
||||||
|
@ -419,9 +419,9 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "14.14.33",
|
"version": "14.14.34",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.33.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.34.tgz",
|
||||||
"integrity": "sha512-oJqcTrgPUF29oUP8AsUqbXGJNuPutsetaa9kTQAQce5Lx5dTYWV02ScBiT/k1BX/Z7pKeqedmvp39Wu4zR7N7g==",
|
"integrity": "sha512-dBPaxocOK6UVyvhbnpFIj2W+S+1cBTkHQbFQfeeJhoKFbzYcVUGHvddeWPSucKATb3F0+pgDq0i6ghEaZjsugA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/@types/node-fetch": {
|
"node_modules/@types/node-fetch": {
|
||||||
|
@ -1962,9 +1962,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/google-protobuf": {
|
"node_modules/google-protobuf": {
|
||||||
"version": "3.15.5",
|
"version": "3.15.6",
|
||||||
"resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.5.tgz",
|
"resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.6.tgz",
|
||||||
"integrity": "sha512-6bLpAI4nMIQODlegR7OevgkCoyOj5frLVDArUpeuBWad7XWUNWMGP0v5lz1/aeUI6Yf3cG9XA6acZkPxom4SEw==",
|
"integrity": "sha512-p65NyhIZFHFUxbIPOm6cygg2rCjK+2uDCxruOG3RaWKM9R4rBGX0STmlJoSOhoyAG8Fha7U8FP4pQomAV1JXsA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/graceful-fs": {
|
"node_modules/graceful-fs": {
|
||||||
|
@ -4236,9 +4236,9 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/node": {
|
"@types/node": {
|
||||||
"version": "14.14.33",
|
"version": "14.14.34",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.33.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.34.tgz",
|
||||||
"integrity": "sha512-oJqcTrgPUF29oUP8AsUqbXGJNuPutsetaa9kTQAQce5Lx5dTYWV02ScBiT/k1BX/Z7pKeqedmvp39Wu4zR7N7g==",
|
"integrity": "sha512-dBPaxocOK6UVyvhbnpFIj2W+S+1cBTkHQbFQfeeJhoKFbzYcVUGHvddeWPSucKATb3F0+pgDq0i6ghEaZjsugA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@types/node-fetch": {
|
"@types/node-fetch": {
|
||||||
|
@ -5408,9 +5408,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"google-protobuf": {
|
"google-protobuf": {
|
||||||
"version": "3.15.5",
|
"version": "3.15.6",
|
||||||
"resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.5.tgz",
|
"resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.6.tgz",
|
||||||
"integrity": "sha512-6bLpAI4nMIQODlegR7OevgkCoyOj5frLVDArUpeuBWad7XWUNWMGP0v5lz1/aeUI6Yf3cG9XA6acZkPxom4SEw==",
|
"integrity": "sha512-p65NyhIZFHFUxbIPOm6cygg2rCjK+2uDCxruOG3RaWKM9R4rBGX0STmlJoSOhoyAG8Fha7U8FP4pQomAV1JXsA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"graceful-fs": {
|
"graceful-fs": {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@vladmandic/human",
|
"name": "@vladmandic/human",
|
||||||
"version": "1.0.4",
|
"version": "1.1.0",
|
||||||
"description": "Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
|
"description": "Human: AI-powered 3D Face Detection, Face Embedding & Recognition, Body Pose Tracking, Hand & Finger Tracking, Iris Analysis, Age & Gender & Emotion Prediction & Gesture Recognition",
|
||||||
"sideEffects": false,
|
"sideEffects": false,
|
||||||
"main": "dist/human.node.js",
|
"main": "dist/human.node.js",
|
||||||
|
@ -54,13 +54,13 @@
|
||||||
"@tensorflow/tfjs-layers": "^3.3.0",
|
"@tensorflow/tfjs-layers": "^3.3.0",
|
||||||
"@tensorflow/tfjs-node": "^3.3.0",
|
"@tensorflow/tfjs-node": "^3.3.0",
|
||||||
"@tensorflow/tfjs-node-gpu": "^3.3.0",
|
"@tensorflow/tfjs-node-gpu": "^3.3.0",
|
||||||
"@types/node": "^14.14.33",
|
"@types/node": "^14.14.34",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.17.0",
|
"@typescript-eslint/eslint-plugin": "^4.17.0",
|
||||||
"@typescript-eslint/parser": "^4.17.0",
|
"@typescript-eslint/parser": "^4.17.0",
|
||||||
"@vladmandic/pilogger": "^0.2.14",
|
"@vladmandic/pilogger": "^0.2.14",
|
||||||
"chokidar": "^3.5.1",
|
"chokidar": "^3.5.1",
|
||||||
"dayjs": "^1.10.4",
|
"dayjs": "^1.10.4",
|
||||||
"esbuild": "^0.9.0",
|
"esbuild": "=0.9.0",
|
||||||
"eslint": "^7.21.0",
|
"eslint": "^7.21.0",
|
||||||
"eslint-config-airbnb-base": "^14.2.1",
|
"eslint-config-airbnb-base": "^14.2.1",
|
||||||
"eslint-plugin-import": "^2.22.1",
|
"eslint-plugin-import": "^2.22.1",
|
||||||
|
|
|
@ -18,19 +18,42 @@ export class MediaPipeFaceMesh {
|
||||||
const results: Array<{}> = [];
|
const results: Array<{}> = [];
|
||||||
for (const prediction of (predictions || [])) {
|
for (const prediction of (predictions || [])) {
|
||||||
if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing
|
if (prediction.isDisposedInternal) continue; // guard against disposed tensors on long running operations such as pause in middle of processing
|
||||||
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
|
const mesh = prediction.coords ? prediction.coords.arraySync() : [];
|
||||||
const meshRaw = prediction.rawCoords;
|
const meshRaw = mesh.map((pt) => [
|
||||||
|
pt[0] / input.shape[2],
|
||||||
|
pt[1] / input.shape[1],
|
||||||
|
pt[2] / this.facePipeline.meshSize,
|
||||||
|
]);
|
||||||
const annotations = {};
|
const annotations = {};
|
||||||
if (mesh && mesh.length > 0) {
|
if (mesh && mesh.length > 0) {
|
||||||
for (const key of Object.keys(coords.MESH_ANNOTATIONS)) annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
|
for (const key of Object.keys(coords.MESH_ANNOTATIONS)) annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
|
||||||
}
|
}
|
||||||
const boxRaw = (prediction.box) ? { topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint } : null;
|
// const boxRaw = (prediction.box) ? { topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint } : null;
|
||||||
const box = prediction.box ? [
|
const box = prediction.box ? [
|
||||||
Math.max(0, prediction.box.startPoint[0]),
|
Math.max(0, prediction.box.startPoint[0]),
|
||||||
Math.max(0, prediction.box.startPoint[1]),
|
Math.max(0, prediction.box.startPoint[1]),
|
||||||
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
|
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
|
||||||
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
|
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
|
||||||
] : 0;
|
] : 0;
|
||||||
|
const boxRaw = prediction.box ? [
|
||||||
|
Math.max(0, prediction.box.startPoint[0] / input.shape[2]),
|
||||||
|
Math.max(0, prediction.box.startPoint[1] / input.shape[1]),
|
||||||
|
Math.min(input.shape[2], (prediction.box.endPoint[0]) - prediction.box.startPoint[0]) / input.shape[2],
|
||||||
|
Math.min(input.shape[1], (prediction.box.endPoint[1]) - prediction.box.startPoint[1]) / input.shape[2],
|
||||||
|
] : [];
|
||||||
|
/*
|
||||||
|
let offsetRaw = <any>[];
|
||||||
|
if (meshRaw.length > 0 && boxRaw.length > 0) {
|
||||||
|
const dimX = meshRaw.map((pt) => pt[0]);
|
||||||
|
const dimY = meshRaw.map((pt) => pt[1]);
|
||||||
|
offsetRaw = [
|
||||||
|
Math.max(0, 0 + Math.min(...dimY) - boxRaw[0]), // distance of detected face border to box top edge
|
||||||
|
Math.max(0, 0 + Math.min(...dimX) - boxRaw[1]), // distance of detected face border to box left edge
|
||||||
|
Math.min(1, 1 - Math.max(...dimY) + boxRaw[2]), // distance of detected face border to box bottom edge
|
||||||
|
Math.min(1, 1 - Math.max(...dimX) + boxRaw[3]), // distance of detected face border to box right edge
|
||||||
|
];
|
||||||
|
}
|
||||||
|
*/
|
||||||
results.push({
|
results.push({
|
||||||
confidence: prediction.faceConfidence || prediction.boxConfidence || 0,
|
confidence: prediction.faceConfidence || prediction.boxConfidence || 0,
|
||||||
boxConfidence: prediction.boxConfidence,
|
boxConfidence: prediction.boxConfidence,
|
||||||
|
@ -39,6 +62,7 @@ export class MediaPipeFaceMesh {
|
||||||
mesh,
|
mesh,
|
||||||
boxRaw,
|
boxRaw,
|
||||||
meshRaw,
|
meshRaw,
|
||||||
|
// offsetRaw,
|
||||||
annotations,
|
annotations,
|
||||||
image: prediction.image ? tf.clone(prediction.image) : null,
|
image: prediction.image ? tf.clone(prediction.image) : null,
|
||||||
});
|
});
|
||||||
|
|
|
@ -197,6 +197,8 @@ export class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
|
let results = tf.tidy(() => this.storedBoxes.map((box, i) => {
|
||||||
|
const boxConfidence = box.confidence;
|
||||||
|
|
||||||
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
||||||
let face;
|
let face;
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
|
@ -282,7 +284,7 @@ export class Pipeline {
|
||||||
coords: transformedCoords,
|
coords: transformedCoords,
|
||||||
box,
|
box,
|
||||||
faceConfidence,
|
faceConfidence,
|
||||||
boxConfidence: box.confidence,
|
boxConfidence,
|
||||||
image: face,
|
image: face,
|
||||||
rawCoords,
|
rawCoords,
|
||||||
};
|
};
|
||||||
|
|
|
@ -2,10 +2,6 @@ import { log } from '../log';
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import * as profile from '../profile';
|
import * as profile from '../profile';
|
||||||
|
|
||||||
// original: https://github.com/sirius-ai/MobileFaceNet_TF
|
|
||||||
// modified: https://github.com/sirius-ai/MobileFaceNet_TF/issues/46
|
|
||||||
// download: https://github.com/sirius-ai/MobileFaceNet_TF/files/3551493/FaceMobileNet192_train_false.zip
|
|
||||||
|
|
||||||
let model;
|
let model;
|
||||||
|
|
||||||
export async function load(config) {
|
export async function load(config) {
|
||||||
|
@ -26,32 +22,63 @@ export function simmilarity(embedding1, embedding2, order = 2) {
|
||||||
.map((val, i) => (Math.abs(embedding1[i] - embedding2[i]) ** order)) // distance squared
|
.map((val, i) => (Math.abs(embedding1[i] - embedding2[i]) ** order)) // distance squared
|
||||||
.reduce((sum, now) => (sum + now), 0) // sum all distances
|
.reduce((sum, now) => (sum + now), 0) // sum all distances
|
||||||
** (1 / order); // get root of
|
** (1 / order); // get root of
|
||||||
const res = Math.max(Math.trunc(1000 * (1 - (50 * distance))) / 1000, 0);
|
const res = Math.max(Math.trunc(1000 * (1 - (1 * distance))) / 1000, 0);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function enhance(input) {
|
||||||
|
const image = tf.tidy(() => {
|
||||||
|
// input received from detector is already normalized to 0..1
|
||||||
|
// input is also assumed to be straightened
|
||||||
|
// const data = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
|
||||||
|
|
||||||
|
// do a tight crop of image and resize it to fit the model
|
||||||
|
// maybe offsets are already prepared by face model, if not use empirical values
|
||||||
|
const box = input.offsetRaw
|
||||||
|
? [input.offsetRaw] // crop based on face mesh borders
|
||||||
|
: [[0.05, 0.15, 0.85, 0.85]]; // fixed crop for top, left, bottom, right
|
||||||
|
const tensor = input.image || input.tensor;
|
||||||
|
const crop = tensor.shape.length === 3
|
||||||
|
? tf.image.cropAndResize(tensor.expandDims(0), box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]) // add batch if missing
|
||||||
|
: tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||||
|
|
||||||
|
// convert to black&white to avoid colorization impact
|
||||||
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
|
||||||
|
const [red, green, blue] = tf.split(crop, 3, 3);
|
||||||
|
const redNorm = tf.mul(red, rgb[0]);
|
||||||
|
const greenNorm = tf.mul(green, rgb[1]);
|
||||||
|
const blueNorm = tf.mul(blue, rgb[2]);
|
||||||
|
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
|
||||||
|
const merge = tf.stack([grayscale, grayscale, grayscale], 3).squeeze(4);
|
||||||
|
|
||||||
|
// normalize brightness from 0..1
|
||||||
|
const darken = merge.sub(merge.min());
|
||||||
|
const lighten = darken.div(darken.max());
|
||||||
|
|
||||||
|
return lighten;
|
||||||
|
});
|
||||||
|
return image;
|
||||||
|
}
|
||||||
|
|
||||||
export async function predict(input, config) {
|
export async function predict(input, config) {
|
||||||
if (!model) return null;
|
if (!model) return null;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const image = tf.tidy(() => {
|
const image = enhance(input);
|
||||||
const data = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // input is already normalized to 0..1
|
// let data: Array<[]> = [];
|
||||||
const box = [[0.05, 0.15, 0.90, 0.85]]; // top, left, bottom, right
|
let data: Array<number> = [];
|
||||||
const crop = tf.image.cropAndResize(data, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]); // optionally do a tight box crop
|
|
||||||
// const norm = crop.sub(crop.min()).sub(0.5); // trick to normalize around image mean value
|
|
||||||
const norm = crop.sub(0.5);
|
|
||||||
return norm;
|
|
||||||
});
|
|
||||||
let data: Array<[]> = [];
|
|
||||||
if (config.face.embedding.enabled) {
|
if (config.face.embedding.enabled) {
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const res = await model.predict({ img_inputs: image });
|
const res = await model.predict(image);
|
||||||
|
// optional normalize outputs with l2 normalization
|
||||||
|
/*
|
||||||
const scaled = tf.tidy(() => {
|
const scaled = tf.tidy(() => {
|
||||||
const l2 = res.norm('euclidean');
|
const l2 = res.norm('euclidean');
|
||||||
const scale = res.div(l2);
|
const scale = res.div(l2);
|
||||||
return scale;
|
return scale;
|
||||||
});
|
});
|
||||||
data = scaled.dataSync(); // convert object array to standard array
|
*/
|
||||||
tf.dispose(scaled);
|
data = res.dataSync();
|
||||||
|
// tf.dispose(scaled);
|
||||||
tf.dispose(res);
|
tf.dispose(res);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf.profile(() => model.predict({ img_inputs: image }));
|
const profileData = await tf.profile(() => model.predict({ img_inputs: image }));
|
||||||
|
@ -64,3 +91,19 @@ export async function predict(input, config) {
|
||||||
resolve(data);
|
resolve(data);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
git clone https://github.com/becauseofAI/MobileFace
|
||||||
|
cd MobileFace/MobileFace_Identification
|
||||||
|
mmconvert --srcFramework mxnet --inputWeight MobileFace_Identification_V3-0000.params --inputNetwork MobileFace_Identification_V3-symbol.json --inputShape 3,112,112 --dstFramework tensorflow --outputModel saved
|
||||||
|
saved_model_cli show --dir saved/
|
||||||
|
tensorflowjs_converter --input_format tf_saved_model --output_format tfjs_graph_model --saved_model_tags train saved/ graph/
|
||||||
|
~/dev/detector/signature.js graph/
|
||||||
|
2021-03-12 08:25:12 DATA: created on: 2021-03-12T13:17:11.960Z
|
||||||
|
2021-03-12 08:25:12 INFO: graph model: /home/vlado/dev/face/MobileFace/MobileFace_Identification/graph/model.json
|
||||||
|
2021-03-12 08:25:12 INFO: size: { unreliable: true, numTensors: 75, numDataBuffers: 75, numBytes: 2183192 }
|
||||||
|
2021-03-12 08:25:12 INFO: model inputs based on signature
|
||||||
|
2021-03-12 08:25:12 INFO: model outputs based on signature
|
||||||
|
2021-03-12 08:25:12 DATA: inputs: [ { name: 'data:0', dtype: 'DT_FLOAT', shape: [ -1, 112, 112, 3, [length]: 4 ] }, [length]: 1 ]
|
||||||
|
2021-03-12 08:25:12 DATA: outputs: [ { id: 0, name: 'batchnorm0/add_1:0', dytpe: 'DT_FLOAT', shape: [ -1, 256, [length]: 2 ] }, [length]: 1 ]
|
||||||
|
*/
|
||||||
|
|
13
src/human.ts
13
src/human.ts
|
@ -151,6 +151,11 @@ class Human {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enhance(input: any): any {
|
||||||
|
if (this.config.face.embedding.enabled) return embedding.enhance(input);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
// preload models, not explicitly required as it's done automatically on first use
|
// preload models, not explicitly required as it's done automatically on first use
|
||||||
async load(userConfig = null) {
|
async load(userConfig = null) {
|
||||||
this.state = 'load';
|
this.state = 'load';
|
||||||
|
@ -359,11 +364,11 @@ class Human {
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
this.#analyze('Start Embedding:');
|
this.#analyze('Start Embedding:');
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : [];
|
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face, this.config) : [];
|
||||||
} else {
|
} else {
|
||||||
this.state = 'run:embedding';
|
this.state = 'run:embedding';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : [];
|
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face, this.config) : [];
|
||||||
this.#perf.embedding = Math.trunc(now() - timeStamp);
|
this.#perf.embedding = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
this.#analyze('End Emotion:');
|
this.#analyze('End Emotion:');
|
||||||
|
@ -388,6 +393,8 @@ class Human {
|
||||||
|
|
||||||
// combine results
|
// combine results
|
||||||
faceRes.push({
|
faceRes.push({
|
||||||
|
...face,
|
||||||
|
/*
|
||||||
confidence: face.confidence,
|
confidence: face.confidence,
|
||||||
faceConfidence: face.faceConfidence,
|
faceConfidence: face.faceConfidence,
|
||||||
boxConfidence: face.boxConfidence,
|
boxConfidence: face.boxConfidence,
|
||||||
|
@ -395,7 +402,9 @@ class Human {
|
||||||
mesh: face.mesh,
|
mesh: face.mesh,
|
||||||
boxRaw: face.boxRaw,
|
boxRaw: face.boxRaw,
|
||||||
meshRaw: face.meshRaw,
|
meshRaw: face.meshRaw,
|
||||||
|
offsetRaw: face.offsetRaw,
|
||||||
annotations: face.annotations,
|
annotations: face.annotations,
|
||||||
|
*/
|
||||||
age: ageRes.age,
|
age: ageRes.age,
|
||||||
gender: genderRes.gender,
|
gender: genderRes.gender,
|
||||||
genderConfidence: genderRes.confidence,
|
genderConfidence: genderRes.confidence,
|
||||||
|
|
|
@ -32,6 +32,7 @@ export class PoseNet {
|
||||||
constructor(model) {
|
constructor(model) {
|
||||||
this.baseModel = model;
|
this.baseModel = model;
|
||||||
this.inputSize = model.model.inputs[0].shape[1];
|
this.inputSize = model.model.inputs[0].shape[1];
|
||||||
|
if (this.inputSize < 128) this.inputSize = 257;
|
||||||
}
|
}
|
||||||
|
|
||||||
async estimatePoses(input, config) {
|
async estimatePoses(input, config) {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
export declare function load(config: any): Promise<any>;
|
export declare function load(config: any): Promise<any>;
|
||||||
export declare function simmilarity(embedding1: any, embedding2: any, order?: number): number;
|
export declare function simmilarity(embedding1: any, embedding2: any, order?: number): number;
|
||||||
|
export declare function enhance(input: any): any;
|
||||||
export declare function predict(input: any, config: any): Promise<unknown>;
|
export declare function predict(input: any, config: any): Promise<unknown>;
|
||||||
|
|
|
@ -53,6 +53,7 @@ declare class Human {
|
||||||
largestKernelOps: any;
|
largestKernelOps: any;
|
||||||
} | {};
|
} | {};
|
||||||
simmilarity(embedding1: any, embedding2: any): number;
|
simmilarity(embedding1: any, embedding2: any): number;
|
||||||
|
enhance(input: any): any;
|
||||||
load(userConfig?: null): Promise<void>;
|
load(userConfig?: null): Promise<void>;
|
||||||
detect(input: any, userConfig?: {}): Promise<{
|
detect(input: any, userConfig?: {}): Promise<{
|
||||||
face: any;
|
face: any;
|
||||||
|
|
Loading…
Reference in New Issue