mirror of https://github.com/vladmandic/human
model tuning
parent
db85fdb895
commit
3b9338d47b
22
config.js
22
config.js
|
@ -56,9 +56,9 @@ export default {
|
||||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
skipFrames: 15, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.1, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in non-maximum suppression (0.1 means drop if overlap 10%)
|
||||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.1, // threshold for deciding when to remove boxes based on score in non-maximum suppression, this is applied on detection objects only and before minConfidence
|
||||||
},
|
},
|
||||||
mesh: {
|
mesh: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -73,20 +73,22 @@ export default {
|
||||||
},
|
},
|
||||||
age: {
|
age: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
modelPath: '../models/ssrnet-age-imdb.json', // can be 'imdb' or 'wiki'
|
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
|
||||||
// which determines training set for model
|
// which determines training set for model
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
skipFrames: 15, // how many frames to go without re-running the detector, only used for video inputs
|
skipFrames: 15, // how many frames to go without re-running the detector, only used for video inputs
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.1, // threshold for discarding a prediction
|
||||||
modelPath: '../models/ssrnet-gender-imdb.json',
|
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
||||||
|
inputSize: 64, // fixed value
|
||||||
|
skipFrames: 15, // how many frames to go without re-running the detector, only used for video inputs
|
||||||
},
|
},
|
||||||
emotion: {
|
emotion: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.2, // threshold for discarding a prediction
|
||||||
skipFrames: 15, // how many frames to go without re-running the detector
|
skipFrames: 15, // how many frames to go without re-running the detector
|
||||||
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
||||||
},
|
},
|
||||||
|
@ -106,9 +108,9 @@ export default {
|
||||||
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
||||||
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.5, // threshold for discarding a prediction
|
minConfidence: 0.1, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.1, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
enlargeFactor: 1.65, // empiric tuning as skeleton prediction prefers hand box with some whitespace
|
enlargeFactor: 1.65, // empiric tuning as skeleton prediction prefers hand box with some whitespace
|
||||||
maxHands: 10, // maximum number of hands detected in the input, should be set to the minimum number for performance
|
maxHands: 10, // maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||||
detector: {
|
detector: {
|
||||||
|
|
|
@ -69,7 +69,7 @@ function drawResults(input, result, canvas) {
|
||||||
// console.log(result.performance);
|
// console.log(result.performance);
|
||||||
|
|
||||||
// eslint-disable-next-line no-use-before-define
|
// eslint-disable-next-line no-use-before-define
|
||||||
requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop before we even draw results
|
if (input.srcObject) requestAnimationFrame(() => runHumanDetect(input, canvas)); // immediate loop before we even draw results
|
||||||
|
|
||||||
// draw fps chart
|
// draw fps chart
|
||||||
menu.updateChart('FPS', fps);
|
menu.updateChart('FPS', fps);
|
||||||
|
@ -187,7 +187,7 @@ function runHumanDetect(input, canvas) {
|
||||||
timeStamp = performance.now();
|
timeStamp = performance.now();
|
||||||
// if live video
|
// if live video
|
||||||
const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
|
const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
|
||||||
if (!live) {
|
if (!live && input.srcObject) {
|
||||||
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
|
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
|
||||||
if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
|
if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||||
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
||||||
|
@ -317,6 +317,7 @@ function setupMenu() {
|
||||||
});
|
});
|
||||||
menu.addRange('Min Confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
menu.addRange('Min Confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||||
human.config.face.detector.minConfidence = parseFloat(val);
|
human.config.face.detector.minConfidence = parseFloat(val);
|
||||||
|
human.config.face.gender.minConfidence = parseFloat(val);
|
||||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||||
human.config.hand.minConfidence = parseFloat(val);
|
human.config.hand.minConfidence = parseFloat(val);
|
||||||
});
|
});
|
||||||
|
|
|
@ -213,7 +213,7 @@ class Menu {
|
||||||
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
||||||
this.container.appendChild(el);
|
this.container.appendChild(el);
|
||||||
el.addEventListener('change', (evt) => {
|
el.addEventListener('change', (evt) => {
|
||||||
object[variable] = evt.target.value;
|
object[variable] = parseInt(evt.target.value) === parseFloat(evt.target.value) ? parseInt(evt.target.value) : parseFloat(evt.target.value);
|
||||||
evt.target.setAttribute('value', evt.target.value);
|
evt.target.setAttribute('value', evt.target.value);
|
||||||
if (callback) callback(evt.target.value);
|
if (callback) callback(evt.target.value);
|
||||||
});
|
});
|
||||||
|
|
|
@ -67088,20 +67088,18 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
this.blazeFaceModel = model;
|
this.blazeFaceModel = model;
|
||||||
this.width = config.detector.inputSize;
|
this.width = config.detector.inputSize;
|
||||||
this.height = config.detector.inputSize;
|
this.height = config.detector.inputSize;
|
||||||
this.maxFaces = config.detector.maxFaces;
|
|
||||||
this.anchorsData = generateAnchors(config.detector.inputSize);
|
this.anchorsData = generateAnchors(config.detector.inputSize);
|
||||||
this.anchors = tf2.tensor2d(this.anchorsData);
|
this.anchors = tf2.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
||||||
this.iouThreshold = config.detector.iouThreshold;
|
this.config = config;
|
||||||
this.scaleFaces = 0.8;
|
this.scaleFaces = 0.8;
|
||||||
this.scoreThreshold = config.detector.scoreThreshold;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [detectedOutputs, boxes, scores] = tf2.tidy(() => {
|
const [detectedOutputs, boxes, scores] = tf2.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
||||||
const normalizedImage = tf2.mul(tf2.sub(resizedImage.div(255), 0.5), 2);
|
const normalizedImage = tf2.sub(resizedImage.div(127.5), 1);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
||||||
let prediction;
|
let prediction;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
|
@ -67115,10 +67113,10 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||||
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
const scoresOut = logits.squeeze();
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||||
const boxIndices = boxIndicesTensor.arraySync();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
|
@ -70877,7 +70875,7 @@ var require_profile = __commonJS((exports) => {
|
||||||
exports.run = profile2;
|
exports.run = profile2;
|
||||||
exports.data = profileData;
|
exports.data = profileData;
|
||||||
});
|
});
|
||||||
var require_ssrnet = __commonJS((exports) => {
|
var require_age = __commonJS((exports) => {
|
||||||
const tf2 = require_tf_node();
|
const tf2 = require_tf_node();
|
||||||
const profile2 = require_profile();
|
const profile2 = require_profile();
|
||||||
const models = {};
|
const models = {};
|
||||||
|
@ -70929,20 +70927,23 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
exports.predict = predict;
|
exports.predict = predict;
|
||||||
exports.load = load;
|
exports.load = load;
|
||||||
});
|
});
|
||||||
var require_ssrnet2 = __commonJS((exports) => {
|
var require_gender = __commonJS((exports) => {
|
||||||
const tf2 = require_tf_node();
|
const tf2 = require_tf_node();
|
||||||
const profile2 = require_profile();
|
const profile2 = require_profile();
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = {gender: ""};
|
let last = {gender: ""};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
|
let alternative = false;
|
||||||
const zoom = [0, 0];
|
const zoom = [0, 0];
|
||||||
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
if (!models.gender)
|
if (!models.gender)
|
||||||
models.gender = await tf2.loadGraphModel(config.face.gender.modelPath);
|
models.gender = await tf2.loadGraphModel(config.face.gender.modelPath);
|
||||||
|
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||||
return models.gender;
|
return models.gender;
|
||||||
}
|
}
|
||||||
async function predict(image2, config) {
|
async function predict(image2, config) {
|
||||||
if (frame < config.face.age.skipFrames && last.gender !== "") {
|
if (frame < config.face.gender.skipFrames && last.gender !== "") {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
@ -70954,8 +70955,20 @@ var require_ssrnet2 = __commonJS((exports) => {
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
||||||
]];
|
]];
|
||||||
const resize = tf2.image.cropAndResize(image2, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
|
const resize = tf2.image.cropAndResize(image2, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
||||||
const enhance = tf2.mul(resize, [255]);
|
let enhance;
|
||||||
|
if (alternative) {
|
||||||
|
enhance = tf2.tidy(() => {
|
||||||
|
const [red, green, blue] = tf2.split(resize, 3, 3);
|
||||||
|
const redNorm = tf2.mul(red, rgb[0]);
|
||||||
|
const greenNorm = tf2.mul(green, rgb[1]);
|
||||||
|
const blueNorm = tf2.mul(blue, rgb[2]);
|
||||||
|
const grayscale = tf2.addN([redNorm, greenNorm, blueNorm]);
|
||||||
|
return grayscale.sub(0.5).mul(2);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
enhance = tf2.mul(resize, [255]);
|
||||||
|
}
|
||||||
tf2.dispose(resize);
|
tf2.dispose(resize);
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
|
@ -70971,10 +70984,18 @@ var require_ssrnet2 = __commonJS((exports) => {
|
||||||
enhance.dispose();
|
enhance.dispose();
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = genderT.dataSync();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
if (alternative) {
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.confidence = confidence;
|
obj.gender = data[0] > data[1] ? "female" : "male";
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const confidence = Math.trunc(200 * Math.abs(data[0] - 0.5)) / 100;
|
||||||
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
genderT.dispose();
|
genderT.dispose();
|
||||||
|
@ -90659,9 +90680,9 @@ var require_config = __commonJS((exports) => {
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
maxFaces: 10,
|
maxFaces: 10,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
iouThreshold: 0.3,
|
iouThreshold: 0.1,
|
||||||
scoreThreshold: 0.8
|
scoreThreshold: 0.1
|
||||||
},
|
},
|
||||||
mesh: {
|
mesh: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -90676,19 +90697,21 @@ var require_config = __commonJS((exports) => {
|
||||||
},
|
},
|
||||||
age: {
|
age: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
modelPath: "../models/ssrnet-age-imdb.json",
|
modelPath: "../models/age-ssrnet-imdb.json",
|
||||||
inputSize: 64,
|
inputSize: 64,
|
||||||
skipFrames: 15
|
skipFrames: 15
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
modelPath: "../models/ssrnet-gender-imdb.json"
|
modelPath: "../models/gender-ssrnet-imdb.json",
|
||||||
|
inputSize: 64,
|
||||||
|
skipFrames: 15
|
||||||
},
|
},
|
||||||
emotion: {
|
emotion: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 64,
|
inputSize: 64,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.2,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
modelPath: "../models/emotion-large.json"
|
modelPath: "../models/emotion-large.json"
|
||||||
}
|
}
|
||||||
|
@ -90706,9 +90729,9 @@ var require_config = __commonJS((exports) => {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
iouThreshold: 0.3,
|
iouThreshold: 0.2,
|
||||||
scoreThreshold: 0.8,
|
scoreThreshold: 0.1,
|
||||||
enlargeFactor: 1.65,
|
enlargeFactor: 1.65,
|
||||||
maxHands: 10,
|
maxHands: 10,
|
||||||
detector: {
|
detector: {
|
||||||
|
@ -90791,8 +90814,8 @@ var require_package = __commonJS((exports, module) => {
|
||||||
});
|
});
|
||||||
const tf = require_tf_node();
|
const tf = require_tf_node();
|
||||||
const facemesh = require_facemesh();
|
const facemesh = require_facemesh();
|
||||||
const age = require_ssrnet();
|
const age = require_age();
|
||||||
const gender = require_ssrnet2();
|
const gender = require_gender();
|
||||||
const emotion = require_emotion();
|
const emotion = require_emotion();
|
||||||
const posenet = require_posenet();
|
const posenet = require_posenet();
|
||||||
const handpose = require_handpose();
|
const handpose = require_handpose();
|
||||||
|
@ -90802,7 +90825,7 @@ const profile = require_profile();
|
||||||
const defaults = require_config().default;
|
const defaults = require_config().default;
|
||||||
const app = require_package();
|
const app = require_package();
|
||||||
const override = {
|
const override = {
|
||||||
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, gender: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
||||||
hand: {skipFrames: 0}
|
hand: {skipFrames: 0}
|
||||||
};
|
};
|
||||||
const now = () => {
|
const now = () => {
|
||||||
|
@ -90831,7 +90854,6 @@ class Human {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.tf = tf;
|
this.tf = tf;
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
|
@ -90894,7 +90916,7 @@ class Human {
|
||||||
this.state = "load";
|
this.state = "load";
|
||||||
const timeStamp2 = now();
|
const timeStamp2 = now();
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
this.config = mergeDeep(defaults, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
if (this.firstRun) {
|
if (this.firstRun) {
|
||||||
this.checkBackend(true);
|
this.checkBackend(true);
|
||||||
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
||||||
|
@ -91045,7 +91067,7 @@ class Human {
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
let timeStamp2;
|
let timeStamp2;
|
||||||
this.config = mergeDeep(defaults, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
if (!this.config.videoOptimized)
|
if (!this.config.videoOptimized)
|
||||||
this.config = mergeDeep(this.config, override);
|
this.config = mergeDeep(this.config, override);
|
||||||
this.state = "check";
|
this.state = "check";
|
||||||
|
@ -91532,7 +91554,7 @@ class Menu {
|
||||||
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
|
||||||
this.container.appendChild(el);
|
this.container.appendChild(el);
|
||||||
el.addEventListener("change", (evt) => {
|
el.addEventListener("change", (evt) => {
|
||||||
object[variable] = evt.target.value;
|
object[variable] = parseInt(evt.target.value) === parseFloat(evt.target.value) ? parseInt(evt.target.value) : parseFloat(evt.target.value);
|
||||||
evt.target.setAttribute("value", evt.target.value);
|
evt.target.setAttribute("value", evt.target.value);
|
||||||
if (callback)
|
if (callback)
|
||||||
callback(evt.target.value);
|
callback(evt.target.value);
|
||||||
|
@ -91673,7 +91695,8 @@ function drawResults(input, result, canvas) {
|
||||||
fps.push(1e3 / (performance.now() - timeStamp));
|
fps.push(1e3 / (performance.now() - timeStamp));
|
||||||
if (fps.length > ui.maxFrames)
|
if (fps.length > ui.maxFrames)
|
||||||
fps.shift();
|
fps.shift();
|
||||||
requestAnimationFrame(() => runHumanDetect(input, canvas));
|
if (input.srcObject)
|
||||||
|
requestAnimationFrame(() => runHumanDetect(input, canvas));
|
||||||
menu2.updateChart("FPS", fps);
|
menu2.updateChart("FPS", fps);
|
||||||
const ctx = canvas.getContext("2d");
|
const ctx = canvas.getContext("2d");
|
||||||
ctx.fillStyle = ui.baseBackground;
|
ctx.fillStyle = ui.baseBackground;
|
||||||
|
@ -91787,7 +91810,7 @@ function runHumanDetect(input, canvas) {
|
||||||
var _a;
|
var _a;
|
||||||
timeStamp = performance.now();
|
timeStamp = performance.now();
|
||||||
const live = input.srcObject && input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
|
const live = input.srcObject && input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
|
||||||
if (!live) {
|
if (!live && input.srcObject) {
|
||||||
if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
|
if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
|
||||||
setTimeout(() => runHumanDetect(input, canvas), 500);
|
setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||||
else
|
else
|
||||||
|
@ -91911,6 +91934,7 @@ function setupMenu() {
|
||||||
});
|
});
|
||||||
menu2.addRange("Min Confidence", human.config.face.detector, "minConfidence", 0, 1, 0.05, (val) => {
|
menu2.addRange("Min Confidence", human.config.face.detector, "minConfidence", 0, 1, 0.05, (val) => {
|
||||||
human.config.face.detector.minConfidence = parseFloat(val);
|
human.config.face.detector.minConfidence = parseFloat(val);
|
||||||
|
human.config.face.gender.minConfidence = parseFloat(val);
|
||||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||||
human.config.hand.minConfidence = parseFloat(val);
|
human.config.hand.minConfidence = parseFloat(val);
|
||||||
});
|
});
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 17412,
|
"bytes": 17514,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js"
|
"path": "dist/human.esm.js"
|
||||||
|
@ -19,11 +19,11 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/menu.js": {
|
"demo/menu.js": {
|
||||||
"bytes": 12357,
|
"bytes": 12460,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 3196136,
|
"bytes": 3196946,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -31,25 +31,25 @@
|
||||||
"dist/demo-browser-index.js.map": {
|
"dist/demo-browser-index.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5557260
|
"bytes": 5559544
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 3193996
|
"bytesInOutput": 3194809
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytesInOutput": 7453
|
"bytesInOutput": 7453
|
||||||
},
|
},
|
||||||
"demo/menu.js": {
|
"demo/menu.js": {
|
||||||
"bytesInOutput": 12359
|
"bytesInOutput": 12462
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytesInOutput": 15694
|
"bytesInOutput": 15800
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3229624
|
"bytes": 3230646
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67127,20 +67127,18 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
this.blazeFaceModel = model;
|
this.blazeFaceModel = model;
|
||||||
this.width = config.detector.inputSize;
|
this.width = config.detector.inputSize;
|
||||||
this.height = config.detector.inputSize;
|
this.height = config.detector.inputSize;
|
||||||
this.maxFaces = config.detector.maxFaces;
|
|
||||||
this.anchorsData = generateAnchors(config.detector.inputSize);
|
this.anchorsData = generateAnchors(config.detector.inputSize);
|
||||||
this.anchors = tf2.tensor2d(this.anchorsData);
|
this.anchors = tf2.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
this.inputSize = tf2.tensor1d([this.width, this.height]);
|
||||||
this.iouThreshold = config.detector.iouThreshold;
|
this.config = config;
|
||||||
this.scaleFaces = 0.8;
|
this.scaleFaces = 0.8;
|
||||||
this.scoreThreshold = config.detector.scoreThreshold;
|
|
||||||
}
|
}
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
if (!inputImage || inputImage.isDisposedInternal || inputImage.shape.length !== 4 || inputImage.shape[1] < 1 || inputImage.shape[2] < 1)
|
||||||
return null;
|
return null;
|
||||||
const [detectedOutputs, boxes, scores] = tf2.tidy(() => {
|
const [detectedOutputs, boxes, scores] = tf2.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
||||||
const normalizedImage = tf2.mul(tf2.sub(resizedImage.div(255), 0.5), 2);
|
const normalizedImage = tf2.sub(resizedImage.div(127.5), 1);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
||||||
let prediction;
|
let prediction;
|
||||||
if (Array.isArray(batchedPrediction)) {
|
if (Array.isArray(batchedPrediction)) {
|
||||||
|
@ -67154,10 +67152,10 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||||
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
const scoresOut = logits.squeeze();
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||||
const boxIndices = boxIndicesTensor.arraySync();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf2.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
|
@ -70933,8 +70931,8 @@ var require_profile = __commonJS((exports) => {
|
||||||
exports.data = profileData;
|
exports.data = profileData;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/age/ssrnet.js
|
// src/age/age.js
|
||||||
var require_ssrnet = __commonJS((exports) => {
|
var require_age = __commonJS((exports) => {
|
||||||
const tf2 = require_tf_node();
|
const tf2 = require_tf_node();
|
||||||
const profile2 = require_profile();
|
const profile2 = require_profile();
|
||||||
const models = {};
|
const models = {};
|
||||||
|
@ -70987,21 +70985,24 @@ var require_ssrnet = __commonJS((exports) => {
|
||||||
exports.load = load;
|
exports.load = load;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/gender/ssrnet.js
|
// src/gender/gender.js
|
||||||
var require_ssrnet2 = __commonJS((exports) => {
|
var require_gender = __commonJS((exports) => {
|
||||||
const tf2 = require_tf_node();
|
const tf2 = require_tf_node();
|
||||||
const profile2 = require_profile();
|
const profile2 = require_profile();
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = {gender: ""};
|
let last = {gender: ""};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
|
let alternative = false;
|
||||||
const zoom = [0, 0];
|
const zoom = [0, 0];
|
||||||
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
if (!models.gender)
|
if (!models.gender)
|
||||||
models.gender = await tf2.loadGraphModel(config.face.gender.modelPath);
|
models.gender = await tf2.loadGraphModel(config.face.gender.modelPath);
|
||||||
|
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||||
return models.gender;
|
return models.gender;
|
||||||
}
|
}
|
||||||
async function predict(image2, config) {
|
async function predict(image2, config) {
|
||||||
if (frame < config.face.age.skipFrames && last.gender !== "") {
|
if (frame < config.face.gender.skipFrames && last.gender !== "") {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
@ -71013,8 +71014,20 @@ var require_ssrnet2 = __commonJS((exports) => {
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
||||||
]];
|
]];
|
||||||
const resize = tf2.image.cropAndResize(image2, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
|
const resize = tf2.image.cropAndResize(image2, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
||||||
const enhance = tf2.mul(resize, [255]);
|
let enhance;
|
||||||
|
if (alternative) {
|
||||||
|
enhance = tf2.tidy(() => {
|
||||||
|
const [red, green, blue] = tf2.split(resize, 3, 3);
|
||||||
|
const redNorm = tf2.mul(red, rgb[0]);
|
||||||
|
const greenNorm = tf2.mul(green, rgb[1]);
|
||||||
|
const blueNorm = tf2.mul(blue, rgb[2]);
|
||||||
|
const grayscale = tf2.addN([redNorm, greenNorm, blueNorm]);
|
||||||
|
return grayscale.sub(0.5).mul(2);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
enhance = tf2.mul(resize, [255]);
|
||||||
|
}
|
||||||
tf2.dispose(resize);
|
tf2.dispose(resize);
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = {};
|
const obj = {};
|
||||||
|
@ -71030,10 +71043,18 @@ var require_ssrnet2 = __commonJS((exports) => {
|
||||||
enhance.dispose();
|
enhance.dispose();
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = genderT.dataSync();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
if (alternative) {
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;
|
||||||
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
obj.confidence = confidence;
|
obj.gender = data[0] > data[1] ? "female" : "male";
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const confidence = Math.trunc(200 * Math.abs(data[0] - 0.5)) / 100;
|
||||||
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
|
obj.gender = data[0] <= 0.5 ? "female" : "male";
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
genderT.dispose();
|
genderT.dispose();
|
||||||
|
@ -90762,9 +90783,9 @@ var require_config = __commonJS((exports) => {
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
maxFaces: 10,
|
maxFaces: 10,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
iouThreshold: 0.3,
|
iouThreshold: 0.1,
|
||||||
scoreThreshold: 0.8
|
scoreThreshold: 0.1
|
||||||
},
|
},
|
||||||
mesh: {
|
mesh: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -90779,19 +90800,21 @@ var require_config = __commonJS((exports) => {
|
||||||
},
|
},
|
||||||
age: {
|
age: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
modelPath: "../models/ssrnet-age-imdb.json",
|
modelPath: "../models/age-ssrnet-imdb.json",
|
||||||
inputSize: 64,
|
inputSize: 64,
|
||||||
skipFrames: 15
|
skipFrames: 15
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
modelPath: "../models/ssrnet-gender-imdb.json"
|
modelPath: "../models/gender-ssrnet-imdb.json",
|
||||||
|
inputSize: 64,
|
||||||
|
skipFrames: 15
|
||||||
},
|
},
|
||||||
emotion: {
|
emotion: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 64,
|
inputSize: 64,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.2,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
modelPath: "../models/emotion-large.json"
|
modelPath: "../models/emotion-large.json"
|
||||||
}
|
}
|
||||||
|
@ -90809,9 +90832,9 @@ var require_config = __commonJS((exports) => {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.1,
|
||||||
iouThreshold: 0.3,
|
iouThreshold: 0.2,
|
||||||
scoreThreshold: 0.8,
|
scoreThreshold: 0.1,
|
||||||
enlargeFactor: 1.65,
|
enlargeFactor: 1.65,
|
||||||
maxHands: 10,
|
maxHands: 10,
|
||||||
detector: {
|
detector: {
|
||||||
|
@ -90898,8 +90921,8 @@ var require_package = __commonJS((exports, module) => {
|
||||||
// src/human.js
|
// src/human.js
|
||||||
const tf = require_tf_node();
|
const tf = require_tf_node();
|
||||||
const facemesh = require_facemesh();
|
const facemesh = require_facemesh();
|
||||||
const age = require_ssrnet();
|
const age = require_age();
|
||||||
const gender = require_ssrnet2();
|
const gender = require_gender();
|
||||||
const emotion = require_emotion();
|
const emotion = require_emotion();
|
||||||
const posenet = require_posenet();
|
const posenet = require_posenet();
|
||||||
const handpose = require_handpose();
|
const handpose = require_handpose();
|
||||||
|
@ -90909,7 +90932,7 @@ const profile = require_profile();
|
||||||
const defaults = require_config().default;
|
const defaults = require_config().default;
|
||||||
const app = require_package();
|
const app = require_package();
|
||||||
const override = {
|
const override = {
|
||||||
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
face: {detector: {skipFrames: 0}, age: {skipFrames: 0}, gender: {skipFrames: 0}, emotion: {skipFrames: 0}},
|
||||||
hand: {skipFrames: 0}
|
hand: {skipFrames: 0}
|
||||||
};
|
};
|
||||||
const now = () => {
|
const now = () => {
|
||||||
|
@ -90938,7 +90961,6 @@ class Human {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.tf = tf;
|
this.tf = tf;
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = null;
|
this.fx = null;
|
||||||
this.state = "idle";
|
this.state = "idle";
|
||||||
|
@ -91001,7 +91023,7 @@ class Human {
|
||||||
this.state = "load";
|
this.state = "load";
|
||||||
const timeStamp = now();
|
const timeStamp = now();
|
||||||
if (userConfig)
|
if (userConfig)
|
||||||
this.config = mergeDeep(defaults, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
if (this.firstRun) {
|
if (this.firstRun) {
|
||||||
this.checkBackend(true);
|
this.checkBackend(true);
|
||||||
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);
|
||||||
|
@ -91152,7 +91174,7 @@ class Human {
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
this.state = "config";
|
||||||
let timeStamp;
|
let timeStamp;
|
||||||
this.config = mergeDeep(defaults, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
if (!this.config.videoOptimized)
|
if (!this.config.videoOptimized)
|
||||||
this.config = mergeDeep(this.config, override);
|
this.config = mergeDeep(this.config, override);
|
||||||
this.state = "check";
|
this.state = "check";
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 7319,
|
"bytes": 7664,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
|
@ -152,7 +152,7 @@
|
||||||
"bytes": 3389,
|
"bytes": 3389,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/age/ssrnet.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 1766,
|
"bytes": 1766,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
|
@ -288,7 +288,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytes": 6991,
|
"bytes": 7096,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -359,8 +359,8 @@
|
||||||
"bytes": 19592,
|
"bytes": 19592,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/gender/ssrnet.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 2015,
|
"bytes": 3042,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -433,7 +433,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 14051,
|
"bytes": 14049,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -442,10 +442,10 @@
|
||||||
"path": "src/face/facemesh.js"
|
"path": "src/face/facemesh.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/age/ssrnet.js"
|
"path": "src/age/age.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/gender/ssrnet.js"
|
"path": "src/gender/gender.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/emotion/emotion.js"
|
"path": "src/emotion/emotion.js"
|
||||||
|
@ -513,7 +513,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5607938
|
"bytes": 5609921
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -576,7 +576,7 @@
|
||||||
"bytesInOutput": 3025
|
"bytesInOutput": 3025
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 7123
|
"bytesInOutput": 7010
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 2768
|
"bytesInOutput": 2768
|
||||||
|
@ -602,11 +602,11 @@
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 1092
|
"bytesInOutput": 1092
|
||||||
},
|
},
|
||||||
"src/age/ssrnet.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1747
|
"bytesInOutput": 1744
|
||||||
},
|
},
|
||||||
"src/gender/ssrnet.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 2007
|
"bytesInOutput": 2892
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2612
|
"bytesInOutput": 2612
|
||||||
|
@ -672,19 +672,19 @@
|
||||||
"bytesInOutput": 4482
|
"bytesInOutput": 4482
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2230
|
"bytesInOutput": 2277
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3533
|
"bytesInOutput": 3533
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 11852
|
"bytesInOutput": 11849
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3196136
|
"bytes": 3196946
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
|
@ -0,0 +1,105 @@
|
||||||
|
{
|
||||||
|
"format": "graph-model",
|
||||||
|
"generatedBy": "2.3.1",
|
||||||
|
"convertedBy": "TensorFlow.js Converter v2.7.0",
|
||||||
|
"userDefinedMetadata":
|
||||||
|
{
|
||||||
|
"signature":
|
||||||
|
{
|
||||||
|
"inputs": {"input_1:0":{"name":"input_1:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}},
|
||||||
|
"outputs": {"Identity:0":{"name":"Identity:0","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"-1"},{"size":"2"}]}}}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"modelTopology":
|
||||||
|
{
|
||||||
|
"node":
|
||||||
|
[
|
||||||
|
{"name":"unknown_60","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"unknown_66","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"unknown_43","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"unknown_49","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"unknown_26","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"unknown_32","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"32"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"unknown_9","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"1"}]}}}}},
|
||||||
|
{"name":"unknown_15","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"unknown_77","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"2"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"unknown_78","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"2"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}},
|
||||||
|
{"name":"input_1","op":"Placeholder","attr":{"dtype":{"type":"DT_FLOAT"},"shape":{"shape":{"dim":[{"size":"-1"},{"size":"64"},{"size":"64"},{"size":"1"}]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"1"},{"size":"8"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"8"},{"size":"8"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"8"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"8"},{"size":"16"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"16"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_1/Relu","op":"_FusedConv2D","input":["input_1","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_2/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_1/Relu","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_2/Relu","unknown_9"],"attr":{"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/activation_2/Relu","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_3/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_3/Relu","unknown_15"],"attr":{"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_5/FusedBatchNormV3"],"attr":{"ksize":{"list":{"i":["1","3","3","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/add_1/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_1/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_3/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_1/add","unknown_26"],"attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_1/add","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","2","2","1"]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_4/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_4/Relu","unknown_32"],"attr":{"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_8/FusedBatchNormV3"],"attr":{"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"ksize":{"list":{"i":["1","3","3","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/add_2/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_2/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_6/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_2/add","unknown_43"],"attr":{"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_2/add","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_5/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_5/Relu","unknown_49"],"attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"epsilon":{"f":0}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_11/FusedBatchNormV3"],"attr":{"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"ksize":{"list":{"i":["1","3","3","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/add_3/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_3/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_9/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_3/add","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","2","2","1"]}},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"epsilon":{"f":0},"num_args":{"i":"1"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/add_3/add","unknown_60"],"attr":{"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/activation_6/Relu","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["StatefulPartitionedCall/model_1/activation_6/Relu","unknown_66"],"attr":{"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d/depthwise","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"epsilon":{"f":0},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","op":"MaxPool","input":["StatefulPartitionedCall/model_1/batch_normalization_14/FusedBatchNormV3"],"attr":{"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"ksize":{"list":{"i":["1","3","3","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/add_4/add","op":"AddV2","input":["StatefulPartitionedCall/model_1/max_pooling2d_4/MaxPool","StatefulPartitionedCall/model_1/batch_normalization_12/FusedBatchNormV3"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","op":"_FusedConv2D","input":["StatefulPartitionedCall/model_1/add_4/add","unknown_77","unknown_78"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"dilations":{"list":{"i":["1","1","1","1"]}}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean","op":"Mean","input":["StatefulPartitionedCall/model_1/conv2d_7/BiasAdd","StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices"],"attr":{"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":false},"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"StatefulPartitionedCall/model_1/predictions/Softmax","op":"Softmax","input":["StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean"],"attr":{"T":{"type":"DT_FLOAT"}}},
|
||||||
|
{"name":"Identity","op":"Identity","input":["StatefulPartitionedCall/model_1/predictions/Softmax"],"attr":{"T":{"type":"DT_FLOAT"}}}
|
||||||
|
],
|
||||||
|
"library": {},
|
||||||
|
"versions":
|
||||||
|
{
|
||||||
|
"producer": 440
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"weightsManifest":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paths": ["gender.bin"],
|
||||||
|
"weights": [{"name":"unknown_60","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_66","shape":[3,3,128,1],"dtype":"float32"},{"name":"unknown_43","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_49","shape":[3,3,64,1],"dtype":"float32"},{"name":"unknown_26","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_32","shape":[3,3,32,1],"dtype":"float32"},{"name":"unknown_9","shape":[3,3,8,1],"dtype":"float32"},{"name":"unknown_15","shape":[3,3,16,1],"dtype":"float32"},{"name":"unknown_77","shape":[3,3,128,2],"dtype":"float32"},{"name":"unknown_78","shape":[2],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/global_average_pooling2d_1/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_weights","shape":[3,3,1,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_1/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_weights","shape":[3,3,8,8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_6/Conv2D_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_2/Conv2D_bn_offset","shape":[8],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_weights","shape":[1,1,64,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_3/Conv2D_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_weights","shape":[1,1,8,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_1/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_7/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_weights","shape":[1,1,16,16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_2/separable_conv2d_bn_offset","shape":[16],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_4/Conv2D_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_weights","shape":[1,1,16,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_3/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_weights","shape":[1,1,128,128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_weights","shape":[1,1,32,32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_4/separable_conv2d_bn_offset","shape":[32],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_8/separable_conv2d_bn_offset","shape":[128],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/conv2d_5/Conv2D_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_weights","shape":[1,1,32,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_5/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_weights","shape":[1,1,64,64],"dtype":"float32"},{"name":"StatefulPartitionedCall/model_1/separable_conv2d_6/separable_conv2d_bn_offset","shape":[64],"dtype":"float32"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -69,22 +69,20 @@ class BlazeFaceModel {
|
||||||
this.blazeFaceModel = model;
|
this.blazeFaceModel = model;
|
||||||
this.width = config.detector.inputSize;
|
this.width = config.detector.inputSize;
|
||||||
this.height = config.detector.inputSize;
|
this.height = config.detector.inputSize;
|
||||||
this.maxFaces = config.detector.maxFaces;
|
|
||||||
this.anchorsData = generateAnchors(config.detector.inputSize);
|
this.anchorsData = generateAnchors(config.detector.inputSize);
|
||||||
this.anchors = tf.tensor2d(this.anchorsData);
|
this.anchors = tf.tensor2d(this.anchorsData);
|
||||||
this.inputSize = tf.tensor1d([this.width, this.height]);
|
this.inputSize = tf.tensor1d([this.width, this.height]);
|
||||||
this.iouThreshold = config.detector.iouThreshold;
|
this.config = config;
|
||||||
this.scaleFaces = 0.8;
|
this.scaleFaces = 0.8;
|
||||||
this.scoreThreshold = config.detector.scoreThreshold;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// toto blazeface leaks two tensors per run
|
|
||||||
async getBoundingBoxes(inputImage) {
|
async getBoundingBoxes(inputImage) {
|
||||||
// sanity check on input
|
// sanity check on input
|
||||||
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;
|
||||||
const [detectedOutputs, boxes, scores] = tf.tidy(() => {
|
const [detectedOutputs, boxes, scores] = tf.tidy(() => {
|
||||||
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
const resizedImage = inputImage.resizeBilinear([this.width, this.height]);
|
||||||
const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);
|
// const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);
|
||||||
|
const normalizedImage = tf.sub(resizedImage.div(127.5), 1);
|
||||||
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);
|
||||||
let prediction;
|
let prediction;
|
||||||
// are we using tfhub or pinto converted model?
|
// are we using tfhub or pinto converted model?
|
||||||
|
@ -99,10 +97,12 @@ class BlazeFaceModel {
|
||||||
}
|
}
|
||||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||||
const logits = tf.slice(prediction, [0, 0], [-1, 1]);
|
const logits = tf.slice(prediction, [0, 0], [-1, 1]);
|
||||||
const scoresOut = tf.sigmoid(logits).squeeze();
|
// const scoresOut = tf.sigmoid(logits).squeeze();
|
||||||
|
const scoresOut = logits.squeeze();
|
||||||
return [prediction, decodedBounds, scoresOut];
|
return [prediction, decodedBounds, scoresOut];
|
||||||
});
|
});
|
||||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.maxFaces, this.iouThreshold, this.scoreThreshold);
|
// activation ('elu'|'hardSigmoid'|'linear'|'relu'|'relu6'| 'selu'|'sigmoid'|'softmax'|'softplus'|'softsign'|'tanh')
|
||||||
|
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||||
const boxIndices = boxIndicesTensor.arraySync();
|
const boxIndices = boxIndicesTensor.arraySync();
|
||||||
boxIndicesTensor.dispose();
|
boxIndicesTensor.dispose();
|
||||||
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));
|
||||||
|
|
|
@ -4,17 +4,20 @@ const profile = require('../profile.js');
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = { gender: '' };
|
let last = { gender: '' };
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
|
let alternative = false;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
if (!models.gender) models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
|
if (!models.gender) models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
|
||||||
|
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||||
return models.gender;
|
return models.gender;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
if ((frame < config.face.age.skipFrames) && last.gender !== '') {
|
if ((frame < config.face.gender.skipFrames) && last.gender !== '') {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
@ -26,9 +29,21 @@ async function predict(image, config) {
|
||||||
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
|
(image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],
|
||||||
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
||||||
]];
|
]];
|
||||||
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
|
const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
||||||
|
let enhance;
|
||||||
|
if (alternative) {
|
||||||
|
enhance = tf.tidy(() => {
|
||||||
|
const [red, green, blue] = tf.split(resize, 3, 3);
|
||||||
|
const redNorm = tf.mul(red, rgb[0]);
|
||||||
|
const greenNorm = tf.mul(green, rgb[1]);
|
||||||
|
const blueNorm = tf.mul(blue, rgb[2]);
|
||||||
|
const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);
|
||||||
|
return grayscale.sub(0.5).mul(2);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
enhance = tf.mul(resize, [255.0]);
|
||||||
|
}
|
||||||
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
||||||
const enhance = tf.mul(resize, [255.0]);
|
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
||||||
let genderT;
|
let genderT;
|
||||||
|
@ -46,10 +61,20 @@ async function predict(image, config) {
|
||||||
|
|
||||||
if (genderT) {
|
if (genderT) {
|
||||||
const data = genderT.dataSync();
|
const data = genderT.dataSync();
|
||||||
const confidence = Math.trunc(Math.abs(1.9 * 100 * (data[0] - 0.5))) / 100;
|
if (alternative) {
|
||||||
if (confidence > config.face.gender.minConfidence) {
|
// returns two values 0..1, bigger one is prediction
|
||||||
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
|
const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;
|
||||||
obj.confidence = confidence;
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
|
obj.gender = data[0] > data[1] ? 'female' : 'male';
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// returns one value 0..1, .5 is prediction threshold
|
||||||
|
const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;
|
||||||
|
if (confidence > config.face.gender.minConfidence) {
|
||||||
|
obj.gender = data[0] <= 0.5 ? 'female' : 'male';
|
||||||
|
obj.confidence = confidence;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
genderT.dispose();
|
genderT.dispose();
|
12
src/human.js
12
src/human.js
|
@ -1,7 +1,7 @@
|
||||||
const tf = require('@tensorflow/tfjs');
|
const tf = require('@tensorflow/tfjs');
|
||||||
const facemesh = require('./face/facemesh.js');
|
const facemesh = require('./face/facemesh.js');
|
||||||
const age = require('./age/ssrnet.js');
|
const age = require('./age/age.js');
|
||||||
const gender = require('./gender/ssrnet.js');
|
const gender = require('./gender/gender.js');
|
||||||
const emotion = require('./emotion/emotion.js');
|
const emotion = require('./emotion/emotion.js');
|
||||||
const posenet = require('./body/posenet.js');
|
const posenet = require('./body/posenet.js');
|
||||||
const handpose = require('./hand/handpose.js');
|
const handpose = require('./hand/handpose.js');
|
||||||
|
@ -13,8 +13,7 @@ const app = require('../package.json');
|
||||||
|
|
||||||
// static config override for non-video detection
|
// static config override for non-video detection
|
||||||
const override = {
|
const override = {
|
||||||
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, emotion: { skipFrames: 0 } },
|
face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },
|
||||||
hand: { skipFrames: 0 },
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// helper function: gets elapsed time on both browser and nodejs
|
// helper function: gets elapsed time on both browser and nodejs
|
||||||
|
@ -46,7 +45,6 @@ class Human {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.tf = tf;
|
this.tf = tf;
|
||||||
this.version = app.version;
|
this.version = app.version;
|
||||||
this.defaults = defaults;
|
|
||||||
this.config = defaults;
|
this.config = defaults;
|
||||||
this.fx = null;
|
this.fx = null;
|
||||||
this.state = 'idle';
|
this.state = 'idle';
|
||||||
|
@ -114,7 +112,7 @@ class Human {
|
||||||
async load(userConfig) {
|
async load(userConfig) {
|
||||||
this.state = 'load';
|
this.state = 'load';
|
||||||
const timeStamp = now();
|
const timeStamp = now();
|
||||||
if (userConfig) this.config = mergeDeep(defaults, userConfig);
|
if (userConfig) this.config = mergeDeep(this.config, userConfig);
|
||||||
|
|
||||||
if (this.firstRun) {
|
if (this.firstRun) {
|
||||||
this.checkBackend(true);
|
this.checkBackend(true);
|
||||||
|
@ -300,7 +298,7 @@ class Human {
|
||||||
let timeStamp;
|
let timeStamp;
|
||||||
|
|
||||||
// update configuration
|
// update configuration
|
||||||
this.config = mergeDeep(defaults, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, override);
|
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, override);
|
||||||
|
|
||||||
// sanity checks
|
// sanity checks
|
||||||
|
|
Loading…
Reference in New Issue