mirror of https://github.com/vladmandic/human
cache invalidation improvements
parent
b65c824d88
commit
db85fdb895
14
config.js
14
config.js
|
@ -56,9 +56,9 @@ export default {
|
||||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
skipFrames: 15, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
||||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.3, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
},
|
},
|
||||||
mesh: {
|
mesh: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -80,13 +80,13 @@ export default {
|
||||||
},
|
},
|
||||||
gender: {
|
gender: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
minConfidence: 0.3, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
modelPath: '../models/ssrnet-gender-imdb.json',
|
modelPath: '../models/ssrnet-gender-imdb.json',
|
||||||
},
|
},
|
||||||
emotion: {
|
emotion: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
inputSize: 64, // fixed value
|
inputSize: 64, // fixed value
|
||||||
minConfidence: 0.3, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
skipFrames: 15, // how many frames to go without re-running the detector
|
skipFrames: 15, // how many frames to go without re-running the detector
|
||||||
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
||||||
},
|
},
|
||||||
|
@ -97,7 +97,7 @@ export default {
|
||||||
inputResolution: 257, // fixed value
|
inputResolution: 257, // fixed value
|
||||||
outputStride: 16, // fixed value
|
outputStride: 16, // fixed value
|
||||||
maxDetections: 10, // maximum number of people detected in the input, should be set to the minimum number for performance
|
maxDetections: 10, // maximum number of people detected in the input, should be set to the minimum number for performance
|
||||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
|
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
|
||||||
},
|
},
|
||||||
hand: {
|
hand: {
|
||||||
|
@ -106,9 +106,9 @@ export default {
|
||||||
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
||||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
||||||
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||||
minConfidence: 0.3, // threshold for discarding a prediction
|
minConfidence: 0.5, // threshold for discarding a prediction
|
||||||
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
iouThreshold: 0.3, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||||
scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||||
enlargeFactor: 1.65, // empiric tuning as skeleton prediction prefers hand box with some whitespace
|
enlargeFactor: 1.65, // empiric tuning as skeleton prediction prefers hand box with some whitespace
|
||||||
maxHands: 10, // maximum number of hands detected in the input, should be set to the minimum number for performance
|
maxHands: 10, // maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||||
detector: {
|
detector: {
|
||||||
|
|
155
demo/browser.js
155
demo/browser.js
|
@ -16,7 +16,7 @@ const ui = {
|
||||||
busy: false,
|
busy: false,
|
||||||
facing: true,
|
facing: true,
|
||||||
useWorker: false,
|
useWorker: false,
|
||||||
worker: 'worker.js',
|
worker: 'demo/worker.js',
|
||||||
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
|
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
|
||||||
drawBoxes: true,
|
drawBoxes: true,
|
||||||
drawPoints: false,
|
drawPoints: false,
|
||||||
|
@ -29,45 +29,6 @@ const ui = {
|
||||||
modelsWarmup: true,
|
modelsWarmup: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
// configuration overrides
|
|
||||||
const config = {
|
|
||||||
backend: 'webgl',
|
|
||||||
profile: false,
|
|
||||||
deallocate: false,
|
|
||||||
wasm: { path: '../assets' },
|
|
||||||
async: true,
|
|
||||||
filter: {
|
|
||||||
enabled: true,
|
|
||||||
width: 0,
|
|
||||||
height: 0,
|
|
||||||
brightness: 0,
|
|
||||||
contrast: 0,
|
|
||||||
sharpness: 0,
|
|
||||||
blur: 0,
|
|
||||||
saturation: 0,
|
|
||||||
hue: 0,
|
|
||||||
negative: false,
|
|
||||||
sepia: false,
|
|
||||||
vintage: false,
|
|
||||||
kodachrome: false,
|
|
||||||
technicolor: false,
|
|
||||||
polaroid: false,
|
|
||||||
pixelate: 0 },
|
|
||||||
videoOptimized: true,
|
|
||||||
face: {
|
|
||||||
enabled: true,
|
|
||||||
detector: { maxFaces: 10, skipFrames: 15, minConfidence: 0.3, iouThreshold: 0.3, scoreThreshold: 0.5 },
|
|
||||||
mesh: { enabled: true },
|
|
||||||
iris: { enabled: true },
|
|
||||||
age: { enabled: true, skipFrames: 15 },
|
|
||||||
gender: { enabled: true },
|
|
||||||
emotion: { enabled: true, minConfidence: 0.3, useGrayscale: true },
|
|
||||||
},
|
|
||||||
body: { enabled: true, maxDetections: 10, scoreThreshold: 0.5, nmsRadius: 20 },
|
|
||||||
hand: { enabled: true, skipFrames: 15, minConfidence: 0.3, iouThreshold: 0.3, scoreThreshold: 0.5 },
|
|
||||||
gesture: { enabled: true },
|
|
||||||
};
|
|
||||||
|
|
||||||
// global variables
|
// global variables
|
||||||
let menu;
|
let menu;
|
||||||
let menuFX;
|
let menuFX;
|
||||||
|
@ -218,7 +179,7 @@ function webWorker(input, image, canvas) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// pass image data as arraybuffer to worker by reference to avoid copy
|
// pass image data as arraybuffer to worker by reference to avoid copy
|
||||||
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height }, [image.data.buffer]);
|
||||||
}
|
}
|
||||||
|
|
||||||
// main processing function when input is webcam, can use direct invocation or web worker
|
// main processing function when input is webcam, can use direct invocation or web worker
|
||||||
|
@ -242,10 +203,10 @@ function runHumanDetect(input, canvas) {
|
||||||
// perform detection in worker
|
// perform detection in worker
|
||||||
webWorker(input, data, canvas);
|
webWorker(input, data, canvas);
|
||||||
} else {
|
} else {
|
||||||
human.detect(input, config).then((result) => {
|
human.detect(input).then((result) => {
|
||||||
if (result.error) log(result.error);
|
if (result.error) log(result.error);
|
||||||
else drawResults(input, result, canvas);
|
else drawResults(input, result, canvas);
|
||||||
if (config.profile) log('profile data:', human.profile());
|
if (human.config.profile) log('profile data:', human.profile());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -260,9 +221,9 @@ async function processImage(input) {
|
||||||
const canvas = document.getElementById('canvas');
|
const canvas = document.getElementById('canvas');
|
||||||
image.width = image.naturalWidth;
|
image.width = image.naturalWidth;
|
||||||
image.height = image.naturalHeight;
|
image.height = image.naturalHeight;
|
||||||
canvas.width = config.filter.width && config.filter.width > 0 ? config.filter.width : image.naturalWidth;
|
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
||||||
canvas.height = config.filter.height && config.filter.height > 0 ? config.filter.height : image.naturalHeight;
|
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
||||||
const result = await human.detect(image, config);
|
const result = await human.detect(image);
|
||||||
drawResults(image, result, canvas);
|
drawResults(image, result, canvas);
|
||||||
const thumb = document.createElement('canvas');
|
const thumb = document.createElement('canvas');
|
||||||
thumb.className = 'thumbnail';
|
thumb.className = 'thumbnail';
|
||||||
|
@ -280,7 +241,7 @@ async function processImage(input) {
|
||||||
|
|
||||||
// just initialize everything and call main function
|
// just initialize everything and call main function
|
||||||
async function detectVideo() {
|
async function detectVideo() {
|
||||||
config.videoOptimized = true;
|
human.config.videoOptimized = true;
|
||||||
document.getElementById('samples-container').style.display = 'none';
|
document.getElementById('samples-container').style.display = 'none';
|
||||||
document.getElementById('canvas').style.display = 'block';
|
document.getElementById('canvas').style.display = 'block';
|
||||||
const video = document.getElementById('video');
|
const video = document.getElementById('video');
|
||||||
|
@ -304,7 +265,7 @@ async function detectVideo() {
|
||||||
// just initialize everything and call main function
|
// just initialize everything and call main function
|
||||||
async function detectSampleImages() {
|
async function detectSampleImages() {
|
||||||
document.getElementById('play').style.display = 'none';
|
document.getElementById('play').style.display = 'none';
|
||||||
config.videoOptimized = false;
|
human.config.videoOptimized = false;
|
||||||
const size = Math.trunc(ui.columns * 25600 / window.innerWidth);
|
const size = Math.trunc(ui.columns * 25600 / window.innerWidth);
|
||||||
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);
|
||||||
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
|
ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;
|
||||||
|
@ -324,49 +285,49 @@ function setupMenu() {
|
||||||
document.getElementById('play').addEventListener('click', () => btn.click());
|
document.getElementById('play').addEventListener('click', () => btn.click());
|
||||||
|
|
||||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], config.backend, (val) => config.backend = val);
|
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], human.config.backend, (val) => human.config.backend = val);
|
||||||
menu.addBool('Async Operations', config, 'async');
|
menu.addBool('Async Operations', human.config, 'async');
|
||||||
menu.addBool('Enable Profiler', config, 'profile');
|
menu.addBool('Enable Profiler', human.config, 'profile');
|
||||||
menu.addBool('Memory Shield', config, 'deallocate');
|
menu.addBool('Memory Shield', human.config, 'deallocate');
|
||||||
menu.addBool('Use Web Worker', ui, 'useWorker');
|
menu.addBool('Use Web Worker', ui, 'useWorker');
|
||||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
menu.addLabel('Enabled Models');
|
menu.addLabel('Enabled Models');
|
||||||
menu.addBool('Face Detect', config.face, 'enabled');
|
menu.addBool('Face Detect', human.config.face, 'enabled');
|
||||||
menu.addBool('Face Mesh', config.face.mesh, 'enabled');
|
menu.addBool('Face Mesh', human.config.face.mesh, 'enabled');
|
||||||
menu.addBool('Face Iris', config.face.iris, 'enabled');
|
menu.addBool('Face Iris', human.config.face.iris, 'enabled');
|
||||||
menu.addBool('Face Age', config.face.age, 'enabled');
|
menu.addBool('Face Age', human.config.face.age, 'enabled');
|
||||||
menu.addBool('Face Gender', config.face.gender, 'enabled');
|
menu.addBool('Face Gender', human.config.face.gender, 'enabled');
|
||||||
menu.addBool('Face Emotion', config.face.emotion, 'enabled');
|
menu.addBool('Face Emotion', human.config.face.emotion, 'enabled');
|
||||||
menu.addBool('Body Pose', config.body, 'enabled');
|
menu.addBool('Body Pose', human.config.body, 'enabled');
|
||||||
menu.addBool('Hand Pose', config.hand, 'enabled');
|
menu.addBool('Hand Pose', human.config.hand, 'enabled');
|
||||||
menu.addBool('Gesture Analysis', config.gesture, 'enabled');
|
menu.addBool('Gesture Analysis', human.config.gesture, 'enabled');
|
||||||
|
|
||||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
menu.addLabel('Model Parameters');
|
menu.addLabel('Model Parameters');
|
||||||
menu.addRange('Max Objects', config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
menu.addRange('Max Objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
||||||
config.face.detector.maxFaces = parseInt(val);
|
human.config.face.detector.maxFaces = parseInt(val);
|
||||||
config.body.maxDetections = parseInt(val);
|
human.config.body.maxDetections = parseInt(val);
|
||||||
config.hand.maxHands = parseInt(val);
|
human.config.hand.maxHands = parseInt(val);
|
||||||
});
|
});
|
||||||
menu.addRange('Skip Frames', config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
menu.addRange('Skip Frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||||
config.face.detector.skipFrames = parseInt(val);
|
human.config.face.detector.skipFrames = parseInt(val);
|
||||||
config.face.emotion.skipFrames = parseInt(val);
|
human.config.face.emotion.skipFrames = parseInt(val);
|
||||||
config.face.age.skipFrames = parseInt(val);
|
human.config.face.age.skipFrames = parseInt(val);
|
||||||
config.hand.skipFrames = parseInt(val);
|
human.config.hand.skipFrames = parseInt(val);
|
||||||
});
|
});
|
||||||
menu.addRange('Min Confidence', config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
menu.addRange('Min Confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||||
config.face.detector.minConfidence = parseFloat(val);
|
human.config.face.detector.minConfidence = parseFloat(val);
|
||||||
config.face.emotion.minConfidence = parseFloat(val);
|
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||||
config.hand.minConfidence = parseFloat(val);
|
human.config.hand.minConfidence = parseFloat(val);
|
||||||
});
|
});
|
||||||
menu.addRange('Score Threshold', config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
|
menu.addRange('Score Threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||||
config.face.detector.scoreThreshold = parseFloat(val);
|
human.config.face.detector.scoreThreshold = parseFloat(val);
|
||||||
config.hand.scoreThreshold = parseFloat(val);
|
human.config.hand.scoreThreshold = parseFloat(val);
|
||||||
config.body.scoreThreshold = parseFloat(val);
|
human.config.body.scoreThreshold = parseFloat(val);
|
||||||
});
|
});
|
||||||
menu.addRange('IOU Threshold', config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
menu.addRange('IOU Threshold', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||||
config.face.detector.iouThreshold = parseFloat(val);
|
human.config.face.detector.iouThreshold = parseFloat(val);
|
||||||
config.hand.iouThreshold = parseFloat(val);
|
human.config.hand.iouThreshold = parseFloat(val);
|
||||||
});
|
});
|
||||||
|
|
||||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
|
@ -382,22 +343,22 @@ function setupMenu() {
|
||||||
menuFX.addBool('Fill Polygons', ui, 'fillPolygons');
|
menuFX.addBool('Fill Polygons', ui, 'fillPolygons');
|
||||||
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||||
menuFX.addLabel('Image Processing');
|
menuFX.addLabel('Image Processing');
|
||||||
menuFX.addBool('Enabled', config.filter, 'enabled');
|
menuFX.addBool('Enabled', human.config.filter, 'enabled');
|
||||||
menuFX.addRange('Image width', config.filter, 'width', 0, 3840, 10, (val) => config.filter.width = parseInt(val));
|
menuFX.addRange('Image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||||
menuFX.addRange('Image height', config.filter, 'height', 0, 2160, 10, (val) => config.filter.height = parseInt(val));
|
menuFX.addRange('Image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||||
menuFX.addRange('Brightness', config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => config.filter.brightness = parseFloat(val));
|
menuFX.addRange('Brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||||
menuFX.addRange('Contrast', config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => config.filter.contrast = parseFloat(val));
|
menuFX.addRange('Contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||||
menuFX.addRange('Sharpness', config.filter, 'sharpness', 0, 1.0, 0.05, (val) => config.filter.sharpness = parseFloat(val));
|
menuFX.addRange('Sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||||
menuFX.addRange('Blur', config.filter, 'blur', 0, 20, 1, (val) => config.filter.blur = parseInt(val));
|
menuFX.addRange('Blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||||
menuFX.addRange('Saturation', config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => config.filter.saturation = parseFloat(val));
|
menuFX.addRange('Saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||||
menuFX.addRange('Hue', config.filter, 'hue', 0, 360, 5, (val) => config.filter.hue = parseInt(val));
|
menuFX.addRange('Hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||||
menuFX.addRange('Pixelate', config.filter, 'pixelate', 0, 32, 1, (val) => config.filter.pixelate = parseInt(val));
|
menuFX.addRange('Pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||||
menuFX.addBool('Negative', config.filter, 'negative');
|
menuFX.addBool('Negative', human.config.filter, 'negative');
|
||||||
menuFX.addBool('Sepia', config.filter, 'sepia');
|
menuFX.addBool('Sepia', human.config.filter, 'sepia');
|
||||||
menuFX.addBool('Vintage', config.filter, 'vintage');
|
menuFX.addBool('Vintage', human.config.filter, 'vintage');
|
||||||
menuFX.addBool('Kodachrome', config.filter, 'kodachrome');
|
menuFX.addBool('Kodachrome', human.config.filter, 'kodachrome');
|
||||||
menuFX.addBool('Technicolor', config.filter, 'technicolor');
|
menuFX.addBool('Technicolor', human.config.filter, 'technicolor');
|
||||||
menuFX.addBool('Polaroid', config.filter, 'polaroid');
|
menuFX.addBool('Polaroid', human.config.filter, 'polaroid');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
|
|
@ -27,6 +27,7 @@ async function drawFace(result, canvas, ui, triangulation) {
|
||||||
}
|
}
|
||||||
// silly hack since fillText does not suport new line
|
// silly hack since fillText does not suport new line
|
||||||
const labels = [];
|
const labels = [];
|
||||||
|
// labels.push(`${Math.trunc(100 * face.confidence)}% face`);
|
||||||
if (face.genderConfidence) labels.push(`${Math.trunc(100 * face.genderConfidence)}% ${face.gender || ''}`);
|
if (face.genderConfidence) labels.push(`${Math.trunc(100 * face.genderConfidence)}% ${face.gender || ''}`);
|
||||||
if (face.age) labels.push(`age: ${face.age || ''}`);
|
if (face.age) labels.push(`age: ${face.age || ''}`);
|
||||||
if (face.iris) labels.push(`iris: ${face.iris}`);
|
if (face.iris) labels.push(`iris: ${face.iris}`);
|
||||||
|
|
|
@ -14,10 +14,9 @@ onmessage = async (msg) => {
|
||||||
busy = true;
|
busy = true;
|
||||||
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
// worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, config }, [image.data.buffer]);
|
||||||
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
const image = new ImageData(new Uint8ClampedArray(msg.data.image), msg.data.width, msg.data.height);
|
||||||
config = msg.data.config;
|
|
||||||
let result = {};
|
let result = {};
|
||||||
try {
|
try {
|
||||||
result = await human.detect(image, config);
|
result = await human.detect(image);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
result.error = err.message;
|
result.error = err.message;
|
||||||
log('worker thread error:', err.message);
|
log('worker thread error:', err.message);
|
||||||
|
|
|
@ -25,7 +25,7 @@ const options = {
|
||||||
key: fs.readFileSync('/home/vlado/dev/piproxy/cert/private.pem'),
|
key: fs.readFileSync('/home/vlado/dev/piproxy/cert/private.pem'),
|
||||||
cert: fs.readFileSync('/home/vlado/dev/piproxy/cert/fullchain.pem'),
|
cert: fs.readFileSync('/home/vlado/dev/piproxy/cert/fullchain.pem'),
|
||||||
root: '.',
|
root: '.',
|
||||||
default: 'index.html',
|
default: 'demo/index.html',
|
||||||
port: 8000,
|
port: 8000,
|
||||||
monitor: ['package.json', 'config.js', 'demo', 'src'],
|
monitor: ['package.json', 'config.js', 'demo', 'src'],
|
||||||
};
|
};
|
||||||
|
@ -114,7 +114,7 @@ function content(url) {
|
||||||
obj.stat = fs.statSync(obj.file);
|
obj.stat = fs.statSync(obj.file);
|
||||||
// should really use streams here instead of reading entire content in-memory, but this is micro-http2 not intended to serve huge files
|
// should really use streams here instead of reading entire content in-memory, but this is micro-http2 not intended to serve huge files
|
||||||
if (obj.stat.isFile()) obj.ok = true;
|
if (obj.stat.isFile()) obj.ok = true;
|
||||||
if (obj.stat.isDirectory()) {
|
if (!obj.ok && obj.stat.isDirectory()) {
|
||||||
obj.file = path.join(obj.file, options.default);
|
obj.file = path.join(obj.file, options.default);
|
||||||
obj = content(obj.file);
|
obj = content(obj.file);
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 18066,
|
"bytes": 17412,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js"
|
"path": "dist/human.esm.js"
|
||||||
|
@ -15,7 +15,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytes": 7561,
|
"bytes": 7627,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/menu.js": {
|
"demo/menu.js": {
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 1277557,
|
"bytes": 3196136,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -31,28 +31,25 @@
|
||||||
"dist/demo-browser-index.js.map": {
|
"dist/demo-browser-index.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5529553
|
"bytes": 5557260
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 1663845
|
"bytesInOutput": 3193996
|
||||||
},
|
|
||||||
"dist/human.esm.js": {
|
|
||||||
"bytesInOutput": 8716
|
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytesInOutput": 7451
|
"bytesInOutput": 7453
|
||||||
},
|
},
|
||||||
"demo/menu.js": {
|
"demo/menu.js": {
|
||||||
"bytesInOutput": 12359
|
"bytesInOutput": 12359
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytesInOutput": 16281
|
"bytesInOutput": 15694
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1708774
|
"bytes": 3229624
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -149,11 +149,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytes": 3374,
|
"bytes": 3389,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/age/ssrnet.js": {
|
"src/age/ssrnet.js": {
|
||||||
"bytes": 1746,
|
"bytes": 1766,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -277,7 +277,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2767,
|
"bytes": 2778,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -304,7 +304,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytes": 2572,
|
"bytes": 2355,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -327,7 +327,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 14368,
|
"bytes": 14674,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -360,7 +360,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/gender/ssrnet.js": {
|
"src/gender/ssrnet.js": {
|
||||||
"bytes": 2003,
|
"bytes": 2015,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||||
|
@ -513,178 +513,178 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5415919
|
"bytes": 5607938
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
|
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
|
||||||
"bytesInOutput": 13
|
"bytesInOutput": 45
|
||||||
},
|
},
|
||||||
"empty:util": {
|
"empty:util": {
|
||||||
"bytesInOutput": 13
|
"bytesInOutput": 42
|
||||||
},
|
},
|
||||||
"empty:crypto": {
|
"empty:crypto": {
|
||||||
"bytesInOutput": 13
|
"bytesInOutput": 44
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
|
"node_modules/@tensorflow/tfjs-core/dist/tf-core.node.js": {
|
||||||
"bytesInOutput": 295162
|
"bytesInOutput": 1010341
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
|
"node_modules/@tensorflow/tfjs-layers/dist/tf-layers.node.js": {
|
||||||
"bytesInOutput": 238778
|
"bytesInOutput": 514491
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
|
"node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js": {
|
||||||
"bytesInOutput": 115231
|
"bytesInOutput": 258962
|
||||||
},
|
},
|
||||||
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
|
"empty:/home/vlado/dev/human/node_modules/string_decoder/lib/string_decoder.js": {
|
||||||
"bytesInOutput": 13
|
"bytesInOutput": 52
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
|
"node_modules/@tensorflow/tfjs-data/dist/tf-data.node.js": {
|
||||||
"bytesInOutput": 52364
|
"bytesInOutput": 129585
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/alea.js": {
|
||||||
"bytesInOutput": 990
|
"bytesInOutput": 2112
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor128.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor128.js": {
|
||||||
"bytesInOutput": 755
|
"bytesInOutput": 1699
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorwow.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorwow.js": {
|
||||||
"bytesInOutput": 845
|
"bytesInOutput": 1897
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorshift7.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xorshift7.js": {
|
||||||
"bytesInOutput": 1001
|
"bytesInOutput": 2307
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor4096.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/xor4096.js": {
|
||||||
"bytesInOutput": 1164
|
"bytesInOutput": 2742
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/tychei.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/lib/tychei.js": {
|
||||||
"bytesInOutput": 880
|
"bytesInOutput": 1940
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/seedrandom.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/seedrandom.js": {
|
||||||
"bytesInOutput": 1614
|
"bytesInOutput": 4019
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/index.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/node_modules/seedrandom/index.js": {
|
||||||
"bytesInOutput": 171
|
"bytesInOutput": 458
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||||
"bytesInOutput": 82510
|
"bytesInOutput": 272412
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
|
"node_modules/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js": {
|
||||||
"bytesInOutput": 261415
|
"bytesInOutput": 561667
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
|
"node_modules/@tensorflow/tfjs/dist/tf.node.js": {
|
||||||
"bytesInOutput": 760
|
"bytesInOutput": 3025
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 3093
|
"bytesInOutput": 7123
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
"src/face/keypoints.js": {
|
||||||
"bytesInOutput": 1946
|
"bytesInOutput": 2768
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytesInOutput": 1006
|
"bytesInOutput": 2070
|
||||||
},
|
},
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytesInOutput": 1190
|
"bytesInOutput": 3017
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 5577
|
"bytesInOutput": 13458
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/uvcoords.js": {
|
||||||
"bytesInOutput": 16786
|
"bytesInOutput": 20584
|
||||||
},
|
},
|
||||||
"src/face/triangulation.js": {
|
"src/face/triangulation.js": {
|
||||||
"bytesInOutput": 9991
|
"bytesInOutput": 23309
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 1237
|
"bytesInOutput": 2420
|
||||||
},
|
},
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 620
|
"bytesInOutput": 1092
|
||||||
},
|
},
|
||||||
"src/age/ssrnet.js": {
|
"src/age/ssrnet.js": {
|
||||||
"bytesInOutput": 877
|
"bytesInOutput": 1747
|
||||||
},
|
},
|
||||||
"src/gender/ssrnet.js": {
|
"src/gender/ssrnet.js": {
|
||||||
"bytesInOutput": 1007
|
"bytesInOutput": 2007
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 1334
|
"bytesInOutput": 2612
|
||||||
},
|
},
|
||||||
"src/body/modelBase.js": {
|
"src/body/modelBase.js": {
|
||||||
"bytesInOutput": 433
|
"bytesInOutput": 900
|
||||||
},
|
},
|
||||||
"src/body/modelMobileNet.js": {
|
"src/body/modelMobileNet.js": {
|
||||||
"bytesInOutput": 245
|
"bytesInOutput": 494
|
||||||
},
|
},
|
||||||
"src/body/heapSort.js": {
|
"src/body/heapSort.js": {
|
||||||
"bytesInOutput": 1042
|
"bytesInOutput": 1637
|
||||||
},
|
},
|
||||||
"src/body/buildParts.js": {
|
"src/body/buildParts.js": {
|
||||||
"bytesInOutput": 547
|
"bytesInOutput": 1752
|
||||||
},
|
},
|
||||||
"src/body/keypoints.js": {
|
"src/body/keypoints.js": {
|
||||||
"bytesInOutput": 1633
|
"bytesInOutput": 2277
|
||||||
},
|
},
|
||||||
"src/body/vectors.js": {
|
"src/body/vectors.js": {
|
||||||
"bytesInOutput": 616
|
"bytesInOutput": 1408
|
||||||
},
|
},
|
||||||
"src/body/decodePose.js": {
|
"src/body/decodePose.js": {
|
||||||
"bytesInOutput": 1024
|
"bytesInOutput": 3773
|
||||||
},
|
},
|
||||||
"src/body/decodeMultiple.js": {
|
"src/body/decodeMultiple.js": {
|
||||||
"bytesInOutput": 604
|
"bytesInOutput": 1990
|
||||||
},
|
},
|
||||||
"src/body/util.js": {
|
"src/body/util.js": {
|
||||||
"bytesInOutput": 1062
|
"bytesInOutput": 2398
|
||||||
},
|
},
|
||||||
"src/body/modelPoseNet.js": {
|
"src/body/modelPoseNet.js": {
|
||||||
"bytesInOutput": 846
|
"bytesInOutput": 2020
|
||||||
},
|
},
|
||||||
"src/body/posenet.js": {
|
"src/body/posenet.js": {
|
||||||
"bytesInOutput": 474
|
"bytesInOutput": 903
|
||||||
},
|
},
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 1398
|
"bytesInOutput": 3555
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 1812
|
"bytesInOutput": 4551
|
||||||
},
|
},
|
||||||
"src/hand/util.js": {
|
"src/hand/util.js": {
|
||||||
"bytesInOutput": 1005
|
"bytesInOutput": 3419
|
||||||
},
|
},
|
||||||
"src/hand/handpipeline.js": {
|
"src/hand/handpipeline.js": {
|
||||||
"bytesInOutput": 3055
|
"bytesInOutput": 8366
|
||||||
},
|
},
|
||||||
"src/hand/anchors.js": {
|
"src/hand/anchors.js": {
|
||||||
"bytesInOutput": 127001
|
"bytesInOutput": 256590
|
||||||
},
|
},
|
||||||
"src/hand/handpose.js": {
|
"src/hand/handpose.js": {
|
||||||
"bytesInOutput": 1105
|
"bytesInOutput": 2946
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 1220
|
"bytesInOutput": 2270
|
||||||
},
|
},
|
||||||
"src/imagefx.js": {
|
"src/imagefx.js": {
|
||||||
"bytesInOutput": 11014
|
"bytesInOutput": 20097
|
||||||
},
|
},
|
||||||
"src/image.js": {
|
"src/image.js": {
|
||||||
"bytesInOutput": 2365
|
"bytesInOutput": 4482
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1300
|
"bytesInOutput": 2230
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 3005
|
"bytesInOutput": 3533
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 7374
|
"bytesInOutput": 11852
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 1277557
|
"bytes": 3196136
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||||
"lint": "eslint src/*.js demo/*.js",
|
"lint": "eslint src/*.js demo/*.js",
|
||||||
"dev": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation dev-server.js",
|
"dev": "npm install && node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation dev-server.js",
|
||||||
"build-iife": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=iife --external:fs --global-name=Human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
|
"build-iife": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=iife --external:fs --global-name=Human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
|
||||||
"build-esm-bundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
|
"build-esm-bundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",
|
||||||
"build-esm-nobundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
|
"build-esm-nobundle": "esbuild --bundle --minify --platform=browser --sourcemap --target=es2018 --format=esm --external:@tensorflow --external:fs --metafile=dist/human.esm-nobundle.json --outfile=dist/human.esm-nobundle.js src/human.js",
|
||||||
|
|
|
@ -14,12 +14,12 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
return new Promise(async (resolve) => {
|
if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {
|
||||||
if (frame < config.face.age.skipFrames) {
|
|
||||||
frame += 1;
|
frame += 1;
|
||||||
resolve(last);
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
|
|
@ -17,12 +17,12 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
return new Promise(async (resolve) => {
|
if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {
|
||||||
if (frame < config.face.emotion.skipFrames) {
|
|
||||||
frame += 1;
|
frame += 1;
|
||||||
resolve(last);
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
|
|
@ -18,8 +18,6 @@ class MediaPipeFaceMesh {
|
||||||
for (const prediction of (predictions || [])) {
|
for (const prediction of (predictions || [])) {
|
||||||
// guard against disposed tensors on long running operations such as pause in middle of processing
|
// guard against disposed tensors on long running operations such as pause in middle of processing
|
||||||
if (prediction.isDisposedInternal) continue;
|
if (prediction.isDisposedInternal) continue;
|
||||||
const confidence = prediction.confidence.arraySync();
|
|
||||||
if (confidence >= this.config.detector.minConfidence) {
|
|
||||||
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
|
const mesh = prediction.coords ? prediction.coords.arraySync() : null;
|
||||||
const annotations = {};
|
const annotations = {};
|
||||||
if (mesh && mesh.length > 0) {
|
if (mesh && mesh.length > 0) {
|
||||||
|
@ -30,14 +28,12 @@ class MediaPipeFaceMesh {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
results.push({
|
results.push({
|
||||||
confidence: confidence || 0,
|
confidence: prediction.confidence || 0,
|
||||||
box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,
|
box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,
|
||||||
mesh,
|
mesh,
|
||||||
annotations,
|
annotations,
|
||||||
image: prediction.image ? tf.clone(prediction.image) : null,
|
image: prediction.image ? tf.clone(prediction.image) : null,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
if (prediction.confidence) prediction.confidence.dispose();
|
|
||||||
if (prediction.coords) prediction.coords.dispose();
|
if (prediction.coords) prediction.coords.dispose();
|
||||||
if (prediction.image) prediction.image.dispose();
|
if (prediction.image) prediction.image.dispose();
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,13 +129,19 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
|
|
||||||
async predict(input, config) {
|
async predict(input, config) {
|
||||||
this.skipFrames = config.detector.skipFrames;
|
this.runsWithoutFaceDetector += 1;
|
||||||
this.maxFaces = config.detector.maxFaces;
|
|
||||||
this.runsWithoutFaceDetector++;
|
let useFreshBox = (this.detectedFaces === 0) || (this.detectedFaces !== this.regionsOfInterest.length);
|
||||||
if (this.shouldUpdateRegionsOfInterest()) {
|
let detector;
|
||||||
const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
// but every skipFrames check if detect boxes number changed
|
||||||
|
if (useFreshBox || (this.runsWithoutFaceDetector > config.detector.skipFrames)) detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
|
// if there are new boxes and number of boxes doesn't match use new boxes, but not if maxhands is fixed to 1
|
||||||
|
if (config.detector.maxFaces > 1 && detector && detector.boxes && detector.boxes.length > 0 && detector.boxes.length !== this.detectedFaces) useFreshBox = true;
|
||||||
|
if (useFreshBox) {
|
||||||
|
// const detector = await this.boundingBoxDetector.getBoundingBoxes(input);
|
||||||
if (!detector || !detector.boxes || (detector.boxes.length === 0)) {
|
if (!detector || !detector.boxes || (detector.boxes.length === 0)) {
|
||||||
this.regionsOfInterest = [];
|
this.regionsOfInterest = [];
|
||||||
|
this.detectedFaces = 0;
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const scaledBoxes = detector.boxes.map((prediction) => {
|
const scaledBoxes = detector.boxes.map((prediction) => {
|
||||||
|
@ -159,7 +165,7 @@ class Pipeline {
|
||||||
this.updateRegionsOfInterest(scaledBoxes);
|
this.updateRegionsOfInterest(scaledBoxes);
|
||||||
this.runsWithoutFaceDetector = 0;
|
this.runsWithoutFaceDetector = 0;
|
||||||
}
|
}
|
||||||
const results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
|
let results = tf.tidy(() => this.regionsOfInterest.map((box, i) => {
|
||||||
let angle = 0;
|
let angle = 0;
|
||||||
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
// The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).
|
||||||
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
|
const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;
|
||||||
|
@ -173,14 +179,19 @@ class Pipeline {
|
||||||
let rotatedImage = input;
|
let rotatedImage = input;
|
||||||
let rotationMatrix = util.IDENTITY_MATRIX;
|
let rotationMatrix = util.IDENTITY_MATRIX;
|
||||||
if (angle !== 0) {
|
if (angle !== 0) {
|
||||||
// bug: input becomes disposed here when running in async mode!
|
|
||||||
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
||||||
}
|
}
|
||||||
const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };
|
const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };
|
||||||
const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
||||||
// The first returned tensor represents facial contours, which are included in the coordinates.
|
// The first returned tensor represents facial contours, which are included in the coordinates.
|
||||||
const [, flag, coords] = this.meshDetector.predict(face);
|
const [, confidence, coords] = this.meshDetector.predict(face);
|
||||||
|
const confidenceVal = confidence.dataSync()[0];
|
||||||
|
confidence.dispose();
|
||||||
|
if (confidenceVal < config.detector.minConfidence) {
|
||||||
|
coords.dispose();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
const coordsReshaped = tf.reshape(coords, [-1, 3]);
|
const coordsReshaped = tf.reshape(coords, [-1, 3]);
|
||||||
let rawCoords = coordsReshaped.arraySync();
|
let rawCoords = coordsReshaped.arraySync();
|
||||||
if (config.iris.enabled) {
|
if (config.iris.enabled) {
|
||||||
|
@ -210,27 +221,21 @@ class Pipeline {
|
||||||
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
|
||||||
tf.dispose(rawCoords);
|
tf.dispose(rawCoords);
|
||||||
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
|
||||||
const confidence = flag.squeeze();
|
|
||||||
tf.dispose(flag);
|
|
||||||
if (config.mesh.enabled) {
|
|
||||||
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
|
||||||
this.regionsOfInterest[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync() };
|
|
||||||
const prediction = {
|
|
||||||
coords: transformedCoords,
|
|
||||||
box: landmarksBox,
|
|
||||||
confidence,
|
|
||||||
image: face,
|
|
||||||
};
|
|
||||||
return prediction;
|
|
||||||
}
|
|
||||||
const prediction = {
|
const prediction = {
|
||||||
coords: null,
|
coords: null,
|
||||||
box: landmarksBox,
|
box: landmarksBox,
|
||||||
confidence,
|
confidence: confidenceVal,
|
||||||
image: face,
|
image: face,
|
||||||
};
|
};
|
||||||
|
if (config.mesh.enabled) {
|
||||||
|
const transformedCoords = tf.tensor2d(transformedCoordsData);
|
||||||
|
this.regionsOfInterest[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync() };
|
||||||
|
prediction.coords = transformedCoords;
|
||||||
|
}
|
||||||
return prediction;
|
return prediction;
|
||||||
}));
|
}));
|
||||||
|
results = results.filter((a) => a !== null);
|
||||||
|
this.detectedFaces = results.length;
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,11 +275,6 @@ class Pipeline {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
shouldUpdateRegionsOfInterest() {
|
|
||||||
if (this.regionsOfInterest.length === 0) return true; // nothing detected, so run detector on the next frame
|
|
||||||
return (this.regionsOfInterest.length !== this.maxFaces) && (this.runsWithoutFaceDetector >= this.skipFrames);
|
|
||||||
}
|
|
||||||
|
|
||||||
calculateLandmarksBoundingBox(landmarks) {
|
calculateLandmarksBoundingBox(landmarks) {
|
||||||
const xs = landmarks.map((d) => d[0]);
|
const xs = landmarks.map((d) => d[0]);
|
||||||
const ys = landmarks.map((d) => d[1]);
|
const ys = landmarks.map((d) => d[1]);
|
||||||
|
|
|
@ -14,12 +14,12 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
return new Promise(async (resolve) => {
|
if ((frame < config.face.age.skipFrames) && last.gender !== '') {
|
||||||
if (frame < config.face.age.skipFrames) {
|
|
||||||
frame += 1;
|
frame += 1;
|
||||||
resolve(last);
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
|
Loading…
Reference in New Issue