mirror of https://github.com/vladmandic/human
implemented face embedding
parent
58698d6173
commit
8d38c977be
Binary file not shown.
After Width: | Height: | Size: 152 KiB |
|
@ -66,6 +66,7 @@ export default {
|
||||||
// such as front-facing camera and
|
// such as front-facing camera and
|
||||||
// 'back' is optimized for distanct faces.
|
// 'back' is optimized for distanct faces.
|
||||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||||
|
rotation: false, // use best-guess rotated face image or just box with rotation as-is
|
||||||
maxFaces: 10, // maximum number of faces detected in the input
|
maxFaces: 10, // maximum number of faces detected in the input
|
||||||
// should be set to the minimum number for performance
|
// should be set to the minimum number for performance
|
||||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector
|
skipFrames: 15, // how many frames to go without re-running the face bounding box detector
|
||||||
|
@ -118,6 +119,12 @@ export default {
|
||||||
skipFrames: 15, // how many frames to go without re-running the detector
|
skipFrames: 15, // how many frames to go without re-running the detector
|
||||||
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
||||||
},
|
},
|
||||||
|
|
||||||
|
embedding: {
|
||||||
|
enabled: false,
|
||||||
|
inputSize: 112, // fixed value
|
||||||
|
modelPath: '../models/mobilefacenet.json',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
body: {
|
body: {
|
||||||
|
|
|
@ -22,6 +22,7 @@ const ui = {
|
||||||
useWorker: false,
|
useWorker: false,
|
||||||
worker: 'demo/worker.js',
|
worker: 'demo/worker.js',
|
||||||
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
|
samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],
|
||||||
|
compare: '../assets/sample-me.jpg',
|
||||||
drawBoxes: true,
|
drawBoxes: true,
|
||||||
drawPoints: false,
|
drawPoints: false,
|
||||||
drawPolygons: true,
|
drawPolygons: true,
|
||||||
|
@ -48,6 +49,7 @@ let menu;
|
||||||
let menuFX;
|
let menuFX;
|
||||||
let worker;
|
let worker;
|
||||||
let bench;
|
let bench;
|
||||||
|
let sample;
|
||||||
let lastDetectedResult = {};
|
let lastDetectedResult = {};
|
||||||
|
|
||||||
// helper function: translates json to human readable string
|
// helper function: translates json to human readable string
|
||||||
|
@ -72,6 +74,16 @@ const status = (msg) => {
|
||||||
document.getElementById('status').innerText = msg;
|
document.getElementById('status').innerText = msg;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
async function calcSimmilariry(faces) {
|
||||||
|
if (!faces || !faces[0] || (faces[0].embedding?.length !== 192)) return;
|
||||||
|
const current = faces[0].embedding;
|
||||||
|
const original = (sample && sample.face && sample.face[0] && sample.face[0].embedding) ? sample.face[0].embedding : null;
|
||||||
|
if (original && original.length === 192) {
|
||||||
|
const simmilarity = human.simmilarity(current, original);
|
||||||
|
document.getElementById('simmilarity').innerText = `simmilarity: ${Math.trunc(1000 * simmilarity) / 10}%`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// draws processed results and starts processing of a next frame
|
// draws processed results and starts processing of a next frame
|
||||||
async function drawResults(input) {
|
async function drawResults(input) {
|
||||||
const result = lastDetectedResult;
|
const result = lastDetectedResult;
|
||||||
|
@ -79,7 +91,7 @@ async function drawResults(input) {
|
||||||
|
|
||||||
// update fps data
|
// update fps data
|
||||||
// const elapsed = performance.now() - timeStamp;
|
// const elapsed = performance.now() - timeStamp;
|
||||||
ui.fps.push(1000 / result.performance.total);
|
if (result.performance && result.performance.total) ui.fps.push(1000 / result.performance.total);
|
||||||
if (ui.fps.length > ui.maxFPSframes) ui.fps.shift();
|
if (ui.fps.length > ui.maxFPSframes) ui.fps.shift();
|
||||||
|
|
||||||
// enable for continous performance monitoring
|
// enable for continous performance monitoring
|
||||||
|
@ -89,7 +101,7 @@ async function drawResults(input) {
|
||||||
await menu.updateChart('FPS', ui.fps);
|
await menu.updateChart('FPS', ui.fps);
|
||||||
|
|
||||||
// get updated canvas
|
// get updated canvas
|
||||||
result.canvas = await human.image(input, userConfig);
|
if (ui.buffered || !result.canvas) result.canvas = await human.image(input, userConfig);
|
||||||
|
|
||||||
// draw image from video
|
// draw image from video
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
|
@ -102,17 +114,20 @@ async function drawResults(input) {
|
||||||
} else {
|
} else {
|
||||||
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
|
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
|
||||||
}
|
}
|
||||||
|
|
||||||
// draw all results
|
// draw all results
|
||||||
await draw.face(result.face, canvas, ui, human.facemesh.triangulation);
|
await draw.face(result.face, canvas, ui, human.facemesh.triangulation);
|
||||||
await draw.body(result.body, canvas, ui);
|
await draw.body(result.body, canvas, ui);
|
||||||
await draw.hand(result.hand, canvas, ui);
|
await draw.hand(result.hand, canvas, ui);
|
||||||
await draw.gesture(result.gesture, canvas, ui);
|
await draw.gesture(result.gesture, canvas, ui);
|
||||||
|
await calcSimmilariry(result.face);
|
||||||
|
|
||||||
// update log
|
// update log
|
||||||
const engine = human.tf.engine();
|
const engine = human.tf.engine();
|
||||||
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
|
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
|
||||||
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
||||||
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
|
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';
|
||||||
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;
|
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b, 0) / ui.fps.length) / 10;
|
||||||
const warning = (ui.fps.length > 5) && (avg < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
const warning = (ui.fps.length > 5) && (avg < 5) ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : '';
|
||||||
document.getElementById('log').innerHTML = `
|
document.getElementById('log').innerHTML = `
|
||||||
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
||||||
|
@ -277,7 +292,8 @@ async function processImage(input) {
|
||||||
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
||||||
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
||||||
const result = await human.detect(image, userConfig);
|
const result = await human.detect(image, userConfig);
|
||||||
drawResults(image, result, canvas);
|
lastDetectedResult = result;
|
||||||
|
await drawResults(image);
|
||||||
const thumb = document.createElement('canvas');
|
const thumb = document.createElement('canvas');
|
||||||
thumb.className = 'thumbnail';
|
thumb.className = 'thumbnail';
|
||||||
thumb.width = window.innerWidth / (ui.columns + 0.1);
|
thumb.width = window.innerWidth / (ui.columns + 0.1);
|
||||||
|
@ -325,11 +341,12 @@ async function detectSampleImages() {
|
||||||
log('Running detection of sample images');
|
log('Running detection of sample images');
|
||||||
status('processing images');
|
status('processing images');
|
||||||
document.getElementById('samples-container').innerHTML = '';
|
document.getElementById('samples-container').innerHTML = '';
|
||||||
for (const sample of ui.samples) await processImage(sample);
|
for (const image of ui.samples) await processImage(image);
|
||||||
status('');
|
status('');
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupMenu() {
|
function setupMenu() {
|
||||||
|
document.getElementById('compare-container').style.display = human.config.face.embedding.enabled ? 'block' : 'none';
|
||||||
menu = new Menu(document.body, '', { top: '1rem', right: '1rem' });
|
menu = new Menu(document.body, '', { top: '1rem', right: '1rem' });
|
||||||
const btn = menu.addButton('start video', 'pause video', () => detectVideo());
|
const btn = menu.addButton('start video', 'pause video', () => detectVideo());
|
||||||
menu.addButton('process images', 'process images', () => detectSampleImages());
|
menu.addButton('process images', 'process images', () => detectSampleImages());
|
||||||
|
@ -449,7 +466,7 @@ async function main() {
|
||||||
// this is not required, just pre-warms all models for faster initial inference
|
// this is not required, just pre-warms all models for faster initial inference
|
||||||
if (ui.modelsWarmup) {
|
if (ui.modelsWarmup) {
|
||||||
status('initializing');
|
status('initializing');
|
||||||
await human.warmup(userConfig);
|
sample = await human.warmup(userConfig, document.getElementById('sample-image'));
|
||||||
}
|
}
|
||||||
status('human: ready');
|
status('human: ready');
|
||||||
document.getElementById('loader').style.display = 'none';
|
document.getElementById('loader').style.display = 'none';
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
.video { display: none; }
|
.video { display: none; }
|
||||||
.canvas { margin: 0 auto; }
|
.canvas { margin: 0 auto; }
|
||||||
.bench { position: absolute; right: 0; bottom: 0; }
|
.bench { position: absolute; right: 0; bottom: 0; }
|
||||||
|
.compare-image { width: 10vw; position: absolute; top: 150px; left: 30px; box-shadow: 0 0 2px 2px black; background: black; }
|
||||||
.loader { width: 300px; height: 300px; border: 3px solid transparent; border-radius: 50%; border-top: 4px solid #f15e41; animation: spin 4s linear infinite; position: absolute; top: 30%; left: 50%; margin-left: -150px; z-index: 15; }
|
.loader { width: 300px; height: 300px; border: 3px solid transparent; border-radius: 50%; border-top: 4px solid #f15e41; animation: spin 4s linear infinite; position: absolute; top: 30%; left: 50%; margin-left: -150px; z-index: 15; }
|
||||||
.loader::before, .loader::after { content: ""; position: absolute; top: 6px; bottom: 6px; left: 6px; right: 6px; border-radius: 50%; border: 4px solid transparent; }
|
.loader::before, .loader::after { content: ""; position: absolute; top: 6px; bottom: 6px; left: 6px; right: 6px; border-radius: 50%; border: 4px solid transparent; }
|
||||||
.loader::before { border-top-color: #bad375; animation: 3s spin linear infinite; }
|
.loader::before { border-top-color: #bad375; animation: 3s spin linear infinite; }
|
||||||
|
@ -70,6 +71,10 @@
|
||||||
<canvas id="canvas" class="canvas"></canvas>
|
<canvas id="canvas" class="canvas"></canvas>
|
||||||
<video id="video" playsinline class="video"></video>
|
<video id="video" playsinline class="video"></video>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="compare-container" style="display: none" class="compare-image">
|
||||||
|
<img id="sample-image" style="width: 100%" src="../assets/sample-me.jpg"></img>
|
||||||
|
<div id="simmilarity"></div>
|
||||||
|
</div>
|
||||||
<div id="samples-container" class="samples-container"></div>
|
<div id="samples-container" class="samples-container"></div>
|
||||||
<canvas id="bench-canvas" class="bench"></canvas>
|
<canvas id="bench-canvas" class="bench"></canvas>
|
||||||
<div id="log" class="log"></div>
|
<div id="log" class="log"></div>
|
||||||
|
|
|
@ -34106,7 +34106,7 @@ Expected: ${expectedFlat}.`);
|
||||||
function permute(args) {
|
function permute(args) {
|
||||||
return new Permute(args);
|
return new Permute(args);
|
||||||
}
|
}
|
||||||
function embedding(args) {
|
function embedding2(args) {
|
||||||
return new Embedding(args);
|
return new Embedding(args);
|
||||||
}
|
}
|
||||||
function add$3(args) {
|
function add$3(args) {
|
||||||
|
@ -34264,7 +34264,7 @@ Expected: ${expectedFlat}.`);
|
||||||
repeatVector,
|
repeatVector,
|
||||||
reshape: reshape$1,
|
reshape: reshape$1,
|
||||||
permute,
|
permute,
|
||||||
embedding,
|
embedding: embedding2,
|
||||||
add: add$3,
|
add: add$3,
|
||||||
average: average$1,
|
average: average$1,
|
||||||
concatenate: concatenate$2,
|
concatenate: concatenate$2,
|
||||||
|
@ -65732,13 +65732,13 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
return faces;
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const blazeface = await loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")});
|
const blazeface = await loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")});
|
||||||
const model = new BlazeFaceModel(blazeface, config2);
|
const model = new BlazeFaceModel(blazeface, config2);
|
||||||
console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
exports.BlazeFaceModel = BlazeFaceModel;
|
exports.BlazeFaceModel = BlazeFaceModel;
|
||||||
exports.disposeBox = disposeBox;
|
exports.disposeBox = disposeBox;
|
||||||
});
|
});
|
||||||
|
@ -69816,15 +69816,15 @@ var require_facepipeline = __commonJS((exports) => {
|
||||||
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = util30.buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = util30.buildRotationMatrix(-angle, faceCenter);
|
||||||
}
|
}
|
||||||
const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint};
|
const face2 = bounding.cutBoxFromImageAndResize({startPoint: box.startPoint, endPoint: box.endPoint}, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
||||||
const face2 = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
const outputFace = config2.detector.rotation ? tf.image.rotateWithOffset(face2, angle) : face2;
|
||||||
if (!config2.mesh.enabled) {
|
if (!config2.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
coords: null,
|
coords: null,
|
||||||
box,
|
box,
|
||||||
faceConfidence: null,
|
faceConfidence: null,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face2
|
image: outputFace
|
||||||
};
|
};
|
||||||
return prediction2;
|
return prediction2;
|
||||||
}
|
}
|
||||||
|
@ -69869,7 +69869,7 @@ var require_facepipeline = __commonJS((exports) => {
|
||||||
box: landmarksBox,
|
box: landmarksBox,
|
||||||
faceConfidence: confidenceVal,
|
faceConfidence: confidenceVal,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face2
|
image: outputFace
|
||||||
};
|
};
|
||||||
this.storedBoxes[i] = {...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal};
|
this.storedBoxes[i] = {...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal};
|
||||||
return prediction;
|
return prediction;
|
||||||
|
@ -69930,7 +69930,7 @@ var require_facemesh = __commonJS((exports) => {
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const models = await Promise.all([
|
const models = await Promise.all([
|
||||||
blazeface.load(config2),
|
blazeface.load(config2),
|
||||||
loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}),
|
loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}),
|
||||||
|
@ -69941,7 +69941,7 @@ var require_facemesh = __commonJS((exports) => {
|
||||||
console.log(`Human: load model: ${config2.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return faceMesh;
|
return faceMesh;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
exports.MediaPipeFaceMesh = MediaPipeFaceMesh;
|
exports.MediaPipeFaceMesh = MediaPipeFaceMesh;
|
||||||
exports.triangulation = coords.TRI468;
|
exports.triangulation = coords.TRI468;
|
||||||
});
|
});
|
||||||
|
@ -69975,28 +69975,23 @@ var require_age = __commonJS((exports) => {
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = {age: 0};
|
let last = {age: 0};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
const zoom = [0, 0];
|
async function load2(config2) {
|
||||||
async function load(config2) {
|
|
||||||
if (!models.age) {
|
if (!models.age) {
|
||||||
models.age = await loadGraphModel(config2.face.age.modelPath);
|
models.age = await loadGraphModel(config2.face.age.modelPath);
|
||||||
console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.age;
|
return models.age;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.age)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.age.inputSize, config2.face.age.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]);
|
|
||||||
const enhance = tf.mul(resize, [255]);
|
const enhance = tf.mul(resize, [255]);
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
let ageT;
|
let ageT;
|
||||||
|
@ -70020,8 +70015,8 @@ var require_age = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
var require_gender = __commonJS((exports) => {
|
var require_gender = __commonJS((exports) => {
|
||||||
const profile2 = __toModule(require_profile());
|
const profile2 = __toModule(require_profile());
|
||||||
|
@ -70029,9 +70024,8 @@ var require_gender = __commonJS((exports) => {
|
||||||
let last = {gender: ""};
|
let last = {gender: ""};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
const zoom = [0, 0];
|
|
||||||
const rgb = [0.2989, 0.587, 0.114];
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
if (!models.gender) {
|
if (!models.gender) {
|
||||||
models.gender = await loadGraphModel(config2.face.gender.modelPath);
|
models.gender = await loadGraphModel(config2.face.gender.modelPath);
|
||||||
alternative = models.gender.inputs[0].shape[3] === 1;
|
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||||
|
@ -70039,20 +70033,16 @@ var require_gender = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
return models.gender;
|
return models.gender;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.gender)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.gender.inputSize, config2.face.gender.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]);
|
|
||||||
let enhance;
|
let enhance;
|
||||||
if (alternative) {
|
if (alternative) {
|
||||||
enhance = tf.tidy(() => {
|
enhance = tf.tidy(() => {
|
||||||
|
@ -70100,8 +70090,8 @@ var require_gender = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
var require_emotion = __commonJS((exports) => {
|
var require_emotion = __commonJS((exports) => {
|
||||||
const profile2 = __toModule(require_profile());
|
const profile2 = __toModule(require_profile());
|
||||||
|
@ -70109,30 +70099,25 @@ var require_emotion = __commonJS((exports) => {
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = [];
|
let last = [];
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
const zoom = [0, 0];
|
|
||||||
const rgb = [0.2989, 0.587, 0.114];
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
const scale = 1;
|
const scale = 1;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
if (!models.emotion) {
|
if (!models.emotion) {
|
||||||
models.emotion = await loadGraphModel(config2.face.emotion.modelPath);
|
models.emotion = await loadGraphModel(config2.face.emotion.modelPath);
|
||||||
console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.emotion;
|
return models.emotion;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.emotion)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.emotion.inputSize, config2.face.emotion.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]);
|
|
||||||
const [red, green, blue] = tf.split(resize, 3, 3);
|
const [red, green, blue] = tf.split(resize, 3, 3);
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
const redNorm = tf.mul(red, rgb[0]);
|
const redNorm = tf.mul(red, rgb[0]);
|
||||||
|
@ -70171,8 +70156,51 @@ var require_emotion = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
|
});
|
||||||
|
var require_embedding = __commonJS((exports) => {
|
||||||
|
const profile2 = __toModule(require_profile());
|
||||||
|
const models = {};
|
||||||
|
async function load2(config2) {
|
||||||
|
if (!models.embedding) {
|
||||||
|
models.embedding = await loadGraphModel(config2.face.embedding.modelPath);
|
||||||
|
console.log(`Human: load model: ${config2.face.embedding.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
}
|
||||||
|
return models.embedding;
|
||||||
|
}
|
||||||
|
function simmilarity2(embedding1, embedding2) {
|
||||||
|
if ((embedding1 == null ? void 0 : embedding1.length) !== (embedding2 == null ? void 0 : embedding2.length))
|
||||||
|
return 0;
|
||||||
|
const distance = 10 * Math.sqrt(embedding1.map((val, i) => val - embedding2[i]).reduce((dist2, diff) => dist2 + diff ** 2, 0));
|
||||||
|
const confidence = 2 * (0.5 - distance);
|
||||||
|
return Math.trunc(1e3 * confidence) / 1e3;
|
||||||
|
}
|
||||||
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.embedding)
|
||||||
|
return null;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
|
const resize = tf.image.resizeBilinear(image2, [config2.face.embedding.inputSize, config2.face.embedding.inputSize], false);
|
||||||
|
let data2 = [];
|
||||||
|
if (config2.face.embedding.enabled) {
|
||||||
|
if (!config2.profile) {
|
||||||
|
const embeddingT = await models.embedding.predict({img_inputs: resize});
|
||||||
|
data2 = [...embeddingT.dataSync()];
|
||||||
|
tf.dispose(embeddingT);
|
||||||
|
} else {
|
||||||
|
const profileData = await tf.profile(() => models.embedding.predict({img_inputs: resize}));
|
||||||
|
data2 = [...profileData.result.dataSync()];
|
||||||
|
profileData.result.dispose();
|
||||||
|
profile2.run("emotion", profileData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resize.dispose();
|
||||||
|
resolve(data2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.predict = predict2;
|
||||||
|
exports.simmilarity = simmilarity2;
|
||||||
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
var require_modelBase = __commonJS((exports) => {
|
var require_modelBase = __commonJS((exports) => {
|
||||||
class BaseModel {
|
class BaseModel {
|
||||||
|
@ -70654,13 +70682,13 @@ var require_modelPoseNet = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.PoseNet = PoseNet;
|
exports.PoseNet = PoseNet;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const graphModel = await loadGraphModel(config2.body.modelPath);
|
const graphModel = await loadGraphModel(config2.body.modelPath);
|
||||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
||||||
console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return new PoseNet(mobilenet);
|
return new PoseNet(mobilenet);
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
var require_posenet = __commonJS((exports) => {
|
var require_posenet = __commonJS((exports) => {
|
||||||
const modelMobileNet = __toModule(require_modelMobileNet());
|
const modelMobileNet = __toModule(require_modelMobileNet());
|
||||||
|
@ -88665,7 +88693,7 @@ var require_handpose = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.HandPose = HandPose;
|
exports.HandPose = HandPose;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const [handDetectorModel, handPoseModel] = await Promise.all([
|
const [handDetectorModel, handPoseModel] = await Promise.all([
|
||||||
loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}),
|
loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}),
|
||||||
loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")})
|
loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")})
|
||||||
|
@ -88677,7 +88705,7 @@ var require_handpose = __commonJS((exports) => {
|
||||||
console.log(`Human: load model: ${config2.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return handpose2;
|
return handpose2;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
var require_gesture = __commonJS((exports) => {
|
var require_gesture = __commonJS((exports) => {
|
||||||
exports.body = (res) => {
|
exports.body = (res) => {
|
||||||
|
@ -98221,6 +98249,7 @@ const facemesh = __toModule(require_facemesh());
|
||||||
const age = __toModule(require_age());
|
const age = __toModule(require_age());
|
||||||
const gender = __toModule(require_gender());
|
const gender = __toModule(require_gender());
|
||||||
const emotion = __toModule(require_emotion());
|
const emotion = __toModule(require_emotion());
|
||||||
|
const embedding = __toModule(require_embedding());
|
||||||
const posenet = __toModule(require_posenet());
|
const posenet = __toModule(require_posenet());
|
||||||
/**
|
/**
|
||||||
* @license
|
* @license
|
||||||
|
@ -98414,6 +98443,7 @@ var config_default = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "../models/blazeface-back.json",
|
modelPath: "../models/blazeface-back.json",
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
|
rotation: false,
|
||||||
maxFaces: 10,
|
maxFaces: 10,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.5,
|
||||||
|
@ -98449,6 +98479,11 @@ var config_default = {
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
modelPath: "../models/emotion-large.json"
|
modelPath: "../models/emotion-large.json"
|
||||||
|
},
|
||||||
|
embedding: {
|
||||||
|
enabled: false,
|
||||||
|
inputSize: 112,
|
||||||
|
modelPath: "../models/mobilefacenet.json"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
body: {
|
body: {
|
||||||
|
@ -98565,6 +98600,11 @@ class Human {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
simmilarity(embedding1, embedding2) {
|
||||||
|
if (this.config.face.embedding.enabled)
|
||||||
|
return embedding.simmilarity(embedding1, embedding2);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
async load(userConfig2) {
|
async load(userConfig2) {
|
||||||
this.state = "load";
|
this.state = "load";
|
||||||
const timeStamp = now2();
|
const timeStamp = now2();
|
||||||
|
@ -98583,6 +98623,7 @@ class Human {
|
||||||
this.models.age,
|
this.models.age,
|
||||||
this.models.gender,
|
this.models.gender,
|
||||||
this.models.emotion,
|
this.models.emotion,
|
||||||
|
this.models.embedding,
|
||||||
this.models.posenet,
|
this.models.posenet,
|
||||||
this.models.handpose
|
this.models.handpose
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
|
@ -98590,6 +98631,7 @@ class Human {
|
||||||
this.models.age || (this.config.face.enabled && this.config.face.age.enabled ? age.load(this.config) : null),
|
this.models.age || (this.config.face.enabled && this.config.face.age.enabled ? age.load(this.config) : null),
|
||||||
this.models.gender || (this.config.face.enabled && this.config.face.gender.enabled ? gender.load(this.config) : null),
|
this.models.gender || (this.config.face.enabled && this.config.face.gender.enabled ? gender.load(this.config) : null),
|
||||||
this.models.emotion || (this.config.face.enabled && this.config.face.emotion.enabled ? emotion.load(this.config) : null),
|
this.models.emotion || (this.config.face.enabled && this.config.face.emotion.enabled ? emotion.load(this.config) : null),
|
||||||
|
this.models.embedding || (this.config.face.enabled && this.config.face.embedding.enabled ? embedding.load(this.config) : null),
|
||||||
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
||||||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
|
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
|
||||||
]);
|
]);
|
||||||
|
@ -98602,6 +98644,8 @@ class Human {
|
||||||
this.models.gender = await gender.load(this.config);
|
this.models.gender = await gender.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion)
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion)
|
||||||
this.models.emotion = await emotion.load(this.config);
|
this.models.emotion = await emotion.load(this.config);
|
||||||
|
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding)
|
||||||
|
this.models.embedding = await embedding.load(this.config);
|
||||||
if (this.config.body.enabled && !this.models.posenet)
|
if (this.config.body.enabled && !this.models.posenet)
|
||||||
this.models.posenet = await posenet.load(this.config);
|
this.models.posenet = await posenet.load(this.config);
|
||||||
if (this.config.hand.enabled && !this.models.handpose)
|
if (this.config.hand.enabled && !this.models.handpose)
|
||||||
|
@ -98643,6 +98687,7 @@ class Human {
|
||||||
let ageRes;
|
let ageRes;
|
||||||
let genderRes;
|
let genderRes;
|
||||||
let emotionRes;
|
let emotionRes;
|
||||||
|
let embeddingRes;
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
timeStamp = now2();
|
timeStamp = now2();
|
||||||
|
@ -98682,8 +98727,18 @@ class Human {
|
||||||
this.perf.emotion = Math.trunc(now2() - timeStamp);
|
this.perf.emotion = Math.trunc(now2() - timeStamp);
|
||||||
}
|
}
|
||||||
this.analyze("End Emotion:");
|
this.analyze("End Emotion:");
|
||||||
|
this.analyze("Start Embedding:");
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
[ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);
|
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face2.image, this.config) : {};
|
||||||
|
} else {
|
||||||
|
this.state = "run:embedding";
|
||||||
|
timeStamp = now2();
|
||||||
|
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face2.image, this.config) : {};
|
||||||
|
this.perf.embedding = Math.trunc(now2() - timeStamp);
|
||||||
|
}
|
||||||
|
this.analyze("End Emotion:");
|
||||||
|
if (this.config.async) {
|
||||||
|
[ageRes, genderRes, emotionRes, embeddingRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes]);
|
||||||
}
|
}
|
||||||
this.analyze("Finish Face:");
|
this.analyze("Finish Face:");
|
||||||
face2.image.dispose();
|
face2.image.dispose();
|
||||||
|
@ -98697,6 +98752,7 @@ class Human {
|
||||||
gender: genderRes.gender,
|
gender: genderRes.gender,
|
||||||
genderConfidence: genderRes.confidence,
|
genderConfidence: genderRes.confidence,
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
|
embedding: embeddingRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0
|
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End Face");
|
this.analyze("End Face");
|
||||||
|
@ -98722,18 +98778,18 @@ class Human {
|
||||||
return process3.canvas;
|
return process3.canvas;
|
||||||
}
|
}
|
||||||
async detect(input, userConfig2 = {}) {
|
async detect(input, userConfig2 = {}) {
|
||||||
this.state = "config";
|
|
||||||
let timeStamp;
|
|
||||||
this.config = mergeDeep(this.config, userConfig2);
|
|
||||||
if (!this.config.videoOptimized)
|
|
||||||
this.config = mergeDeep(this.config, disableSkipFrames);
|
|
||||||
this.state = "check";
|
|
||||||
const error = this.sanity(input);
|
|
||||||
if (error) {
|
|
||||||
this.log(error, input);
|
|
||||||
return {error};
|
|
||||||
}
|
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
this.state = "config";
|
||||||
|
let timeStamp;
|
||||||
|
this.config = mergeDeep(this.config, userConfig2);
|
||||||
|
if (!this.config.videoOptimized)
|
||||||
|
this.config = mergeDeep(this.config, disableSkipFrames);
|
||||||
|
this.state = "check";
|
||||||
|
const error = this.sanity(input);
|
||||||
|
if (error) {
|
||||||
|
this.log(error, input);
|
||||||
|
resolve({error});
|
||||||
|
}
|
||||||
let poseRes;
|
let poseRes;
|
||||||
let handRes;
|
let handRes;
|
||||||
let faceRes;
|
let faceRes;
|
||||||
|
@ -98801,10 +98857,12 @@ class Human {
|
||||||
resolve({face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process3.canvas});
|
resolve({face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process3.canvas});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
async warmup(userConfig2) {
|
async warmup(userConfig2, sample2) {
|
||||||
const warmup = new ImageData(255, 255);
|
if (!sample2)
|
||||||
await this.detect(warmup, userConfig2);
|
sample2 = new ImageData(255, 255);
|
||||||
|
const warmup = await this.detect(sample2, userConfig2);
|
||||||
this.log("warmed up");
|
this.log("warmed up");
|
||||||
|
return warmup;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99598,6 +99656,7 @@ const ui = {
|
||||||
useWorker: false,
|
useWorker: false,
|
||||||
worker: "demo/worker.js",
|
worker: "demo/worker.js",
|
||||||
samples: ["../assets/sample6.jpg", "../assets/sample1.jpg", "../assets/sample4.jpg", "../assets/sample5.jpg", "../assets/sample3.jpg", "../assets/sample2.jpg"],
|
samples: ["../assets/sample6.jpg", "../assets/sample1.jpg", "../assets/sample4.jpg", "../assets/sample5.jpg", "../assets/sample3.jpg", "../assets/sample2.jpg"],
|
||||||
|
compare: "../assets/sample-me.jpg",
|
||||||
drawBoxes: true,
|
drawBoxes: true,
|
||||||
drawPoints: false,
|
drawPoints: false,
|
||||||
drawPolygons: true,
|
drawPolygons: true,
|
||||||
|
@ -99622,6 +99681,7 @@ let menu2;
|
||||||
let menuFX;
|
let menuFX;
|
||||||
let worker;
|
let worker;
|
||||||
let bench;
|
let bench;
|
||||||
|
let sample;
|
||||||
let lastDetectedResult = {};
|
let lastDetectedResult = {};
|
||||||
function str(...msg) {
|
function str(...msg) {
|
||||||
if (!Array.isArray(msg))
|
if (!Array.isArray(msg))
|
||||||
|
@ -99642,14 +99702,27 @@ const log2 = (...msg) => {
|
||||||
const status = (msg) => {
|
const status = (msg) => {
|
||||||
document.getElementById("status").innerText = msg;
|
document.getElementById("status").innerText = msg;
|
||||||
};
|
};
|
||||||
|
async function calcSimmilariry(faces) {
|
||||||
|
var _a;
|
||||||
|
if (!faces || !faces[0] || ((_a = faces[0].embedding) == null ? void 0 : _a.length) !== 192)
|
||||||
|
return;
|
||||||
|
const current = faces[0].embedding;
|
||||||
|
const original = sample && sample.face && sample.face[0] && sample.face[0].embedding ? sample.face[0].embedding : null;
|
||||||
|
if (original && original.length === 192) {
|
||||||
|
const simmilarity = human.simmilarity(current, original);
|
||||||
|
document.getElementById("simmilarity").innerText = `simmilarity: ${Math.trunc(1e3 * simmilarity) / 10}%`;
|
||||||
|
}
|
||||||
|
}
|
||||||
async function drawResults(input) {
|
async function drawResults(input) {
|
||||||
const result = lastDetectedResult;
|
const result = lastDetectedResult;
|
||||||
const canvas = document.getElementById("canvas");
|
const canvas = document.getElementById("canvas");
|
||||||
ui.fps.push(1e3 / result.performance.total);
|
if (result.performance && result.performance.total)
|
||||||
|
ui.fps.push(1e3 / result.performance.total);
|
||||||
if (ui.fps.length > ui.maxFPSframes)
|
if (ui.fps.length > ui.maxFPSframes)
|
||||||
ui.fps.shift();
|
ui.fps.shift();
|
||||||
await menu2.updateChart("FPS", ui.fps);
|
await menu2.updateChart("FPS", ui.fps);
|
||||||
result.canvas = await human.image(input, userConfig);
|
if (ui.buffered || !result.canvas)
|
||||||
|
result.canvas = await human.image(input, userConfig);
|
||||||
const ctx = canvas.getContext("2d");
|
const ctx = canvas.getContext("2d");
|
||||||
ctx.fillStyle = ui.baseBackground;
|
ctx.fillStyle = ui.baseBackground;
|
||||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||||
|
@ -99666,11 +99739,12 @@ async function drawResults(input) {
|
||||||
await draw_default.body(result.body, canvas, ui);
|
await draw_default.body(result.body, canvas, ui);
|
||||||
await draw_default.hand(result.hand, canvas, ui);
|
await draw_default.hand(result.hand, canvas, ui);
|
||||||
await draw_default.gesture(result.gesture, canvas, ui);
|
await draw_default.gesture(result.gesture, canvas, ui);
|
||||||
|
await calcSimmilariry(result.face);
|
||||||
const engine = human.tf.engine();
|
const engine = human.tf.engine();
|
||||||
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : "";
|
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : "";
|
||||||
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
|
||||||
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : "";
|
const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : "";
|
||||||
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;
|
const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b, 0) / ui.fps.length) / 10;
|
||||||
const warning = ui.fps.length > 5 && avg < 5 ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : "";
|
const warning = ui.fps.length > 5 && avg < 5 ? '<font color="lightcoral">warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models</font>' : "";
|
||||||
document.getElementById("log").innerHTML = `
|
document.getElementById("log").innerHTML = `
|
||||||
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing}<br>
|
||||||
|
@ -99833,7 +99907,8 @@ async function processImage(input) {
|
||||||
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image2.naturalWidth;
|
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image2.naturalWidth;
|
||||||
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image2.naturalHeight;
|
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image2.naturalHeight;
|
||||||
const result = await human.detect(image2, userConfig);
|
const result = await human.detect(image2, userConfig);
|
||||||
drawResults(image2, result, canvas);
|
lastDetectedResult = result;
|
||||||
|
await drawResults(image2);
|
||||||
const thumb = document.createElement("canvas");
|
const thumb = document.createElement("canvas");
|
||||||
thumb.className = "thumbnail";
|
thumb.className = "thumbnail";
|
||||||
thumb.width = window.innerWidth / (ui.columns + 0.1);
|
thumb.width = window.innerWidth / (ui.columns + 0.1);
|
||||||
|
@ -99877,11 +99952,12 @@ async function detectSampleImages() {
|
||||||
log2("Running detection of sample images");
|
log2("Running detection of sample images");
|
||||||
status("processing images");
|
status("processing images");
|
||||||
document.getElementById("samples-container").innerHTML = "";
|
document.getElementById("samples-container").innerHTML = "";
|
||||||
for (const sample of ui.samples)
|
for (const image2 of ui.samples)
|
||||||
await processImage(sample);
|
await processImage(image2);
|
||||||
status("");
|
status("");
|
||||||
}
|
}
|
||||||
function setupMenu() {
|
function setupMenu() {
|
||||||
|
document.getElementById("compare-container").style.display = human.config.face.embedding.enabled ? "block" : "none";
|
||||||
menu2 = new menu_default(document.body, "", {top: "1rem", right: "1rem"});
|
menu2 = new menu_default(document.body, "", {top: "1rem", right: "1rem"});
|
||||||
const btn = menu2.addButton("start video", "pause video", () => detectVideo());
|
const btn = menu2.addButton("start video", "pause video", () => detectVideo());
|
||||||
menu2.addButton("process images", "process images", () => detectSampleImages());
|
menu2.addButton("process images", "process images", () => detectSampleImages());
|
||||||
|
@ -99985,7 +100061,7 @@ async function main() {
|
||||||
}
|
}
|
||||||
if (ui.modelsWarmup) {
|
if (ui.modelsWarmup) {
|
||||||
status("initializing");
|
status("initializing");
|
||||||
await human.warmup(userConfig);
|
sample = await human.warmup(userConfig, document.getElementById("sample-image"));
|
||||||
}
|
}
|
||||||
status("human: ready");
|
status("human: ready");
|
||||||
document.getElementById("loader").style.display = "none";
|
document.getElementById("loader").style.display = "none";
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -5,7 +5,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytes": 20443,
|
"bytes": 21317,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "dist/human.esm.js"
|
"path": "dist/human.esm.js"
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytes": 3453585,
|
"bytes": 3456039,
|
||||||
"imports": []
|
"imports": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -38,13 +38,13 @@
|
||||||
"dist/demo-browser-index.js.map": {
|
"dist/demo-browser-index.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5439521
|
"bytes": 5448434
|
||||||
},
|
},
|
||||||
"dist/demo-browser-index.js": {
|
"dist/demo-browser-index.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"bytesInOutput": 3442938
|
"bytesInOutput": 3445365
|
||||||
},
|
},
|
||||||
"demo/draw.js": {
|
"demo/draw.js": {
|
||||||
"bytesInOutput": 9599
|
"bytesInOutput": 9599
|
||||||
|
@ -56,10 +56,10 @@
|
||||||
"bytesInOutput": 9770
|
"bytesInOutput": 9770
|
||||||
},
|
},
|
||||||
"demo/browser.js": {
|
"demo/browser.js": {
|
||||||
"bytesInOutput": 17794
|
"bytesInOutput": 18714
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3494059
|
"bytes": 3497406
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8483,
|
"bytes": 8623,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
|
@ -132,6 +132,17 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytes": 3111,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/tf.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2928,
|
"bytes": 2928,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -159,8 +170,12 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/face/coords.js": {
|
||||||
|
"bytes": 37909,
|
||||||
|
"imports": []
|
||||||
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytes": 2604,
|
"bytes": 2471,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -168,22 +183,16 @@
|
||||||
{
|
{
|
||||||
"path": "src/face/blazeface.js"
|
"path": "src/face/blazeface.js"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"path": "src/face/keypoints.js"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"path": "src/face/facepipeline.js"
|
"path": "src/face/facepipeline.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/uvcoords.js"
|
"path": "src/face/coords.js"
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "src/face/triangulation.js"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13686,
|
"bytes": 13680,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -192,29 +201,17 @@
|
||||||
"path": "src/face/box.js"
|
"path": "src/face/box.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/keypoints.js"
|
"path": "src/face/util.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/util.js"
|
"path": "src/face/coords.js"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytes": 2507,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytes": 12940,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3078,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
|
||||||
"bytes": 19592,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3209,
|
"bytes": 3209,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -289,7 +286,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 14787,
|
"bytes": 15788,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -306,6 +303,9 @@
|
||||||
{
|
{
|
||||||
"path": "src/emotion/emotion.js"
|
"path": "src/emotion/emotion.js"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/embedding/embedding.js"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/body/posenet.js"
|
"path": "src/body/posenet.js"
|
||||||
},
|
},
|
||||||
|
@ -357,16 +357,13 @@
|
||||||
"dist/human.esm-nobundle.js.map": {
|
"dist/human.esm-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 769294
|
"bytes": 786297
|
||||||
},
|
},
|
||||||
"dist/human.esm-nobundle.js": {
|
"dist/human.esm-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 6970
|
"bytesInOutput": 6972
|
||||||
},
|
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytesInOutput": 2768
|
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytesInOutput": 2052
|
"bytesInOutput": 2052
|
||||||
|
@ -374,26 +371,29 @@
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytesInOutput": 3043
|
"bytesInOutput": 3043
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/coords.js": {
|
||||||
"bytesInOutput": 12379
|
"bytesInOutput": 51519
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 20584
|
"bytesInOutput": 12370
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2564
|
"bytesInOutput": 2461
|
||||||
},
|
},
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 1118
|
"bytesInOutput": 1118
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1826
|
"bytesInOutput": 1830
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 2996
|
"bytesInOutput": 3000
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2697
|
"bytesInOutput": 2701
|
||||||
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytesInOutput": 2487
|
||||||
},
|
},
|
||||||
"src/body/modelBase.js": {
|
"src/body/modelBase.js": {
|
||||||
"bytesInOutput": 866
|
"bytesInOutput": 866
|
||||||
|
@ -408,25 +408,25 @@
|
||||||
"bytesInOutput": 1764
|
"bytesInOutput": 1764
|
||||||
},
|
},
|
||||||
"src/body/keypoints.js": {
|
"src/body/keypoints.js": {
|
||||||
"bytesInOutput": 2277
|
"bytesInOutput": 2276
|
||||||
},
|
},
|
||||||
"src/body/vectors.js": {
|
"src/body/vectors.js": {
|
||||||
"bytesInOutput": 1420
|
"bytesInOutput": 1419
|
||||||
},
|
},
|
||||||
"src/body/decodePose.js": {
|
"src/body/decodePose.js": {
|
||||||
"bytesInOutput": 3797
|
"bytesInOutput": 3796
|
||||||
},
|
},
|
||||||
"src/body/decodeMultiple.js": {
|
"src/body/decodeMultiple.js": {
|
||||||
"bytesInOutput": 2026
|
"bytesInOutput": 2026
|
||||||
},
|
},
|
||||||
"src/body/util.js": {
|
"src/body/util.js": {
|
||||||
"bytesInOutput": 2410
|
"bytesInOutput": 2409
|
||||||
},
|
},
|
||||||
"src/body/modelPoseNet.js": {
|
"src/body/modelPoseNet.js": {
|
||||||
"bytesInOutput": 2012
|
"bytesInOutput": 2014
|
||||||
},
|
},
|
||||||
"src/body/posenet.js": {
|
"src/body/posenet.js": {
|
||||||
"bytesInOutput": 963
|
"bytesInOutput": 962
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 4402
|
"bytesInOutput": 4402
|
||||||
|
@ -438,7 +438,7 @@
|
||||||
"bytesInOutput": 256590
|
"bytesInOutput": 256590
|
||||||
},
|
},
|
||||||
"src/hand/handpose.js": {
|
"src/hand/handpose.js": {
|
||||||
"bytesInOutput": 3064
|
"bytesInOutput": 3066
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 3038
|
"bytesInOutput": 3038
|
||||||
|
@ -452,11 +452,8 @@
|
||||||
"src/tf.js": {
|
"src/tf.js": {
|
||||||
"bytesInOutput": 174
|
"bytesInOutput": 174
|
||||||
},
|
},
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytesInOutput": 17898
|
|
||||||
},
|
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 227
|
"bytesInOutput": 278
|
||||||
},
|
},
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 3037
|
"bytesInOutput": 3037
|
||||||
|
@ -468,7 +465,7 @@
|
||||||
"bytesInOutput": 186
|
"bytesInOutput": 186
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1933
|
"bytesInOutput": 2048
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
|
@ -477,13 +474,13 @@
|
||||||
"bytesInOutput": 23
|
"bytesInOutput": 23
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 12333
|
"bytesInOutput": 13232
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 419161
|
"bytes": 432856
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34113,7 +34113,7 @@ Expected: ${expectedFlat}.`);
|
||||||
function permute(args) {
|
function permute(args) {
|
||||||
return new Permute(args);
|
return new Permute(args);
|
||||||
}
|
}
|
||||||
function embedding(args) {
|
function embedding2(args) {
|
||||||
return new Embedding(args);
|
return new Embedding(args);
|
||||||
}
|
}
|
||||||
function add$3(args) {
|
function add$3(args) {
|
||||||
|
@ -34271,7 +34271,7 @@ Expected: ${expectedFlat}.`);
|
||||||
repeatVector,
|
repeatVector,
|
||||||
reshape: reshape$1,
|
reshape: reshape$1,
|
||||||
permute,
|
permute,
|
||||||
embedding,
|
embedding: embedding2,
|
||||||
add: add$3,
|
add: add$3,
|
||||||
average: average$1,
|
average: average$1,
|
||||||
concatenate: concatenate$2,
|
concatenate: concatenate$2,
|
||||||
|
@ -65751,13 +65751,13 @@ var require_blazeface = __commonJS((exports) => {
|
||||||
return faces;
|
return faces;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const blazeface = await loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")});
|
const blazeface = await loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")});
|
||||||
const model = new BlazeFaceModel(blazeface, config2);
|
const model = new BlazeFaceModel(blazeface, config2);
|
||||||
console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.detector.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return model;
|
return model;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
exports.BlazeFaceModel = BlazeFaceModel;
|
exports.BlazeFaceModel = BlazeFaceModel;
|
||||||
exports.disposeBox = disposeBox;
|
exports.disposeBox = disposeBox;
|
||||||
});
|
});
|
||||||
|
@ -69843,15 +69843,15 @@ var require_facepipeline = __commonJS((exports) => {
|
||||||
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = util30.buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = util30.buildRotationMatrix(-angle, faceCenter);
|
||||||
}
|
}
|
||||||
const boxCPU = {startPoint: box.startPoint, endPoint: box.endPoint};
|
const face2 = bounding.cutBoxFromImageAndResize({startPoint: box.startPoint, endPoint: box.endPoint}, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
||||||
const face2 = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
const outputFace = config2.detector.rotation ? tf.image.rotateWithOffset(face2, angle) : face2;
|
||||||
if (!config2.mesh.enabled) {
|
if (!config2.mesh.enabled) {
|
||||||
const prediction2 = {
|
const prediction2 = {
|
||||||
coords: null,
|
coords: null,
|
||||||
box,
|
box,
|
||||||
faceConfidence: null,
|
faceConfidence: null,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face2
|
image: outputFace
|
||||||
};
|
};
|
||||||
return prediction2;
|
return prediction2;
|
||||||
}
|
}
|
||||||
|
@ -69896,7 +69896,7 @@ var require_facepipeline = __commonJS((exports) => {
|
||||||
box: landmarksBox,
|
box: landmarksBox,
|
||||||
faceConfidence: confidenceVal,
|
faceConfidence: confidenceVal,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face2
|
image: outputFace
|
||||||
};
|
};
|
||||||
this.storedBoxes[i] = {...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal};
|
this.storedBoxes[i] = {...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal};
|
||||||
return prediction;
|
return prediction;
|
||||||
|
@ -69959,7 +69959,7 @@ var require_facemesh = __commonJS((exports) => {
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const models = await Promise.all([
|
const models = await Promise.all([
|
||||||
blazeface.load(config2),
|
blazeface.load(config2),
|
||||||
loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}),
|
loadGraphModel(config2.mesh.modelPath, {fromTFHub: config2.mesh.modelPath.includes("tfhub.dev")}),
|
||||||
|
@ -69970,7 +69970,7 @@ var require_facemesh = __commonJS((exports) => {
|
||||||
console.log(`Human: load model: ${config2.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.iris.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return faceMesh;
|
return faceMesh;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
exports.MediaPipeFaceMesh = MediaPipeFaceMesh;
|
exports.MediaPipeFaceMesh = MediaPipeFaceMesh;
|
||||||
exports.triangulation = coords.TRI468;
|
exports.triangulation = coords.TRI468;
|
||||||
});
|
});
|
||||||
|
@ -70008,28 +70008,23 @@ var require_age = __commonJS((exports) => {
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = {age: 0};
|
let last = {age: 0};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
const zoom = [0, 0];
|
async function load2(config2) {
|
||||||
async function load(config2) {
|
|
||||||
if (!models.age) {
|
if (!models.age) {
|
||||||
models.age = await loadGraphModel(config2.face.age.modelPath);
|
models.age = await loadGraphModel(config2.face.age.modelPath);
|
||||||
console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.age;
|
return models.age;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.age)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
if (frame < config2.face.age.skipFrames && last.age && last.age > 0) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.age.inputSize, config2.face.age.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.age.inputSize, config2.face.age.inputSize]);
|
|
||||||
const enhance = tf.mul(resize, [255]);
|
const enhance = tf.mul(resize, [255]);
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
let ageT;
|
let ageT;
|
||||||
|
@ -70053,8 +70048,8 @@ var require_age = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/gender/gender.js
|
// src/gender/gender.js
|
||||||
|
@ -70064,9 +70059,8 @@ var require_gender = __commonJS((exports) => {
|
||||||
let last = {gender: ""};
|
let last = {gender: ""};
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
const zoom = [0, 0];
|
|
||||||
const rgb = [0.2989, 0.587, 0.114];
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
if (!models.gender) {
|
if (!models.gender) {
|
||||||
models.gender = await loadGraphModel(config2.face.gender.modelPath);
|
models.gender = await loadGraphModel(config2.face.gender.modelPath);
|
||||||
alternative = models.gender.inputs[0].shape[3] === 1;
|
alternative = models.gender.inputs[0].shape[3] === 1;
|
||||||
|
@ -70074,20 +70068,16 @@ var require_gender = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
return models.gender;
|
return models.gender;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.gender)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
if (frame < config2.face.gender.skipFrames && last.gender !== "") {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.gender.inputSize, config2.face.gender.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.gender.inputSize, config2.face.gender.inputSize]);
|
|
||||||
let enhance;
|
let enhance;
|
||||||
if (alternative) {
|
if (alternative) {
|
||||||
enhance = tf.tidy(() => {
|
enhance = tf.tidy(() => {
|
||||||
|
@ -70135,8 +70125,8 @@ var require_gender = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/emotion/emotion.js
|
// src/emotion/emotion.js
|
||||||
|
@ -70146,30 +70136,25 @@ var require_emotion = __commonJS((exports) => {
|
||||||
const models = {};
|
const models = {};
|
||||||
let last = [];
|
let last = [];
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
const zoom = [0, 0];
|
|
||||||
const rgb = [0.2989, 0.587, 0.114];
|
const rgb = [0.2989, 0.587, 0.114];
|
||||||
const scale = 1;
|
const scale = 1;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
if (!models.emotion) {
|
if (!models.emotion) {
|
||||||
models.emotion = await loadGraphModel(config2.face.emotion.modelPath);
|
models.emotion = await loadGraphModel(config2.face.emotion.modelPath);
|
||||||
console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.emotion;
|
return models.emotion;
|
||||||
}
|
}
|
||||||
async function predict(image2, config2) {
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.emotion)
|
||||||
|
return null;
|
||||||
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
if (frame < config2.face.emotion.skipFrames && last.length > 0) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
const box = [[
|
const resize = tf.image.resizeBilinear(image2, [config2.face.emotion.inputSize, config2.face.emotion.inputSize], false);
|
||||||
image2.shape[1] * zoom[0] / image2.shape[1],
|
|
||||||
image2.shape[2] * zoom[1] / image2.shape[2],
|
|
||||||
(image2.shape[1] - image2.shape[1] * zoom[0]) / image2.shape[1],
|
|
||||||
(image2.shape[2] - image2.shape[2] * zoom[1]) / image2.shape[2]
|
|
||||||
]];
|
|
||||||
const resize = tf.image.cropAndResize(image2, box, [0], [config2.face.emotion.inputSize, config2.face.emotion.inputSize]);
|
|
||||||
const [red, green, blue] = tf.split(resize, 3, 3);
|
const [red, green, blue] = tf.split(resize, 3, 3);
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
const redNorm = tf.mul(red, rgb[0]);
|
const redNorm = tf.mul(red, rgb[0]);
|
||||||
|
@ -70208,8 +70193,53 @@ var require_emotion = __commonJS((exports) => {
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.predict = predict;
|
exports.predict = predict2;
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// src/embedding/embedding.js
|
||||||
|
var require_embedding = __commonJS((exports) => {
|
||||||
|
const profile2 = __toModule(require_profile());
|
||||||
|
const models = {};
|
||||||
|
async function load2(config2) {
|
||||||
|
if (!models.embedding) {
|
||||||
|
models.embedding = await loadGraphModel(config2.face.embedding.modelPath);
|
||||||
|
console.log(`Human: load model: ${config2.face.embedding.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
}
|
||||||
|
return models.embedding;
|
||||||
|
}
|
||||||
|
function simmilarity2(embedding1, embedding2) {
|
||||||
|
if ((embedding1 == null ? void 0 : embedding1.length) !== (embedding2 == null ? void 0 : embedding2.length))
|
||||||
|
return 0;
|
||||||
|
const distance = 10 * Math.sqrt(embedding1.map((val, i) => val - embedding2[i]).reduce((dist2, diff) => dist2 + diff ** 2, 0));
|
||||||
|
const confidence = 2 * (0.5 - distance);
|
||||||
|
return Math.trunc(1e3 * confidence) / 1e3;
|
||||||
|
}
|
||||||
|
async function predict2(image2, config2) {
|
||||||
|
if (!models.embedding)
|
||||||
|
return null;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
|
const resize = tf.image.resizeBilinear(image2, [config2.face.embedding.inputSize, config2.face.embedding.inputSize], false);
|
||||||
|
let data2 = [];
|
||||||
|
if (config2.face.embedding.enabled) {
|
||||||
|
if (!config2.profile) {
|
||||||
|
const embeddingT = await models.embedding.predict({img_inputs: resize});
|
||||||
|
data2 = [...embeddingT.dataSync()];
|
||||||
|
tf.dispose(embeddingT);
|
||||||
|
} else {
|
||||||
|
const profileData = await tf.profile(() => models.embedding.predict({img_inputs: resize}));
|
||||||
|
data2 = [...profileData.result.dataSync()];
|
||||||
|
profileData.result.dispose();
|
||||||
|
profile2.run("emotion", profileData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resize.dispose();
|
||||||
|
resolve(data2);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.predict = predict2;
|
||||||
|
exports.simmilarity = simmilarity2;
|
||||||
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/body/modelBase.js
|
// src/body/modelBase.js
|
||||||
|
@ -70711,13 +70741,13 @@ var require_modelPoseNet = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.PoseNet = PoseNet;
|
exports.PoseNet = PoseNet;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const graphModel = await loadGraphModel(config2.body.modelPath);
|
const graphModel = await loadGraphModel(config2.body.modelPath);
|
||||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
||||||
console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return new PoseNet(mobilenet);
|
return new PoseNet(mobilenet);
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/body/posenet.js
|
// src/body/posenet.js
|
||||||
|
@ -88732,7 +88762,7 @@ var require_handpose = __commonJS((exports) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.HandPose = HandPose;
|
exports.HandPose = HandPose;
|
||||||
async function load(config2) {
|
async function load2(config2) {
|
||||||
const [handDetectorModel, handPoseModel] = await Promise.all([
|
const [handDetectorModel, handPoseModel] = await Promise.all([
|
||||||
loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}),
|
loadGraphModel(config2.detector.modelPath, {fromTFHub: config2.detector.modelPath.includes("tfhub.dev")}),
|
||||||
loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")})
|
loadGraphModel(config2.skeleton.modelPath, {fromTFHub: config2.skeleton.modelPath.includes("tfhub.dev")})
|
||||||
|
@ -88744,7 +88774,7 @@ var require_handpose = __commonJS((exports) => {
|
||||||
console.log(`Human: load model: ${config2.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
|
console.log(`Human: load model: ${config2.skeleton.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
return handpose2;
|
return handpose2;
|
||||||
}
|
}
|
||||||
exports.load = load;
|
exports.load = load2;
|
||||||
});
|
});
|
||||||
|
|
||||||
// src/gesture.js
|
// src/gesture.js
|
||||||
|
@ -98582,6 +98612,7 @@ const facemesh = __toModule(require_facemesh());
|
||||||
const age = __toModule(require_age());
|
const age = __toModule(require_age());
|
||||||
const gender = __toModule(require_gender());
|
const gender = __toModule(require_gender());
|
||||||
const emotion = __toModule(require_emotion());
|
const emotion = __toModule(require_emotion());
|
||||||
|
const embedding = __toModule(require_embedding());
|
||||||
const posenet = __toModule(require_posenet());
|
const posenet = __toModule(require_posenet());
|
||||||
|
|
||||||
// src/hand/box.js
|
// src/hand/box.js
|
||||||
|
@ -98783,6 +98814,7 @@ var config_default = {
|
||||||
detector: {
|
detector: {
|
||||||
modelPath: "../models/blazeface-back.json",
|
modelPath: "../models/blazeface-back.json",
|
||||||
inputSize: 256,
|
inputSize: 256,
|
||||||
|
rotation: false,
|
||||||
maxFaces: 10,
|
maxFaces: 10,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
minConfidence: 0.5,
|
minConfidence: 0.5,
|
||||||
|
@ -98818,6 +98850,11 @@ var config_default = {
|
||||||
minConfidence: 0.2,
|
minConfidence: 0.2,
|
||||||
skipFrames: 15,
|
skipFrames: 15,
|
||||||
modelPath: "../models/emotion-large.json"
|
modelPath: "../models/emotion-large.json"
|
||||||
|
},
|
||||||
|
embedding: {
|
||||||
|
enabled: false,
|
||||||
|
inputSize: 112,
|
||||||
|
modelPath: "../models/mobilefacenet.json"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
body: {
|
body: {
|
||||||
|
@ -98938,6 +98975,11 @@ class Human {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
simmilarity(embedding1, embedding2) {
|
||||||
|
if (this.config.face.embedding.enabled)
|
||||||
|
return embedding.simmilarity(embedding1, embedding2);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
async load(userConfig) {
|
async load(userConfig) {
|
||||||
this.state = "load";
|
this.state = "load";
|
||||||
const timeStamp = now2();
|
const timeStamp = now2();
|
||||||
|
@ -98956,6 +98998,7 @@ class Human {
|
||||||
this.models.age,
|
this.models.age,
|
||||||
this.models.gender,
|
this.models.gender,
|
||||||
this.models.emotion,
|
this.models.emotion,
|
||||||
|
this.models.embedding,
|
||||||
this.models.posenet,
|
this.models.posenet,
|
||||||
this.models.handpose
|
this.models.handpose
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
|
@ -98963,6 +99006,7 @@ class Human {
|
||||||
this.models.age || (this.config.face.enabled && this.config.face.age.enabled ? age.load(this.config) : null),
|
this.models.age || (this.config.face.enabled && this.config.face.age.enabled ? age.load(this.config) : null),
|
||||||
this.models.gender || (this.config.face.enabled && this.config.face.gender.enabled ? gender.load(this.config) : null),
|
this.models.gender || (this.config.face.enabled && this.config.face.gender.enabled ? gender.load(this.config) : null),
|
||||||
this.models.emotion || (this.config.face.enabled && this.config.face.emotion.enabled ? emotion.load(this.config) : null),
|
this.models.emotion || (this.config.face.enabled && this.config.face.emotion.enabled ? emotion.load(this.config) : null),
|
||||||
|
this.models.embedding || (this.config.face.enabled && this.config.face.embedding.enabled ? embedding.load(this.config) : null),
|
||||||
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
||||||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
|
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null)
|
||||||
]);
|
]);
|
||||||
|
@ -98975,6 +99019,8 @@ class Human {
|
||||||
this.models.gender = await gender.load(this.config);
|
this.models.gender = await gender.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion)
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion)
|
||||||
this.models.emotion = await emotion.load(this.config);
|
this.models.emotion = await emotion.load(this.config);
|
||||||
|
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding)
|
||||||
|
this.models.embedding = await embedding.load(this.config);
|
||||||
if (this.config.body.enabled && !this.models.posenet)
|
if (this.config.body.enabled && !this.models.posenet)
|
||||||
this.models.posenet = await posenet.load(this.config);
|
this.models.posenet = await posenet.load(this.config);
|
||||||
if (this.config.hand.enabled && !this.models.handpose)
|
if (this.config.hand.enabled && !this.models.handpose)
|
||||||
|
@ -99016,6 +99062,7 @@ class Human {
|
||||||
let ageRes;
|
let ageRes;
|
||||||
let genderRes;
|
let genderRes;
|
||||||
let emotionRes;
|
let emotionRes;
|
||||||
|
let embeddingRes;
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
this.state = "run:face";
|
this.state = "run:face";
|
||||||
timeStamp = now2();
|
timeStamp = now2();
|
||||||
|
@ -99055,8 +99102,18 @@ class Human {
|
||||||
this.perf.emotion = Math.trunc(now2() - timeStamp);
|
this.perf.emotion = Math.trunc(now2() - timeStamp);
|
||||||
}
|
}
|
||||||
this.analyze("End Emotion:");
|
this.analyze("End Emotion:");
|
||||||
|
this.analyze("Start Embedding:");
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
[ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);
|
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face2.image, this.config) : {};
|
||||||
|
} else {
|
||||||
|
this.state = "run:embedding";
|
||||||
|
timeStamp = now2();
|
||||||
|
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face2.image, this.config) : {};
|
||||||
|
this.perf.embedding = Math.trunc(now2() - timeStamp);
|
||||||
|
}
|
||||||
|
this.analyze("End Emotion:");
|
||||||
|
if (this.config.async) {
|
||||||
|
[ageRes, genderRes, emotionRes, embeddingRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes]);
|
||||||
}
|
}
|
||||||
this.analyze("Finish Face:");
|
this.analyze("Finish Face:");
|
||||||
face2.image.dispose();
|
face2.image.dispose();
|
||||||
|
@ -99070,6 +99127,7 @@ class Human {
|
||||||
gender: genderRes.gender,
|
gender: genderRes.gender,
|
||||||
genderConfidence: genderRes.confidence,
|
genderConfidence: genderRes.confidence,
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
|
embedding: embeddingRes,
|
||||||
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0
|
iris: irisSize !== 0 ? Math.trunc(irisSize) / 100 : 0
|
||||||
});
|
});
|
||||||
this.analyze("End Face");
|
this.analyze("End Face");
|
||||||
|
@ -99095,18 +99153,18 @@ class Human {
|
||||||
return process3.canvas;
|
return process3.canvas;
|
||||||
}
|
}
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = "config";
|
|
||||||
let timeStamp;
|
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
|
||||||
if (!this.config.videoOptimized)
|
|
||||||
this.config = mergeDeep(this.config, disableSkipFrames);
|
|
||||||
this.state = "check";
|
|
||||||
const error = this.sanity(input);
|
|
||||||
if (error) {
|
|
||||||
this.log(error, input);
|
|
||||||
return {error};
|
|
||||||
}
|
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
this.state = "config";
|
||||||
|
let timeStamp;
|
||||||
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
|
if (!this.config.videoOptimized)
|
||||||
|
this.config = mergeDeep(this.config, disableSkipFrames);
|
||||||
|
this.state = "check";
|
||||||
|
const error = this.sanity(input);
|
||||||
|
if (error) {
|
||||||
|
this.log(error, input);
|
||||||
|
resolve({error});
|
||||||
|
}
|
||||||
let poseRes;
|
let poseRes;
|
||||||
let handRes;
|
let handRes;
|
||||||
let faceRes;
|
let faceRes;
|
||||||
|
@ -99174,10 +99232,12 @@ class Human {
|
||||||
resolve({face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process3.canvas});
|
resolve({face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process3.canvas});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
async warmup(userConfig) {
|
async warmup(userConfig, sample) {
|
||||||
const warmup = new ImageData(255, 255);
|
if (!sample)
|
||||||
await this.detect(warmup, userConfig);
|
sample = new ImageData(255, 255);
|
||||||
|
const warmup = await this.detect(sample, userConfig);
|
||||||
this.log("warmed up");
|
this.log("warmed up");
|
||||||
|
return warmup;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export {
|
export {
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8483,
|
"bytes": 8721,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
|
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
|
||||||
|
@ -12320,7 +12320,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytes": 1912,
|
"bytes": 1941,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12443,8 +12443,19 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytes": 2105,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/tf.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2928,
|
"bytes": 2979,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12492,7 +12503,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13680,
|
"bytes": 13762,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12513,7 +12524,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3209,
|
"bytes": 3382,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12586,7 +12597,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 14787,
|
"bytes": 16020,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12603,6 +12614,9 @@
|
||||||
{
|
{
|
||||||
"path": "src/emotion/emotion.js"
|
"path": "src/emotion/emotion.js"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/embedding/embedding.js"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/body/posenet.js"
|
"path": "src/body/posenet.js"
|
||||||
},
|
},
|
||||||
|
@ -12681,7 +12695,7 @@
|
||||||
"dist/human.esm.js.map": {
|
"dist/human.esm.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5469818
|
"bytes": 5477319
|
||||||
},
|
},
|
||||||
"dist/human.esm.js": {
|
"dist/human.esm.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -12696,7 +12710,7 @@
|
||||||
"bytesInOutput": 6782
|
"bytesInOutput": 6782
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
|
"node_modules/@tensorflow/tfjs/dist/tf.es2017.js": {
|
||||||
"bytesInOutput": 2548114
|
"bytesInOutput": 2548127
|
||||||
},
|
},
|
||||||
"empty:path": {
|
"empty:path": {
|
||||||
"bytesInOutput": 42
|
"bytesInOutput": 42
|
||||||
|
@ -12714,7 +12728,7 @@
|
||||||
"bytesInOutput": 39631
|
"bytesInOutput": 39631
|
||||||
},
|
},
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 6972
|
"bytesInOutput": 6974
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytesInOutput": 2052
|
"bytesInOutput": 2052
|
||||||
|
@ -12726,22 +12740,25 @@
|
||||||
"bytesInOutput": 51519
|
"bytesInOutput": 51519
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 12388
|
"bytesInOutput": 12471
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2459
|
"bytesInOutput": 2461
|
||||||
},
|
},
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 1118
|
"bytesInOutput": 1118
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1826
|
"bytesInOutput": 1563
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 2996
|
"bytesInOutput": 2736
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2697
|
"bytesInOutput": 2438
|
||||||
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytesInOutput": 1769
|
||||||
},
|
},
|
||||||
"src/body/modelBase.js": {
|
"src/body/modelBase.js": {
|
||||||
"bytesInOutput": 866
|
"bytesInOutput": 866
|
||||||
|
@ -12771,7 +12788,7 @@
|
||||||
"bytesInOutput": 2411
|
"bytesInOutput": 2411
|
||||||
},
|
},
|
||||||
"src/body/modelPoseNet.js": {
|
"src/body/modelPoseNet.js": {
|
||||||
"bytesInOutput": 2020
|
"bytesInOutput": 2022
|
||||||
},
|
},
|
||||||
"src/body/posenet.js": {
|
"src/body/posenet.js": {
|
||||||
"bytesInOutput": 974
|
"bytesInOutput": 974
|
||||||
|
@ -12786,7 +12803,7 @@
|
||||||
"bytesInOutput": 256590
|
"bytesInOutput": 256590
|
||||||
},
|
},
|
||||||
"src/hand/handpose.js": {
|
"src/hand/handpose.js": {
|
||||||
"bytesInOutput": 3064
|
"bytesInOutput": 3066
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 3038
|
"bytesInOutput": 3038
|
||||||
|
@ -13392,7 +13409,7 @@
|
||||||
"bytesInOutput": 42
|
"bytesInOutput": 42
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 227
|
"bytesInOutput": 278
|
||||||
},
|
},
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 3037
|
"bytesInOutput": 3037
|
||||||
|
@ -13404,7 +13421,7 @@
|
||||||
"bytesInOutput": 186
|
"bytesInOutput": 186
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1933
|
"bytesInOutput": 2072
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
|
@ -13413,13 +13430,13 @@
|
||||||
"bytesInOutput": 24
|
"bytesInOutput": 24
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 12359
|
"bytesInOutput": 13501
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3453585
|
"bytes": 3456039
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8483,
|
"bytes": 8623,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
|
"node_modules/@tensorflow/tfjs-backend-wasm/dist/backend_wasm.js": {
|
||||||
|
@ -12443,6 +12443,17 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytes": 3111,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/tf.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2928,
|
"bytes": 2928,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -12470,8 +12481,12 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/face/coords.js": {
|
||||||
|
"bytes": 37909,
|
||||||
|
"imports": []
|
||||||
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytes": 2604,
|
"bytes": 2471,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12479,22 +12494,16 @@
|
||||||
{
|
{
|
||||||
"path": "src/face/blazeface.js"
|
"path": "src/face/blazeface.js"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"path": "src/face/keypoints.js"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"path": "src/face/facepipeline.js"
|
"path": "src/face/facepipeline.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/uvcoords.js"
|
"path": "src/face/coords.js"
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "src/face/triangulation.js"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13686,
|
"bytes": 13680,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12503,29 +12512,17 @@
|
||||||
"path": "src/face/box.js"
|
"path": "src/face/box.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/keypoints.js"
|
"path": "src/face/util.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/util.js"
|
"path": "src/face/coords.js"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytes": 2507,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytes": 12940,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3078,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
|
||||||
"bytes": 19592,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3209,
|
"bytes": 3209,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -12600,7 +12597,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 14787,
|
"bytes": 15788,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -12617,6 +12614,9 @@
|
||||||
{
|
{
|
||||||
"path": "src/emotion/emotion.js"
|
"path": "src/emotion/emotion.js"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/embedding/embedding.js"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/body/posenet.js"
|
"path": "src/body/posenet.js"
|
||||||
},
|
},
|
||||||
|
@ -12695,7 +12695,7 @@
|
||||||
"dist/human.js.map": {
|
"dist/human.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 5436159
|
"bytes": 5451131
|
||||||
},
|
},
|
||||||
"dist/human.js": {
|
"dist/human.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
|
@ -12730,23 +12730,20 @@
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 7270
|
"bytesInOutput": 7270
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytesInOutput": 2936
|
|
||||||
},
|
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytesInOutput": 2154
|
"bytesInOutput": 2154
|
||||||
},
|
},
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytesInOutput": 3205
|
"bytesInOutput": 3205
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/coords.js": {
|
||||||
"bytesInOutput": 12869
|
"bytesInOutput": 59051
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 21528
|
"bytesInOutput": 12860
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2665
|
"bytesInOutput": 2556
|
||||||
},
|
},
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 1156
|
"bytesInOutput": 1156
|
||||||
|
@ -12760,6 +12757,9 @@
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2809
|
"bytesInOutput": 2809
|
||||||
},
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytesInOutput": 2590
|
||||||
|
},
|
||||||
"src/body/modelBase.js": {
|
"src/body/modelBase.js": {
|
||||||
"bytesInOutput": 920
|
"bytesInOutput": 920
|
||||||
},
|
},
|
||||||
|
@ -12773,25 +12773,25 @@
|
||||||
"bytesInOutput": 1846
|
"bytesInOutput": 1846
|
||||||
},
|
},
|
||||||
"src/body/keypoints.js": {
|
"src/body/keypoints.js": {
|
||||||
"bytesInOutput": 2447
|
"bytesInOutput": 2446
|
||||||
},
|
},
|
||||||
"src/body/vectors.js": {
|
"src/body/vectors.js": {
|
||||||
"bytesInOutput": 1534
|
"bytesInOutput": 1533
|
||||||
},
|
},
|
||||||
"src/body/decodePose.js": {
|
"src/body/decodePose.js": {
|
||||||
"bytesInOutput": 3929
|
"bytesInOutput": 3928
|
||||||
},
|
},
|
||||||
"src/body/decodeMultiple.js": {
|
"src/body/decodeMultiple.js": {
|
||||||
"bytesInOutput": 2100
|
"bytesInOutput": 2100
|
||||||
},
|
},
|
||||||
"src/body/util.js": {
|
"src/body/util.js": {
|
||||||
"bytesInOutput": 2536
|
"bytesInOutput": 2535
|
||||||
},
|
},
|
||||||
"src/body/modelPoseNet.js": {
|
"src/body/modelPoseNet.js": {
|
||||||
"bytesInOutput": 2095
|
"bytesInOutput": 2095
|
||||||
},
|
},
|
||||||
"src/body/posenet.js": {
|
"src/body/posenet.js": {
|
||||||
"bytesInOutput": 1015
|
"bytesInOutput": 1014
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 4572
|
"bytesInOutput": 4572
|
||||||
|
@ -12815,7 +12815,7 @@
|
||||||
"bytesInOutput": 5588
|
"bytesInOutput": 5588
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 14241
|
"bytesInOutput": 15261
|
||||||
},
|
},
|
||||||
"src/tf.js": {
|
"src/tf.js": {
|
||||||
"bytesInOutput": 46
|
"bytesInOutput": 46
|
||||||
|
@ -13411,9 +13411,6 @@
|
||||||
"src/tf.js": {
|
"src/tf.js": {
|
||||||
"bytesInOutput": 44
|
"bytesInOutput": 44
|
||||||
},
|
},
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytesInOutput": 23182
|
|
||||||
},
|
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 3183
|
"bytesInOutput": 3183
|
||||||
},
|
},
|
||||||
|
@ -13421,7 +13418,7 @@
|
||||||
"bytesInOutput": 3038
|
"bytesInOutput": 3038
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 2129
|
"bytesInOutput": 2254
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
|
@ -13430,7 +13427,7 @@
|
||||||
"bytesInOutput": 26
|
"bytesInOutput": 26
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 3632392
|
"bytes": 3647380
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytes": 8483,
|
"bytes": 8623,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
|
@ -132,6 +132,17 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytes": 3111,
|
||||||
|
"imports": [
|
||||||
|
{
|
||||||
|
"path": "src/tf.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "src/profile.js"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytes": 2928,
|
"bytes": 2928,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -159,8 +170,12 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"src/face/coords.js": {
|
||||||
|
"bytes": 37909,
|
||||||
|
"imports": []
|
||||||
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytes": 2604,
|
"bytes": 2471,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -168,22 +183,16 @@
|
||||||
{
|
{
|
||||||
"path": "src/face/blazeface.js"
|
"path": "src/face/blazeface.js"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"path": "src/face/keypoints.js"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"path": "src/face/facepipeline.js"
|
"path": "src/face/facepipeline.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/uvcoords.js"
|
"path": "src/face/coords.js"
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "src/face/triangulation.js"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytes": 13686,
|
"bytes": 13680,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -192,29 +201,17 @@
|
||||||
"path": "src/face/box.js"
|
"path": "src/face/box.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/keypoints.js"
|
"path": "src/face/util.js"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"path": "src/face/util.js"
|
"path": "src/face/coords.js"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytes": 2507,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytes": 12940,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytes": 3078,
|
"bytes": 3078,
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
|
||||||
"bytes": 19592,
|
|
||||||
"imports": []
|
|
||||||
},
|
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytes": 3209,
|
"bytes": 3209,
|
||||||
"imports": [
|
"imports": [
|
||||||
|
@ -289,7 +286,7 @@
|
||||||
"imports": []
|
"imports": []
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytes": 14787,
|
"bytes": 15788,
|
||||||
"imports": [
|
"imports": [
|
||||||
{
|
{
|
||||||
"path": "src/tf.js"
|
"path": "src/tf.js"
|
||||||
|
@ -306,6 +303,9 @@
|
||||||
{
|
{
|
||||||
"path": "src/emotion/emotion.js"
|
"path": "src/emotion/emotion.js"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"path": "src/embedding/embedding.js"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"path": "src/body/posenet.js"
|
"path": "src/body/posenet.js"
|
||||||
},
|
},
|
||||||
|
@ -357,16 +357,13 @@
|
||||||
"dist/human.node-nobundle.js.map": {
|
"dist/human.node-nobundle.js.map": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"bytes": 784541
|
"bytes": 802566
|
||||||
},
|
},
|
||||||
"dist/human.node-nobundle.js": {
|
"dist/human.node-nobundle.js": {
|
||||||
"imports": [],
|
"imports": [],
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"src/face/blazeface.js": {
|
"src/face/blazeface.js": {
|
||||||
"bytesInOutput": 6974
|
"bytesInOutput": 6976
|
||||||
},
|
|
||||||
"src/face/keypoints.js": {
|
|
||||||
"bytesInOutput": 2771
|
|
||||||
},
|
},
|
||||||
"src/face/box.js": {
|
"src/face/box.js": {
|
||||||
"bytesInOutput": 2059
|
"bytesInOutput": 2059
|
||||||
|
@ -374,26 +371,29 @@
|
||||||
"src/face/util.js": {
|
"src/face/util.js": {
|
||||||
"bytesInOutput": 3054
|
"bytesInOutput": 3054
|
||||||
},
|
},
|
||||||
"src/face/facepipeline.js": {
|
"src/face/coords.js": {
|
||||||
"bytesInOutput": 12381
|
"bytesInOutput": 51530
|
||||||
},
|
},
|
||||||
"src/face/uvcoords.js": {
|
"src/face/facepipeline.js": {
|
||||||
"bytesInOutput": 20586
|
"bytesInOutput": 12372
|
||||||
},
|
},
|
||||||
"src/face/facemesh.js": {
|
"src/face/facemesh.js": {
|
||||||
"bytesInOutput": 2569
|
"bytesInOutput": 2465
|
||||||
},
|
},
|
||||||
"src/profile.js": {
|
"src/profile.js": {
|
||||||
"bytesInOutput": 1120
|
"bytesInOutput": 1120
|
||||||
},
|
},
|
||||||
"src/age/age.js": {
|
"src/age/age.js": {
|
||||||
"bytesInOutput": 1829
|
"bytesInOutput": 1833
|
||||||
},
|
},
|
||||||
"src/gender/gender.js": {
|
"src/gender/gender.js": {
|
||||||
"bytesInOutput": 2999
|
"bytesInOutput": 3003
|
||||||
},
|
},
|
||||||
"src/emotion/emotion.js": {
|
"src/emotion/emotion.js": {
|
||||||
"bytesInOutput": 2700
|
"bytesInOutput": 2704
|
||||||
|
},
|
||||||
|
"src/embedding/embedding.js": {
|
||||||
|
"bytesInOutput": 2490
|
||||||
},
|
},
|
||||||
"src/body/modelBase.js": {
|
"src/body/modelBase.js": {
|
||||||
"bytesInOutput": 868
|
"bytesInOutput": 868
|
||||||
|
@ -408,25 +408,25 @@
|
||||||
"bytesInOutput": 1766
|
"bytesInOutput": 1766
|
||||||
},
|
},
|
||||||
"src/body/keypoints.js": {
|
"src/body/keypoints.js": {
|
||||||
"bytesInOutput": 2288
|
"bytesInOutput": 2287
|
||||||
},
|
},
|
||||||
"src/body/vectors.js": {
|
"src/body/vectors.js": {
|
||||||
"bytesInOutput": 1428
|
"bytesInOutput": 1427
|
||||||
},
|
},
|
||||||
"src/body/decodePose.js": {
|
"src/body/decodePose.js": {
|
||||||
"bytesInOutput": 3799
|
"bytesInOutput": 3798
|
||||||
},
|
},
|
||||||
"src/body/decodeMultiple.js": {
|
"src/body/decodeMultiple.js": {
|
||||||
"bytesInOutput": 2028
|
"bytesInOutput": 2028
|
||||||
},
|
},
|
||||||
"src/body/util.js": {
|
"src/body/util.js": {
|
||||||
"bytesInOutput": 2418
|
"bytesInOutput": 2417
|
||||||
},
|
},
|
||||||
"src/body/modelPoseNet.js": {
|
"src/body/modelPoseNet.js": {
|
||||||
"bytesInOutput": 2015
|
"bytesInOutput": 2017
|
||||||
},
|
},
|
||||||
"src/body/posenet.js": {
|
"src/body/posenet.js": {
|
||||||
"bytesInOutput": 977
|
"bytesInOutput": 976
|
||||||
},
|
},
|
||||||
"src/hand/handdetector.js": {
|
"src/hand/handdetector.js": {
|
||||||
"bytesInOutput": 4404
|
"bytesInOutput": 4404
|
||||||
|
@ -438,7 +438,7 @@
|
||||||
"bytesInOutput": 256592
|
"bytesInOutput": 256592
|
||||||
},
|
},
|
||||||
"src/hand/handpose.js": {
|
"src/hand/handpose.js": {
|
||||||
"bytesInOutput": 3067
|
"bytesInOutput": 3069
|
||||||
},
|
},
|
||||||
"src/gesture.js": {
|
"src/gesture.js": {
|
||||||
"bytesInOutput": 3042
|
"bytesInOutput": 3042
|
||||||
|
@ -455,11 +455,8 @@
|
||||||
"src/tf.js": {
|
"src/tf.js": {
|
||||||
"bytesInOutput": 193
|
"bytesInOutput": 193
|
||||||
},
|
},
|
||||||
"src/face/triangulation.js": {
|
|
||||||
"bytesInOutput": 17898
|
|
||||||
},
|
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 227
|
"bytesInOutput": 278
|
||||||
},
|
},
|
||||||
"src/hand/box.js": {
|
"src/hand/box.js": {
|
||||||
"bytesInOutput": 3037
|
"bytesInOutput": 3037
|
||||||
|
@ -471,7 +468,7 @@
|
||||||
"bytesInOutput": 186
|
"bytesInOutput": 186
|
||||||
},
|
},
|
||||||
"config.js": {
|
"config.js": {
|
||||||
"bytesInOutput": 1933
|
"bytesInOutput": 2048
|
||||||
},
|
},
|
||||||
"package.json": {
|
"package.json": {
|
||||||
"bytesInOutput": 0
|
"bytesInOutput": 0
|
||||||
|
@ -480,10 +477,10 @@
|
||||||
"bytesInOutput": 23
|
"bytesInOutput": 23
|
||||||
},
|
},
|
||||||
"src/human.js": {
|
"src/human.js": {
|
||||||
"bytesInOutput": 12338
|
"bytesInOutput": 13237
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bytes": 419508
|
"bytes": 433211
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -5,9 +5,6 @@ const models = {};
|
||||||
let last = { age: 0 };
|
let last = { age: 0 };
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// tuning values
|
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
|
||||||
|
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
if (!models.age) {
|
if (!models.age) {
|
||||||
models.age = await loadGraphModel(config.face.age.modelPath);
|
models.age = await loadGraphModel(config.face.age.modelPath);
|
||||||
|
@ -18,12 +15,15 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
|
if (!models.age) return null;
|
||||||
if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {
|
if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
/*
|
||||||
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
@ -31,7 +31,8 @@ async function predict(image, config) {
|
||||||
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
||||||
]];
|
]];
|
||||||
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
|
const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);
|
||||||
// const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
*/
|
||||||
|
const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);
|
||||||
const enhance = tf.mul(resize, [255.0]);
|
const enhance = tf.mul(resize, [255.0]);
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
import { tf, loadGraphModel } from '../tf.js';
|
||||||
|
import * as profile from '../profile.js';
|
||||||
|
|
||||||
|
// based on https://github.com/sirius-ai/MobileFaceNet_TF
|
||||||
|
// model converted from https://github.com/sirius-ai/MobileFaceNet_TF/files/3551493/FaceMobileNet192_train_false.zip
|
||||||
|
|
||||||
|
const models = {};
|
||||||
|
|
||||||
|
async function load(config) {
|
||||||
|
if (!models.embedding) {
|
||||||
|
models.embedding = await loadGraphModel(config.face.embedding.modelPath);
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(`Human: load model: ${config.face.embedding.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
|
}
|
||||||
|
return models.embedding;
|
||||||
|
}
|
||||||
|
|
||||||
|
function simmilarity(embedding1, embedding2) {
|
||||||
|
if (embedding1?.length !== embedding2?.length) return 0;
|
||||||
|
// euclidean distance
|
||||||
|
const distance = 10 * Math.sqrt(
|
||||||
|
embedding1
|
||||||
|
.map((val, i) => (val - embedding2[i]))
|
||||||
|
.reduce((dist, diff) => dist + (diff ** 2), 0),
|
||||||
|
);
|
||||||
|
const confidence = 2 * (0.5 - distance); // double confidence output for bigger differences and round to three decimals
|
||||||
|
return Math.trunc(1000 * confidence) / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function predict(image, config) {
|
||||||
|
if (!models.embedding) return null;
|
||||||
|
return new Promise(async (resolve) => {
|
||||||
|
const resize = tf.image.resizeBilinear(image, [config.face.embedding.inputSize, config.face.embedding.inputSize], false);
|
||||||
|
// const normalize = tf.tidy(() => resize.div(127.5).sub(0.5)); // this is -0.5...0.5 ???
|
||||||
|
let data = [];
|
||||||
|
if (config.face.embedding.enabled) {
|
||||||
|
if (!config.profile) {
|
||||||
|
const embeddingT = await models.embedding.predict({ img_inputs: resize });
|
||||||
|
data = [...embeddingT.dataSync()]; // convert object array to standard array
|
||||||
|
tf.dispose(embeddingT);
|
||||||
|
} else {
|
||||||
|
const profileData = await tf.profile(() => models.embedding.predict({ img_inputs: resize }));
|
||||||
|
data = [...profileData.result.dataSync()];
|
||||||
|
profileData.result.dispose();
|
||||||
|
profile.run('emotion', profileData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resize.dispose();
|
||||||
|
// normalize.dispose();
|
||||||
|
resolve(data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.predict = predict;
|
||||||
|
exports.simmilarity = simmilarity;
|
||||||
|
exports.load = load;
|
|
@ -7,7 +7,6 @@ let last = [];
|
||||||
let frame = Number.MAX_SAFE_INTEGER;
|
let frame = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
const scale = 1; // score multiplication factor
|
const scale = 1; // score multiplication factor
|
||||||
|
|
||||||
|
@ -21,12 +20,15 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
|
if (!models.emotion) return null;
|
||||||
if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {
|
if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
/*
|
||||||
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
@ -34,7 +36,8 @@ async function predict(image, config) {
|
||||||
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
||||||
]];
|
]];
|
||||||
const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);
|
const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);
|
||||||
// const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
|
*/
|
||||||
|
const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);
|
||||||
const [red, green, blue] = tf.split(resize, 3, 3);
|
const [red, green, blue] = tf.split(resize, 3, 3);
|
||||||
resize.dispose();
|
resize.dispose();
|
||||||
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
|
// weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
|
||||||
|
|
|
@ -192,8 +192,8 @@ class Pipeline {
|
||||||
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
|
||||||
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
|
||||||
}
|
}
|
||||||
const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };
|
const face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
||||||
const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
|
const outputFace = config.detector.rotation ? tf.image.rotateWithOffset(face, angle) : face;
|
||||||
|
|
||||||
// if we're not going to produce mesh, don't spend time with further processing
|
// if we're not going to produce mesh, don't spend time with further processing
|
||||||
if (!config.mesh.enabled) {
|
if (!config.mesh.enabled) {
|
||||||
|
@ -202,7 +202,7 @@ class Pipeline {
|
||||||
box,
|
box,
|
||||||
faceConfidence: null,
|
faceConfidence: null,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face,
|
image: outputFace,
|
||||||
};
|
};
|
||||||
return prediction;
|
return prediction;
|
||||||
}
|
}
|
||||||
|
@ -250,7 +250,7 @@ class Pipeline {
|
||||||
box: landmarksBox,
|
box: landmarksBox,
|
||||||
faceConfidence: confidenceVal,
|
faceConfidence: confidenceVal,
|
||||||
confidence: box.confidence,
|
confidence: box.confidence,
|
||||||
image: face,
|
image: outputFace,
|
||||||
};
|
};
|
||||||
this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
|
this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };
|
||||||
return prediction;
|
return prediction;
|
||||||
|
|
|
@ -7,7 +7,6 @@ let frame = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
|
||||||
async function load(config) {
|
async function load(config) {
|
||||||
|
@ -21,12 +20,15 @@ async function load(config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function predict(image, config) {
|
async function predict(image, config) {
|
||||||
|
if (!models.gender) return null;
|
||||||
if ((frame < config.face.gender.skipFrames) && last.gender !== '') {
|
if ((frame < config.face.gender.skipFrames) && last.gender !== '') {
|
||||||
frame += 1;
|
frame += 1;
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
frame = 0;
|
frame = 0;
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
/*
|
||||||
|
const zoom = [0, 0]; // 0..1 meaning 0%..100%
|
||||||
const box = [[
|
const box = [[
|
||||||
(image.shape[1] * zoom[0]) / image.shape[1],
|
(image.shape[1] * zoom[0]) / image.shape[1],
|
||||||
(image.shape[2] * zoom[1]) / image.shape[2],
|
(image.shape[2] * zoom[1]) / image.shape[2],
|
||||||
|
@ -34,6 +36,8 @@ async function predict(image, config) {
|
||||||
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
(image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],
|
||||||
]];
|
]];
|
||||||
const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);
|
||||||
|
*/
|
||||||
|
const resize = tf.image.resizeBilinear(image, [config.face.gender.inputSize, config.face.gender.inputSize], false);
|
||||||
let enhance;
|
let enhance;
|
||||||
if (alternative) {
|
if (alternative) {
|
||||||
enhance = tf.tidy(() => {
|
enhance = tf.tidy(() => {
|
||||||
|
|
65
src/human.js
65
src/human.js
|
@ -3,6 +3,7 @@ import * as facemesh from './face/facemesh.js';
|
||||||
import * as age from './age/age.js';
|
import * as age from './age/age.js';
|
||||||
import * as gender from './gender/gender.js';
|
import * as gender from './gender/gender.js';
|
||||||
import * as emotion from './emotion/emotion.js';
|
import * as emotion from './emotion/emotion.js';
|
||||||
|
import * as embedding from './embedding/embedding.js';
|
||||||
import * as posenet from './body/posenet.js';
|
import * as posenet from './body/posenet.js';
|
||||||
import * as handpose from './hand/handpose.js';
|
import * as handpose from './hand/handpose.js';
|
||||||
import * as gesture from './gesture.js';
|
import * as gesture from './gesture.js';
|
||||||
|
@ -108,6 +109,11 @@ class Human {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
simmilarity(embedding1, embedding2) {
|
||||||
|
if (this.config.face.embedding.enabled) return embedding.simmilarity(embedding1, embedding2);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
// preload models, not explicitly required as it's done automatically on first use
|
// preload models, not explicitly required as it's done automatically on first use
|
||||||
async load(userConfig) {
|
async load(userConfig) {
|
||||||
this.state = 'load';
|
this.state = 'load';
|
||||||
|
@ -127,6 +133,7 @@ class Human {
|
||||||
this.models.age,
|
this.models.age,
|
||||||
this.models.gender,
|
this.models.gender,
|
||||||
this.models.emotion,
|
this.models.emotion,
|
||||||
|
this.models.embedding,
|
||||||
this.models.posenet,
|
this.models.posenet,
|
||||||
this.models.handpose,
|
this.models.handpose,
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
|
@ -134,6 +141,7 @@ class Human {
|
||||||
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
|
||||||
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
|
||||||
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
|
||||||
|
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
|
||||||
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
|
||||||
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
|
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),
|
||||||
]);
|
]);
|
||||||
|
@ -142,6 +150,7 @@ class Human {
|
||||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||||
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
|
||||||
|
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
|
||||||
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
|
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
|
||||||
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
|
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
|
||||||
}
|
}
|
||||||
|
@ -199,6 +208,7 @@ class Human {
|
||||||
let ageRes;
|
let ageRes;
|
||||||
let genderRes;
|
let genderRes;
|
||||||
let emotionRes;
|
let emotionRes;
|
||||||
|
let embeddingRes;
|
||||||
const faceRes = [];
|
const faceRes = [];
|
||||||
this.state = 'run:face';
|
this.state = 'run:face';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
|
@ -206,11 +216,13 @@ class Human {
|
||||||
this.perf.face = Math.trunc(now() - timeStamp);
|
this.perf.face = Math.trunc(now() - timeStamp);
|
||||||
for (const face of faces) {
|
for (const face of faces) {
|
||||||
this.analyze('Get Face');
|
this.analyze('Get Face');
|
||||||
|
|
||||||
// is something went wrong, skip the face
|
// is something went wrong, skip the face
|
||||||
if (!face.image || face.image.isDisposedInternal) {
|
if (!face.image || face.image.isDisposedInternal) {
|
||||||
this.log('Face object is disposed:', face.image);
|
this.log('Face object is disposed:', face.image);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// run age, inherits face from blazeface
|
// run age, inherits face from blazeface
|
||||||
this.analyze('Start Age:');
|
this.analyze('Start Age:');
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
|
@ -232,6 +244,7 @@ class Human {
|
||||||
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
|
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
|
||||||
this.perf.gender = Math.trunc(now() - timeStamp);
|
this.perf.gender = Math.trunc(now() - timeStamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
// run emotion, inherits face from blazeface
|
// run emotion, inherits face from blazeface
|
||||||
this.analyze('Start Emotion:');
|
this.analyze('Start Emotion:');
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
|
@ -244,9 +257,21 @@ class Human {
|
||||||
}
|
}
|
||||||
this.analyze('End Emotion:');
|
this.analyze('End Emotion:');
|
||||||
|
|
||||||
|
// run emotion, inherits face from blazeface
|
||||||
|
this.analyze('Start Embedding:');
|
||||||
|
if (this.config.async) {
|
||||||
|
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : {};
|
||||||
|
} else {
|
||||||
|
this.state = 'run:embedding';
|
||||||
|
timeStamp = now();
|
||||||
|
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : {};
|
||||||
|
this.perf.embedding = Math.trunc(now() - timeStamp);
|
||||||
|
}
|
||||||
|
this.analyze('End Emotion:');
|
||||||
|
|
||||||
// if async wait for results
|
// if async wait for results
|
||||||
if (this.config.async) {
|
if (this.config.async) {
|
||||||
[ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);
|
[ageRes, genderRes, emotionRes, embeddingRes] = await Promise.all([ageRes, genderRes, emotionRes, embeddingRes]);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.analyze('Finish Face:');
|
this.analyze('Finish Face:');
|
||||||
|
@ -270,6 +295,7 @@ class Human {
|
||||||
gender: genderRes.gender,
|
gender: genderRes.gender,
|
||||||
genderConfidence: genderRes.confidence,
|
genderConfidence: genderRes.confidence,
|
||||||
emotion: emotionRes,
|
emotion: emotionRes,
|
||||||
|
embedding: embeddingRes,
|
||||||
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,
|
||||||
});
|
});
|
||||||
this.analyze('End Face');
|
this.analyze('End Face');
|
||||||
|
@ -294,23 +320,23 @@ class Human {
|
||||||
|
|
||||||
// main detect function
|
// main detect function
|
||||||
async detect(input, userConfig = {}) {
|
async detect(input, userConfig = {}) {
|
||||||
this.state = 'config';
|
|
||||||
let timeStamp;
|
|
||||||
|
|
||||||
// update configuration
|
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
|
||||||
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);
|
|
||||||
|
|
||||||
// sanity checks
|
|
||||||
this.state = 'check';
|
|
||||||
const error = this.sanity(input);
|
|
||||||
if (error) {
|
|
||||||
this.log(error, input);
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
|
|
||||||
// detection happens inside a promise
|
// detection happens inside a promise
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async (resolve) => {
|
||||||
|
this.state = 'config';
|
||||||
|
let timeStamp;
|
||||||
|
|
||||||
|
// update configuration
|
||||||
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
|
if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);
|
||||||
|
|
||||||
|
// sanity checks
|
||||||
|
this.state = 'check';
|
||||||
|
const error = this.sanity(input);
|
||||||
|
if (error) {
|
||||||
|
this.log(error, input);
|
||||||
|
resolve({ error });
|
||||||
|
}
|
||||||
|
|
||||||
let poseRes;
|
let poseRes;
|
||||||
let handRes;
|
let handRes;
|
||||||
let faceRes;
|
let faceRes;
|
||||||
|
@ -391,10 +417,11 @@ class Human {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async warmup(userConfig) {
|
async warmup(userConfig, sample) {
|
||||||
const warmup = new ImageData(255, 255);
|
if (!sample) sample = new ImageData(255, 255);
|
||||||
await this.detect(warmup, userConfig);
|
const warmup = await this.detect(sample, userConfig);
|
||||||
this.log('warmed up');
|
this.log('warmed up');
|
||||||
|
return warmup;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue