mirror of https://github.com/vladmandic/human
enable body segmentation and background replacement in demo
parent
5bfe5655a7
commit
f345538794
|
@ -11,10 +11,12 @@ Repository: **<git+https://github.com/vladmandic/human.git>**
|
|||
|
||||
### **HEAD -> main** 2021/06/05 mandic00@live.com
|
||||
|
||||
|
||||
### **origin/main** 2021/06/05 mandic00@live.com
|
||||
|
||||
- unified build
|
||||
- enable body segmentation and background replacement
|
||||
|
||||
### **origin/main** 2021/06/04 mandic00@live.com
|
||||
|
||||
- work on body segmentation
|
||||
- added experimental body segmentation module
|
||||
- add meet and selfie models
|
||||
- add live hints to demo
|
||||
|
|
8
TODO.md
8
TODO.md
|
@ -4,14 +4,9 @@
|
|||
|
||||
N/A
|
||||
|
||||
## Explore Models
|
||||
|
||||
- InsightFace: RetinaFace detector and ArcFace recognition: <https://github.com/deepinsight/insightface>
|
||||
|
||||
## In Progress
|
||||
## Work in Progress
|
||||
|
||||
- Switch to TypeScript 4.3
|
||||
- Add backgrounds to segmentation
|
||||
|
||||
## Known Issues
|
||||
|
||||
|
@ -19,4 +14,3 @@ N/A
|
|||
- NanoDet with WASM: <https://github.com/tensorflow/tfjs/issues/4824>
|
||||
- BlazeFace and HandPose rotation in NodeJS: <https://github.com/tensorflow/tfjs/issues/4066>
|
||||
- TypeDoc with TypeScript 4.3: <https://github.com/TypeStrong/typedoc/issues/1589>
|
||||
- HandPose lower precision with WASM
|
||||
|
|
135
demo/index.js
135
demo/index.js
|
@ -27,7 +27,7 @@ import webRTC from './helpers/webrtc.js';
|
|||
|
||||
let human;
|
||||
|
||||
const userConfig = {
|
||||
let userConfig = {
|
||||
warmup: 'none',
|
||||
backend: 'humangl',
|
||||
wasmPath: 'https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@3.6.0/dist/',
|
||||
|
@ -96,6 +96,7 @@ const ui = {
|
|||
bench: true, // show gl fps benchmark window
|
||||
lastFrame: 0, // time of last frame processing
|
||||
viewportSet: false, // internal, has custom viewport been set
|
||||
background: null, // holds instance of segmentation background image
|
||||
|
||||
// webrtc
|
||||
useWebRTC: false, // use webrtc as camera source instead of local webcam
|
||||
|
@ -210,8 +211,8 @@ async function drawResults(input) {
|
|||
await menu.process.updateChart('FPS', ui.detectFPS);
|
||||
|
||||
// get updated canvas if missing or if we want buffering, but skip if segmentation is enabled
|
||||
if (human.config.segmentation.enabled) {
|
||||
result.canvas = await human.segmentation(input);
|
||||
if (userConfig.segmentation.enabled) {
|
||||
result.canvas = await human.segmentation(input, ui.background, userConfig);
|
||||
} else if (!result.canvas || ui.buffered) {
|
||||
const image = await human.image(input);
|
||||
result.canvas = image.canvas;
|
||||
|
@ -496,8 +497,8 @@ async function processImage(input, title) {
|
|||
const canvas = document.getElementById('canvas');
|
||||
image.width = image.naturalWidth;
|
||||
image.height = image.naturalHeight;
|
||||
canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;
|
||||
canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;
|
||||
canvas.width = userConfig.filter.width && userConfig.filter.width > 0 ? userConfig.filter.width : image.naturalWidth;
|
||||
canvas.height = userConfig.filter.height && userConfig.filter.height > 0 ? userConfig.filter.height : image.naturalHeight;
|
||||
const origCacheSensitiry = userConfig.cacheSensitivity;
|
||||
userConfig.cacheSensitivity = 0;
|
||||
const result = await human.detect(image, userConfig);
|
||||
|
@ -614,55 +615,55 @@ function setupMenu() {
|
|||
menu.display.addBool('fill polygons', human.draw.options, 'fillPolygons');
|
||||
|
||||
menu.image = new Menu(document.body, '', { top, left: x[1] });
|
||||
menu.image.addBool('enabled', human.config.filter, 'enabled', (val) => human.config.filter.enabled = val);
|
||||
ui.menuWidth = menu.image.addRange('image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||
ui.menuHeight = menu.image.addRange('image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||
menu.image.addBool('enabled', userConfig.filter, 'enabled', (val) => userConfig.filter.enabled = val);
|
||||
ui.menuWidth = menu.image.addRange('image width', userConfig.filter, 'width', 0, 3840, 10, (val) => userConfig.filter.width = parseInt(val));
|
||||
ui.menuHeight = menu.image.addRange('image height', userConfig.filter, 'height', 0, 2160, 10, (val) => userConfig.filter.height = parseInt(val));
|
||||
menu.image.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.image.addRange('brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||
menu.image.addRange('contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||
menu.image.addRange('sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||
menu.image.addRange('blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||
menu.image.addRange('saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||
menu.image.addRange('hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||
menu.image.addRange('pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||
menu.image.addRange('brightness', userConfig.filter, 'brightness', -1.0, 1.0, 0.05, (val) => userConfig.filter.brightness = parseFloat(val));
|
||||
menu.image.addRange('contrast', userConfig.filter, 'contrast', -1.0, 1.0, 0.05, (val) => userConfig.filter.contrast = parseFloat(val));
|
||||
menu.image.addRange('sharpness', userConfig.filter, 'sharpness', 0, 1.0, 0.05, (val) => userConfig.filter.sharpness = parseFloat(val));
|
||||
menu.image.addRange('blur', userConfig.filter, 'blur', 0, 20, 1, (val) => userConfig.filter.blur = parseInt(val));
|
||||
menu.image.addRange('saturation', userConfig.filter, 'saturation', -1.0, 1.0, 0.05, (val) => userConfig.filter.saturation = parseFloat(val));
|
||||
menu.image.addRange('hue', userConfig.filter, 'hue', 0, 360, 5, (val) => userConfig.filter.hue = parseInt(val));
|
||||
menu.image.addRange('pixelate', userConfig.filter, 'pixelate', 0, 32, 1, (val) => userConfig.filter.pixelate = parseInt(val));
|
||||
menu.image.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.image.addBool('negative', human.config.filter, 'negative', (val) => human.config.filter.negative = val);
|
||||
menu.image.addBool('sepia', human.config.filter, 'sepia', (val) => human.config.filter.sepia = val);
|
||||
menu.image.addBool('vintage', human.config.filter, 'vintage', (val) => human.config.filter.vintage = val);
|
||||
menu.image.addBool('kodachrome', human.config.filter, 'kodachrome', (val) => human.config.filter.kodachrome = val);
|
||||
menu.image.addBool('technicolor', human.config.filter, 'technicolor', (val) => human.config.filter.technicolor = val);
|
||||
menu.image.addBool('polaroid', human.config.filter, 'polaroid', (val) => human.config.filter.polaroid = val);
|
||||
menu.image.addBool('negative', userConfig.filter, 'negative', (val) => userConfig.filter.negative = val);
|
||||
menu.image.addBool('sepia', userConfig.filter, 'sepia', (val) => userConfig.filter.sepia = val);
|
||||
menu.image.addBool('vintage', userConfig.filter, 'vintage', (val) => userConfig.filter.vintage = val);
|
||||
menu.image.addBool('kodachrome', userConfig.filter, 'kodachrome', (val) => userConfig.filter.kodachrome = val);
|
||||
menu.image.addBool('technicolor', userConfig.filter, 'technicolor', (val) => userConfig.filter.technicolor = val);
|
||||
menu.image.addBool('polaroid', userConfig.filter, 'polaroid', (val) => userConfig.filter.polaroid = val);
|
||||
menu.image.addHTML('<input type="file" id="file-input" class="input-file"></input>   input');
|
||||
menu.image.addHTML('<input type="file" id="file-background" class="input-file"></input>   background');
|
||||
|
||||
menu.process = new Menu(document.body, '', { top, left: x[2] });
|
||||
menu.process.addList('backend', ['cpu', 'webgl', 'wasm', 'humangl'], human.config.backend, (val) => human.config.backend = val);
|
||||
menu.process.addBool('async operations', human.config, 'async', (val) => human.config.async = val);
|
||||
menu.process.addList('backend', ['cpu', 'webgl', 'wasm', 'humangl'], userConfig.backend, (val) => userConfig.backend = val);
|
||||
menu.process.addBool('async operations', userConfig, 'async', (val) => userConfig.async = val);
|
||||
menu.process.addBool('use web worker', ui, 'useWorker');
|
||||
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.process.addLabel('model parameters');
|
||||
menu.process.addRange('max objects', human.config.face.detector, 'maxDetected', 1, 50, 1, (val) => {
|
||||
human.config.face.detector.maxDetected = parseInt(val);
|
||||
human.config.body.maxDetected = parseInt(val);
|
||||
human.config.hand.maxDetected = parseInt(val);
|
||||
menu.process.addRange('max objects', userConfig.face.detector, 'maxDetected', 1, 50, 1, (val) => {
|
||||
userConfig.face.detector.maxDetected = parseInt(val);
|
||||
userConfig.body.maxDetected = parseInt(val);
|
||||
userConfig.hand.maxDetected = parseInt(val);
|
||||
});
|
||||
menu.process.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
human.config.face.detector.skipFrames = parseInt(val);
|
||||
human.config.face.emotion.skipFrames = parseInt(val);
|
||||
human.config.hand.skipFrames = parseInt(val);
|
||||
menu.process.addRange('skip frames', userConfig.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
userConfig.face.detector.skipFrames = parseInt(val);
|
||||
userConfig.face.emotion.skipFrames = parseInt(val);
|
||||
userConfig.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
menu.process.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.minConfidence = parseFloat(val);
|
||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||
human.config.hand.minConfidence = parseFloat(val);
|
||||
menu.process.addRange('min confidence', userConfig.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||
userConfig.face.detector.minConfidence = parseFloat(val);
|
||||
userConfig.face.emotion.minConfidence = parseFloat(val);
|
||||
userConfig.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
menu.process.addRange('overlap', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.iouThreshold = parseFloat(val);
|
||||
human.config.hand.iouThreshold = parseFloat(val);
|
||||
menu.process.addRange('overlap', userConfig.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
userConfig.face.detector.iouThreshold = parseFloat(val);
|
||||
userConfig.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
menu.process.addBool('rotation detection', human.config.face.detector, 'rotation', (val) => {
|
||||
human.config.face.detector.rotation = val;
|
||||
human.config.hand.rotation = val;
|
||||
menu.process.addBool('rotation detection', userConfig.face.detector, 'rotation', (val) => {
|
||||
userConfig.face.detector.rotation = val;
|
||||
userConfig.hand.rotation = val;
|
||||
});
|
||||
menu.process.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
// menu.process.addButton('process sample images', 'process images', () => detectSampleImages());
|
||||
|
@ -670,20 +671,20 @@ function setupMenu() {
|
|||
menu.process.addChart('FPS', 'FPS');
|
||||
|
||||
menu.models = new Menu(document.body, '', { top, left: x[3] });
|
||||
menu.models.addBool('face detect', human.config.face, 'enabled', (val) => human.config.face.enabled = val);
|
||||
menu.models.addBool('face mesh', human.config.face.mesh, 'enabled', (val) => human.config.face.mesh.enabled = val);
|
||||
menu.models.addBool('face iris', human.config.face.iris, 'enabled', (val) => human.config.face.iris.enabled = val);
|
||||
menu.models.addBool('face description', human.config.face.description, 'enabled', (val) => human.config.face.description.enabled = val);
|
||||
menu.models.addBool('face emotion', human.config.face.emotion, 'enabled', (val) => human.config.face.emotion.enabled = val);
|
||||
menu.models.addBool('face detect', userConfig.face, 'enabled', (val) => userConfig.face.enabled = val);
|
||||
menu.models.addBool('face mesh', userConfig.face.mesh, 'enabled', (val) => userConfig.face.mesh.enabled = val);
|
||||
menu.models.addBool('face iris', userConfig.face.iris, 'enabled', (val) => userConfig.face.iris.enabled = val);
|
||||
menu.models.addBool('face description', userConfig.face.description, 'enabled', (val) => userConfig.face.description.enabled = val);
|
||||
menu.models.addBool('face emotion', userConfig.face.emotion, 'enabled', (val) => userConfig.face.emotion.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('body pose', human.config.body, 'enabled', (val) => human.config.body.enabled = val);
|
||||
menu.models.addBool('hand pose', human.config.hand, 'enabled', (val) => human.config.hand.enabled = val);
|
||||
menu.models.addBool('body pose', userConfig.body, 'enabled', (val) => userConfig.body.enabled = val);
|
||||
menu.models.addBool('hand pose', userConfig.hand, 'enabled', (val) => userConfig.hand.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('gestures', human.config.gesture, 'enabled', (val) => human.config.gesture.enabled = val);
|
||||
menu.models.addBool('gestures', userConfig.gesture, 'enabled', (val) => userConfig.gesture.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('body segmentation', human.config.segmentation, 'enabled', (val) => human.config.segmentation.enabled = val);
|
||||
menu.models.addBool('body segmentation', userConfig.segmentation, 'enabled', (val) => userConfig.segmentation.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('object detection', human.config.object, 'enabled', (val) => human.config.object.enabled = val);
|
||||
menu.models.addBool('object detection', userConfig.object, 'enabled', (val) => userConfig.object.enabled = val);
|
||||
menu.models.addHTML('<hr style="border-style: inset; border-color: dimgray">');
|
||||
menu.models.addBool('face compare', compare, 'enabled', (val) => {
|
||||
compare.enabled = val;
|
||||
|
@ -748,12 +749,34 @@ async function processDataURL(f, action) {
|
|||
await processImage(dataURL, f.name);
|
||||
document.getElementById('canvas').style.display = 'none';
|
||||
}
|
||||
if (action === 'background') {
|
||||
const image = new Image();
|
||||
image.onerror = async () => status('image loading error');
|
||||
image.onload = async () => {
|
||||
ui.background = image;
|
||||
document.getElementById('canvas').style.display = 'block';
|
||||
const canvas = document.getElementById('canvas');
|
||||
const ctx = canvas.getContext('2d');
|
||||
const overlaid = await human.segmentation(canvas, ui.background, userConfig);
|
||||
if (overlaid) ctx.drawImage(overlaid, 0, 0);
|
||||
};
|
||||
image.src = dataURL;
|
||||
}
|
||||
resolve(true);
|
||||
};
|
||||
reader.readAsDataURL(f);
|
||||
});
|
||||
}
|
||||
|
||||
async function runSegmentation() {
|
||||
document.getElementById('file-background').onchange = async (evt) => {
|
||||
userConfig.segmentation.enabled = true;
|
||||
evt.preventDefault();
|
||||
if (evt.target.files.length < 2) ui.columns = 1;
|
||||
for (const f of evt.target.files) await processDataURL(f, 'background');
|
||||
};
|
||||
}
|
||||
|
||||
async function dragAndDrop() {
|
||||
document.body.addEventListener('dragenter', (evt) => evt.preventDefault());
|
||||
document.body.addEventListener('dragleave', (evt) => evt.preventDefault());
|
||||
|
@ -764,6 +787,11 @@ async function dragAndDrop() {
|
|||
if (evt.dataTransfer.files.length < 2) ui.columns = 1;
|
||||
for (const f of evt.dataTransfer.files) await processDataURL(f, 'process');
|
||||
});
|
||||
document.getElementById('file-input').onchange = async (evt) => {
|
||||
evt.preventDefault();
|
||||
if (evt.target.files.length < 2) ui.columns = 1;
|
||||
for (const f of evt.target.files) await processDataURL(f, 'process');
|
||||
};
|
||||
}
|
||||
|
||||
async function drawHints() {
|
||||
|
@ -861,6 +889,7 @@ async function main() {
|
|||
|
||||
// create instance of human
|
||||
human = new Human(userConfig);
|
||||
userConfig = { ...human.config, ...userConfig };
|
||||
if (typeof tf !== 'undefined') {
|
||||
// eslint-disable-next-line no-undef
|
||||
log('TensorFlow external version:', tf.version);
|
||||
|
@ -895,9 +924,11 @@ async function main() {
|
|||
for (const m of Object.values(menu)) m.hide();
|
||||
|
||||
// init drag & drop
|
||||
|
||||
await dragAndDrop();
|
||||
|
||||
// init segmentation
|
||||
await runSegmentation();
|
||||
|
||||
if (params.has('image')) {
|
||||
try {
|
||||
const image = JSON.parse(params.get('image'));
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,10 +1,10 @@
|
|||
|
||||
/*
|
||||
Human library
|
||||
homepage: <https://github.com/vladmandic/human>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
/*
|
||||
Human library
|
||||
homepage: <https://github.com/vladmandic/human>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
|
@ -4318,6 +4318,8 @@ async function predict3(image18, config3, idx, count2) {
|
|||
// src/face.ts
|
||||
var calculateGaze = (face5) => {
|
||||
const radians = (pt1, pt2) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]);
|
||||
if (!face5.annotations["rightEyeIris"] || !face5.annotations["leftEyeIris"])
|
||||
return { bearing: 0, strength: 0 };
|
||||
const offsetIris = [0, -0.1];
|
||||
const eyeRatio = 1;
|
||||
const left = face5.mesh[33][2] > face5.mesh[263][2];
|
||||
|
@ -9970,7 +9972,7 @@ async function face2(inCanvas2, result, drawOptions) {
|
|||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing)) {
|
||||
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing) && f.annotations["leftEyeIris"] && f.annotations["rightEyeIris"] && f.annotations["leftEyeIris"][0] && f.annotations["rightEyeIris"][0]) {
|
||||
ctx.strokeStyle = "pink";
|
||||
ctx.beginPath();
|
||||
const leftGaze = [
|
||||
|
@ -10419,6 +10421,7 @@ function calc(newResult) {
|
|||
// src/segmentation/segmentation.ts
|
||||
var tf20 = __toModule(require_tfjs_esm());
|
||||
var model9;
|
||||
var busy = false;
|
||||
async function load12(config3) {
|
||||
if (!model9) {
|
||||
model9 = await tf20.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath));
|
||||
|
@ -10489,6 +10492,9 @@ async function predict11(input, config3) {
|
|||
}
|
||||
async function process5(input, background, config3) {
|
||||
var _a;
|
||||
if (busy)
|
||||
return null;
|
||||
busy = true;
|
||||
if (!config3.segmentation.enabled)
|
||||
config3.segmentation.enabled = true;
|
||||
if (!model9)
|
||||
|
@ -10516,8 +10522,9 @@ async function process5(input, background, config3) {
|
|||
cData.data[4 * i + 3] = (255 - alpha[4 * i + 3]) / 255 * cData.data[4 * i + 3] + alpha[4 * i + 3] / 255 * fgData[4 * i + 3];
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
return c;
|
||||
img.canvas = c;
|
||||
}
|
||||
busy = false;
|
||||
return img.canvas;
|
||||
}
|
||||
|
||||
|
@ -11322,8 +11329,6 @@ var Human = class {
|
|||
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
|
||||
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
if (!this.config.object.enabled)
|
||||
this.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
|
||||
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
|
||||
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
|
||||
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,10 +1,10 @@
|
|||
|
||||
/*
|
||||
Human library
|
||||
homepage: <https://github.com/vladmandic/human>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
/*
|
||||
Human library
|
||||
homepage: <https://github.com/vladmandic/human>
|
||||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
|
@ -4318,6 +4318,8 @@ async function predict3(image18, config3, idx, count2) {
|
|||
// src/face.ts
|
||||
var calculateGaze = (face5) => {
|
||||
const radians = (pt1, pt2) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]);
|
||||
if (!face5.annotations["rightEyeIris"] || !face5.annotations["leftEyeIris"])
|
||||
return { bearing: 0, strength: 0 };
|
||||
const offsetIris = [0, -0.1];
|
||||
const eyeRatio = 1;
|
||||
const left = face5.mesh[33][2] > face5.mesh[263][2];
|
||||
|
@ -9970,7 +9972,7 @@ async function face2(inCanvas2, result, drawOptions) {
|
|||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing)) {
|
||||
if (localOptions.drawGaze && ((_b = (_a = f.rotation) == null ? void 0 : _a.gaze) == null ? void 0 : _b.strength) && ((_d = (_c = f.rotation) == null ? void 0 : _c.gaze) == null ? void 0 : _d.bearing) && f.annotations["leftEyeIris"] && f.annotations["rightEyeIris"] && f.annotations["leftEyeIris"][0] && f.annotations["rightEyeIris"][0]) {
|
||||
ctx.strokeStyle = "pink";
|
||||
ctx.beginPath();
|
||||
const leftGaze = [
|
||||
|
@ -10419,6 +10421,7 @@ function calc(newResult) {
|
|||
// src/segmentation/segmentation.ts
|
||||
var tf20 = __toModule(require_tfjs_esm());
|
||||
var model9;
|
||||
var busy = false;
|
||||
async function load12(config3) {
|
||||
if (!model9) {
|
||||
model9 = await tf20.loadGraphModel(join(config3.modelBasePath, config3.segmentation.modelPath));
|
||||
|
@ -10489,6 +10492,9 @@ async function predict11(input, config3) {
|
|||
}
|
||||
async function process5(input, background, config3) {
|
||||
var _a;
|
||||
if (busy)
|
||||
return null;
|
||||
busy = true;
|
||||
if (!config3.segmentation.enabled)
|
||||
config3.segmentation.enabled = true;
|
||||
if (!model9)
|
||||
|
@ -10516,8 +10522,9 @@ async function process5(input, background, config3) {
|
|||
cData.data[4 * i + 3] = (255 - alpha[4 * i + 3]) / 255 * cData.data[4 * i + 3] + alpha[4 * i + 3] / 255 * fgData[4 * i + 3];
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
return c;
|
||||
img.canvas = c;
|
||||
}
|
||||
busy = false;
|
||||
return img.canvas;
|
||||
}
|
||||
|
||||
|
@ -11322,8 +11329,6 @@ var Human = class {
|
|||
this.tf.ENV.set("CHECK_COMPUTATION_FOR_ERRORS", false);
|
||||
this.tf.ENV.set("WEBGL_CPU_FORWARD", true);
|
||||
this.tf.ENV.set("WEBGL_PACK_DEPTHWISECONV", true);
|
||||
if (!this.config.object.enabled)
|
||||
this.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
|
||||
if (typeof this.config["deallocate"] !== "undefined" && this.config["deallocate"]) {
|
||||
log("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:", true);
|
||||
this.tf.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD", 0);
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,21 +1,21 @@
|
|||
2021-06-05 12:59:25 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-05 12:59:25 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-05 12:59:25 [36mINFO: [39m Toolchain: tfjs: 3.7.0 esbuild 0.12.6; typescript 4.2.4; typedoc: 0.20.36 eslint: 7.27.0
|
||||
2021-06-05 12:59:25 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-06-05 12:59:25 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1298,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 12:59:25 [35mSTATE:[39m target: node type: node: {"imports":41,"importBytes":429859,"outputBytes":375893,"outputFiles":"dist/human.node.js"}
|
||||
2021-06-05 12:59:25 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1306,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 12:59:25 [35mSTATE:[39m target: nodeGPU type: node: {"imports":41,"importBytes":429867,"outputBytes":375897,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-06-05 12:59:25 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1373,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 12:59:26 [35mSTATE:[39m target: nodeWASM type: node: {"imports":41,"importBytes":429934,"outputBytes":375969,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-06-05 12:59:26 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1400,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 12:59:26 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":41,"importBytes":429961,"outputBytes":247750,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-06-05 12:59:26 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1299,"moduleBytes":4230827,"imports":7,"importBytes":2478,"outputBytes":1140326,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 12:59:27 [35mSTATE:[39m target: browserBundle type: iife: {"imports":41,"importBytes":1568887,"outputBytes":1383936,"outputFiles":"dist/human.js"}
|
||||
2021-06-05 12:59:27 [35mSTATE:[39m target: browserBundle type: esm: {"imports":41,"importBytes":1568887,"outputBytes":1383928,"outputFiles":"dist/human.esm.js"}
|
||||
2021-06-05 12:59:27 [36mINFO: [39m Running Linter: ["server/","demo/","src/","test/"]
|
||||
2021-06-05 12:59:55 [36mINFO: [39m Linter complete: files: 69 errors: 0 warnings: 0
|
||||
2021-06-05 12:59:55 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-06-05 13:00:13 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-06-05 13:00:13 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-06-05 13:00:29 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
2021-06-05 16:11:51 [36mINFO: [39m @vladmandic/human version 2.0.0
|
||||
2021-06-05 16:11:51 [36mINFO: [39m User: vlado Platform: linux Arch: x64 Node: v16.0.0
|
||||
2021-06-05 16:11:51 [36mINFO: [39m Toolchain: tfjs: 3.7.0 esbuild 0.12.6; typescript 4.2.4; typedoc: 0.20.36 eslint: 7.27.0
|
||||
2021-06-05 16:11:51 [36mINFO: [39m Build: file startup all type: production config: {"minifyWhitespace":true,"minifyIdentifiers":true,"minifySyntax":true}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: node type: tfjs: {"imports":1,"importBytes":102,"outputBytes":1298,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: node type: node: {"imports":41,"importBytes":430197,"outputBytes":376126,"outputFiles":"dist/human.node.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: nodeGPU type: tfjs: {"imports":1,"importBytes":110,"outputBytes":1306,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: nodeGPU type: node: {"imports":41,"importBytes":430205,"outputBytes":376130,"outputFiles":"dist/human.node-gpu.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: nodeWASM type: tfjs: {"imports":1,"importBytes":149,"outputBytes":1373,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: nodeWASM type: node: {"imports":41,"importBytes":430272,"outputBytes":376202,"outputFiles":"dist/human.node-wasm.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: browserNoBundle type: tfjs: {"imports":1,"importBytes":2478,"outputBytes":1400,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 16:11:51 [35mSTATE:[39m target: browserNoBundle type: esm: {"imports":41,"importBytes":430299,"outputBytes":247921,"outputFiles":"dist/human.esm-nobundle.js"}
|
||||
2021-06-05 16:11:52 [35mSTATE:[39m target: browserBundle type: tfjs: {"modules":1299,"moduleBytes":4230827,"imports":7,"importBytes":2478,"outputBytes":1140326,"outputFiles":"dist/tfjs.esm.js"}
|
||||
2021-06-05 16:11:52 [35mSTATE:[39m target: browserBundle type: iife: {"imports":41,"importBytes":1569225,"outputBytes":1384109,"outputFiles":"dist/human.js"}
|
||||
2021-06-05 16:11:52 [35mSTATE:[39m target: browserBundle type: esm: {"imports":41,"importBytes":1569225,"outputBytes":1384101,"outputFiles":"dist/human.esm.js"}
|
||||
2021-06-05 16:11:52 [36mINFO: [39m Running Linter: ["server/","demo/","src/","test/"]
|
||||
2021-06-05 16:12:25 [36mINFO: [39m Linter complete: files: 69 errors: 0 warnings: 0
|
||||
2021-06-05 16:12:25 [36mINFO: [39m Generate Typings: ["src/human.ts"] outDir: ["types"]
|
||||
2021-06-05 16:12:44 [36mINFO: [39m Generate ChangeLog: ["/home/vlado/dev/human/CHANGELOG.md"]
|
||||
2021-06-05 16:12:44 [36mINFO: [39m Generate TypeDocs: ["src/human.ts"] outDir: ["typedoc"]
|
||||
2021-06-05 16:13:00 [36mINFO: [39m Documentation generated at /home/vlado/dev/human/typedoc 1
|
||||
|
|
|
@ -240,7 +240,7 @@ export async function face(inCanvas: HTMLCanvasElement, result: Array<Face>, dra
|
|||
ctx.fill();
|
||||
}
|
||||
}
|
||||
if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing) {
|
||||
if (localOptions.drawGaze && f.rotation?.gaze?.strength && f.rotation?.gaze?.bearing && f.annotations['leftEyeIris'] && f.annotations['rightEyeIris'] && f.annotations['leftEyeIris'][0] && f.annotations['rightEyeIris'][0]) {
|
||||
ctx.strokeStyle = 'pink';
|
||||
ctx.beginPath();
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ const rad2deg = (theta) => Math.round((theta * 180) / Math.PI);
|
|||
|
||||
const calculateGaze = (face): { bearing: number, strength: number } => {
|
||||
const radians = (pt1, pt2) => Math.atan2(pt1[1] - pt2[1], pt1[0] - pt2[0]); // function to calculate angle between any two points
|
||||
if (!face.annotations['rightEyeIris'] || !face.annotations['leftEyeIris']) return { bearing: 0, strength: 0 };
|
||||
|
||||
const offsetIris = [0, -0.1]; // iris center may not align with average of eye extremes
|
||||
const eyeRatio = 1; // factor to normalize changes x vs y
|
||||
|
|
|
@ -370,7 +370,7 @@ export class Human {
|
|||
this.tf.ENV.set('CHECK_COMPUTATION_FOR_ERRORS', false);
|
||||
this.tf.ENV.set('WEBGL_CPU_FORWARD', true);
|
||||
this.tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||
if (!this.config.object.enabled) this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
// if (!this.config.object.enabled) this.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); // safe to use 16bit precision
|
||||
if (typeof this.config['deallocate'] !== 'undefined' && this.config['deallocate']) { // hidden param
|
||||
log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', true);
|
||||
this.tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0);
|
||||
|
|
|
@ -12,6 +12,7 @@ import { Config } from '../config';
|
|||
type Input = Tensor | typeof Image | ImageData | ImageBitmap | HTMLImageElement | HTMLMediaElement | HTMLVideoElement | HTMLCanvasElement | OffscreenCanvas;
|
||||
|
||||
let model: GraphModel;
|
||||
let busy = false;
|
||||
// let blurKernel;
|
||||
|
||||
export async function load(config: Config): Promise<GraphModel> {
|
||||
|
@ -95,7 +96,9 @@ export async function predict(input: { tensor: Tensor | null, canvas: OffscreenC
|
|||
return alpha;
|
||||
}
|
||||
|
||||
export async function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas> {
|
||||
export async function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas | null> {
|
||||
if (busy) return null;
|
||||
busy = true;
|
||||
if (!config.segmentation.enabled) config.segmentation.enabled = true; // override config
|
||||
if (!model) await load(config);
|
||||
const img = image.process(input, config);
|
||||
|
@ -124,8 +127,8 @@ export async function process(input: Input, background: Input | undefined, confi
|
|||
cData.data[4 * i + 3] = ((255 - alpha[4 * i + 3]) / 255.0 * cData.data[4 * i + 3]) + (alpha[4 * i + 3] / 255.0 * fgData[4 * i + 3]);
|
||||
}
|
||||
ctx.putImageData(cData, 0, 0);
|
||||
|
||||
return c;
|
||||
img.canvas = c;
|
||||
}
|
||||
busy = false;
|
||||
return img.canvas;
|
||||
}
|
||||
|
|
|
@ -738,7 +738,7 @@
|
|||
<a name="segmentation-1" class="tsd-anchor"></a>
|
||||
<h3>segmentation</h3>
|
||||
<ul class="tsd-signatures tsd-kind-method tsd-parent-kind-class">
|
||||
<li class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span>, background<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></li>
|
||||
<li class="tsd-signature tsd-kind-icon">segmentation<span class="tsd-signature-symbol">(</span>input<span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">any</span>, background<span class="tsd-signature-symbol">?: </span><span class="tsd-signature-type">any</span><span class="tsd-signature-symbol">)</span><span class="tsd-signature-symbol">: </span><span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></li>
|
||||
</ul>
|
||||
<ul class="tsd-descriptions">
|
||||
<li class="tsd-description">
|
||||
|
@ -760,7 +760,7 @@
|
|||
<h5><span class="tsd-flag ts-flagOptional">Optional</span> background: <span class="tsd-signature-type">any</span></h5>
|
||||
</li>
|
||||
</ul>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></h4>
|
||||
<h4 class="tsd-returns-title">Returns <span class="tsd-signature-type">Promise</span><span class="tsd-signature-symbol"><</span><span class="tsd-signature-type">null</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">HTMLCanvasElement</span><span class="tsd-signature-symbol"> | </span><span class="tsd-signature-type">OffscreenCanvas</span><span class="tsd-signature-symbol">></span></h4>
|
||||
<p>Canvas</p>
|
||||
</li>
|
||||
</ul>
|
||||
|
|
|
@ -130,7 +130,7 @@ export declare class Human {
|
|||
* @param background?: {@link Input}
|
||||
* @returns Canvas
|
||||
*/
|
||||
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement>;
|
||||
segmentation(input: Input, background?: Input): Promise<OffscreenCanvas | HTMLCanvasElement | null>;
|
||||
/** Enhance method performs additional enhacements to face image previously detected for futher processing
|
||||
* @param input: Tensor as provided in human.result.face[n].tensor
|
||||
* @returns Tensor
|
||||
|
|
|
@ -9,5 +9,5 @@ export declare function predict(input: {
|
|||
tensor: Tensor | null;
|
||||
canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||
}, config: Config): Promise<Uint8ClampedArray | null>;
|
||||
export declare function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas>;
|
||||
export declare function process(input: Input, background: Input | undefined, config: Config): Promise<HTMLCanvasElement | OffscreenCanvas | null>;
|
||||
export {};
|
||||
|
|
Loading…
Reference in New Issue